summaryrefslogtreecommitdiff
path: root/packages/microsoft.dotnet.buildtools/2.1.0-rc1-03006-01/lib/PerfTesting.targets
diff options
context:
space:
mode:
Diffstat (limited to 'packages/microsoft.dotnet.buildtools/2.1.0-rc1-03006-01/lib/PerfTesting.targets')
-rwxr-xr-xpackages/microsoft.dotnet.buildtools/2.1.0-rc1-03006-01/lib/PerfTesting.targets104
1 files changed, 104 insertions, 0 deletions
diff --git a/packages/microsoft.dotnet.buildtools/2.1.0-rc1-03006-01/lib/PerfTesting.targets b/packages/microsoft.dotnet.buildtools/2.1.0-rc1-03006-01/lib/PerfTesting.targets
new file mode 100755
index 0000000000..2ee7e7163a
--- /dev/null
+++ b/packages/microsoft.dotnet.buildtools/2.1.0-rc1-03006-01/lib/PerfTesting.targets
@@ -0,0 +1,104 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="14.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <!-- Perf Analysis NuGet package paths -->
+ <PropertyGroup>
+ <PerformanceType Condition="'$(PerformanceType)'==''">Profile</PerformanceType>
+ <TraceEventPackage>Microsoft.Diagnostics.Tracing.TraceEvent\$(TraceEventPackageVersion)</TraceEventPackage>
+ </PropertyGroup>
+
+ <ItemGroup>
+ <TraceEventNativePath Include="$(PackagesDir)\$(TraceEventPackage)\lib\native\**\*.*" />
+ </ItemGroup>
+
+ <Target Name="ValidatePerformanceRunType" Condition="'$(Performance)'=='true'" BeforeTargets="GenerateTestExecutionScripts">
+ <Error Condition="'$(PerformanceType)'!='Diagnostic' AND '$(PerformanceType)'!='Profile'" Text="Invalid Performance Type value specified: $(PerformanceType)" />
+ </Target>
+
+ <Target Name ="PublishPerfRunner" Condition="'$(Performance)'=='true'" BeforeTargets="RunTestsForProject">
+ <Copy SourceFiles="@(TraceEventNativePath)" DestinationFiles="@(TraceEventNativePath->'$(StartWorkingDirectory)\%(RecursiveDir)%(Filename)%(Extension)')" />
+ </Target>
+
+ <!-- Set platform specific values. -->
+ <PropertyGroup Condition="'$(TargetOS)'=='Linux'">
+ <PerfTestCommandDotnetExecutable>$RUNTIME_PATH/dotnet</PerfTestCommandDotnetExecutable>
+ <BenchviewDir>$(ToolsDir)Microsoft.BenchView.JSONFormat/tools</BenchviewDir>
+ <PythonCommand>python3.5</PythonCommand>
+ <CliExitErrorCommand>exit 1</CliExitErrorCommand>
+ </PropertyGroup>
+
+ <PropertyGroup Condition="'$(TargetOS)'=='Windows_NT'">
+ <PerfTestCommandDotnetExecutable>%RUNTIME_PATH%\dotnet.exe</PerfTestCommandDotnetExecutable>
+ <BenchviewDir>$(ToolsDir)Microsoft.BenchView.JSONFormat\tools</BenchviewDir>
+ <PythonCommand>py.exe</PythonCommand>
+ <CliExitErrorCommand>EXIT /B 1</CliExitErrorCommand>
+ </PropertyGroup>
+
+ <!-- Sets the flags for the performance counters to be collected in this run. -->
+ <PropertyGroup Condition="'$(PerformanceType)'=='Profile'">
+ <!-- Collect, per benchmark, execution time (stopwatch). -->
+ <CollectFlags>stopwatch</CollectFlags>
+ <RunId>Perf-Profile</RunId>
+ </PropertyGroup>
+
+ <PropertyGroup Condition="'$(PerformanceType)'=='Diagnostic'">
+ <!-- Collect, per benchmark, the following metrics:
+ 1. Allocated bytes in execution thread,
+ 2. Any user defined Clr events (through the xUnit Performance Api attributes),
+ 3. CPU Usage (Utilization by Process, Stack),
+ 4. CPU Performance Counters (Pmc Rollovers)
+ -->
+ <CollectFlags>default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi</CollectFlags>
+ <RunId>Perf-Diagnostic</RunId>
+ </PropertyGroup>
+
+ <PropertyGroup>
+ <PerfRunnerCommand>$(PerfTestCommandDotnetExecutable) PerfRunner.exe --perf:runid $(RunId) --perf:collect $(CollectFlags) || $(CliExitErrorCommand)</PerfRunnerCommand>
+ <MeasurementPyCommand>$(PythonCommand) "$(BenchviewDir)/measurement.py" xunit "$(RunId)-$(AssemblyName).xml" --better desc --drop-first-value --append -o "$(ProjectDir)measurement.json" || $(CliExitErrorCommand)</MeasurementPyCommand>
+ </PropertyGroup>
+
+ <!-- Build the commands to be appended to the generated RunTest.[cmd|sh] script. -->
+ <ItemGroup>
+ <PerfTestCommandLines Include="$(PerfRunnerCommand)" />
+ </ItemGroup>
+
+ <ItemGroup Condition="'$(TargetOS)'=='Windows_NT' and '$(LogToBenchview)' == 'true'">
+ <PerfTestCommandLines Include="if exist &quot;$(RunId)-$(AssemblyName).xml&quot; (" />
+ <PerfTestCommandLines Include="$(MeasurementPyCommand)" />
+ <PerfTestCommandLines Include=")" />
+ </ItemGroup>
+
+ <ItemGroup Condition="'$(TargetOS)'=='Linux' and '$(LogToBenchview)' == 'true'">
+ <PerfTestCommandLines Include="if [ -f &quot;$(RunId)-$(AssemblyName).xml&quot; ]; then" />
+ <PerfTestCommandLines Include="$(MeasurementPyCommand)" />
+ <PerfTestCommandLines Include="fi" />
+ </ItemGroup>
+
+ <!-- Optimizations to configure Xunit for performance -->
+ <ItemGroup Condition="'$(IncludePerformanceTests)' == 'true'">
+ <AssemblyInfoUsings Include="using Microsoft.Xunit.Performance%3B" />
+ <AssemblyInfoLines Include="[assembly:OptimizeForBenchmarks]" />
+ </ItemGroup>
+
+ <Target Name="UploadToBenchview" Condition="'$(LogToBenchview)' == 'true'" AfterTargets="TestAllProjects">
+ <PropertyGroup>
+ <SubmissionPyCommand>$(PythonCommand) "$(BenchviewDir)/submission.py" "$(ProjectDir)measurement.json" --build "$(ProjectDir)build.json" --machine-data "$(ProjectDir)machinedata.json" --metadata "$(ProjectDir)submission-metadata.json" --group "CoreFx" --type "$(BenchviewRuntype)" --config-name "$(ConfigurationGroup)" --config Configuration "$(ConfigurationGroup)" --config OS "$(TargetOS)" --config "RunType" "$(PerformanceType)" -arch "$(Platform)" --machinepool "PerfSnake" -o "$(ProjectDir)submission.json" || $(CliExitErrorCommand)</SubmissionPyCommand>
+ <UploadPyCommand>$(PythonCommand) "$(BenchviewDir)/upload.py" "$(ProjectDir)submission.json" --container corefx || $(CliExitErrorCommand)</UploadPyCommand>
+ </PropertyGroup>
+
+ <ItemGroup>
+ <BenchviewCalls Include="echo $(SubmissionPyCommand)"/>
+ <BenchviewCalls Include="$(SubmissionPyCommand)"/>
+
+ <BenchviewCalls Include="echo $(UploadPyCommand)"/>
+ <BenchviewCalls Include="$(UploadPyCommand)"/>
+ </ItemGroup>
+
+ <Exec Command="%(BenchviewCalls.Identity)"/>
+ </Target>
+
+ <Target Name="WarnForDebugPerfConfiguration"
+ BeforeTargets="RunTestsForProject"
+ Condition="'$(Performance)' == 'true' and !$(ConfigurationGroup.ToLower().Contains('release'))">
+ <Warning Text="You are running performance tests in a configuration other than Release. Your results may be unreliable." />
+ </Target>
+</Project> \ No newline at end of file