blob: 2ee7e7163ae4d08f8a551b9d9e598000cce2f20d (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
|
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="14.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<!-- Perf Analysis NuGet package paths -->
<PropertyGroup>
<PerformanceType Condition="'$(PerformanceType)'==''">Profile</PerformanceType>
<TraceEventPackage>Microsoft.Diagnostics.Tracing.TraceEvent\$(TraceEventPackageVersion)</TraceEventPackage>
</PropertyGroup>
<ItemGroup>
<TraceEventNativePath Include="$(PackagesDir)\$(TraceEventPackage)\lib\native\**\*.*" />
</ItemGroup>
<Target Name="ValidatePerformanceRunType" Condition="'$(Performance)'=='true'" BeforeTargets="GenerateTestExecutionScripts">
<Error Condition="'$(PerformanceType)'!='Diagnostic' AND '$(PerformanceType)'!='Profile'" Text="Invalid Performance Type value specified: $(PerformanceType)" />
</Target>
<Target Name ="PublishPerfRunner" Condition="'$(Performance)'=='true'" BeforeTargets="RunTestsForProject">
<Copy SourceFiles="@(TraceEventNativePath)" DestinationFiles="@(TraceEventNativePath->'$(StartWorkingDirectory)\%(RecursiveDir)%(Filename)%(Extension)')" />
</Target>
<!-- Set platform specific values. -->
<PropertyGroup Condition="'$(TargetOS)'=='Linux'">
<PerfTestCommandDotnetExecutable>$RUNTIME_PATH/dotnet</PerfTestCommandDotnetExecutable>
<BenchviewDir>$(ToolsDir)Microsoft.BenchView.JSONFormat/tools</BenchviewDir>
<PythonCommand>python3.5</PythonCommand>
<CliExitErrorCommand>exit 1</CliExitErrorCommand>
</PropertyGroup>
<PropertyGroup Condition="'$(TargetOS)'=='Windows_NT'">
<PerfTestCommandDotnetExecutable>%RUNTIME_PATH%\dotnet.exe</PerfTestCommandDotnetExecutable>
<BenchviewDir>$(ToolsDir)Microsoft.BenchView.JSONFormat\tools</BenchviewDir>
<PythonCommand>py.exe</PythonCommand>
<CliExitErrorCommand>EXIT /B 1</CliExitErrorCommand>
</PropertyGroup>
<!-- Sets the flags for the performance counters to be collected in this run. -->
<PropertyGroup Condition="'$(PerformanceType)'=='Profile'">
<!-- Collect, per benchmark, execution time (stopwatch). -->
<CollectFlags>stopwatch</CollectFlags>
<RunId>Perf-Profile</RunId>
</PropertyGroup>
<PropertyGroup Condition="'$(PerformanceType)'=='Diagnostic'">
<!-- Collect, per benchmark, the following metrics:
1. Allocated bytes in execution thread,
2. Any user defined Clr events (through the xUnit Performance Api attributes),
3. CPU Usage (Utilization by Process, Stack),
4. CPU Performance Counters (Pmc Rollovers)
-->
<CollectFlags>default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi</CollectFlags>
<RunId>Perf-Diagnostic</RunId>
</PropertyGroup>
<PropertyGroup>
<PerfRunnerCommand>$(PerfTestCommandDotnetExecutable) PerfRunner.exe --perf:runid $(RunId) --perf:collect $(CollectFlags) || $(CliExitErrorCommand)</PerfRunnerCommand>
<MeasurementPyCommand>$(PythonCommand) "$(BenchviewDir)/measurement.py" xunit "$(RunId)-$(AssemblyName).xml" --better desc --drop-first-value --append -o "$(ProjectDir)measurement.json" || $(CliExitErrorCommand)</MeasurementPyCommand>
</PropertyGroup>
<!-- Build the commands to be appended to the generated RunTest.[cmd|sh] script. -->
<ItemGroup>
<PerfTestCommandLines Include="$(PerfRunnerCommand)" />
</ItemGroup>
<ItemGroup Condition="'$(TargetOS)'=='Windows_NT' and '$(LogToBenchview)' == 'true'">
<PerfTestCommandLines Include="if exist "$(RunId)-$(AssemblyName).xml" (" />
<PerfTestCommandLines Include="$(MeasurementPyCommand)" />
<PerfTestCommandLines Include=")" />
</ItemGroup>
<ItemGroup Condition="'$(TargetOS)'=='Linux' and '$(LogToBenchview)' == 'true'">
<PerfTestCommandLines Include="if [ -f "$(RunId)-$(AssemblyName).xml" ]; then" />
<PerfTestCommandLines Include="$(MeasurementPyCommand)" />
<PerfTestCommandLines Include="fi" />
</ItemGroup>
<!-- Optimizations to configure Xunit for performance -->
<ItemGroup Condition="'$(IncludePerformanceTests)' == 'true'">
<AssemblyInfoUsings Include="using Microsoft.Xunit.Performance%3B" />
<AssemblyInfoLines Include="[assembly:OptimizeForBenchmarks]" />
</ItemGroup>
<Target Name="UploadToBenchview" Condition="'$(LogToBenchview)' == 'true'" AfterTargets="TestAllProjects">
<PropertyGroup>
<SubmissionPyCommand>$(PythonCommand) "$(BenchviewDir)/submission.py" "$(ProjectDir)measurement.json" --build "$(ProjectDir)build.json" --machine-data "$(ProjectDir)machinedata.json" --metadata "$(ProjectDir)submission-metadata.json" --group "CoreFx" --type "$(BenchviewRuntype)" --config-name "$(ConfigurationGroup)" --config Configuration "$(ConfigurationGroup)" --config OS "$(TargetOS)" --config "RunType" "$(PerformanceType)" -arch "$(Platform)" --machinepool "PerfSnake" -o "$(ProjectDir)submission.json" || $(CliExitErrorCommand)</SubmissionPyCommand>
<UploadPyCommand>$(PythonCommand) "$(BenchviewDir)/upload.py" "$(ProjectDir)submission.json" --container corefx || $(CliExitErrorCommand)</UploadPyCommand>
</PropertyGroup>
<ItemGroup>
<BenchviewCalls Include="echo $(SubmissionPyCommand)"/>
<BenchviewCalls Include="$(SubmissionPyCommand)"/>
<BenchviewCalls Include="echo $(UploadPyCommand)"/>
<BenchviewCalls Include="$(UploadPyCommand)"/>
</ItemGroup>
<Exec Command="%(BenchviewCalls.Identity)"/>
</Target>
<Target Name="WarnForDebugPerfConfiguration"
BeforeTargets="RunTestsForProject"
Condition="'$(Performance)' == 'true' and !$(ConfigurationGroup.ToLower().Contains('release'))">
<Warning Text="You are running performance tests in a configuration other than Release. Your results may be unreliable." />
</Target>
</Project>
|