summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJosé Rivero <jorive@microsoft.com>2017-06-01 08:56:27 -0700
committerGitHub <noreply@github.com>2017-06-01 08:56:27 -0700
commit1d2e6f8b131af5c4f684c35418ee6c14b417b316 (patch)
tree8b43165285f001b663dee24baf044904e9b3bf26
parentbeaefc2188a4cf46c99e522f4ed0b57507b07633 (diff)
downloadcoreclr-1d2e6f8b131af5c4f684c35418ee6c14b417b316.tar.gz
coreclr-1d2e6f8b131af5c4f684c35418ee6c14b417b316.tar.bz2
coreclr-1d2e6f8b131af5c4f684c35418ee6c14b417b316.zip
Updating version of xUnit-Performance-Api (#11987)
- This update contains a fix for this issue: https://github.com/Microsoft/xunit-performance/issues/230 - Consolidate the Api version in two places: `dependencies.prop` and `PerfHarness.csproj` - Move performance log files to root directory where Jenkins can archive them - Fixed the output to console, so it is easier to see on the Jenkins job the sccript steps live and get an exact repro step if needed
-rw-r--r--dependencies.props13
-rw-r--r--perf.groovy6
-rw-r--r--tests/scripts/run-xunit-perf.cmd74
-rwxr-xr-xtests/scripts/run-xunit-perf.sh6
-rw-r--r--tests/src/Common/PerfHarness/PerfHarness.csproj4
-rw-r--r--tests/src/Common/external/external.depproj12
-rw-r--r--tests/src/JIT/config/benchmark+roslyn/benchmark+roslyn.csproj12
-rw-r--r--tests/src/JIT/config/benchmark+serialize/benchmark+serialize.csproj12
-rw-r--r--tests/src/JIT/config/benchmark/benchmark.csproj14
-rw-r--r--tests/src/performance/performance.csproj12
10 files changed, 90 insertions, 75 deletions
diff --git a/dependencies.props b/dependencies.props
index 6945f62f55..77f5c7930e 100644
--- a/dependencies.props
+++ b/dependencies.props
@@ -15,6 +15,9 @@
<PropertyGroup>
<CoreClrPackageVersion>2.1.0-preview1-25327-01</CoreClrPackageVersion>
<XunitPackageVersion>2.2.0-beta2-build3300</XunitPackageVersion>
+ <XUnitConsoleNetCoreVersion>1.0.2-prerelease-00177</XUnitConsoleNetCoreVersion>
+ <XUnitPerformanceApiVersion>1.0.0-beta-build0007</XUnitPerformanceApiVersion>
+ <MicrosoftDiagnosticsTracingLibraryVersion>1.0.3-alpha-experimental</MicrosoftDiagnosticsTracingLibraryVersion>
</PropertyGroup>
<!-- Package dependency verification/auto-upgrade configuration. -->
@@ -78,8 +81,16 @@
<Version>1.0.0-alpha-build0040</Version>
</StaticDependency>
+ <XUnitPerformanceApiDependency Include="xunit.performance.api" />
+ <XUnitPerformanceApiDependency Include="xunit.performance.core" />
+ <XUnitPerformanceApiDependency Include="xunit.performance.execution" />
+ <XUnitPerformanceApiDependency Include="xunit.performance.metrics" />
+ <StaticDependency Include="@(XUnitPerformanceApiDependency)">
+ <Version>$(XUnitPerformanceApiVersion)</Version>
+ </StaticDependency>
+
<StaticDependency Include="xunit.console.netcore">
- <Version>1.0.2-prerelease-00177</Version>
+ <Version>$(XUnitConsoleNetCoreVersion)</Version>
</StaticDependency>
<DependencyBuildInfo Include="@(StaticDependency)">
diff --git a/perf.groovy b/perf.groovy
index 125ed7b062..9be47c0b3c 100644
--- a/perf.groovy
+++ b/perf.groovy
@@ -105,12 +105,12 @@ def static getOSGroup(def os) {
batchFile("tests\\runtest.cmd ${configuration} ${architecture} GenerateLayoutOnly")
- // Run with just stopwatch
+ // Run with just stopwatch: Profile=Off
batchFile("tests\\scripts\\run-xunit-perf.cmd -arch ${arch} -configuration ${configuration} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} -stabilityPrefix \"START \"CORECLR_PERF_RUN\" /B /WAIT /HIGH /AFFINITY 0x2\"")
batchFile("tests\\scripts\\run-xunit-perf.cmd -arch ${arch} -configuration ${configuration} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} -stabilityPrefix \"START \"CORECLR_PERF_RUN\" /B /WAIT /HIGH /AFFINITY 0x2\"")
batchFile("tests\\scripts\\run-xunit-perf.cmd -arch ${arch} -configuration ${configuration} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\linkbench\\linkbench -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -nowarmup -runtype ${runType} -scenarioTest -group ILLink")
- // Run with the full set of counters enabled
+ // Run with the full set of counters enabled: Profile=On
batchFile("tests\\scripts\\run-xunit-perf.cmd -arch ${arch} -configuration ${configuration} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi -stabilityPrefix \"START \"CORECLR_PERF_RUN\" /B /WAIT /HIGH /AFFINITY 0x2\"")
batchFile("tests\\scripts\\run-xunit-perf.cmd -arch ${arch} -configuration ${configuration} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi -stabilityPrefix \"START \"CORECLR_PERF_RUN\" /B /WAIT /HIGH /AFFINITY 0x2\"")
}
@@ -124,6 +124,7 @@ def static getOSGroup(def os) {
def archiveSettings = new ArchivalSettings()
archiveSettings.addFiles('Perf-*.xml')
archiveSettings.addFiles('Perf-*.etl')
+ archiveSettings.addFiles('Perf-*.log')
archiveSettings.addFiles('machinedata.json')
Utilities.addArchival(newJob, archiveSettings)
@@ -288,6 +289,7 @@ def static getOSGroup(def os) {
// Save machinedata.json to /artifact/bin/ Jenkins dir
def archiveSettings = new ArchivalSettings()
+ archiveSettings.addFiles('Perf-*.log')
archiveSettings.addFiles('Perf-*.xml')
archiveSettings.addFiles('machinedata.json')
Utilities.addArchival(newJob, archiveSettings)
diff --git a/tests/scripts/run-xunit-perf.cmd b/tests/scripts/run-xunit-perf.cmd
index 7b1bd3e9b0..e223a3bda9 100644
--- a/tests/scripts/run-xunit-perf.cmd
+++ b/tests/scripts/run-xunit-perf.cmd
@@ -6,7 +6,6 @@
@if defined _echo echo on
setlocal ENABLEDELAYEDEXPANSION
-setlocal
set ERRORLEVEL=
set BENCHVIEW_RUN_TYPE=local
set CORECLR_REPO=%CD%
@@ -45,7 +44,6 @@ setlocal
rem find and stage the tests
set /A "LV_FAILURES=0"
for /R %CORECLR_PERF% %%T in (*.%TEST_FILE_EXT%) do (
- rem Skip known failures
call :run_benchmark %%T || (
set /A "LV_FAILURES+=1"
)
@@ -73,13 +71,11 @@ setlocal
set BENCHDIR=%~p1
rem copy benchmark and any input files
- call :run_cmd xcopy /s %~1 . >> %RUNLOG% || exit /b 1
+ call :run_cmd xcopy /sy %~1 . >> %RUNLOG% || exit /b 1
if exist "%BENCHDIR%*.txt" (
- call :run_cmd xcopy /s %BENCHDIR%*.txt . >> %RUNLOG% || exit /b 1
+ call :run_cmd xcopy /sy %BENCHDIR%*.txt . >> %RUNLOG% || exit /b 1
)
- set CORE_ROOT=%CORECLR_REPO%\sandbox
-
rem setup additional environment variables
if DEFINED TEST_ENV (
if EXIST "%TEST_ENV%" (
@@ -87,13 +83,26 @@ setlocal
)
)
- set BENCHNAME_LOG_FILE_NAME=%BENCHNAME%.log
+ echo/
+ echo/ ----------
+ echo/ Running %BENCHNAME%
+ echo/ ----------
+
+ rem CORE_ROOT environment variable is used by some benchmarks such as Roslyn / CscBench.
+ set CORE_ROOT=%CORECLR_REPO%\sandbox
+
+ set LV_RUNID=Perf-%ETW_COLLECTION%
+ set BENCHNAME_LOG_FILE_NAME=%LV_RUNID%-%BENCHNAME%.log
+ set LV_CMD=
if defined IS_SCENARIO_TEST (
- call :run_cmd corerun.exe "%CORECLR_REPO%\sandbox\%BENCHNAME%.%TEST_FILE_EXT%" --perf:runid Perf 1>"%BENCHNAME_LOG_FILE_NAME%" 2>&1
+ set "LV_CMD=corerun.exe "%CORECLR_REPO%\sandbox\%BENCHNAME%.%TEST_FILE_EXT%" --perf:runid "%LV_RUNID%""
) else (
- call :run_cmd %STABILITY_PREFIX% corerun.exe PerfHarness.dll "%CORECLR_REPO%\sandbox\%BENCHNAME%.%TEST_FILE_EXT%" --perf:runid Perf --perf:collect %COLLECTION_FLAGS% 1>"%BENCHNAME_LOG_FILE_NAME%" 2>&1
+ set "LV_CMD=%STABILITY_PREFIX% corerun.exe PerfHarness.dll "%CORECLR_REPO%\sandbox\%BENCHNAME%.%TEST_FILE_EXT%" --perf:runid "%LV_RUNID%" --perf:collect %COLLECTION_FLAGS%"
)
+ call :print_to_console $ !LV_CMD!
+ call :run_cmd !LV_CMD! 1>"%BENCHNAME_LOG_FILE_NAME%" 2>&1
+
IF %ERRORLEVEL% NEQ 0 (
call :print_error corerun.exe exited with %ERRORLEVEL% code.
if exist "%BENCHNAME_LOG_FILE_NAME%" type "%BENCHNAME_LOG_FILE_NAME%"
@@ -103,14 +112,13 @@ setlocal
rem optionally generate results for benchview
if exist "%BENCHVIEW_PATH%" (
call :generate_results_for_benchview || exit /b 1
- )
+ )
rem Save off the results to the root directory for recovery later in Jenkins
- IF EXIST "Perf-*%BENCHNAME%.xml" (
- call :run_cmd copy "Perf-*%BENCHNAME%.xml" "%CORECLR_REPO%\Perf-%BENCHNAME%-%ETW_COLLECTION%.xml" || exit /b 1
- )
- IF EXIST "Perf-*%BENCHNAME%.etl" (
- call :run_cmd copy "Perf-*%BENCHNAME%.etl" "%CORECLR_REPO%\Perf-%BENCHNAME%-%ETW_COLLECTION%.etl" || exit /b 1
+ for %%e in (xml etl log) do (
+ IF EXIST ".\%LV_RUNID%-%BENCHNAME%.%%e" (
+ call :run_cmd xcopy /vy ".\%LV_RUNID%-%BENCHNAME%.%%e" .. || exit /b 1
+ )
)
exit /b 0
@@ -219,7 +227,7 @@ rem Sets the test architecture.
rem ****************************************************************************
set TEST_ARCH=%TEST_ARCHITECTURE%
exit /b 0
-
+
:verify_benchview_tools
rem ****************************************************************************
rem Verifies that the path to the benchview tools is correct.
@@ -231,7 +239,7 @@ rem ****************************************************************************
)
)
exit /b 0
-
+
:verify_core_overlay
rem ****************************************************************************
rem Verify that the Core_Root folder exist.
@@ -243,7 +251,7 @@ rem ****************************************************************************
)
exit /b 0
- :set_collection_config
+:set_collection_config
rem ****************************************************************************
rem Set's the config based on the providers used for collection
rem ****************************************************************************
@@ -254,7 +262,7 @@ rem ****************************************************************************
)
exit /b 0
-
+
:set_perf_run_log
rem ****************************************************************************
rem Sets the script's output log file.
@@ -271,7 +279,7 @@ rem ****************************************************************************
rem Creates the sandbox folder used by the script to copy binaries locally,
rem and execute benchmarks.
rem ****************************************************************************
- if exist sandbox rd /s /q sandbox
+ if exist sandbox rmdir /s /q sandbox
if exist sandbox call :print_error Failed to remove the sandbox folder& exit /b 1
if not exist sandbox mkdir sandbox
if not exist sandbox call :print_error Failed to create the sandbox folder& exit /b 1
@@ -293,7 +301,7 @@ rem ****************************************************************************
set LV_MEASUREMENT_ARGS=%LV_MEASUREMENT_ARGS% --append
for /f %%f in ('dir /b Perf-*%BENCHNAME%.xml 2^>nul') do (
- call :run_cmd py.exe "%BENCHVIEW_PATH%\measurement.py" %LV_MEASUREMENT_ARGS% %%f
+ call :run_cmd py.exe "%BENCHVIEW_PATH%\measurement.py" %LV_MEASUREMENT_ARGS% %%f
IF !ERRORLEVEL! NEQ 0 (
call :print_error Failed to generate BenchView measurement data.
@@ -322,7 +330,7 @@ setlocal
set LV_SUBMISSION_ARGS=%LV_SUBMISSION_ARGS% --machinepool "PerfSnake"
call :run_cmd py.exe "%BENCHVIEW_PATH%\submission.py" measurement.json %LV_SUBMISSION_ARGS%
-
+
IF %ERRORLEVEL% NEQ 0 (
call :print_error Creating BenchView submission data failed.
exit /b 1
@@ -360,7 +368,7 @@ rem ****************************************************************************
echo -collectionFlags This is used to specify what collectoin flags get passed to the performance
echo harness that is doing the test running. If this is not specified we only use stopwatch.
echo Other flags are "default", which is the whatever the test being run specified, "CacheMisses",
- echo "BranchMispredictions", and "InstructionsRetired".
+ echo "BranchMispredictions", and "InstructionsRetired".
exit /b %ERRORLEVEL%
:print_error
@@ -368,19 +376,17 @@ rem ****************************************************************************
rem Function wrapper that unifies how errors are output by the script.
rem Functions output to the standard error.
rem ****************************************************************************
- echo [%DATE%][%TIME:~0,-3%][ERROR] %* 1>&2
+ call :print_to_console [ERROR] %* 1>&2
exit /b %ERRORLEVEL%
:print_to_console
rem ****************************************************************************
-rem Sends text to the console screen, no matter what (even when the script's
-rem output is redirected). This can be useful to provide information on where
-rem the script is executing.
+rem Sends text to the console screen. This can be useful to provide
+rem information on where the script is executing.
rem ****************************************************************************
- if defined _debug (
- echo [%DATE%][%TIME:~0,-3%] %* >CON
- )
- echo [%DATE%][%TIME:~0,-3%] %*
+ echo/
+ echo/%USERNAME%@%COMPUTERNAME% "%CD%"
+ echo/[%DATE%][%TIME:~0,-3%] %*
exit /b %ERRORLEVEL%
:run_cmd
@@ -396,9 +402,3 @@ rem ****************************************************************************
call :print_to_console $ %*
call %*
exit /b %ERRORLEVEL%
-
-:skip_failures
-rem ****************************************************************************
-rem Skip known failures
-rem ****************************************************************************
- exit /b 0
diff --git a/tests/scripts/run-xunit-perf.sh b/tests/scripts/run-xunit-perf.sh
index 2797174f32..e6758e59fb 100755
--- a/tests/scripts/run-xunit-perf.sh
+++ b/tests/scripts/run-xunit-perf.sh
@@ -340,7 +340,7 @@ for testcase in ${tests[@]}; do
echo "----------"
echo " Running $testname"
echo "----------"
- run_command $stabilityPrefix ./corerun PerfHarness.dll $test --perf:runid Perf --perf:collect $collectionflags || exit 1
+ run_command $stabilityPrefix ./corerun PerfHarness.dll $test --perf:runid Perf --perf:collect $collectionflags 1>"Perf-$filename.log" 2>&1 || exit 1
if [ -d "$BENCHVIEW_TOOLS_PATH" ]; then
run_command python3.5 "$BENCHVIEW_TOOLS_PATH/measurement.py" xunit "Perf-$filename.xml" --better desc $hasWarmupRun --append || {
echo [ERROR] Failed to generate BenchView data;
@@ -349,6 +349,10 @@ for testcase in ${tests[@]}; do
fi
# Rename file to be archived by Jenkins.
+ mv -f "Perf-$filename.log" "$CORECLR_REPO/Perf-$filename-$perfCollection.log" || {
+ echo [ERROR] Failed to move "Perf-$filename.log" to "$CORECLR_REPO".
+ exit 1;
+ }
mv -f "Perf-$filename.xml" "$CORECLR_REPO/Perf-$filename-$perfCollection.xml" || {
echo [ERROR] Failed to move "Perf-$filename.xml" to "$CORECLR_REPO".
exit 1;
diff --git a/tests/src/Common/PerfHarness/PerfHarness.csproj b/tests/src/Common/PerfHarness/PerfHarness.csproj
index a15e8022ae..39eff59c12 100644
--- a/tests/src/Common/PerfHarness/PerfHarness.csproj
+++ b/tests/src/Common/PerfHarness/PerfHarness.csproj
@@ -6,9 +6,7 @@
</PropertyGroup>
<ItemGroup>
- <PackageReference Include="xunit.performance.api">
- <Version>1.0.0-beta-build0006</Version>
- </PackageReference>
+ <PackageReference Include="xunit.performance.api" Version="1.0.0-beta-build0007" />
</ItemGroup>
</Project> \ No newline at end of file
diff --git a/tests/src/Common/external/external.depproj b/tests/src/Common/external/external.depproj
index 464d27e019..51b36208b6 100644
--- a/tests/src/Common/external/external.depproj
+++ b/tests/src/Common/external/external.depproj
@@ -22,19 +22,19 @@
<Version>1.1.1</Version>
</PackageReference>
<PackageReference Include="xunit.performance.api">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.core">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.execution">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.metrics">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="Microsoft.Diagnostics.Tracing.TraceEvent">
- <Version>1.0.3-alpha-experimental</Version>
+ <Version>$(MicrosoftDiagnosticsTracingLibraryVersion)</Version>
</PackageReference>
<PackageReference Include="Newtonsoft.Json">
<Version>9.0.1</Version>
@@ -43,7 +43,7 @@
<Version>$(XunitPackageVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.console.netcore">
- <Version>1.0.2-prerelease-00177</Version>
+ <Version>$(XUnitConsoleNetCoreVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.runner.utility">
<Version>$(XunitPackageVersion)</Version>
diff --git a/tests/src/JIT/config/benchmark+roslyn/benchmark+roslyn.csproj b/tests/src/JIT/config/benchmark+roslyn/benchmark+roslyn.csproj
index 27af386633..9155703cd3 100644
--- a/tests/src/JIT/config/benchmark+roslyn/benchmark+roslyn.csproj
+++ b/tests/src/JIT/config/benchmark+roslyn/benchmark+roslyn.csproj
@@ -11,19 +11,19 @@
<Version>1.1.1</Version>
</PackageReference>
<PackageReference Include="xunit.performance.api">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.core">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.execution">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.metrics">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="Microsoft.Diagnostics.Tracing.TraceEvent">
- <Version>1.0.3-alpha-experimental</Version>
+ <Version>$(MicrosoftDiagnosticsTracingLibraryVersion)</Version>
</PackageReference>
<PackageReference Include="Microsoft.NETCore.Platforms">
<Version>2.0.0-preview2-25302-03</Version>
@@ -80,7 +80,7 @@
<Version>$(XunitPackageVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.console.netcore">
- <Version>1.0.2-prerelease-00177</Version>
+ <Version>$(XUnitConsoleNetCoreVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.runner.utility">
<Version>$(XunitPackageVersion)</Version>
diff --git a/tests/src/JIT/config/benchmark+serialize/benchmark+serialize.csproj b/tests/src/JIT/config/benchmark+serialize/benchmark+serialize.csproj
index 20accfe300..36181432d2 100644
--- a/tests/src/JIT/config/benchmark+serialize/benchmark+serialize.csproj
+++ b/tests/src/JIT/config/benchmark+serialize/benchmark+serialize.csproj
@@ -8,19 +8,19 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.performance.api">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.core">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.execution">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.metrics">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="Microsoft.Diagnostics.Tracing.TraceEvent">
- <Version>1.0.3-alpha-experimental</Version>
+ <Version>$(MicrosoftDiagnosticsTracingLibraryVersion)</Version>
</PackageReference>
<PackageReference Include="Microsoft.NETCore.Platforms">
<Version>2.0.0-preview2-25302-03</Version>
@@ -80,7 +80,7 @@
<Version>$(XunitPackageVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.console.netcore">
- <Version>1.0.2-prerelease-00177</Version>
+ <Version>$(XUnitConsoleNetCoreVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.runner.utility">
<Version>$(XunitPackageVersion)</Version>
diff --git a/tests/src/JIT/config/benchmark/benchmark.csproj b/tests/src/JIT/config/benchmark/benchmark.csproj
index e5285c1fec..f739151181 100644
--- a/tests/src/JIT/config/benchmark/benchmark.csproj
+++ b/tests/src/JIT/config/benchmark/benchmark.csproj
@@ -8,19 +8,19 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.performance.api">
- <Version>1.0.0-beta-build0003</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.core">
- <Version>1.0.0-beta-build0003</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.execution">
- <Version>1.0.0-beta-build0003</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.metrics">
- <Version>1.0.0-beta-build0003</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="Microsoft.Diagnostics.Tracing.TraceEvent">
- <Version>1.0.3-alpha-experimental</Version>
+ <Version>$(MicrosoftDiagnosticsTracingLibraryVersion)</Version>
</PackageReference>
<PackageReference Include="Microsoft.NETCore.Platforms">
<Version>2.0.0-preview2-25302-03</Version>
@@ -95,7 +95,7 @@
<Version>$(XunitPackageVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.console.netcore">
- <Version>1.0.2-prerelease-00177</Version>
+ <Version>$(XUnitConsoleNetCoreVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.runner.utility">
<Version>$(XunitPackageVersion)</Version>
@@ -110,5 +110,5 @@
</PropertyGroup>
<Import Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory), dir.targets))\dir.targets" />
<Target Name="Build"
- DependsOnTargets="ResolveReferences" />
+ DependsOnTargets="ResolveReferences" />
</Project> \ No newline at end of file
diff --git a/tests/src/performance/performance.csproj b/tests/src/performance/performance.csproj
index 5a6ca1279f..8133535f45 100644
--- a/tests/src/performance/performance.csproj
+++ b/tests/src/performance/performance.csproj
@@ -8,19 +8,19 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.performance.api">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.core">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.execution">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.performance.metrics">
- <Version>1.0.0-beta-build0006</Version>
+ <Version>$(XUnitPerformanceApiVersion)</Version>
</PackageReference>
<PackageReference Include="Microsoft.Diagnostics.Tracing.TraceEvent">
- <Version>1.0.3-alpha-experimental</Version>
+ <Version>$(MicrosoftDiagnosticsTracingLibraryVersion)</Version>
</PackageReference>
<PackageReference Include="Microsoft.NETCore.Platforms">
<Version>2.0.0-preview2-25302-03</Version>
@@ -92,7 +92,7 @@
<Version>$(XunitPackageVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.console.netcore">
- <Version>1.0.2-prerelease-00177</Version>
+ <Version>$(XUnitConsoleNetCoreVersion)</Version>
</PackageReference>
<PackageReference Include="xunit.runner.utility">
<Version>$(XunitPackageVersion)</Version>