summaryrefslogtreecommitdiff
path: root/perf.groovy
diff options
context:
space:
mode:
authorMichelle McDaniel <adiaaida@gmail.com>2018-01-19 14:29:45 -0800
committerGitHub <noreply@github.com>2018-01-19 14:29:45 -0800
commite435c5048c44978208c2a0dc45444a18415be272 (patch)
treee41b79f8dcd355dca2d49b1229364a2e5c68a41e /perf.groovy
parent35af7047534f420cb098f496837d1d24a9aedb78 (diff)
downloadcoreclr-e435c5048c44978208c2a0dc45444a18415be272.tar.gz
coreclr-e435c5048c44978208c2a0dc45444a18415be272.tar.bz2
coreclr-e435c5048c44978208c2a0dc45444a18415be272.zip
Convert perf.groovy to use run-xunit-perf.py (#15881)
* Convert perf.groovy to use run-xunit-perf.py Also removes run-xunit-perf.cmd and run-xunit-perf.sh. This change moves us to using build-tests.sh to generate the layout (ie pull down corefx), so this change fixes #12143.
Diffstat (limited to 'perf.groovy')
-rw-r--r--perf.groovy42
1 files changed, 17 insertions, 25 deletions
diff --git a/perf.groovy b/perf.groovy
index 0efcaba19f..5497316398 100644
--- a/perf.groovy
+++ b/perf.groovy
@@ -79,16 +79,16 @@ def static getOSGroup(def os) {
batchFile("tests\\runtest.cmd ${configuration} ${architecture} GenerateLayoutOnly")
- def runXUnitPerfCommonArgs = "-arch ${arch} -configuration ${configuration} -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} ${testEnv} -optLevel ${opt_level} -jitName ${jit} -outputdir \"%WORKSPACE%\\bin\\sandbox_logs\" -stabilityPrefix \"START \"CORECLR_PERF_RUN\" /B /WAIT /HIGH /AFFINITY 0x2\""
+ def runXUnitPerfCommonArgs = "-arch ${arch} -configuration ${configuration} -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} ${testEnv} -optLevel ${opt_level} -jitName ${jit} -outputdir \"%WORKSPACE%\\bin\\sandbox_logs\" -stabilityPrefix \"START \\\"CORECLR_PERF_RUN\\\" /B /WAIT /HIGH /AFFINITY 0x2\""
// Run with just stopwatch: Profile=Off
- batchFile("tests\\scripts\\run-xunit-perf.cmd ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library")
- batchFile("tests\\scripts\\run-xunit-perf.cmd ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality")
+ batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library")
+ batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality")
// Run with the full set of counters enabled: Profile=On
if (opt_level != 'min_opt') {
- batchFile("tests\\scripts\\run-xunit-perf.cmd ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi")
- batchFile("tests\\scripts\\run-xunit-perf.cmd ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi")
+ batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi")
+ batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi")
}
}
}
@@ -266,11 +266,12 @@ def static getFullPerfJobName(def project, def os, def isPR) {
def newBuildJob = job(fullBuildJobName) {
steps {
shell("./build.sh verbose ${architecture} ${configuration}")
+ shell("./build-test.sh generatelayoutonly ${architecture} ${configuration}")
}
}
Utilities.setMachineAffinity(newBuildJob, os, 'latest-or-auto')
Utilities.standardJobSetup(newBuildJob, project, isPR, "*/${branch}")
- Utilities.addArchival(newBuildJob, "bin/Product/**,bin/obj/*/tests/**/*.dylib,bin/obj/*/tests/**/*.so", "bin/Product/**/.nuget/**")
+ Utilities.addArchival(newBuildJob, "bin/Product/**,bin/obj/*/tests/**/*.dylib,bin/obj/*/tests/**/*.so,bin/tests/**", "bin/Product/**/.nuget/**")
}
@@ -302,9 +303,12 @@ def static getFullPerfJobName(def project, def os, def isPR) {
def osGroup = getOSGroup(os)
def runType = isPR ? 'private' : 'rolling'
def benchViewName = isPR ? 'coreclr private \$BenchviewCommitName' : 'coreclr rolling \$GIT_BRANCH_WITHOUT_ORIGIN \$GIT_COMMIT'
+ def uploadString = '-uploadToBenchview'
+
+ def runXUnitCommonArgs = "-arch ${architecture} -os Ubuntu16.04 -configuration ${configuration} -stabilityPrefix \"taskset 0x00000002 nice --adjustment=-10\" -generateBenchviewData \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools\" ${uploadString} -runtype ${runType} -outputdir \"\${WORKSPACE}/bin/sandbox_logs\""
steps {
- shell("./tests/scripts/perf-prep.sh")
+ shell("./tests/scripts/perf-prep.sh --nocorefx")
shell("./init-tools.sh")
copyArtifacts(fullBuildJobName) {
includePatterns("bin/**")
@@ -315,24 +319,12 @@ def static getFullPerfJobName(def project, def os, def isPR) {
shell("GIT_BRANCH_WITHOUT_ORIGIN=\$(echo \$GIT_BRANCH | sed \"s/[^/]*\\/\\(.*\\)/\\1 /\")\n" +
"python3.5 \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools/submission-metadata.py\" --name \" ${benchViewName} \" --user-email \"dotnet-bot@microsoft.com\"\n" +
"python3.5 \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools/build.py\" git --branch \$GIT_BRANCH_WITHOUT_ORIGIN --type ${runType}")
- shell("""./tests/scripts/run-xunit-perf.sh \\
- --testRootDir=\"\${WORKSPACE}/bin/tests/Windows_NT.${architecture}.${configuration}\" \\
- --testNativeBinDir=\"\${WORKSPACE}/bin/obj/${osGroup}.${architecture}.${configuration}/tests\" \\
- --coreClrBinDir=\"\${WORKSPACE}/bin/Product/${osGroup}.${architecture}.${configuration}\" \\
- --mscorlibDir=\"\${WORKSPACE}/bin/Product/${osGroup}.${architecture}.${configuration}\" \\
- --coreFxBinDir=\"\${WORKSPACE}/corefx\" \\
- --runType=\"${runType}\" \\
- --benchViewOS=\"${os}\" \\
- --generatebenchviewdata=\"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools\" \\
- --stabilityPrefix=\"taskset 0x00000002 nice --adjustment=-10\" \\
- --uploadToBenchview""")
- shell("mkdir -p bin/toArchive/sandbox/Logs/")
- shell("rsync -a bin/sandbox/Logs/Perf-*.* bin/toArchive/sandbox/Logs/")
+ shell("""python3 ./tests/scripts/run-xunit-perf.py -testBinLoc bin/tests/Windows_NT.${architecture}.${configuration}/JIT/Performance/CodeQuality ${runXUnitCommonArgs}""")
}
}
def archiveSettings = new ArchivalSettings()
- archiveSettings.addFiles('bin/toArchive/**')
+ archiveSettings.addFiles('bin/sandbox_logs/**')
archiveSettings.addFiles('machinedata.json')
Utilities.addArchival(newJob, archiveSettings)
@@ -590,14 +582,14 @@ parallel(
batchFile("tests\\runtest.cmd ${configuration} ${architecture} GenerateLayoutOnly")
- def runXUnitPerfCommonArgs = "-arch ${arch} -configuration ${configuration} -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} ${testEnv} -optLevel ${opt_level} -jitName ${jit} -outputdir \"%WORKSPACE%\\bin\\sandbox_logs\" -stabilityPrefix \"START \"CORECLR_PERF_RUN\" /B /WAIT /HIGH\" -scenarioTest"
+ def runXUnitPerfCommonArgs = "-arch ${arch} -configuration ${configuration} -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} ${testEnv} -optLevel ${opt_level} -jitName ${jit} -outputdir \"%WORKSPACE%\\bin\\sandbox_logs\" -stabilityPrefix \"START \\\"CORECLR_PERF_RUN\\\" /B /WAIT /HIGH\" -scenarioTest"
// Profile=Off
- batchFile("tests\\scripts\\run-xunit-perf.cmd ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\Scenario\\JitBench -group CoreCLR-Scenarios")
+ batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\Scenario\\JitBench -group CoreCLR-Scenarios")
// Profile=On
if (opt_level != 'min_opt') {
- batchFile("tests\\scripts\\run-xunit-perf.cmd ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\Scenario\\JitBench -group CoreCLR-Scenarios -collectionFlags BranchMispredictions+CacheMisses+InstructionRetired")
+ batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\Scenario\\JitBench -group CoreCLR-Scenarios -collectionFlags BranchMispredictions+CacheMisses+InstructionRetired")
}
}
}
@@ -790,7 +782,7 @@ parallel(
def runXUnitPerfCommonArgs = "-arch ${arch} -configuration ${configuration} -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} ${testEnv} -optLevel ${opt_level} -jitName ${jit} -outputdir \"%WORKSPACE%\\bin\\sandbox_logs\" -scenarioTest"
// Scenario: ILLink
- batchFile("tests\\scripts\\run-xunit-perf.cmd ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\linkbench\\linkbench -group ILLink -nowarmup")
+ batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\linkbench\\linkbench -group ILLink -nowarmup")
}
}