summaryrefslogtreecommitdiff
path: root/src/pal/src/misc/sysinfo.cpp
diff options
context:
space:
mode:
authorJan Vorlicek <janvorli@microsoft.com>2017-10-03 22:09:43 +0200
committerGitHub <noreply@github.com>2017-10-03 22:09:43 +0200
commit08d39ddf02c81c99bd49c19b808c855235cbabdc (patch)
treef1314991120946c183cd58490db9219cbce596ca /src/pal/src/misc/sysinfo.cpp
parent4cfd557c2d1e80123d5a37aaea00636ddcc7be49 (diff)
downloadcoreclr-08d39ddf02c81c99bd49c19b808c855235cbabdc.tar.gz
coreclr-08d39ddf02c81c99bd49c19b808c855235cbabdc.tar.bz2
coreclr-08d39ddf02c81c99bd49c19b808c855235cbabdc.zip
Fix build with latest Xcode on OSX (#14282)
* Fix build with latest Xcode on OSX The latest Xcode 9 cannot successfully build CoreCLR PAL. There are several issues. First, it complains that min / max macros cannot be defined in C++ code, since they would collide with the std::min and std::max functions. Second, some of the headers that PAL includes pull in declarations of several template classes that we explicitly define in PAL and also the new operator declaration. To fix that, I have undefined the min and max macros for PAL and replaced their usage by the std::min / max functions. I have also removed the manual declaration of the colliding template classes and new operator and added inclusion of the proper C++ headers instead. The PAL was including non-pal safemath.h and to make this change compatible with it, I have added definition of USE_STL that makes safemath.h include type_trait from STL instead of our local trimmed copy. I have also removed some dead code that I have discovered during the process. Fixes #14279 * Fix build on ARM32 and very recent GLIBCXX
Diffstat (limited to 'src/pal/src/misc/sysinfo.cpp')
-rw-r--r--src/pal/src/misc/sysinfo.cpp21
1 files changed, 11 insertions, 10 deletions
diff --git a/src/pal/src/misc/sysinfo.cpp b/src/pal/src/misc/sysinfo.cpp
index a06f4b75cb..cb5dda62a8 100644
--- a/src/pal/src/misc/sysinfo.cpp
+++ b/src/pal/src/misc/sysinfo.cpp
@@ -81,6 +81,7 @@ Revision History:
#include "pal/dbgmsg.h"
+#include <algorithm>
SET_DEFAULT_DEBUG_CHANNEL(MISC);
@@ -437,16 +438,16 @@ PAL_GetLogicalProcessorCacheSizeFromOS()
size_t cacheSize = 0;
#ifdef _SC_LEVEL1_DCACHE_SIZE
- cacheSize = max(cacheSize, sysconf(_SC_LEVEL1_DCACHE_SIZE));
+ cacheSize = std::max(cacheSize, (size_t)sysconf(_SC_LEVEL1_DCACHE_SIZE));
#endif
#ifdef _SC_LEVEL2_CACHE_SIZE
- cacheSize = max(cacheSize, sysconf(_SC_LEVEL2_CACHE_SIZE));
+ cacheSize = std::max(cacheSize, (size_t)sysconf(_SC_LEVEL2_CACHE_SIZE));
#endif
#ifdef _SC_LEVEL3_CACHE_SIZE
- cacheSize = max(cacheSize, sysconf(_SC_LEVEL3_CACHE_SIZE));
+ cacheSize = std::max(cacheSize, (size_t)sysconf(_SC_LEVEL3_CACHE_SIZE));
#endif
#ifdef _SC_LEVEL4_CACHE_SIZE
- cacheSize = max(cacheSize, sysconf(_SC_LEVEL4_CACHE_SIZE));
+ cacheSize = std::max(cacheSize, (size_t)sysconf(_SC_LEVEL4_CACHE_SIZE));
#endif
#if defined(_ARM64_)
@@ -455,15 +456,15 @@ PAL_GetLogicalProcessorCacheSizeFromOS()
size_t size;
if(ReadMemoryValueFromFile("/sys/devices/system/cpu/cpu0/cache/index0/size", &size))
- cacheSize = max(cacheSize, size);
+ cacheSize = std::max(cacheSize, size);
if(ReadMemoryValueFromFile("/sys/devices/system/cpu/cpu0/cache/index1/size", &size))
- cacheSize = max(cacheSize, size);
+ cacheSize = std::max(cacheSize, size);
if(ReadMemoryValueFromFile("/sys/devices/system/cpu/cpu0/cache/index2/size", &size))
- cacheSize = max(cacheSize, size);
+ cacheSize = std::max(cacheSize, size);
if(ReadMemoryValueFromFile("/sys/devices/system/cpu/cpu0/cache/index3/size", &size))
- cacheSize = max(cacheSize, size);
+ cacheSize = std::max(cacheSize, size);
if(ReadMemoryValueFromFile("/sys/devices/system/cpu/cpu0/cache/index4/size", &size))
- cacheSize = max(cacheSize, size);
+ cacheSize = std::max(cacheSize, size);
}
if(cacheSize == 0)
@@ -488,7 +489,7 @@ PAL_GetLogicalProcessorCacheSizeFromOS()
// Assume L3$/CPU grows linearly from 256K to 1.5M/CPU as logicalCPUs grows from 2 to 12 CPUs
DWORD logicalCPUs = PAL_GetLogicalCpuCountFromOS();
- cacheSize = logicalCPUs*min(1536, max(256, logicalCPUs*128))*1024;
+ cacheSize = logicalCPUs*std::min(1536, std::max(256, logicalCPUs*128))*1024;
}
#endif