summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCyd Haselton <chaselton@gmail.com>2017-08-11 15:14:36 -0500
committerJan Kotas <jkotas@microsoft.com>2017-08-11 13:14:36 -0700
commit78298734845ffbf3cdd75736ebad71dfa1856aba (patch)
tree01e70f87e556af9466829d71f74a26a59a8f95e2
parent1a55fa5b9881c256078090f0d2ad4961c96c2865 (diff)
downloadcoreclr-78298734845ffbf3cdd75736ebad71dfa1856aba.tar.gz
coreclr-78298734845ffbf3cdd75736ebad71dfa1856aba.tar.bz2
coreclr-78298734845ffbf3cdd75736ebad71dfa1856aba.zip
Changes that didn't make it to PR#12982 (#13055)
* Changes that didn't make it to PR#12982 * Recommended changes to volatile.h Added two templates to cast away volatility due to clang 3.8 changes * Update runtime.Linux.Microsoft.NETCore.Runtime.CoreCLR.props Fixed erroneous extra Platform conditional line
-rw-r--r--cross/android/arm64/toolchain.cmake4
-rw-r--r--src/inc/volatile.h19
2 files changed, 22 insertions, 1 deletions
diff --git a/cross/android/arm64/toolchain.cmake b/cross/android/arm64/toolchain.cmake
index 60f8c78511..29415899c1 100644
--- a/cross/android/arm64/toolchain.cmake
+++ b/cross/android/arm64/toolchain.cmake
@@ -22,6 +22,10 @@ find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump)
add_compile_options(--sysroot=${CROSS_ROOTFS})
add_compile_options(-fPIE)
+## Needed for Android or bionic specific conditionals
+add_compile_options(-D__ANDROID__)
+add_compile_options(-D__BIONIC__)
+
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}")
diff --git a/src/inc/volatile.h b/src/inc/volatile.h
index 5aa0e50866..ecf9ffe427 100644
--- a/src/inc/volatile.h
+++ b/src/inc/volatile.h
@@ -106,6 +106,19 @@
#define VOLATILE_MEMORY_BARRIER()
#endif // __GNUC__
+template<typename T>
+struct RemoveVolatile
+{
+ typedef T type;
+};
+
+template<typename T>
+struct RemoveVolatile<volatile T>
+{
+ typedef T type;
+};
+
+
//
// VolatileLoad loads a T from a pointer to T. It is guaranteed that this load will not be optimized
// away by the compiler, and that any operation that occurs after this load, in program order, will
@@ -113,6 +126,10 @@
// this is the case for most aligned scalar data types. If you need atomic loads or stores, you need
// to consult the compiler and CPU manuals to find which circumstances allow atomicity.
//
+// Starting at version 3.8, clang errors out on initializing of type int * to volatile int *. To fix this, we add two templates to cast away volatility
+// Helper structures for casting away volatileness
+
+
template<typename T>
inline
T VolatileLoad(T const * pt)
@@ -125,7 +142,7 @@ T VolatileLoad(T const * pt)
static const unsigned lockFreeAtomicSizeMask = (1 << 1) | (1 << 2) | (1 << 4) | (1 << 8);
if((1 << sizeof(T)) & lockFreeAtomicSizeMask)
{
- __atomic_load((T volatile const *)pt, &val, __ATOMIC_ACQUIRE);
+ __atomic_load((T const *)pt, const_cast<typename RemoveVolatile<T>::type *>(&val), __ATOMIC_ACQUIRE);
}
else
{