summaryrefslogtreecommitdiff
path: root/src/jit
diff options
context:
space:
mode:
Diffstat (limited to 'src/jit')
-rw-r--r--src/jit/codegen.h2
-rw-r--r--src/jit/codegencommon.cpp7
-rw-r--r--src/jit/compiler.hpp9
-rw-r--r--src/jit/emit.cpp6
-rw-r--r--src/jit/emitarm.cpp2
-rw-r--r--src/jit/lclvars.cpp5
-rw-r--r--src/jit/target.h8
7 files changed, 29 insertions, 10 deletions
diff --git a/src/jit/codegen.h b/src/jit/codegen.h
index 5a086e99fe..f38b4dc13a 100644
--- a/src/jit/codegen.h
+++ b/src/jit/codegen.h
@@ -361,7 +361,7 @@ protected:
void genMov32RelocatableDisplacement(BasicBlock* block, regNumber reg);
void genMov32RelocatableDataLabel(unsigned value, regNumber reg);
- void genMov32RelocatableImmediate(emitAttr size, unsigned value, regNumber reg);
+ void genMov32RelocatableImmediate(emitAttr size, size_t value, regNumber reg);
bool genUsedPopToReturn; // True if we use the pop into PC to return,
// False if we didn't and must branch to LR to return.
diff --git a/src/jit/codegencommon.cpp b/src/jit/codegencommon.cpp
index 4a479af777..72a91d9a0e 100644
--- a/src/jit/codegencommon.cpp
+++ b/src/jit/codegencommon.cpp
@@ -5685,7 +5685,8 @@ void CodeGen::genCheckUseBlockInit()
{
// Var is completely on the stack, in the legacy JIT case, or
// on the stack at entry, in the RyuJIT case.
- initStkLclCnt += (unsigned)roundUp(compiler->lvaLclSize(varNum)) / sizeof(int);
+ initStkLclCnt +=
+ (unsigned)roundUp(compiler->lvaLclSize(varNum), TARGET_POINTER_SIZE) / sizeof(int);
}
}
else
@@ -5716,7 +5717,7 @@ void CodeGen::genCheckUseBlockInit()
{
varDsc->lvMustInit = true;
- initStkLclCnt += (unsigned)roundUp(compiler->lvaLclSize(varNum)) / sizeof(int);
+ initStkLclCnt += (unsigned)roundUp(compiler->lvaLclSize(varNum), TARGET_POINTER_SIZE) / sizeof(int);
}
continue;
@@ -6621,7 +6622,7 @@ void CodeGen::genMov32RelocatableDataLabel(unsigned value, regNumber reg)
*
* Move of relocatable immediate to register
*/
-void CodeGen::genMov32RelocatableImmediate(emitAttr size, unsigned value, regNumber reg)
+void CodeGen::genMov32RelocatableImmediate(emitAttr size, size_t value, regNumber reg)
{
_ASSERTE(EA_IS_RELOC(size));
diff --git a/src/jit/compiler.hpp b/src/jit/compiler.hpp
index ab3f32e548..2b9f3aa94b 100644
--- a/src/jit/compiler.hpp
+++ b/src/jit/compiler.hpp
@@ -1436,6 +1436,15 @@ inline void GenTree::SetOper(genTreeOps oper, ValueNumberUpdate vnUpdate)
assert(GenTree::s_gtNodeSizes[oper] == TREE_NODE_SZ_SMALL || GenTree::s_gtNodeSizes[oper] == TREE_NODE_SZ_LARGE);
assert(GenTree::s_gtNodeSizes[oper] == TREE_NODE_SZ_SMALL || (gtDebugFlags & GTF_DEBUG_NODE_LARGE));
+#if defined(_HOST_64BIT_) && !defined(_TARGET_64BIT_)
+ if (gtOper == GT_CNS_LNG && oper == GT_CNS_INT)
+ {
+ // When casting from LONG to INT, we need to force cast of the value,
+ // if the host architecture represents INT and LONG with the same data size.
+ gtLngCon.gtLconVal = (INT64)(INT32)gtLngCon.gtLconVal;
+ }
+#endif // defined(_HOST_64BIT_) && !defined(_TARGET_64BIT_)
+
SetOperRaw(oper);
#ifdef DEBUG
diff --git a/src/jit/emit.cpp b/src/jit/emit.cpp
index 5c70e08e37..7a1d106f17 100644
--- a/src/jit/emit.cpp
+++ b/src/jit/emit.cpp
@@ -5460,8 +5460,8 @@ void emitter::emitOutputDataSec(dataSecDsc* sec, BYTE* dst)
JITDUMP(" section %u, size %u, block absolute addr\n", secNum++, dscSize);
assert(dscSize && dscSize % TARGET_POINTER_SIZE == 0);
- size_t numElems = dscSize / TARGET_POINTER_SIZE;
- BYTE** bDst = (BYTE**)dst;
+ size_t numElems = dscSize / TARGET_POINTER_SIZE;
+ target_size_t* bDst = (target_size_t*)dst;
for (unsigned i = 0; i < numElems; i++)
{
BasicBlock* block = ((BasicBlock**)dsc->dsCont)[i];
@@ -5475,7 +5475,7 @@ void emitter::emitOutputDataSec(dataSecDsc* sec, BYTE* dst)
#ifdef _TARGET_ARM_
target = (BYTE*)((size_t)target | 1); // Or in thumb bit
#endif
- bDst[i] = target;
+ bDst[i] = (target_size_t)target;
if (emitComp->opts.compReloc)
{
emitRecordRelocation(&(bDst[i]), target, IMAGE_REL_BASED_HIGHLOW);
diff --git a/src/jit/emitarm.cpp b/src/jit/emitarm.cpp
index bcf0cb4f33..ca5575a470 100644
--- a/src/jit/emitarm.cpp
+++ b/src/jit/emitarm.cpp
@@ -1673,7 +1673,7 @@ void emitter::emitIns_R(instruction ins, emitAttr attr, regNumber reg)
*/
void emitter::emitIns_R_I(
- instruction ins, emitAttr attr, regNumber reg, int imm, insFlags flags /* = INS_FLAGS_DONT_CARE */)
+ instruction ins, emitAttr attr, regNumber reg, ssize_t imm, insFlags flags /* = INS_FLAGS_DONT_CARE */)
{
insFormat fmt = IF_NONE;
diff --git a/src/jit/lclvars.cpp b/src/jit/lclvars.cpp
index de1266ea65..82c37d6bb8 100644
--- a/src/jit/lclvars.cpp
+++ b/src/jit/lclvars.cpp
@@ -4970,7 +4970,8 @@ void Compiler::lvaAssignVirtualFrameOffsetsToArgs()
argLcls++;
// Early out if we can. If size is 8 and base reg is 2, then the mask is 0x1100
- tempMask |= ((((1 << (roundUp(argSize) / REGSIZE_BYTES))) - 1) << lvaTable[preSpillLclNum].lvArgReg);
+ tempMask |= ((((1 << (roundUp(argSize, TARGET_POINTER_SIZE) / REGSIZE_BYTES))) - 1)
+ << lvaTable[preSpillLclNum].lvArgReg);
if (tempMask == preSpillMask)
{
// We won't encounter more pre-spilled registers,
@@ -7334,7 +7335,7 @@ Compiler::fgWalkResult Compiler::lvaStressLclFldCB(GenTree** pTree, fgWalkData*
// Change the variable to a TYP_BLK
if (varType != TYP_BLK)
{
- varDsc->lvExactSize = (unsigned)(roundUp(padding + pComp->lvaLclSize(lclNum)));
+ varDsc->lvExactSize = (unsigned)(roundUp(padding + pComp->lvaLclSize(lclNum), TARGET_POINTER_SIZE));
varDsc->lvType = TYP_BLK;
pComp->lvaSetVarAddrExposed(lclNum);
}
diff --git a/src/jit/target.h b/src/jit/target.h
index 453f8f9e6c..397ecbdb00 100644
--- a/src/jit/target.h
+++ b/src/jit/target.h
@@ -2394,6 +2394,14 @@ C_ASSERT((RBM_INT_CALLEE_SAVED & RBM_FPBASE) == RBM_NONE);
#endif
/*****************************************************************************/
+#ifdef _TARGET_64BIT_
+typedef unsigned __int64 target_size_t;
+#else
+typedef unsigned int target_size_t;
+#endif
+
+C_ASSERT(sizeof(target_size_t) == TARGET_POINTER_SIZE);
+
/*****************************************************************************/
#endif // _TARGET_H_
/*****************************************************************************/