diff options
author | rearnsha <rearnsha@138bc75d-0d04-0410-961f-82ee72b054a4> | 2018-07-31 17:36:09 +0000 |
---|---|---|
committer | Dongkyun Son <dongkyun.s@samsung.com> | 2019-02-06 15:53:54 +0000 |
commit | 233eedd720a30dc4552a3310792871e8156e8076 (patch) | |
tree | e136a5637597ce7237d3153103cfaeea9cf3889d | |
parent | dc1279d6da8e406489871229a4778e55f4177bef (diff) | |
download | linaro-gcc-233eedd720a30dc4552a3310792871e8156e8076.tar.gz linaro-gcc-233eedd720a30dc4552a3310792871e8156e8076.tar.bz2 linaro-gcc-233eedd720a30dc4552a3310792871e8156e8076.zip |
AArch64 - disable CB[N]Z TB[N]Z when tracking speculation
The CB[N]Z and TB[N]Z instructions do not expose the comparison through
the condition code flags. This makes it impossible to track speculative
execution through such a branch. We can handle this relatively easily
by simply disabling the patterns in this case.
A side effect of this is that the split patterns for the atomic operations
need to also avoid generating these instructions. They mostly have simple
fall-backs for this already.
* config/aarch64/aarch64.md (cb<optab><mode>1): Disable when
aarch64_track_speculation is true.
(tb<optab><mode>1): Likewise.
* config/aarch64/aarch64.c (aarch64_split_compare_regs): Do not
generate CB[N]Z when tracking speculation.
(aarch64_split_compare_and_swap): Likewise.
(aarch64_split_atomic_op): Likewise.
(backported eaf891585b16ef84046ae73a01bb1c7189c682e9)
Change-Id: I0d0a090ef7bcf074b9e4d6ee115e71faa157962f
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@263172 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r-- | gcc/ChangeLog | 10 | ||||
-rw-r--r-- | gcc/config/aarch64/aarch64.c | 33 | ||||
-rw-r--r-- | gcc/config/aarch64/aarch64.md | 6 |
3 files changed, 43 insertions, 6 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 017782351db..384627cb3c1 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,5 +1,15 @@ 2018-07-31 Richard Earnshaw <rearnsha@arm.com> + * config/aarch64/aarch64.md (cb<optab><mode>1): Disable when + aarch64_track_speculation is true. + (tb<optab><mode>1): Likewise. + * config/aarch64/aarch64.c (aarch64_split_compare_regs): Do not + generate CB[N]Z when tracking speculation. + (aarch64_split_compare_and_swap): Likewise. + (aarch64_split_atomic_op): Likewise. + +2018-07-31 Richard Earnshaw <rearnsha@arm.com> + * config/aarch64/aarch64.opt (mtrack-speculation): New target option. 2018-07-31 Richard Earnshaw <rearnsha@arm.com> diff --git a/gcc/config/aarch64/aarch64.c b/gcc/config/aarch64/aarch64.c index 795033faeae..975617f5f31 100644 --- a/gcc/config/aarch64/aarch64.c +++ b/gcc/config/aarch64/aarch64.c @@ -11650,7 +11650,16 @@ aarch64_split_compare_and_swap (rtx operands[]) if (strong_zero_p) { - x = gen_rtx_NE (VOIDmode, rval, const0_rtx); + if (aarch64_track_speculation) + { + /* Emit an explicit compare instruction, so that we can correctly + track the condition codes. */ + rtx cc_reg = aarch64_gen_compare_reg (NE, rval, const0_rtx); + x = gen_rtx_NE (GET_MODE (cc_reg), cc_reg, const0_rtx); + } + else + x = gen_rtx_NE (VOIDmode, rval, const0_rtx); + x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, gen_rtx_LABEL_REF (Pmode, label2), pc_rtx); aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x)); @@ -11668,7 +11677,16 @@ aarch64_split_compare_and_swap (rtx operands[]) if (!is_weak) { - x = gen_rtx_NE (VOIDmode, scratch, const0_rtx); + if (aarch64_track_speculation) + { + /* Emit an explicit compare instruction, so that we can correctly + track the condition codes. */ + rtx cc_reg = aarch64_gen_compare_reg (NE, scratch, const0_rtx); + x = gen_rtx_NE (GET_MODE (cc_reg), cc_reg, const0_rtx); + } + else + x = gen_rtx_NE (VOIDmode, scratch, const0_rtx); + x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, gen_rtx_LABEL_REF (Pmode, label1), pc_rtx); aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x)); @@ -12004,7 +12022,16 @@ aarch64_split_atomic_op (enum rtx_code code, rtx old_out, rtx new_out, rtx mem, aarch64_emit_store_exclusive (mode, cond, mem, gen_lowpart (mode, new_out), model_rtx); - x = gen_rtx_NE (VOIDmode, cond, const0_rtx); + if (aarch64_track_speculation) + { + /* Emit an explicit compare instruction, so that we can correctly + track the condition codes. */ + rtx cc_reg = aarch64_gen_compare_reg (NE, cond, const0_rtx); + x = gen_rtx_NE (GET_MODE (cc_reg), cc_reg, const0_rtx); + } + else + x = gen_rtx_NE (VOIDmode, cond, const0_rtx); + x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, gen_rtx_LABEL_REF (Pmode, label), pc_rtx); aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x)); diff --git a/gcc/config/aarch64/aarch64.md b/gcc/config/aarch64/aarch64.md index 6a03262cc06..bc8d736f595 100644 --- a/gcc/config/aarch64/aarch64.md +++ b/gcc/config/aarch64/aarch64.md @@ -597,7 +597,7 @@ (const_int 0)) (label_ref (match_operand 1 "" "")) (pc)))] - "" + "!aarch64_track_speculation" { if (get_attr_length (insn) == 8) return aarch64_gen_far_branch (operands, 1, "Lcb", "<inv_cb>\\t%<w>0, "); @@ -627,7 +627,7 @@ (label_ref (match_operand 2 "" "")) (pc))) (clobber (reg:CC CC_REGNUM))] - "" + "!aarch64_track_speculation" { if (get_attr_length (insn) == 8) { @@ -663,7 +663,7 @@ (label_ref (match_operand 1 "" "")) (pc))) (clobber (reg:CC CC_REGNUM))] - "" + "!aarch64_track_speculation" { if (get_attr_length (insn) == 8) { |