|
|
67287f |
2014-02-20 Richard Henderson <rth@redhat.com>
|
|
|
67287f |
|
|
|
67287f |
PR c++/60272
|
|
|
67287f |
* builtins.c (expand_builtin_atomic_compare_exchange): Conditionalize
|
|
|
67287f |
on failure the store back into EXPECT. Always make a new pseudo for
|
|
|
67287f |
OLDVAL.
|
|
|
67287f |
|
|
|
67287f |
* cas_n.c (libat_compare_exchange): Conditionalize on failure
|
|
|
67287f |
the store back to EPTR.
|
|
|
67287f |
|
|
|
67287f |
--- gcc/builtins.c (revision 207972)
|
|
|
67287f |
+++ gcc/builtins.c (revision 207973)
|
|
|
67287f |
@@ -5350,7 +5350,7 @@ static rtx
|
|
|
67287f |
expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
|
|
|
67287f |
rtx target)
|
|
|
67287f |
{
|
|
|
67287f |
- rtx expect, desired, mem, oldval;
|
|
|
67287f |
+ rtx expect, desired, mem, oldval, label;
|
|
|
67287f |
enum memmodel success, failure;
|
|
|
67287f |
tree weak;
|
|
|
67287f |
bool is_weak;
|
|
|
67287f |
@@ -5388,14 +5388,26 @@ expand_builtin_atomic_compare_exchange (
|
|
|
67287f |
if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
|
|
|
67287f |
is_weak = true;
|
|
|
67287f |
|
|
|
67287f |
- oldval = expect;
|
|
|
67287f |
- if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
|
|
|
67287f |
- &oldval, mem, oldval, desired,
|
|
|
67287f |
+ if (target == const0_rtx)
|
|
|
67287f |
+ target = NULL;
|
|
|
67287f |
+
|
|
|
67287f |
+ /* Lest the rtl backend create a race condition with an imporoper store
|
|
|
67287f |
+ to memory, always create a new pseudo for OLDVAL. */
|
|
|
67287f |
+ oldval = NULL;
|
|
|
67287f |
+
|
|
|
67287f |
+ if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
|
|
|
67287f |
is_weak, success, failure))
|
|
|
67287f |
return NULL_RTX;
|
|
|
67287f |
|
|
|
67287f |
- if (oldval != expect)
|
|
|
67287f |
- emit_move_insn (expect, oldval);
|
|
|
67287f |
+ /* Conditionally store back to EXPECT, lest we create a race condition
|
|
|
67287f |
+ with an improper store to memory. */
|
|
|
67287f |
+ /* ??? With a rearrangement of atomics at the gimple level, we can handle
|
|
|
67287f |
+ the normal case where EXPECT is totally private, i.e. a register. At
|
|
|
67287f |
+ which point the store can be unconditional. */
|
|
|
67287f |
+ label = gen_label_rtx ();
|
|
|
67287f |
+ emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
|
|
|
67287f |
+ emit_move_insn (expect, oldval);
|
|
|
67287f |
+ emit_label (label);
|
|
|
67287f |
|
|
|
67287f |
return target;
|
|
|
67287f |
}
|
|
|
67287f |
--- libatomic/cas_n.c (revision 207972)
|
|
|
67287f |
+++ libatomic/cas_n.c (revision 207973)
|
|
|
67287f |
@@ -51,10 +51,9 @@ SIZE(libat_compare_exchange) (UTYPE *mpt
|
|
|
67287f |
#if !DONE && N <= WORDSIZE && defined(atomic_compare_exchange_w)
|
|
|
67287f |
bool
|
|
|
67287f |
SIZE(libat_compare_exchange) (UTYPE *mptr, UTYPE *eptr, UTYPE newval,
|
|
|
67287f |
- int smodel, int fmodel UNUSED)
|
|
|
67287f |
+ int smodel, int fmodel)
|
|
|
67287f |
{
|
|
|
67287f |
UWORD mask, shift, weval, woldval, wnewval, t, *wptr;
|
|
|
67287f |
- bool ret = false;
|
|
|
67287f |
|
|
|
67287f |
pre_barrier (smodel);
|
|
|
67287f |
|
|
|
67287f |
@@ -82,12 +81,13 @@ SIZE(libat_compare_exchange) (UTYPE *mpt
|
|
|
67287f |
}
|
|
|
67287f |
while (!atomic_compare_exchange_w (wptr, &woldval, t, true,
|
|
|
67287f |
__ATOMIC_RELAXED, __ATOMIC_RELAXED));
|
|
|
67287f |
- ret = true;
|
|
|
67287f |
+ post_barrier (smodel);
|
|
|
67287f |
+ return true;
|
|
|
67287f |
+
|
|
|
67287f |
failure:
|
|
|
67287f |
*eptr = woldval >> shift;
|
|
|
67287f |
-
|
|
|
67287f |
- post_barrier (smodel);
|
|
|
67287f |
- return ret;
|
|
|
67287f |
+ post_barrier (fmodel);
|
|
|
67287f |
+ return false;
|
|
|
67287f |
}
|
|
|
67287f |
|
|
|
67287f |
#define DONE 1
|
|
|
67287f |
@@ -102,18 +102,17 @@ SIZE(libat_compare_exchange) (UTYPE *mpt
|
|
|
67287f |
{
|
|
|
67287f |
UTYPE oldval;
|
|
|
67287f |
UWORD magic;
|
|
|
67287f |
- bool ret = false;
|
|
|
67287f |
+ bool ret;
|
|
|
67287f |
|
|
|
67287f |
pre_seq_barrier (smodel);
|
|
|
67287f |
magic = protect_start (mptr);
|
|
|
67287f |
|
|
|
67287f |
oldval = *mptr;
|
|
|
67287f |
- if (oldval == *eptr)
|
|
|
67287f |
- {
|
|
|
67287f |
- *mptr = newval;
|
|
|
67287f |
- ret = true;
|
|
|
67287f |
- }
|
|
|
67287f |
- *eptr = oldval;
|
|
|
67287f |
+ ret = (oldval == *eptr);
|
|
|
67287f |
+ if (ret)
|
|
|
67287f |
+ *mptr = newval;
|
|
|
67287f |
+ else
|
|
|
67287f |
+ *eptr = oldval;
|
|
|
67287f |
|
|
|
67287f |
protect_end (mptr, magic);
|
|
|
67287f |
post_seq_barrier (smodel);
|