diff options
author | rth <rth@138bc75d-0d04-0410-961f-82ee72b054a4> | 2012-01-10 04:14:09 +0000 |
---|---|---|
committer | rth <rth@138bc75d-0d04-0410-961f-82ee72b054a4> | 2012-01-10 04:14:09 +0000 |
commit | e1b93706d7bfb87bb5cf39a422aa94c0322d0740 (patch) | |
tree | 4beb1f30ce9aaa086052f1a9b009b27fe336a1e0 /gcc/config/arm/sync.md | |
parent | b2abbcafff02c0bbc41e66a1ef7da55f04577e94 (diff) | |
download | gcc-e1b93706d7bfb87bb5cf39a422aa94c0322d0740.tar.gz |
arm: Convert to atomic optabs.
* config/arm/arm.c (arm_gen_compare_reg): Add scratch argument;
use it if reload_completed.
(arm_legitimize_sync_memory, arm_emit, arm_insn_count, arm_count,
arm_output_asm_insn, arm_process_output_memory_barrier,
arm_output_memory_barrier, arm_ldrex_suffix, arm_output_ldrex,
arm_output_strex, arm_output_it, arm_output_op2, arm_output_op3,
arm_output_sync_loop, arm_get_sync_operand, FETCH_SYNC_OPERAND,
arm_process_output_sync_insn, arm_output_sync_insn,
arm_sync_loop_insns, arm_call_generator, arm_expand_sync): Remove.
(arm_pre_atomic_barrier, arm_post_atomic_barrier): New.
(arm_emit_load_exclusive, arm_emit_store_exclusive): New.
(emit_unlikely_jump): New.
(arm_expand_compare_and_swap, arm_split_compare_and_swap): New.
(arm_split_atomic_op): New.
* config/arm/arm-protos.h: Update.
* config/arm/arm.h (enum arm_sync_generator_tag): Remove.
(struct arm_sync_generator): Remove.
* config/arm/arm.md (VUNSPEC_SYNC_COMPARE_AND_SWAP, VUNSPEC_SYNC_LOCK,
VUNSPEC_SYNC_OP, VUNSPEC_SYNC_NEW_OP, VUNSPEC_SYNC_OLD_OP): Remove.
(VUNSPEC_ATOMIC_CAS, VUNSPEC_ATOMIC_XCHG, VUNSPEC_ATOMIC_OP): New.
(VUNSPEC_LL, VUNSPEC_SC): New.
(sync_result, sync_memory, sync_required_value, sync_new_value,
sync_t1, sync_t2, sync_release_barrier, sync_op): Remove.
(attr length): Don't use arm_sync_loop_insns.
(cbranch_cc, cstore_cc): Update call to arm_gen_compare_reg.
(movsfcc, movdfcc): Likewise.
* config/arm/constraints.md (Ua): New.
* config/arm/prediates.md (mem_noofs_operand): New.
(sync_compare_and_swap<QHSD>, sync_lock_test_and_set<QHSD>): Remove.
(sync_clobber, sync_t2_reqd): Remove.
(sync_<syncop><QHSD>, sync_nand<QHSD>): Remove.
(sync_new_<syncop><QHSD>, sync_new_nand<QHSD>): Remove.
(sync_old_<syncop><QHSD>, sync_old_nand<QHSD>): Remove.
(arm_sync_compare_and_swap<SIDI>): Remove.
(arm_sync_compare_and_swap<NARROW>): Remove.
(arm_sync_lock_test_and_set<SIDI>): Remove.
(arm_sync_lock_test_and_set<NARROW>): Remove.
(arm_sync_new_<syncop><SIDI>): Remove.
(arm_sync_new_<syncop><NARROW>): Remove.
(arm_sync_new_nand<SIDI>): Remove.
(arm_sync_new_nand<NARROW>): Remove.
(arm_sync_old_<syncop><SIDI>): Remove.
(arm_sync_old_<syncop><NARROW>): Remove.
(arm_sync_old_nand<SIDI>): Remove.
(arm_sync_old_nand<NARROW>): Remove.
(*memory_barrier): Merge arm_output_memory_barrier.
(atomic_compare_and_swap<QHSD>): New.
(atomic_compare_and_swap<NARROW>_1): New.
(atomic_compare_and_swap<SIDI>_1): New.
(atomic_exchange<QHSD>): New.
(cas_cmp_operand, cas_cmp_str): New.
(atomic_op_operand, atomic_op_str): New.
(atomic_<syncop><QHSD>, atomic_nand<QHSD>): New.
(atomic_fetch_<syncop><QHSD>, atomic_fetch_nand<QHSD>): New.
(atomic_<syncop>_fetch<QHSD>, atomic_nand_fetch<QHSD>): New.
(arm_load_exclusive<NARROW>): New.
(arm_load_exclusivesi, arm_load_exclusivedi): New.
(arm_store_exclusive<QHSD>): New.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@183050 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/config/arm/sync.md')
-rw-r--r-- | gcc/config/arm/sync.md | 670 |
1 files changed, 264 insertions, 406 deletions
diff --git a/gcc/config/arm/sync.md b/gcc/config/arm/sync.md index 40ee93c35d1..96de0f37d8d 100644 --- a/gcc/config/arm/sync.md +++ b/gcc/config/arm/sync.md @@ -1,5 +1,5 @@ ;; Machine description for ARM processor synchronization primitives. -;; Copyright (C) 2010 Free Software Foundation, Inc. +;; Copyright (C) 2010, 2012 Free Software Foundation, Inc. ;; Written by Marcus Shawcroft (marcus.shawcroft@arm.com) ;; 64bit Atomics by Dave Gilbert (david.gilbert@linaro.org) ;; @@ -19,11 +19,20 @@ ;; along with GCC; see the file COPYING3. If not see ;; <http://www.gnu.org/licenses/>. */ -;; ARMV6 introduced ldrex and strex instruction. These instruction -;; access SI width data. In order to implement synchronization -;; primitives for the narrower QI and HI modes we insert appropriate -;; AND/OR sequences into the synchronization loop to mask out the -;; relevant component of an SI access. +(define_mode_attr sync_predtab + [(QI "TARGET_HAVE_LDREXBH && TARGET_HAVE_MEMORY_BARRIER") + (HI "TARGET_HAVE_LDREXBH && TARGET_HAVE_MEMORY_BARRIER") + (SI "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER") + (DI "TARGET_HAVE_LDREXD && ARM_DOUBLEWORD_ALIGN + && TARGET_HAVE_MEMORY_BARRIER")]) + +(define_code_iterator syncop [plus minus ior xor and]) + +(define_code_attr sync_optab + [(ior "ior") (xor "xor") (and "and") (plus "add") (minus "sub")]) + +(define_mode_attr sync_sfx + [(QI "b") (HI "h") (SI "") (DI "d")]) (define_expand "memory_barrier" [(set (match_dup 0) @@ -34,463 +43,312 @@ MEM_VOLATILE_P (operands[0]) = 1; }) - -(define_mode_attr sync_predtab [(SI "TARGET_HAVE_LDREX && - TARGET_HAVE_MEMORY_BARRIER") - (QI "TARGET_HAVE_LDREXBH && - TARGET_HAVE_MEMORY_BARRIER") - (HI "TARGET_HAVE_LDREXBH && - TARGET_HAVE_MEMORY_BARRIER") - (DI "TARGET_HAVE_LDREXD && - ARM_DOUBLEWORD_ALIGN && - TARGET_HAVE_MEMORY_BARRIER")]) - -(define_expand "sync_compare_and_swap<mode>" - [(set (match_operand:QHSD 0 "s_register_operand") - (unspec_volatile:QHSD [(match_operand:QHSD 1 "memory_operand") - (match_operand:QHSD 2 "s_register_operand") - (match_operand:QHSD 3 "s_register_operand")] - VUNSPEC_SYNC_COMPARE_AND_SWAP))] - "<sync_predtab>" - { - struct arm_sync_generator generator; - generator.op = arm_sync_generator_omrn; - generator.u.omrn = gen_arm_sync_compare_and_swap<mode>; - arm_expand_sync (<MODE>mode, &generator, operands[0], operands[1], - operands[2], operands[3]); - DONE; - }) - -(define_expand "sync_lock_test_and_set<mode>" - [(match_operand:QHSD 0 "s_register_operand") - (match_operand:QHSD 1 "memory_operand") - (match_operand:QHSD 2 "s_register_operand")] - "<sync_predtab>" +(define_insn "*memory_barrier" + [(set (match_operand:BLK 0 "" "") + (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))] + "TARGET_HAVE_MEMORY_BARRIER" { - struct arm_sync_generator generator; - generator.op = arm_sync_generator_omn; - generator.u.omn = gen_arm_sync_lock_test_and_set<mode>; - arm_expand_sync (<MODE>mode, &generator, operands[0], operands[1], NULL, - operands[2]); - DONE; - }) - -(define_code_iterator syncop [plus minus ior xor and]) - -(define_code_attr sync_optab [(ior "ior") - (xor "xor") - (and "and") - (plus "add") - (minus "sub")]) + if (TARGET_HAVE_DMB) + { + /* Note we issue a system level barrier. We should consider issuing + a inner shareabilty zone barrier here instead, ie. "DMB ISH". */ + /* ??? Differentiate based on SEQ_CST vs less strict? */ + return "dmb\tsy"; + } -(define_code_attr sync_clobber [(ior "=&r") - (and "=&r") - (xor "X") - (plus "X") - (minus "X")]) + if (TARGET_HAVE_DMB_MCR) + return "mcr\tp15, 0, r0, c7, c10, 5"; -(define_code_attr sync_t2_reqd [(ior "4") - (and "4") - (xor "*") - (plus "*") - (minus "*")]) - -(define_expand "sync_<sync_optab><mode>" - [(match_operand:QHSD 0 "memory_operand") - (match_operand:QHSD 1 "s_register_operand") - (syncop:QHSD (match_dup 0) (match_dup 1))] - "<sync_predtab>" - { - struct arm_sync_generator generator; - generator.op = arm_sync_generator_omn; - generator.u.omn = gen_arm_sync_new_<sync_optab><mode>; - arm_expand_sync (<MODE>mode, &generator, NULL, operands[0], NULL, - operands[1]); - DONE; - }) + gcc_unreachable (); + } + [(set_attr "length" "4") + (set_attr "conds" "unconditional") + (set_attr "predicable" "no")]) -(define_expand "sync_nand<mode>" - [(match_operand:QHSD 0 "memory_operand") - (match_operand:QHSD 1 "s_register_operand") - (not:QHSD (and:QHSD (match_dup 0) (match_dup 1)))] +(define_expand "atomic_compare_and_swap<mode>" + [(match_operand:SI 0 "s_register_operand" "") ;; bool out + (match_operand:QHSD 1 "s_register_operand" "") ;; val out + (match_operand:QHSD 2 "mem_noofs_operand" "") ;; memory + (match_operand:QHSD 3 "general_operand" "") ;; expected + (match_operand:QHSD 4 "s_register_operand" "") ;; desired + (match_operand:SI 5 "const_int_operand") ;; is_weak + (match_operand:SI 6 "const_int_operand") ;; mod_s + (match_operand:SI 7 "const_int_operand")] ;; mod_f "<sync_predtab>" - { - struct arm_sync_generator generator; - generator.op = arm_sync_generator_omn; - generator.u.omn = gen_arm_sync_new_nand<mode>; - arm_expand_sync (<MODE>mode, &generator, NULL, operands[0], NULL, - operands[1]); - DONE; - }) +{ + arm_expand_compare_and_swap (operands); + DONE; +}) -(define_expand "sync_new_<sync_optab><mode>" - [(match_operand:QHSD 0 "s_register_operand") - (match_operand:QHSD 1 "memory_operand") - (match_operand:QHSD 2 "s_register_operand") - (syncop:QHSD (match_dup 1) (match_dup 2))] +(define_insn_and_split "atomic_compare_and_swap<mode>_1" + [(set (reg:CC_Z CC_REGNUM) ;; bool out + (unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS)) + (set (match_operand:SI 0 "s_register_operand" "=&r") ;; val out + (zero_extend:SI + (match_operand:NARROW 1 "mem_noofs_operand" "+Ua"))) ;; memory + (set (match_dup 1) + (unspec_volatile:NARROW + [(match_operand:SI 2 "arm_add_operand" "rIL") ;; expected + (match_operand:NARROW 3 "s_register_operand" "r") ;; desired + (match_operand:SI 4 "const_int_operand") ;; is_weak + (match_operand:SI 5 "const_int_operand") ;; mod_s + (match_operand:SI 6 "const_int_operand")] ;; mod_f + VUNSPEC_ATOMIC_CAS)) + (clobber (match_scratch:SI 7 "=&r"))] "<sync_predtab>" + "#" + "&& reload_completed" + [(const_int 0)] { - struct arm_sync_generator generator; - generator.op = arm_sync_generator_omn; - generator.u.omn = gen_arm_sync_new_<sync_optab><mode>; - arm_expand_sync (<MODE>mode, &generator, operands[0], operands[1], - NULL, operands[2]); + arm_split_compare_and_swap (operands); DONE; }) -(define_expand "sync_new_nand<mode>" - [(match_operand:QHSD 0 "s_register_operand") - (match_operand:QHSD 1 "memory_operand") - (match_operand:QHSD 2 "s_register_operand") - (not:QHSD (and:QHSD (match_dup 1) (match_dup 2)))] - "<sync_predtab>" - { - struct arm_sync_generator generator; - generator.op = arm_sync_generator_omn; - generator.u.omn = gen_arm_sync_new_nand<mode>; - arm_expand_sync (<MODE>mode, &generator, operands[0], operands[1], - NULL, operands[2]); - DONE; - }); +(define_mode_attr cas_cmp_operand + [(SI "arm_add_operand") (DI "cmpdi_operand")]) +(define_mode_attr cas_cmp_str + [(SI "rIL") (DI "rDi")]) -(define_expand "sync_old_<sync_optab><mode>" - [(match_operand:QHSD 0 "s_register_operand") - (match_operand:QHSD 1 "memory_operand") - (match_operand:QHSD 2 "s_register_operand") - (syncop:QHSD (match_dup 1) (match_dup 2))] +(define_insn_and_split "atomic_compare_and_swap<mode>_1" + [(set (reg:CC_Z CC_REGNUM) ;; bool out + (unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS)) + (set (match_operand:SIDI 0 "s_register_operand" "=&r") ;; val out + (match_operand:SIDI 1 "mem_noofs_operand" "+Ua")) ;; memory + (set (match_dup 1) + (unspec_volatile:SIDI + [(match_operand:SIDI 2 "<cas_cmp_operand>" "<cas_cmp_str>") ;; expect + (match_operand:SIDI 3 "s_register_operand" "r") ;; desired + (match_operand:SI 4 "const_int_operand") ;; is_weak + (match_operand:SI 5 "const_int_operand") ;; mod_s + (match_operand:SI 6 "const_int_operand")] ;; mod_f + VUNSPEC_ATOMIC_CAS)) + (clobber (match_scratch:SI 7 "=&r"))] "<sync_predtab>" + "#" + "&& reload_completed" + [(const_int 0)] { - struct arm_sync_generator generator; - generator.op = arm_sync_generator_omn; - generator.u.omn = gen_arm_sync_old_<sync_optab><mode>; - arm_expand_sync (<MODE>mode, &generator, operands[0], operands[1], - NULL, operands[2]); + arm_split_compare_and_swap (operands); DONE; }) -(define_expand "sync_old_nand<mode>" - [(match_operand:QHSD 0 "s_register_operand") - (match_operand:QHSD 1 "memory_operand") - (match_operand:QHSD 2 "s_register_operand") - (not:QHSD (and:QHSD (match_dup 1) (match_dup 2)))] +(define_insn_and_split "atomic_exchange<mode>" + [(set (match_operand:QHSD 0 "s_register_operand" "=&r") ;; output + (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")) ;; memory + (set (match_dup 1) + (unspec_volatile:QHSD + [(match_operand:QHSD 2 "s_register_operand" "r") ;; input + (match_operand:SI 3 "const_int_operand" "")] ;; model + VUNSPEC_ATOMIC_XCHG)) + (clobber (reg:CC CC_REGNUM)) + (clobber (match_scratch:SI 4 "=&r"))] "<sync_predtab>" + "#" + "&& reload_completed" + [(const_int 0)] { - struct arm_sync_generator generator; - generator.op = arm_sync_generator_omn; - generator.u.omn = gen_arm_sync_old_nand<mode>; - arm_expand_sync (<MODE>mode, &generator, operands[0], operands[1], - NULL, operands[2]); + arm_split_atomic_op (SET, operands[0], NULL, operands[1], + operands[2], operands[3], operands[4]); DONE; }) -(define_insn "arm_sync_compare_and_swap<mode>" - [(set (match_operand:SIDI 0 "s_register_operand" "=&r") - (unspec_volatile:SIDI - [(match_operand:SIDI 1 "arm_sync_memory_operand" "+Q") - (match_operand:SIDI 2 "s_register_operand" "r") - (match_operand:SIDI 3 "s_register_operand" "r")] - VUNSPEC_SYNC_COMPARE_AND_SWAP)) - (set (match_dup 1) (unspec_volatile:SIDI [(match_dup 2)] - VUNSPEC_SYNC_COMPARE_AND_SWAP)) - (set (reg:CC CC_REGNUM) (unspec_volatile:CC [(match_dup 1)] - VUNSPEC_SYNC_COMPARE_AND_SWAP)) - ] - "<sync_predtab>" - { - return arm_output_sync_insn (insn, operands); - } - [(set_attr "sync_result" "0") - (set_attr "sync_memory" "1") - (set_attr "sync_required_value" "2") - (set_attr "sync_new_value" "3") - (set_attr "sync_t1" "0") - (set_attr "conds" "clob") - (set_attr "predicable" "no")]) +(define_mode_attr atomic_op_operand + [(QI "reg_or_int_operand") + (HI "reg_or_int_operand") + (SI "reg_or_int_operand") + (DI "s_register_operand")]) -(define_insn "arm_sync_compare_and_swap<mode>" - [(set (match_operand:SI 0 "s_register_operand" "=&r") - (zero_extend:SI - (unspec_volatile:NARROW - [(match_operand:NARROW 1 "arm_sync_memory_operand" "+Q") - (match_operand:SI 2 "s_register_operand" "r") - (match_operand:SI 3 "s_register_operand" "r")] - VUNSPEC_SYNC_COMPARE_AND_SWAP))) - (set (match_dup 1) (unspec_volatile:NARROW [(match_dup 2)] - VUNSPEC_SYNC_COMPARE_AND_SWAP)) - (set (reg:CC CC_REGNUM) (unspec_volatile:CC [(match_dup 1)] - VUNSPEC_SYNC_COMPARE_AND_SWAP)) - ] - "<sync_predtab>" - { - return arm_output_sync_insn (insn, operands); - } - [(set_attr "sync_result" "0") - (set_attr "sync_memory" "1") - (set_attr "sync_required_value" "2") - (set_attr "sync_new_value" "3") - (set_attr "sync_t1" "0") - (set_attr "conds" "clob") - (set_attr "predicable" "no")]) +(define_mode_attr atomic_op_str + [(QI "rn") (HI "rn") (SI "rn") (DI "r")]) -(define_insn "arm_sync_lock_test_and_set<mode>" - [(set (match_operand:SIDI 0 "s_register_operand" "=&r") - (match_operand:SIDI 1 "arm_sync_memory_operand" "+Q")) - (set (match_dup 1) - (unspec_volatile:SIDI [(match_operand:SIDI 2 "s_register_operand" "r")] - VUNSPEC_SYNC_LOCK)) +(define_insn_and_split "atomic_<sync_optab><mode>" + [(set (match_operand:QHSD 0 "mem_noofs_operand" "+Ua") + (unspec_volatile:QHSD + [(syncop:QHSD (match_dup 0) + (match_operand:QHSD 1 "<atomic_op_operand>" "<atomic_op_str>")) + (match_operand:SI 2 "const_int_operand")] ;; model + VUNSPEC_ATOMIC_OP)) (clobber (reg:CC CC_REGNUM)) - (clobber (match_scratch:SI 3 "=&r"))] + (clobber (match_scratch:QHSD 3 "=&r")) + (clobber (match_scratch:SI 4 "=&r"))] "<sync_predtab>" + "#" + "&& reload_completed" + [(const_int 0)] { - return arm_output_sync_insn (insn, operands); - } - [(set_attr "sync_release_barrier" "no") - (set_attr "sync_result" "0") - (set_attr "sync_memory" "1") - (set_attr "sync_new_value" "2") - (set_attr "sync_t1" "0") - (set_attr "sync_t2" "3") - (set_attr "conds" "clob") - (set_attr "predicable" "no")]) + arm_split_atomic_op (<CODE>, NULL, operands[3], operands[0], + operands[1], operands[2], operands[4]); + DONE; + }) -(define_insn "arm_sync_lock_test_and_set<mode>" - [(set (match_operand:SI 0 "s_register_operand" "=&r") - (zero_extend:SI (match_operand:NARROW 1 "arm_sync_memory_operand" "+Q"))) - (set (match_dup 1) - (unspec_volatile:NARROW [(match_operand:SI 2 "s_register_operand" "r")] - VUNSPEC_SYNC_LOCK)) +(define_insn_and_split "atomic_nand<mode>" + [(set (match_operand:QHSD 0 "mem_noofs_operand" "+Ua") + (unspec_volatile:QHSD + [(not:QHSD + (and:QHSD (match_dup 0) + (match_operand:QHSD 1 "<atomic_op_operand>" "<atomic_op_str>"))) + (match_operand:SI 2 "const_int_operand")] ;; model + VUNSPEC_ATOMIC_OP)) (clobber (reg:CC CC_REGNUM)) - (clobber (match_scratch:SI 3 "=&r"))] + (clobber (match_scratch:QHSD 3 "=&r")) + (clobber (match_scratch:SI 4 "=&r"))] "<sync_predtab>" + "#" + "&& reload_completed" + [(const_int 0)] { - return arm_output_sync_insn (insn, operands); - } - [(set_attr "sync_release_barrier" "no") - (set_attr "sync_result" "0") - (set_attr "sync_memory" "1") - (set_attr "sync_new_value" "2") - (set_attr "sync_t1" "0") - (set_attr "sync_t2" "3") - (set_attr "conds" "clob") - (set_attr "predicable" "no")]) + arm_split_atomic_op (NOT, NULL, operands[3], operands[0], + operands[1], operands[2], operands[4]); + DONE; + }) -(define_insn "arm_sync_new_<sync_optab><mode>" - [(set (match_operand:SIDI 0 "s_register_operand" "=&r") - (unspec_volatile:SIDI [(syncop:SIDI - (match_operand:SIDI 1 "arm_sync_memory_operand" "+Q") - (match_operand:SIDI 2 "s_register_operand" "r")) - ] - VUNSPEC_SYNC_NEW_OP)) +(define_insn_and_split "atomic_fetch_<sync_optab><mode>" + [(set (match_operand:QHSD 0 "s_register_operand" "=&r") + (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")) (set (match_dup 1) - (unspec_volatile:SIDI [(match_dup 1) (match_dup 2)] - VUNSPEC_SYNC_NEW_OP)) + (unspec_volatile:QHSD + [(syncop:QHSD (match_dup 1) + (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>")) + (match_operand:SI 3 "const_int_operand")] ;; model + VUNSPEC_ATOMIC_OP)) (clobber (reg:CC CC_REGNUM)) - (clobber (match_scratch:SI 3 "=&r"))] + (clobber (match_scratch:QHSD 4 "=&r")) + (clobber (match_scratch:SI 5 "=&r"))] "<sync_predtab>" + "#" + "&& reload_completed" + [(const_int 0)] { - return arm_output_sync_insn (insn, operands); - } - [(set_attr "sync_result" "0") - (set_attr "sync_memory" "1") - (set_attr "sync_new_value" "2") - (set_attr "sync_t1" "0") - (set_attr "sync_t2" "3") - (set_attr "sync_op" "<sync_optab>") - (set_attr "conds" "clob") - (set_attr "predicable" "no")]) + arm_split_atomic_op (<CODE>, operands[0], operands[4], operands[1], + operands[2], operands[3], operands[5]); + DONE; + }) -(define_insn "arm_sync_new_<sync_optab><mode>" - [(set (match_operand:SI 0 "s_register_operand" "=&r") - (unspec_volatile:SI [(syncop:SI - (zero_extend:SI - (match_operand:NARROW 1 "arm_sync_memory_operand" "+Q")) - (match_operand:SI 2 "s_register_operand" "r")) - ] - VUNSPEC_SYNC_NEW_OP)) +(define_insn_and_split "atomic_fetch_nand<mode>" + [(set (match_operand:QHSD 0 "s_register_operand" "=&r") + (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")) (set (match_dup 1) - (unspec_volatile:NARROW [(match_dup 1) (match_dup 2)] - VUNSPEC_SYNC_NEW_OP)) + (unspec_volatile:QHSD + [(not:QHSD + (and:QHSD (match_dup 1) + (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>"))) + (match_operand:SI 3 "const_int_operand")] ;; model + VUNSPEC_ATOMIC_OP)) (clobber (reg:CC CC_REGNUM)) - (clobber (match_scratch:SI 3 "=&r"))] + (clobber (match_scratch:QHSD 4 "=&r")) + (clobber (match_scratch:SI 5 "=&r"))] "<sync_predtab>" + "#" + "&& reload_completed" + [(const_int 0)] { - return arm_output_sync_insn (insn, operands); - } - [(set_attr "sync_result" "0") - (set_attr "sync_memory" "1") - (set_attr "sync_new_value" "2") - (set_attr "sync_t1" "0") - (set_attr "sync_t2" "3") - (set_attr "sync_op" "<sync_optab>") - (set_attr "conds" "clob") - (set_attr "predicable" "no")]) + arm_split_atomic_op (NOT, operands[0], operands[4], operands[1], + operands[2], operands[3], operands[5]); + DONE; + }) -(define_insn "arm_sync_new_nand<mode>" - [(set (match_operand:SIDI 0 "s_register_operand" "=&r") - (unspec_volatile:SIDI [(not:SIDI (and:SIDI - (match_operand:SIDI 1 "arm_sync_memory_operand" "+Q") - (match_operand:SIDI 2 "s_register_operand" "r"))) - ] - VUNSPEC_SYNC_NEW_OP)) +(define_insn_and_split "atomic_<sync_optab>_fetch<mode>" + [(set (match_operand:QHSD 0 "s_register_operand" "=&r") + (syncop:QHSD + (match_operand:QHSD 1 "mem_noofs_operand" "+Ua") + (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>"))) (set (match_dup 1) - (unspec_volatile:SIDI [(match_dup 1) (match_dup 2)] - VUNSPEC_SYNC_NEW_OP)) + (unspec_volatile:QHSD + [(match_dup 1) (match_dup 2) + (match_operand:SI 3 "const_int_operand")] ;; model + VUNSPEC_ATOMIC_OP)) (clobber (reg:CC CC_REGNUM)) - (clobber (match_scratch:SI 3 "=&r"))] + (clobber (match_scratch:SI 4 "=&r"))] "<sync_predtab>" + "#" + "&& reload_completed" + [(const_int 0)] { - return arm_output_sync_insn (insn, operands); - } - [(set_attr "sync_result" "0") - (set_attr "sync_memory" "1") - (set_attr "sync_new_value" "2") - (set_attr "sync_t1" "0") - (set_attr "sync_t2" "3") - (set_attr "sync_op" "nand") - (set_attr "conds" "clob") - (set_attr "predicable" "no")]) + arm_split_atomic_op (<CODE>, NULL, operands[0], operands[1], + operands[2], operands[3], operands[4]); + DONE; + }) -(define_insn "arm_sync_new_nand<mode>" - [(set (match_operand:SI 0 "s_register_operand" "=&r") - (unspec_volatile:SI - [(not:SI - (and:SI - (zero_extend:SI - (match_operand:NARROW 1 "arm_sync_memory_operand" "+Q")) - (match_operand:SI 2 "s_register_operand" "r"))) - ] VUNSPEC_SYNC_NEW_OP)) +(define_insn_and_split "atomic_nand_fetch<mode>" + [(set (match_operand:QHSD 0 "s_register_operand" "=&r") + (not:QHSD + (and:QHSD + (match_operand:QHSD 1 "mem_noofs_operand" "+Ua") + (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>")))) (set (match_dup 1) - (unspec_volatile:NARROW [(match_dup 1) (match_dup 2)] - VUNSPEC_SYNC_NEW_OP)) + (unspec_volatile:QHSD + [(match_dup 1) (match_dup 2) + (match_operand:SI 3 "const_int_operand")] ;; model + VUNSPEC_ATOMIC_OP)) (clobber (reg:CC CC_REGNUM)) - (clobber (match_scratch:SI 3 "=&r"))] + (clobber (match_scratch:SI 4 "=&r"))] "<sync_predtab>" + "#" + "&& reload_completed" + [(const_int 0)] { - return arm_output_sync_insn (insn, operands); - } - [(set_attr "sync_result" "0") - (set_attr "sync_memory" "1") - (set_attr "sync_new_value" "2") - (set_attr "sync_t1" "0") - (set_attr "sync_t2" "3") - (set_attr "sync_op" "nand") - (set_attr "conds" "clob") - (set_attr "predicable" "no")]) + arm_split_atomic_op (NOT, NULL, operands[0], operands[1], + operands[2], operands[3], operands[4]); + DONE; + }) -(define_insn "arm_sync_old_<sync_optab><mode>" - [(set (match_operand:SIDI 0 "s_register_operand" "=&r") - (unspec_volatile:SIDI [(syncop:SIDI - (match_operand:SIDI 1 "arm_sync_memory_operand" "+Q") - (match_operand:SIDI 2 "s_register_operand" "r")) - ] - VUNSPEC_SYNC_OLD_OP)) - (set (match_dup 1) - (unspec_volatile:SIDI [(match_dup 1) (match_dup 2)] - VUNSPEC_SYNC_OLD_OP)) - (clobber (reg:CC CC_REGNUM)) - (clobber (match_scratch:SIDI 3 "=&r")) - (clobber (match_scratch:SI 4 "<sync_clobber>"))] - "<sync_predtab>" - { - return arm_output_sync_insn (insn, operands); - } - [(set_attr "sync_result" "0") - (set_attr "sync_memory" "1") - (set_attr "sync_new_value" "2") - (set_attr "sync_t1" "3") - (set_attr "sync_t2" "<sync_t2_reqd>") - (set_attr "sync_op" "<sync_optab>") - (set_attr "conds" "clob") - (set_attr "predicable" "no")]) +(define_insn "arm_load_exclusive<mode>" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (zero_extend:SI + (unspec_volatile:NARROW + [(match_operand:NARROW 1 "mem_noofs_operand" "Ua")] + VUNSPEC_LL)))] + "TARGET_HAVE_LDREXBH" + "ldrex<sync_sfx>%?\t%0, %C1" + [(set_attr "predicable" "yes")]) -(define_insn "arm_sync_old_<sync_optab><mode>" - [(set (match_operand:SI 0 "s_register_operand" "=&r") - (unspec_volatile:SI [(syncop:SI - (zero_extend:SI - (match_operand:NARROW 1 "arm_sync_memory_operand" "+Q")) - (match_operand:SI 2 "s_register_operand" "r")) - ] - VUNSPEC_SYNC_OLD_OP)) - (set (match_dup 1) - (unspec_volatile:NARROW [(match_dup 1) (match_dup 2)] - VUNSPEC_SYNC_OLD_OP)) - (clobber (reg:CC CC_REGNUM)) - (clobber (match_scratch:SI 3 "=&r")) - (clobber (match_scratch:SI 4 "<sync_clobber>"))] - "<sync_predtab>" - { - return arm_output_sync_insn (insn, operands); - } - [(set_attr "sync_result" "0") - (set_attr "sync_memory" "1") - (set_attr "sync_new_value" "2") - (set_attr "sync_t1" "3") - (set_attr "sync_t2" "<sync_t2_reqd>") - (set_attr "sync_op" "<sync_optab>") - (set_attr "conds" "clob") - (set_attr "predicable" "no")]) +(define_insn "arm_load_exclusivesi" + [(set (match_operand:SI 0 "s_register_operand" "=r") + (unspec_volatile:SI + [(match_operand:SI 1 "mem_noofs_operand" "Ua")] + VUNSPEC_LL))] + "TARGET_HAVE_LDREX" + "ldrex%?\t%0, %C1" + [(set_attr "predicable" "yes")]) -(define_insn "arm_sync_old_nand<mode>" - [(set (match_operand:SIDI 0 "s_register_operand" "=&r") - (unspec_volatile:SIDI [(not:SIDI (and:SIDI - (match_operand:SIDI 1 "arm_sync_memory_operand" "+Q") - (match_operand:SIDI 2 "s_register_operand" "r"))) - ] - VUNSPEC_SYNC_OLD_OP)) - (set (match_dup 1) - (unspec_volatile:SIDI [(match_dup 1) (match_dup 2)] - VUNSPEC_SYNC_OLD_OP)) - (clobber (reg:CC CC_REGNUM)) - (clobber (match_scratch:SIDI 3 "=&r")) - (clobber (match_scratch:SI 4 "=&r"))] - "<sync_predtab>" +(define_insn "arm_load_exclusivedi" + [(set (match_operand:DI 0 "s_register_operand" "=r") + (unspec_volatile:DI + [(match_operand:DI 1 "mem_noofs_operand" "Ua")] + VUNSPEC_LL))] + "TARGET_HAVE_LDREXD" { - return arm_output_sync_insn (insn, operands); - } - [(set_attr "sync_result" "0") - (set_attr "sync_memory" "1") - (set_attr "sync_new_value" "2") - (set_attr "sync_t1" "3") - (set_attr "sync_t2" "4") - (set_attr "sync_op" "nand") - (set_attr "conds" "clob") - (set_attr "predicable" "no")]) + rtx target = operands[0]; + /* The restrictions on target registers in ARM mode are that the two + registers are consecutive and the first one is even; Thumb is + actually more flexible, but DI should give us this anyway. + Note that the 1st register always gets the lowest word in memory. */ + gcc_assert ((REGNO (target) & 1) == 0); + operands[2] = gen_rtx_REG (SImode, REGNO (target) + 1); + return "ldrexd%?\t%0, %2, %C1"; + } + [(set_attr "predicable" "yes")]) -(define_insn "arm_sync_old_nand<mode>" +(define_insn "arm_store_exclusive<mode>" [(set (match_operand:SI 0 "s_register_operand" "=&r") - (unspec_volatile:SI [(not:SI (and:SI - (zero_extend:SI - (match_operand:NARROW 1 "arm_sync_memory_operand" "+Q")) - (match_operand:SI 2 "s_register_operand" "r"))) - ] - VUNSPEC_SYNC_OLD_OP)) - (set (match_dup 1) - (unspec_volatile:NARROW [(match_dup 1) (match_dup 2)] - VUNSPEC_SYNC_OLD_OP)) - (clobber (reg:CC CC_REGNUM)) - (clobber (match_scratch:SI 3 "=&r")) - (clobber (match_scratch:SI 4 "=&r"))] + (unspec_volatile:SI [(const_int 0)] VUNSPEC_SC)) + (set (match_operand:QHSD 1 "mem_noofs_operand" "=Ua") + (unspec_volatile:QHSD + [(match_operand:QHSD 2 "s_register_operand" "r")] + VUNSPEC_SC))] "<sync_predtab>" { - return arm_output_sync_insn (insn, operands); - } - [(set_attr "sync_result" "0") - (set_attr "sync_memory" "1") - (set_attr "sync_new_value" "2") - (set_attr "sync_t1" "3") - (set_attr "sync_t2" "4") - (set_attr "sync_op" "nand") - (set_attr "conds" "clob") - (set_attr "predicable" "no")]) - -(define_insn "*memory_barrier" - [(set (match_operand:BLK 0 "" "") - (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))] - "TARGET_HAVE_MEMORY_BARRIER" - { - return arm_output_memory_barrier (operands); + if (<MODE>mode == DImode) + { + rtx value = operands[2]; + /* The restrictions on target registers in ARM mode are that the two + registers are consecutive and the first one is even; Thumb is + actually more flexible, but DI should give us this anyway. + Note that the 1st register always gets the lowest word in memory. */ + gcc_assert ((REGNO (value) & 1) == 0 || TARGET_THUMB2); + operands[3] = gen_rtx_REG (SImode, REGNO (value) + 1); + return "strexd%?\t%0, %2, %3, %C1"; + } + return "strex<sync_sfx>%?\t%0, %2, %C1"; } - [(set_attr "length" "4") - (set_attr "conds" "unconditional") - (set_attr "predicable" "no")]) - + [(set_attr "predicable" "yes")]) |