diff options
author | jgreenhalgh <jgreenhalgh@138bc75d-0d04-0410-961f-82ee72b054a4> | 2014-05-16 09:03:19 +0000 |
---|---|---|
committer | jgreenhalgh <jgreenhalgh@138bc75d-0d04-0410-961f-82ee72b054a4> | 2014-05-16 09:03:19 +0000 |
commit | 8982100415a14a5d4b8c0c8795eb82f8985c239b (patch) | |
tree | 8ad9e1dd69424fcfccdd21f460bdc53468639ae2 | |
parent | 81d1eb9df766795dd1d5439c65eb61cbee824b72 (diff) | |
download | gcc-8982100415a14a5d4b8c0c8795eb82f8985c239b.tar.gz |
[AArch64 costs 11/18] Improve costs for rotate and shift operations.
* config/aarch64/aarch64.c (aarch64_rtx_costs): Improve costs for
rotates and shifts.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@210503 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r-- | gcc/ChangeLog | 6 | ||||
-rw-r--r-- | gcc/config/aarch64/aarch64.c | 56 |
2 files changed, 53 insertions, 9 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 38a66e730cd..a31e4a3a1dd 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,6 +1,12 @@ 2014-05-16 James Greenhalgh <james.greenhalgh@arm.com> Philipp Tomsich <philipp.tomsich@theobroma-systems.com> + * config/aarch64/aarch64.c (aarch64_rtx_costs): Improve costs for + rotates and shifts. + +2014-05-16 James Greenhalgh <james.greenhalgh@arm.com> + Philipp Tomsich <philipp.tomsich@theobroma-systems.com> + * config/aarch64/aarch64.c (aarch64_rtx_costs): Cost ZERO_EXTEND and SIGN_EXTEND better. diff --git a/gcc/config/aarch64/aarch64.c b/gcc/config/aarch64/aarch64.c index 76ef7ce89bf..34084a91323 100644 --- a/gcc/config/aarch64/aarch64.c +++ b/gcc/config/aarch64/aarch64.c @@ -5339,21 +5339,59 @@ cost_minus: *cost += extra_cost->alu.extend; return false; + case ASHIFT: + op0 = XEXP (x, 0); + op1 = XEXP (x, 1); + + if (CONST_INT_P (op1)) + { + /* LSL (immediate), UBMF, UBFIZ and friends. These are all + aliases. */ + if (speed) + *cost += extra_cost->alu.shift; + + /* We can incorporate zero/sign extend for free. */ + if (GET_CODE (op0) == ZERO_EXTEND + || GET_CODE (op0) == SIGN_EXTEND) + op0 = XEXP (op0, 0); + + *cost += rtx_cost (op0, ASHIFT, 0, speed); + return true; + } + else + { + /* LSLV. */ + if (speed) + *cost += extra_cost->alu.shift_reg; + + return false; /* All arguments need to be in registers. */ + } + case ROTATE: - if (!CONST_INT_P (XEXP (x, 1))) - *cost += COSTS_N_INSNS (2); - /* Fall through. */ case ROTATERT: case LSHIFTRT: - case ASHIFT: case ASHIFTRT: + op0 = XEXP (x, 0); + op1 = XEXP (x, 1); - /* Shifting by a register often takes an extra cycle. */ - if (speed && !CONST_INT_P (XEXP (x, 1))) - *cost += extra_cost->alu.arith_shift_reg; + if (CONST_INT_P (op1)) + { + /* ASR (immediate) and friends. */ + if (speed) + *cost += extra_cost->alu.shift; - *cost += rtx_cost (XEXP (x, 0), ASHIFT, 0, speed); - return true; + *cost += rtx_cost (op0, (enum rtx_code) code, 0, speed); + return true; + } + else + { + + /* ASR (register) and friends. */ + if (speed) + *cost += extra_cost->alu.shift_reg; + + return false; /* All arguments need to be in registers. */ + } case HIGH: if (!CONSTANT_P (XEXP (x, 0))) |