summaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorwilco <wilco@138bc75d-0d04-0410-961f-82ee72b054a4>2016-09-01 11:34:49 +0000
committerwilco <wilco@138bc75d-0d04-0410-961f-82ee72b054a4>2016-09-01 11:34:49 +0000
commit156b069e85e47a97b3c4193584e9385ee500ad29 (patch)
treeb02de5909c79bd9673638eac207a1c2a2d817c11 /gcc
parent1c02615cb22c38bd5529b1e236603324b9ae8553 (diff)
downloadgcc-156b069e85e47a97b3c4193584e9385ee500ad29.tar.gz
This patch adds legitimize_address_displacement hook so that stack accesses
with large offsets are split into a more efficient sequence. Unaligned and TI/TFmode use a 256-byte range, byte and halfword accesses use a 4KB range, wider accesses use a 16KB range to maximise the available addressing range and increase opportunities to share the base address. int f(int x) { int arr[8192]; arr[4096] = 0; arr[6000] = 0; arr[7000] = 0; arr[8191] = 0; return arr[x]; } Now generates: sub sp, sp, #32768 add x1, sp, 16384 str wzr, [x1] str wzr, [x1, 7616] str wzr, [x1, 11616] str wzr, [x1, 16380] ldr w0, [sp, w0, sxtw 2] add sp, sp, 32768 ret gcc/ * config/aarch64/aarch64.c (aarch64_legitimize_address_displacement): New function. (TARGET_LEGITIMIZE_ADDRESS_DISPLACEMENT): Define. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@239923 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog6
-rw-r--r--gcc/config/aarch64/aarch64.c22
2 files changed, 28 insertions, 0 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index bf5c54901e7..dca92035308 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,9 @@
+2016-09-01 Wilco Dijkstra <wdijkstr@arm.com>
+
+ * config/aarch64/aarch64.c (aarch64_legitimize_address_displacement):
+ New function.
+ (TARGET_LEGITIMIZE_ADDRESS_DISPLACEMENT): Define.
+
2016-09-01 Kyrylo Tkachov <kyrylo.tkachov@arm.com>
* config/aarch64/aarch64.md (*ands<mode>_compare0): New pattern.
diff --git a/gcc/config/aarch64/aarch64.c b/gcc/config/aarch64/aarch64.c
index e813d66b40a..5efad462f11 100644
--- a/gcc/config/aarch64/aarch64.c
+++ b/gcc/config/aarch64/aarch64.c
@@ -4191,6 +4191,24 @@ aarch64_legitimate_address_p (machine_mode mode, rtx x,
return aarch64_classify_address (&addr, x, mode, outer_code, strict_p);
}
+/* Split an out-of-range address displacement into a base and offset.
+ Use 4KB range for 1- and 2-byte accesses and a 16KB range otherwise
+ to increase opportunities for sharing the base address of different sizes.
+ For TI/TFmode and unaligned accesses use a 256-byte range. */
+static bool
+aarch64_legitimize_address_displacement (rtx *disp, rtx *off, machine_mode mode)
+{
+ HOST_WIDE_INT mask = GET_MODE_SIZE (mode) < 4 ? 0xfff : 0x3fff;
+
+ if (mode == TImode || mode == TFmode ||
+ (INTVAL (*disp) & (GET_MODE_SIZE (mode) - 1)) != 0)
+ mask = 0xff;
+
+ *off = GEN_INT (INTVAL (*disp) & ~mask);
+ *disp = GEN_INT (INTVAL (*disp) & mask);
+ return true;
+}
+
/* Return TRUE if rtx X is immediate constant 0.0 */
bool
aarch64_float_const_zero_rtx_p (rtx x)
@@ -14135,6 +14153,10 @@ aarch64_optab_supported_p (int op, machine_mode mode1, machine_mode,
#undef TARGET_LEGITIMATE_CONSTANT_P
#define TARGET_LEGITIMATE_CONSTANT_P aarch64_legitimate_constant_p
+#undef TARGET_LEGITIMIZE_ADDRESS_DISPLACEMENT
+#define TARGET_LEGITIMIZE_ADDRESS_DISPLACEMENT \
+ aarch64_legitimize_address_displacement
+
#undef TARGET_LIBGCC_CMP_RETURN_MODE
#define TARGET_LIBGCC_CMP_RETURN_MODE aarch64_libgcc_cmp_return_mode