summaryrefslogtreecommitdiff
path: root/gcc/config/avr
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/config/avr')
-rw-r--r--gcc/config/avr/avr-c.c40
-rw-r--r--gcc/config/avr/avr-devices.c36
-rw-r--r--gcc/config/avr/avr-log.c95
-rw-r--r--gcc/config/avr/avr-protos.h25
-rw-r--r--gcc/config/avr/avr.c2840
-rw-r--r--gcc/config/avr/avr.h28
-rw-r--r--gcc/config/avr/avr.md831
-rw-r--r--gcc/config/avr/predicates.md12
8 files changed, 3009 insertions, 898 deletions
diff --git a/gcc/config/avr/avr-c.c b/gcc/config/avr/avr-c.c
index 55de2d7fe4b..fd03b361b5e 100644
--- a/gcc/config/avr/avr-c.c
+++ b/gcc/config/avr/avr-c.c
@@ -18,6 +18,7 @@
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
+/* Not included in avr.c since this requires C front end. */
#include "config.h"
#include "system.h"
@@ -27,8 +28,23 @@
#include "cpplib.h"
#include "tree.h"
#include "c-family/c-common.h"
+#include "langhooks.h"
+
+
+/* Implement `REGISTER_TARGET_PRAGMAS'. */
+
+void
+avr_register_target_pragmas (void)
+{
+ c_register_addr_space ("__pgm", ADDR_SPACE_PGM);
+ c_register_addr_space ("__pgm1", ADDR_SPACE_PGM1);
+ c_register_addr_space ("__pgm2", ADDR_SPACE_PGM2);
+ c_register_addr_space ("__pgm3", ADDR_SPACE_PGM3);
+ c_register_addr_space ("__pgm4", ADDR_SPACE_PGM4);
+ c_register_addr_space ("__pgm5", ADDR_SPACE_PGM5);
+ c_register_addr_space ("__pgmx", ADDR_SPACE_PGMX);
+}
-/* Not included in avr.c since this requires C front end. */
/* Worker function for TARGET_CPU_CPP_BUILTINS. */
@@ -90,6 +106,26 @@ avr_cpu_cpp_builtins (struct cpp_reader *pfile)
cpp_define (pfile, "__AVR_ERRATA_SKIP_JMP_CALL__");
}
+ cpp_define_formatted (pfile, "__AVR_SFR_OFFSET__=0x%x",
+ avr_current_arch->sfr_offset);
+
+ /* Define builtin macros so that the user can easily query if or if not
+ non-generic address spaces (and which) are supported.
+ This is only supported for C. For C++, a language extension is needed
+ (as mentioned in ISO/IEC DTR 18037; Annex F.2) which is not
+ implemented in GCC up to now. */
+
+ if (!strcmp (lang_hooks.name, "GNU C"))
+ {
+ cpp_define (pfile, "__PGM=__pgm");
+ cpp_define (pfile, "__PGM1=__pgm1");
+ cpp_define (pfile, "__PGM2=__pgm2");
+ cpp_define (pfile, "__PGM3=__pgm3");
+ cpp_define (pfile, "__PGM4=__pgm4");
+ cpp_define (pfile, "__PGM5=__pgm5");
+ cpp_define (pfile, "__PGMX=__pgmx");
+ }
+
/* Define builtin macros so that the user can
easily query if or if not a specific builtin
is available. */
@@ -100,6 +136,8 @@ avr_cpu_cpp_builtins (struct cpp_reader *pfile)
cpp_define (pfile, "__BUILTIN_AVR_WDR");
cpp_define (pfile, "__BUILTIN_AVR_SLEEP");
cpp_define (pfile, "__BUILTIN_AVR_SWAP");
+ cpp_define (pfile, "__BUILTIN_AVR_MAP8");
+ cpp_define (pfile, "__BUILTIN_AVR_MAP16");
cpp_define (pfile, "__BUILTIN_AVR_DELAY_CYCLES");
cpp_define (pfile, "__BUILTIN_AVR_FMUL");
diff --git a/gcc/config/avr/avr-devices.c b/gcc/config/avr/avr-devices.c
index d0dda8536af..8d6e947f129 100644
--- a/gcc/config/avr/avr-devices.c
+++ b/gcc/config/avr/avr-devices.c
@@ -23,20 +23,30 @@
#include "coretypes.h"
#include "tm.h"
-/* List of all known AVR MCU architectures. */
+/* List of all known AVR MCU architectures.
+ Order as of enum avr_arch from avr.h. */
-const struct base_arch_s avr_arch_types[] = {
- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, NULL, "avr2" }, /* unknown device specified */
- { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=1", "avr1" },
- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=2", "avr2" },
- { 0, 0, 0, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=25", "avr25" },
- { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=3", "avr3" },
- { 0, 0, 1, 0, 1, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=31", "avr31" },
- { 0, 0, 1, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=35", "avr35" },
- { 0, 1, 0, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=4", "avr4" },
- { 0, 1, 1, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=5", "avr5" },
- { 0, 1, 1, 1, 1, 1, 0, 0, 0, 0x0060, "__AVR_ARCH__=51", "avr51" },
- { 0, 1, 1, 1, 1, 1, 1, 0, 0, 0x0060, "__AVR_ARCH__=6", "avr6" }
+const struct base_arch_s
+avr_arch_types[] =
+{
+ /* unknown device specified */
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, 32, 1, NULL, "avr2" },
+ /*
+ A M J LM E E E d S S O # F
+ S U M PO L L I a t F ff 6 l
+ M L P MV P P J - - t a R s 4 a
+ XW M M M a r e s
+ X P t t k h */
+ { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, 32, 1, "__AVR_ARCH__=1", "avr1" },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, 32, 1, "__AVR_ARCH__=2", "avr2" },
+ { 0, 0, 0, 1, 0, 0, 0, 0, 0, 0x0060, 32, 1, "__AVR_ARCH__=25", "avr25" },
+ { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0x0060, 32, 1, "__AVR_ARCH__=3", "avr3" },
+ { 0, 0, 1, 0, 1, 0, 0, 0, 0, 0x0060, 32, 2, "__AVR_ARCH__=31", "avr31" },
+ { 0, 0, 1, 1, 0, 0, 0, 0, 0, 0x0060, 32, 1, "__AVR_ARCH__=35", "avr35" },
+ { 0, 1, 0, 1, 0, 0, 0, 0, 0, 0x0060, 32, 1, "__AVR_ARCH__=4", "avr4" },
+ { 0, 1, 1, 1, 0, 0, 0, 0, 0, 0x0060, 32, 1, "__AVR_ARCH__=5", "avr5" },
+ { 0, 1, 1, 1, 1, 1, 0, 0, 0, 0x0060, 32, 2, "__AVR_ARCH__=51", "avr51" },
+ { 0, 1, 1, 1, 1, 1, 1, 0, 0, 0x0060, 32, 4, "__AVR_ARCH__=6", "avr6" }
};
const struct mcu_type_s avr_mcu_types[] = {
diff --git a/gcc/config/avr/avr-log.c b/gcc/config/avr/avr-log.c
index cdeb6690fde..3c4bccfa282 100644
--- a/gcc/config/avr/avr-log.c
+++ b/gcc/config/avr/avr-log.c
@@ -49,6 +49,8 @@
C: enum rtx_code
m: enum machine_mode
R: enum reg_class
+ D: double_int (signed decimal)
+ X: double_int (unsigned hex)
L: insn list
H: location_t
@@ -59,6 +61,8 @@
F: caller (via __FUNCTION__)
P: Pass name and number
?: Print caller, current function and pass info
+ !: Ditto, but only print if in a pass with static pass number,
+ else return.
== same as printf ==
@@ -80,9 +84,9 @@ static void avr_log_vadump (FILE*, const char*, va_list);
/* As we have no variadic macros, avr_edump maps to a call to
avr_log_set_caller_e which saves __FUNCTION__ to avr_log_caller and
- returns a function pointer to avr_log_fdump_e. avr_fdump_e
+ returns a function pointer to avr_log_fdump_e. avr_log_fdump_e
gets the printf-like arguments and calls avr_log_vadump, the
- worker function. avr_fdump works the same way. */
+ worker function. avr_fdump works the same way. */
/* Provide avr_log_fdump_e/f so that avr_log_set_caller_e/_f can return
their address. */
@@ -133,6 +137,49 @@ avr_log_set_caller_f (const char *caller)
return avr_log_fdump_f;
}
+
+/* Copy-paste from double-int.c:double_int_split_digit (it's static there).
+ Splits last digit of *CST (taken as unsigned) in BASE and returns it. */
+
+static unsigned
+avr_double_int_pop_digit (double_int *cst, unsigned base)
+{
+ unsigned HOST_WIDE_INT resl, reml;
+ HOST_WIDE_INT resh, remh;
+
+ div_and_round_double (FLOOR_DIV_EXPR, true, cst->low, cst->high, base, 0,
+ &resl, &resh, &reml, &remh);
+ cst->high = resh;
+ cst->low = resl;
+
+ return reml;
+}
+
+
+/* Dump VAL as hex value to FILE. */
+
+static void
+avr_dump_double_int_hex (FILE *file, double_int val)
+{
+ unsigned digit[4];
+
+ digit[0] = avr_double_int_pop_digit (&val, 1 << 16);
+ digit[1] = avr_double_int_pop_digit (&val, 1 << 16);
+ digit[2] = avr_double_int_pop_digit (&val, 1 << 16);
+ digit[3] = avr_double_int_pop_digit (&val, 1 << 16);
+
+ fprintf (file, "0x");
+
+ if (digit[3] | digit[2])
+ fprintf (file, "%04x%04x", digit[3], digit[2]);
+
+ if (digit[3] | digit[2] | digit[1] | digit[0])
+ fprintf (file, "%04x%04x", digit[1], digit[0]);
+ else
+ fprintf (file, "0");
+}
+
+
/* Worker function implementing the %-codes and forwarding to
respective print/dump function. */
@@ -187,6 +234,14 @@ avr_log_vadump (FILE *file, const char *fmt, va_list ap)
fprintf (file, "%d", va_arg (ap, int));
break;
+ case 'D':
+ dump_double_int (file, va_arg (ap, double_int), false);
+ break;
+
+ case 'X':
+ avr_dump_double_int_hex (file, va_arg (ap, double_int));
+ break;
+
case 'x':
fprintf (file, "%x", va_arg (ap, int));
break;
@@ -249,7 +304,7 @@ avr_log_vadump (FILE *file, const char *fmt, va_list ap)
location_t loc = va_arg (ap, location_t);
if (BUILTINS_LOCATION == loc)
- fprintf (file, "<BUILTIN-LOCATION");
+ fprintf (file, "<BUILTIN-LOCATION>");
else if (UNKNOWN_LOCATION == loc)
fprintf (file, "<UNKNOWN-LOCATION>");
else
@@ -304,27 +359,43 @@ avr_log_vadump (FILE *file, const char *fmt, va_list ap)
void
avr_log_set_avr_log (void)
{
- if (avr_log_details)
+ bool all = TARGET_ALL_DEBUG != 0;
+
+ if (all || avr_log_details)
{
/* Adding , at beginning and end of string makes searching easier. */
char *str = (char*) alloca (3 + strlen (avr_log_details));
+ bool info;
str[0] = ',';
strcat (stpcpy (str+1, avr_log_details), ",");
-
-#define SET_DUMP_DETAIL(S) \
- avr_log.S = (TARGET_ALL_DEBUG \
- || NULL != strstr (str, "," #S ",") \
- || NULL != strstr (str, ",all,"))
- SET_DUMP_DETAIL (rtx_costs);
+ all |= NULL != strstr (str, ",all,");
+ info = NULL != strstr (str, ",?,");
+
+ if (info)
+ fprintf (stderr, "\n-mlog=");
+
+#define SET_DUMP_DETAIL(S) \
+ do { \
+ avr_log.S = (all || NULL != strstr (str, "," #S ",")); \
+ if (info) \
+ fprintf (stderr, #S ","); \
+ } while (0)
+
+ SET_DUMP_DETAIL (address_cost);
+ SET_DUMP_DETAIL (builtin);
+ SET_DUMP_DETAIL (constraints);
SET_DUMP_DETAIL (legitimate_address_p);
SET_DUMP_DETAIL (legitimize_address);
SET_DUMP_DETAIL (legitimize_reload_address);
- SET_DUMP_DETAIL (constraints);
- SET_DUMP_DETAIL (address_cost);
+ SET_DUMP_DETAIL (progmem);
+ SET_DUMP_DETAIL (rtx_costs);
#undef SET_DUMP_DETAIL
+
+ if (info)
+ fprintf (stderr, "?\n\n");
}
}
diff --git a/gcc/config/avr/avr-protos.h b/gcc/config/avr/avr-protos.h
index d3518335a00..bafd794a302 100644
--- a/gcc/config/avr/avr-protos.h
+++ b/gcc/config/avr/avr-protos.h
@@ -32,7 +32,9 @@ extern int avr_initial_elimination_offset (int from, int to);
extern int avr_simple_epilogue (void);
extern int avr_hard_regno_rename_ok (unsigned int, unsigned int);
extern rtx avr_return_addr_rtx (int count, rtx tem);
+extern void avr_register_target_pragmas (void);
extern bool avr_accumulate_outgoing_args (void);
+extern void avr_init_expanders (void);
#ifdef TREE_CODE
extern void avr_asm_output_aligned_decl_common (FILE*, const_tree, const char*, unsigned HOST_WIDE_INT, unsigned int, bool);
@@ -47,15 +49,8 @@ extern void init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
#endif /* TREE_CODE */
#ifdef RTX_CODE
-extern void asm_output_external_libcall (FILE *file, rtx symref);
extern const char *output_movqi (rtx insn, rtx operands[], int *l);
extern const char *output_movhi (rtx insn, rtx operands[], int *l);
-extern const char *out_movqi_r_mr (rtx insn, rtx op[], int *l);
-extern const char *out_movqi_mr_r (rtx insn, rtx op[], int *l);
-extern const char *out_movhi_r_mr (rtx insn, rtx op[], int *l);
-extern const char *out_movhi_mr_r (rtx insn, rtx op[], int *l);
-extern const char *out_movsi_r_mr (rtx insn, rtx op[], int *l);
-extern const char *out_movsi_mr_r (rtx insn, rtx op[], int *l);
extern const char *output_movsisf (rtx insn, rtx operands[], int *l);
extern const char *avr_out_tstsi (rtx, rtx*, int*);
extern const char *avr_out_tsthi (rtx, rtx*, int*);
@@ -84,6 +79,7 @@ extern bool avr_rotate_bytes (rtx operands[]);
extern void expand_prologue (void);
extern void expand_epilogue (bool);
+extern bool avr_emit_movmemhi (rtx*);
extern int avr_epilogue_uses (int regno);
extern int avr_starting_frame_offset (void);
@@ -94,6 +90,9 @@ extern const char* avr_out_bitop (rtx, rtx*, int*);
extern const char* avr_out_plus (rtx*, int*, int*);
extern const char* avr_out_plus_noclobber (rtx*, int*, int*);
extern const char* avr_out_addto_sp (rtx*, int*);
+extern const char* avr_out_xload (rtx, rtx*, int*);
+extern const char* avr_out_movmem (rtx, rtx*, int*);
+extern const char* avr_out_map_bits (rtx, rtx*, int*);
extern bool avr_popcount_each_byte (rtx, int, int);
extern int extra_constraint_Q (rtx x);
@@ -121,6 +120,10 @@ extern reg_class_t avr_mode_code_base_reg_class (enum machine_mode, addr_space_t
extern bool avr_regno_mode_code_ok_for_base_p (int, enum machine_mode, addr_space_t, RTX_CODE, RTX_CODE);
extern rtx avr_incoming_return_addr_rtx (void);
extern rtx avr_legitimize_reload_address (rtx*, enum machine_mode, int, int, int, int, rtx (*)(rtx,int));
+extern bool avr_mem_pgm_p (rtx);
+extern bool avr_mem_pgmx_p (rtx);
+extern bool avr_load_libgcc_p (rtx);
+extern bool avr_xload_libgcc_p (enum machine_mode);
#endif /* RTX_CODE */
#ifdef REAL_VALUE_TYPE
@@ -139,12 +142,14 @@ extern void avr_log_set_avr_log (void);
typedef struct
{
- unsigned rtx_costs :1;
+ unsigned address_cost :1;
+ unsigned builtin :1;
+ unsigned constraints :1;
unsigned legitimate_address_p :1;
unsigned legitimize_address :1;
unsigned legitimize_reload_address :1;
- unsigned constraints :1;
- unsigned address_cost :1;
+ unsigned progmem :1;
+ unsigned rtx_costs :1;
} avr_log_t;
extern avr_log_t avr_log;
diff --git a/gcc/config/avr/avr.c b/gcc/config/avr/avr.c
index 65d43b8ca07..bb4a08dad71 100644
--- a/gcc/config/avr/avr.c
+++ b/gcc/config/avr/avr.c
@@ -35,6 +35,7 @@
#include "tree.h"
#include "output.h"
#include "expr.h"
+#include "c-family/c-common.h"
#include "diagnostic-core.h"
#include "obstack.h"
#include "function.h"
@@ -54,11 +55,21 @@
/* Return true if STR starts with PREFIX and false, otherwise. */
#define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
-#define AVR_SECTION_PROGMEM (SECTION_MACH_DEP << 0)
+/* The 4 bits starting at SECTION_MACH_DEP are reverved to store
+ 1 + flash segment where progmem data is to be located.
+ For example, data with __pgm2 is stored as (1+2) * SECTION_MACH_DEP. */
+#define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
/* Prototypes for local helper functions. */
+static const char* out_movqi_r_mr (rtx, rtx[], int*);
+static const char* out_movhi_r_mr (rtx, rtx[], int*);
+static const char* out_movsi_r_mr (rtx, rtx[], int*);
+static const char* out_movqi_mr_r (rtx, rtx[], int*);
+static const char* out_movhi_mr_r (rtx, rtx[], int*);
+static const char* out_movsi_mr_r (rtx, rtx[], int*);
+
static int avr_naked_function_p (tree);
static int interrupt_function_p (tree);
static int signal_function_p (tree);
@@ -84,12 +95,28 @@ static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
/* Allocate registers from r25 to r8 for parameters for function calls. */
#define FIRST_CUM_REG 26
+/* Implicit target register of LPM instruction (R0) */
+static GTY(()) rtx lpm_reg_rtx;
+
+/* (Implicit) address register of LPM instruction (R31:R30 = Z) */
+static GTY(()) rtx lpm_addr_reg_rtx;
+
/* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
static GTY(()) rtx tmp_reg_rtx;
/* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
static GTY(()) rtx zero_reg_rtx;
+/* RAMPZ special function register */
+static GTY(()) rtx rampz_rtx;
+
+/* RTX containing the strings "" and "e", respectively */
+static GTY(()) rtx xstring_empty;
+static GTY(()) rtx xstring_e;
+
+/* RTXs for all general purpose registers as QImode */
+static GTY(()) rtx all_regs_rtx[32];
+
/* AVR register names {"r0", "r1", ..., "r31"} */
static const char *const avr_regnames[] = REGISTER_NAMES;
@@ -106,7 +133,17 @@ const struct mcu_type_s *avr_current_device;
static GTY(()) section *progmem_swtable_section;
/* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
-static GTY(()) section *progmem_section;
+static GTY(()) section *progmem_section[6];
+
+static const char * const progmem_section_prefix[6] =
+ {
+ ".progmem.data",
+ ".progmem1.data",
+ ".progmem2.data",
+ ".progmem3.data",
+ ".progmem4.data",
+ ".progmem5.data"
+ };
/* To track if code will use .bss and/or .data. */
bool avr_need_clear_bss_p = false;
@@ -172,9 +209,6 @@ bool avr_need_copy_data_p = false;
#undef TARGET_FUNCTION_ARG_ADVANCE
#define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
-#undef TARGET_LEGITIMIZE_ADDRESS
-#define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
-
#undef TARGET_RETURN_IN_MEMORY
#define TARGET_RETURN_IN_MEMORY avr_return_in_memory
@@ -189,9 +223,6 @@ bool avr_need_copy_data_p = false;
#undef TARGET_CASE_VALUES_THRESHOLD
#define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
-#undef TARGET_LEGITIMATE_ADDRESS_P
-#define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
-
#undef TARGET_FRAME_POINTER_REQUIRED
#define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
#undef TARGET_CAN_ELIMINATE
@@ -221,6 +252,24 @@ bool avr_need_copy_data_p = false;
#undef TARGET_SCALAR_MODE_SUPPORTED_P
#define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
+#undef TARGET_ADDR_SPACE_SUBSET_P
+#define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
+
+#undef TARGET_ADDR_SPACE_CONVERT
+#define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
+
+#undef TARGET_ADDR_SPACE_ADDRESS_MODE
+#define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
+
+#undef TARGET_ADDR_SPACE_POINTER_MODE
+#define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
+
+#undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
+#define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
+
+#undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
+#define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
+
/* Custom function to replace string prefix.
@@ -323,9 +372,6 @@ avr_option_override (void)
avr_current_arch = &avr_arch_types[avr_current_device->arch];
avr_extra_arch_macro = avr_current_device->macro;
- tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
- zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
-
init_machine_status = avr_init_machine_status;
avr_log_set_avr_log();
@@ -339,6 +385,38 @@ avr_init_machine_status (void)
return ggc_alloc_cleared_machine_function ();
}
+
+/* Implement `INIT_EXPANDERS'. */
+/* The function works like a singleton. */
+
+void
+avr_init_expanders (void)
+{
+ int regno;
+
+ static bool done = false;
+
+ if (done)
+ return;
+ else
+ done = true;
+
+ for (regno = 0; regno < 32; regno ++)
+ all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
+
+ lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
+ tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
+ zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
+
+ lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
+
+ rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR));
+
+ xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
+ xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
+}
+
+
/* Return register class for register R. */
enum reg_class
@@ -384,6 +462,81 @@ avr_scalar_mode_supported_p (enum machine_mode mode)
}
+/* Return the segment number of pgm address space AS, i.e.
+ the 64k block it lives in.
+ Return -1 if unknown, i.e. 24-bit AS in flash.
+ Return -2 for anything else. */
+
+static int
+avr_pgm_segment (addr_space_t as)
+{
+ switch (as)
+ {
+ default: return -2;
+
+ case ADDR_SPACE_PGMX: return -1;
+ case ADDR_SPACE_PGM: return 0;
+ case ADDR_SPACE_PGM1: return 1;
+ case ADDR_SPACE_PGM2: return 2;
+ case ADDR_SPACE_PGM3: return 3;
+ case ADDR_SPACE_PGM4: return 4;
+ case ADDR_SPACE_PGM5: return 5;
+ }
+}
+
+
+/* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
+
+static bool
+avr_decl_pgm_p (tree decl)
+{
+ if (TREE_CODE (decl) != VAR_DECL
+ || TREE_TYPE (decl) == error_mark_node)
+ {
+ return false;
+ }
+
+ return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
+}
+
+
+/* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
+ address space and FALSE, otherwise. */
+
+static bool
+avr_decl_pgmx_p (tree decl)
+{
+ if (TREE_CODE (decl) != VAR_DECL
+ || TREE_TYPE (decl) == error_mark_node)
+ {
+ return false;
+ }
+
+ return (ADDR_SPACE_PGMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
+}
+
+
+/* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
+
+bool
+avr_mem_pgm_p (rtx x)
+{
+ return (MEM_P (x)
+ && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
+}
+
+
+/* Return TRUE if X is a MEM rtx located in the 24-bit Flash
+ address space and FALSE, otherwise. */
+
+bool
+avr_mem_pgmx_p (rtx x)
+{
+ return (MEM_P (x)
+ && ADDR_SPACE_PGMX == MEM_ADDR_SPACE (x));
+}
+
+
/* A helper for the subsequent function attribute used to dig for
attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
@@ -989,8 +1142,7 @@ expand_prologue (void)
&& TEST_HARD_REG_BIT (set, REG_Z)
&& TEST_HARD_REG_BIT (set, REG_Z + 1))
{
- emit_move_insn (tmp_reg_rtx,
- gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
+ emit_move_insn (tmp_reg_rtx, rampz_rtx);
emit_push_byte (TMP_REGNO, false);
}
@@ -1228,8 +1380,7 @@ expand_epilogue (bool sibcall_p)
&& TEST_HARD_REG_BIT (set, REG_Z + 1))
{
emit_pop_byte (TMP_REGNO);
- emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
- tmp_reg_rtx);
+ emit_move_insn (rampz_rtx, tmp_reg_rtx);
}
/* Restore SREG using tmp reg as scratch. */
@@ -1379,6 +1530,9 @@ avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
return ok;
}
+
+/* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
+ now only a helper for avr_addr_space_legitimize_address. */
/* Attempts to replace X with a valid
memory address for an operand of mode MODE */
@@ -1641,8 +1795,9 @@ print_operand_address (FILE *file, rtx addr)
}
-/* Output X as assembler operand to file FILE. */
-
+/* Output X as assembler operand to file FILE.
+ For a description of supported %-codes, see top of avr.md. */
+
void
print_operand (FILE *file, rtx x, int code)
{
@@ -1661,6 +1816,31 @@ print_operand (FILE *file, rtx x, int code)
if (AVR_HAVE_EIJMP_EICALL)
fputc ('e', file);
}
+ else if (code == 't'
+ || code == 'T')
+ {
+ static int t_regno = -1;
+ static int t_nbits = -1;
+
+ if (REG_P (x) && t_regno < 0 && code == 'T')
+ {
+ t_regno = REGNO (x);
+ t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
+ }
+ else if (CONST_INT_P (x) && t_regno >= 0
+ && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
+ {
+ int bpos = INTVAL (x);
+
+ fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
+ if (code == 'T')
+ fprintf (file, ",%d", bpos % 8);
+
+ t_regno = -1;
+ }
+ else
+ fatal_insn ("operands to %T/%t must be reg + const_int:", x);
+ }
else if (REG_P (x))
{
if (x == zero_reg_rtx)
@@ -1668,16 +1848,39 @@ print_operand (FILE *file, rtx x, int code)
else
fprintf (file, reg_names[true_regnum (x) + abcd]);
}
- else if (GET_CODE (x) == CONST_INT)
- fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
- else if (GET_CODE (x) == MEM)
+ else if (CONST_INT_P (x))
+ {
+ HOST_WIDE_INT ival = INTVAL (x);
+
+ if ('i' != code)
+ fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
+ else if (low_io_address_operand (x, VOIDmode)
+ || high_io_address_operand (x, VOIDmode))
+ {
+ switch (ival)
+ {
+ case RAMPZ_ADDR: fprintf (file, "__RAMPZ__"); break;
+ case SREG_ADDR: fprintf (file, "__SREG__"); break;
+ case SP_ADDR: fprintf (file, "__SP_L__"); break;
+ case SP_ADDR+1: fprintf (file, "__SP_H__"); break;
+
+ default:
+ fprintf (file, HOST_WIDE_INT_PRINT_HEX,
+ ival - avr_current_arch->sfr_offset);
+ break;
+ }
+ }
+ else
+ fatal_insn ("bad address, not an I/O address:", x);
+ }
+ else if (MEM_P (x))
{
rtx addr = XEXP (x, 0);
if (code == 'm')
{
if (!CONSTANT_P (addr))
- fatal_insn ("bad address, not a constant):", addr);
+ fatal_insn ("bad address, not a constant:", addr);
/* Assembler template with m-code is data - not progmem section */
if (text_segment_operand (addr, VOIDmode))
if (warning (0, "accessing data memory with"
@@ -1688,6 +1891,10 @@ print_operand (FILE *file, rtx x, int code)
}
output_addr_const (file, addr);
}
+ else if (code == 'i')
+ {
+ print_operand (file, addr, 'i');
+ }
else if (code == 'o')
{
if (GET_CODE (addr) != PLUS)
@@ -1717,6 +1924,10 @@ print_operand (FILE *file, rtx x, int code)
else
print_operand_address (file, addr);
}
+ else if (code == 'i')
+ {
+ fatal_insn ("bad address, not an I/O address:", x);
+ }
else if (code == 'x')
{
/* Constant progmem address - like used in jmp or call */
@@ -1740,6 +1951,8 @@ print_operand (FILE *file, rtx x, int code)
REAL_VALUE_TO_TARGET_SINGLE (rv, val);
fprintf (file, "0x%lx", val);
}
+ else if (GET_CODE (x) == CONST_STRING)
+ fputs (XSTR (x, 0), file);
else if (code == 'j')
fputs (cond_string (GET_CODE (x)), file);
else if (code == 'k')
@@ -2177,6 +2390,436 @@ avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
/***********************************************************************
Functions for outputting various mov's for a various modes
************************************************************************/
+
+/* Return true if a value of mode MODE is read from flash by
+ __load_* function from libgcc. */
+
+bool
+avr_load_libgcc_p (rtx op)
+{
+ enum machine_mode mode = GET_MODE (op);
+ int n_bytes = GET_MODE_SIZE (mode);
+
+ return (n_bytes > 2
+ && !AVR_HAVE_LPMX
+ && avr_mem_pgm_p (op));
+}
+
+/* Return true if a value of mode MODE is read by __xload_* function. */
+
+bool
+avr_xload_libgcc_p (enum machine_mode mode)
+{
+ int n_bytes = GET_MODE_SIZE (mode);
+
+ return (n_bytes > 1
+ && avr_current_arch->n_segments > 1
+ && !AVR_HAVE_ELPMX);
+}
+
+
+/* Find an unused d-register to be used as scratch in INSN.
+ EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
+ is a register, skip all possible return values that overlap EXCLUDE.
+ The policy for the returned register is similar to that of
+ `reg_unused_after', i.e. the returned register may overlap the SET_DEST
+ of INSN.
+
+ Return a QImode d-register or NULL_RTX if nothing found. */
+
+static rtx
+avr_find_unused_d_reg (rtx insn, rtx exclude)
+{
+ int regno;
+ bool isr_p = (interrupt_function_p (current_function_decl)
+ || signal_function_p (current_function_decl));
+
+ for (regno = 16; regno < 32; regno++)
+ {
+ rtx reg = all_regs_rtx[regno];
+
+ if ((exclude
+ && reg_overlap_mentioned_p (exclude, reg))
+ || fixed_regs[regno])
+ {
+ continue;
+ }
+
+ /* Try non-live register */
+
+ if (!df_regs_ever_live_p (regno)
+ && (TREE_THIS_VOLATILE (current_function_decl)
+ || cfun->machine->is_OS_task
+ || cfun->machine->is_OS_main
+ || (!isr_p && call_used_regs[regno])))
+ {
+ return reg;
+ }
+
+ /* Any live register can be used if it is unused after.
+ Prologue/epilogue will care for it as needed. */
+
+ if (df_regs_ever_live_p (regno)
+ && reg_unused_after (insn, reg))
+ {
+ return reg;
+ }
+ }
+
+ return NULL_RTX;
+}
+
+
+/* Helper function for the next function in the case where only restricted
+ version of LPM instruction is available. */
+
+static const char*
+avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
+{
+ rtx dest = xop[0];
+ rtx addr = xop[1];
+ int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
+ int regno_dest;
+
+ regno_dest = REGNO (dest);
+
+ /* The implicit target register of LPM. */
+ xop[3] = lpm_reg_rtx;
+
+ switch (GET_CODE (addr))
+ {
+ default:
+ gcc_unreachable();
+
+ case REG:
+
+ gcc_assert (REG_Z == REGNO (addr));
+
+ switch (n_bytes)
+ {
+ default:
+ gcc_unreachable();
+
+ case 1:
+ avr_asm_len ("%4lpm", xop, plen, 1);
+
+ if (regno_dest != LPM_REGNO)
+ avr_asm_len ("mov %0,%3", xop, plen, 1);
+
+ return "";
+
+ case 2:
+ if (REGNO (dest) == REG_Z)
+ return avr_asm_len ("%4lpm" CR_TAB
+ "push %3" CR_TAB
+ "adiw %2,1" CR_TAB
+ "%4lpm" CR_TAB
+ "mov %B0,%3" CR_TAB
+ "pop %A0", xop, plen, 6);
+
+ avr_asm_len ("%4lpm" CR_TAB
+ "mov %A0,%3" CR_TAB
+ "adiw %2,1" CR_TAB
+ "%4lpm" CR_TAB
+ "mov %B0,%3", xop, plen, 5);
+
+ if (!reg_unused_after (insn, addr))
+ avr_asm_len ("sbiw %2,1", xop, plen, 1);
+
+ break; /* 2 */
+ }
+
+ break; /* REG */
+
+ case POST_INC:
+
+ gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
+ && n_bytes <= 4);
+
+ if (regno_dest == LPM_REGNO)
+ avr_asm_len ("%4lpm" CR_TAB
+ "adiw %2,1", xop, plen, 2);
+ else
+ avr_asm_len ("%4lpm" CR_TAB
+ "mov %A0,%3" CR_TAB
+ "adiw %2,1", xop, plen, 3);
+
+ if (n_bytes >= 2)
+ avr_asm_len ("%4lpm" CR_TAB
+ "mov %B0,%3" CR_TAB
+ "adiw %2,1", xop, plen, 3);
+
+ if (n_bytes >= 3)
+ avr_asm_len ("%4lpm" CR_TAB
+ "mov %C0,%3" CR_TAB
+ "adiw %2,1", xop, plen, 3);
+
+ if (n_bytes >= 4)
+ avr_asm_len ("%4lpm" CR_TAB
+ "mov %D0,%3" CR_TAB
+ "adiw %2,1", xop, plen, 3);
+
+ break; /* POST_INC */
+
+ } /* switch CODE (addr) */
+
+ return "";
+}
+
+
+/* If PLEN == NULL: Ouput instructions to load a value from a memory location
+ OP[1] in AS1 to register OP[0].
+ If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
+ Return "". */
+
+static const char*
+avr_out_lpm (rtx insn, rtx *op, int *plen)
+{
+ rtx xop[6];
+ rtx dest = op[0];
+ rtx src = SET_SRC (single_set (insn));
+ rtx addr;
+ int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
+ int regno_dest;
+ int segment;
+
+ if (plen)
+ *plen = 0;
+
+ if (MEM_P (dest))
+ {
+ warning (0, "writing to address space %qs not supported",
+ c_addr_space_name (MEM_ADDR_SPACE (dest)));
+
+ return "";
+ }
+
+ addr = XEXP (src, 0);
+
+ segment = avr_pgm_segment (MEM_ADDR_SPACE (src));
+
+ gcc_assert (REG_P (dest)
+ && ((segment >= 0
+ && (REG_P (addr) || POST_INC == GET_CODE (addr)))
+ || (GET_CODE (addr) == LO_SUM && segment == -1)));
+
+ if (segment == -1)
+ {
+ /* We are called from avr_out_xload because someone wrote
+ __pgmx on a device with just one flash segment. */
+
+ addr = XEXP (addr, 1);
+ }
+
+ xop[0] = dest;
+ xop[1] = addr;
+ xop[2] = lpm_addr_reg_rtx;
+ xop[4] = xstring_empty;
+ xop[5] = tmp_reg_rtx;
+
+ regno_dest = REGNO (dest);
+
+ /* Cut down segment number to a number the device actually
+ supports. We do this late to preserve the address space's
+ name for diagnostics. */
+
+ segment %= avr_current_arch->n_segments;
+
+ /* Set RAMPZ as needed. */
+
+ if (segment)
+ {
+ xop[4] = GEN_INT (segment);
+
+ if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
+ xop[3])
+ {
+ avr_asm_len ("ldi %3,%4" CR_TAB
+ "out __RAMPZ__,%3", xop, plen, 2);
+ }
+ else if (segment == 1)
+ {
+ avr_asm_len ("clr %5" CR_TAB
+ "inc %5" CR_TAB
+ "out __RAMPZ__,%5", xop, plen, 3);
+ }
+ else
+ {
+ avr_asm_len ("mov %5,%2" CR_TAB
+ "ldi %2,%4" CR_TAB
+ "out __RAMPZ__,%2" CR_TAB
+ "mov %2,%5", xop, plen, 4);
+ }
+
+ xop[4] = xstring_e;
+ }
+
+ if ((segment == 0 && !AVR_HAVE_LPMX)
+ || (segment != 0 && !AVR_HAVE_ELPMX))
+ {
+ return avr_out_lpm_no_lpmx (insn, xop, plen);
+ }
+
+ switch (GET_CODE (addr))
+ {
+ default:
+ gcc_unreachable();
+
+ case REG:
+
+ gcc_assert (REG_Z == REGNO (addr));
+
+ switch (n_bytes)
+ {
+ default:
+ gcc_unreachable();
+
+ case 1:
+ return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
+
+ case 2:
+ if (REGNO (dest) == REG_Z)
+ return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
+ "%4lpm %B0,%a2" CR_TAB
+ "mov %A0,%5", xop, plen, 3);
+ else
+ {
+ avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
+ "%4lpm %B0,%a2", xop, plen, 2);
+
+ if (!reg_unused_after (insn, addr))
+ avr_asm_len ("sbiw %2,1", xop, plen, 1);
+ }
+
+ break; /* 2 */
+
+ case 3:
+
+ avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
+ "%4lpm %B0,%a2+" CR_TAB
+ "%4lpm %C0,%a2", xop, plen, 3);
+
+ if (!reg_unused_after (insn, addr))
+ avr_asm_len ("sbiw %2,2", xop, plen, 1);
+
+ break; /* 3 */
+
+ case 4:
+
+ avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
+ "%4lpm %B0,%a2+", xop, plen, 2);
+
+ if (REGNO (dest) == REG_Z - 2)
+ return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
+ "%4lpm %C0,%a2" CR_TAB
+ "mov %D0,%5", xop, plen, 3);
+ else
+ {
+ avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
+ "%4lpm %D0,%a2", xop, plen, 2);
+
+ if (!reg_unused_after (insn, addr))
+ avr_asm_len ("sbiw %2,3", xop, plen, 1);
+ }
+
+ break; /* 4 */
+ } /* n_bytes */
+
+ break; /* REG */
+
+ case POST_INC:
+
+ gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
+ && n_bytes <= 4);
+
+ avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
+ if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
+ if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
+ if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
+
+ break; /* POST_INC */
+
+ } /* switch CODE (addr) */
+
+ return "";
+}
+
+
+/* Worker function for xload_<mode> and xload_8 insns. */
+
+const char*
+avr_out_xload (rtx insn, rtx *op, int *plen)
+{
+ rtx xop[5];
+ rtx reg = op[0];
+ int n_bytes = GET_MODE_SIZE (GET_MODE (reg));
+ unsigned int regno = REGNO (reg);
+
+ if (avr_current_arch->n_segments == 1)
+ return avr_out_lpm (insn, op, plen);
+
+ xop[0] = reg;
+ xop[1] = op[1];
+ xop[2] = lpm_addr_reg_rtx;
+ xop[3] = lpm_reg_rtx;
+ xop[4] = tmp_reg_rtx;
+
+ avr_asm_len ("out __RAMPZ__,%1", xop, plen, -1);
+
+ if (1 == n_bytes)
+ {
+ if (AVR_HAVE_ELPMX)
+ return avr_asm_len ("elpm %0,%a2", xop, plen, 1);
+ else
+ return avr_asm_len ("elpm" CR_TAB
+ "mov %0,%3", xop, plen, 2);
+ }
+
+ gcc_assert (AVR_HAVE_ELPMX);
+
+ if (!reg_overlap_mentioned_p (reg, lpm_addr_reg_rtx))
+ {
+ /* Insn clobbers the Z-register so we can use post-increment. */
+
+ avr_asm_len ("elpm %A0,%a2+", xop, plen, 1);
+ if (n_bytes >= 2) avr_asm_len ("elpm %B0,%a2+", xop, plen, 1);
+ if (n_bytes >= 3) avr_asm_len ("elpm %C0,%a2+", xop, plen, 1);
+ if (n_bytes >= 4) avr_asm_len ("elpm %D0,%a2+", xop, plen, 1);
+
+ return "";
+ }
+
+ switch (n_bytes)
+ {
+ default:
+ gcc_unreachable();
+
+ case 2:
+ gcc_assert (regno == REGNO (lpm_addr_reg_rtx));
+
+ return avr_asm_len ("elpm %4,%a2+" CR_TAB
+ "elpm %B0,%a2" CR_TAB
+ "mov %A0,%4", xop, plen, 3);
+
+ case 3:
+ case 4:
+ gcc_assert (regno + 2 == REGNO (lpm_addr_reg_rtx));
+
+ avr_asm_len ("elpm %A0,%a2+" CR_TAB
+ "elpm %B0,%a2+", xop, plen, 2);
+
+ if (n_bytes == 3)
+ return avr_asm_len ("elpm %C0,%a2", xop, plen, 1);
+ else
+ return avr_asm_len ("elpm %4,%a2+" CR_TAB
+ "elpm %D0,%a2" CR_TAB
+ "mov %C0,%4", xop, plen, 3);
+ }
+
+ return "";
+}
+
+
const char *
output_movqi (rtx insn, rtx operands[], int *l)
{
@@ -2185,6 +2828,12 @@ output_movqi (rtx insn, rtx operands[], int *l)
rtx src = operands[1];
int *real_l = l;
+ if (avr_mem_pgm_p (src)
+ || avr_mem_pgm_p (dest))
+ {
+ return avr_out_lpm (insn, operands, real_l);
+ }
+
if (!l)
l = &dummy;
@@ -2211,17 +2860,12 @@ output_movqi (rtx insn, rtx operands[], int *l)
}
else if (GET_CODE (dest) == MEM)
{
- const char *templ;
-
- if (src == const0_rtx)
- operands[1] = zero_reg_rtx;
-
- templ = out_movqi_mr_r (insn, operands, real_l);
+ rtx xop[2];
- if (!real_l)
- output_asm_insn (templ, operands);
+ xop[0] = dest;
+ xop[1] = src == const0_rtx ? zero_reg_rtx : src;
- operands[1] = src;
+ return out_movqi_mr_r (insn, xop, real_l);
}
return "";
}
@@ -2235,6 +2879,12 @@ output_movhi (rtx insn, rtx operands[], int *l)
rtx src = operands[1];
int *real_l = l;
+ if (avr_mem_pgm_p (src)
+ || avr_mem_pgm_p (dest))
+ {
+ return avr_out_lpm (insn, operands, real_l);
+ }
+
if (!l)
l = &dummy;
@@ -2286,93 +2936,79 @@ output_movhi (rtx insn, rtx operands[], int *l)
}
else if (GET_CODE (dest) == MEM)
{
- const char *templ;
-
- if (src == const0_rtx)
- operands[1] = zero_reg_rtx;
-
- templ = out_movhi_mr_r (insn, operands, real_l);
+ rtx xop[2];
- if (!real_l)
- output_asm_insn (templ, operands);
+ xop[0] = dest;
+ xop[1] = src == const0_rtx ? zero_reg_rtx : src;
- operands[1] = src;
- return "";
+ return out_movhi_mr_r (insn, xop, real_l);
}
fatal_insn ("invalid insn:", insn);
return "";
}
-const char *
-out_movqi_r_mr (rtx insn, rtx op[], int *l)
+static const char*
+out_movqi_r_mr (rtx insn, rtx op[], int *plen)
{
rtx dest = op[0];
rtx src = op[1];
rtx x = XEXP (src, 0);
- int dummy;
-
- if (!l)
- l = &dummy;
if (CONSTANT_ADDRESS_P (x))
{
- if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
- {
- *l = 1;
- return AS2 (in,%0,__SREG__);
- }
- if (optimize > 0 && io_address_operand (x, QImode))
- {
- *l = 1;
- return AS2 (in,%0,%m1-0x20);
- }
- *l = 2;
- return AS2 (lds,%0,%m1);
+ return optimize > 0 && io_address_operand (x, QImode)
+ ? avr_asm_len ("in %0,%i1", op, plen, -1)
+ : avr_asm_len ("lds %0,%m1", op, plen, -2);
}
- /* memory access by reg+disp */
else if (GET_CODE (x) == PLUS
- && REG_P (XEXP (x,0))
- && GET_CODE (XEXP (x,1)) == CONST_INT)
+ && REG_P (XEXP (x, 0))
+ && CONST_INT_P (XEXP (x, 1)))
{
- if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
- {
- int disp = INTVAL (XEXP (x,1));
- if (REGNO (XEXP (x,0)) != REG_Y)
- fatal_insn ("incorrect insn:",insn);
+ /* memory access by reg+disp */
- if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
- return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
- AS2 (ldd,%0,Y+63) CR_TAB
- AS2 (sbiw,r28,%o1-63));
+ int disp = INTVAL (XEXP (x, 1));
+
+ if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
+ {
+ if (REGNO (XEXP (x, 0)) != REG_Y)
+ fatal_insn ("incorrect insn:",insn);
- return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
- AS2 (sbci,r29,hi8(-%o1)) CR_TAB
- AS2 (ld,%0,Y) CR_TAB
- AS2 (subi,r28,lo8(%o1)) CR_TAB
- AS2 (sbci,r29,hi8(%o1)));
- }
- else if (REGNO (XEXP (x,0)) == REG_X)
- {
- /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
- it but I have this situation with extremal optimizing options. */
- if (reg_overlap_mentioned_p (dest, XEXP (x,0))
- || reg_unused_after (insn, XEXP (x,0)))
- return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
- AS2 (ld,%0,X));
-
- return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
- AS2 (ld,%0,X) CR_TAB
- AS2 (sbiw,r26,%o1));
- }
- *l = 1;
- return AS2 (ldd,%0,%1);
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
+ return avr_asm_len ("adiw r28,%o1-63" CR_TAB
+ "ldd %0,Y+63" CR_TAB
+ "sbiw r28,%o1-63", op, plen, -3);
+
+ return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
+ "sbci r29,hi8(-%o1)" CR_TAB
+ "ld %0,Y" CR_TAB
+ "subi r28,lo8(%o1)" CR_TAB
+ "sbci r29,hi8(%o1)", op, plen, -5);
+ }
+ else if (REGNO (XEXP (x, 0)) == REG_X)
+ {
+ /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
+ it but I have this situation with extremal optimizing options. */
+
+ avr_asm_len ("adiw r26,%o1" CR_TAB
+ "ld %0,X", op, plen, -2);
+
+ if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
+ && !reg_unused_after (insn, XEXP (x,0)))
+ {
+ avr_asm_len ("sbiw r26,%o1", op, plen, 1);
+ }
+
+ return "";
+ }
+
+ return avr_asm_len ("ldd %0,%1", op, plen, -1);
}
- *l = 1;
- return AS2 (ld,%0,%1);
+
+ return avr_asm_len ("ld %0,%1", op, plen, -1);
}
-const char *
-out_movhi_r_mr (rtx insn, rtx op[], int *l)
+static const char*
+out_movhi_r_mr (rtx insn, rtx op[], int *plen)
{
rtx dest = op[0];
rtx src = op[1];
@@ -2382,39 +3018,25 @@ out_movhi_r_mr (rtx insn, rtx op[], int *l)
/* "volatile" forces reading low byte first, even if less efficient,
for correct operation with 16-bit I/O registers. */
int mem_volatile_p = MEM_VOLATILE_P (src);
- int tmp;
-
- if (!l)
- l = &tmp;
if (reg_base > 0)
{
if (reg_dest == reg_base) /* R = (R) */
- {
- *l = 3;
- return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
- AS2 (ld,%B0,%1) CR_TAB
- AS2 (mov,%A0,__tmp_reg__));
- }
- else if (reg_base == REG_X) /* (R26) */
- {
- if (reg_unused_after (insn, base))
- {
- *l = 2;
- return (AS2 (ld,%A0,X+) CR_TAB
- AS2 (ld,%B0,X));
- }
- *l = 3;
- return (AS2 (ld,%A0,X+) CR_TAB
- AS2 (ld,%B0,X) CR_TAB
- AS2 (sbiw,r26,1));
- }
- else /* (R) */
- {
- *l = 2;
- return (AS2 (ld,%A0,%1) CR_TAB
- AS2 (ldd,%B0,%1+1));
- }
+ return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
+ "ld %B0,%1" CR_TAB
+ "mov %A0,__tmp_reg__", op, plen, -3);
+
+ if (reg_base != REG_X)
+ return avr_asm_len ("ld %A0,%1" CR_TAB
+ "ldd %B0,%1+1", op, plen, -2);
+
+ avr_asm_len ("ld %A0,X+" CR_TAB
+ "ld %B0,X", op, plen, -2);
+
+ if (!reg_unused_after (insn, base))
+ avr_asm_len ("sbiw r26,1", op, plen, 1);
+
+ return "";
}
else if (GET_CODE (base) == PLUS) /* (R + i) */
{
@@ -2422,109 +3044,90 @@ out_movhi_r_mr (rtx insn, rtx op[], int *l)
int reg_base = true_regnum (XEXP (base, 0));
if (disp > MAX_LD_OFFSET (GET_MODE (src)))
- {
- if (REGNO (XEXP (base, 0)) != REG_Y)
- fatal_insn ("incorrect insn:",insn);
-
- if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
- return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
- AS2 (ldd,%A0,Y+62) CR_TAB
- AS2 (ldd,%B0,Y+63) CR_TAB
- AS2 (sbiw,r28,%o1-62));
+ {
+ if (REGNO (XEXP (base, 0)) != REG_Y)
+ fatal_insn ("incorrect insn:",insn);
+
+ return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
+ ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
+ "ldd %A0,Y+62" CR_TAB
+ "ldd %B0,Y+63" CR_TAB
+ "sbiw r28,%o1-62", op, plen, -4)
+
+ : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
+ "sbci r29,hi8(-%o1)" CR_TAB
+ "ld %A0,Y" CR_TAB
+ "ldd %B0,Y+1" CR_TAB
+ "subi r28,lo8(%o1)" CR_TAB
+ "sbci r29,hi8(%o1)", op, plen, -6);
+ }
+
+ /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
+ it but I have this situation with extremal
+ optimization options. */
- return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
- AS2 (sbci,r29,hi8(-%o1)) CR_TAB
- AS2 (ld,%A0,Y) CR_TAB
- AS2 (ldd,%B0,Y+1) CR_TAB
- AS2 (subi,r28,lo8(%o1)) CR_TAB
- AS2 (sbci,r29,hi8(%o1)));
- }
if (reg_base == REG_X)
- {
- /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
- it but I have this situation with extremal
- optimization options. */
-
- *l = 4;
- if (reg_base == reg_dest)
- return (AS2 (adiw,r26,%o1) CR_TAB
- AS2 (ld,__tmp_reg__,X+) CR_TAB
- AS2 (ld,%B0,X) CR_TAB
- AS2 (mov,%A0,__tmp_reg__));
+ return reg_base == reg_dest
+ ? avr_asm_len ("adiw r26,%o1" CR_TAB
+ "ld __tmp_reg__,X+" CR_TAB
+ "ld %B0,X" CR_TAB
+ "mov %A0,__tmp_reg__", op, plen, -4)
- return (AS2 (adiw,r26,%o1) CR_TAB
- AS2 (ld,%A0,X+) CR_TAB
- AS2 (ld,%B0,X) CR_TAB
- AS2 (sbiw,r26,%o1+1));
- }
+ : avr_asm_len ("adiw r26,%o1" CR_TAB
+ "ld %A0,X+" CR_TAB
+ "ld %B0,X" CR_TAB
+ "sbiw r26,%o1+1", op, plen, -4);
- if (reg_base == reg_dest)
- {
- *l = 3;
- return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
- AS2 (ldd,%B0,%B1) CR_TAB
- AS2 (mov,%A0,__tmp_reg__));
- }
-
- *l = 2;
- return (AS2 (ldd,%A0,%A1) CR_TAB
- AS2 (ldd,%B0,%B1));
+ return reg_base == reg_dest
+ ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
+ "ldd %B0,%B1" CR_TAB
+ "mov %A0,__tmp_reg__", op, plen, -3)
+
+ : avr_asm_len ("ldd %A0,%A1" CR_TAB
+ "ldd %B0,%B1", op, plen, -2);
}
else if (GET_CODE (base) == PRE_DEC) /* (--R) */
{
if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
- fatal_insn ("incorrect insn:", insn);
+ fatal_insn ("incorrect insn:", insn);
- if (mem_volatile_p)
- {
- if (REGNO (XEXP (base, 0)) == REG_X)
- {
- *l = 4;
- return (AS2 (sbiw,r26,2) CR_TAB
- AS2 (ld,%A0,X+) CR_TAB
- AS2 (ld,%B0,X) CR_TAB
- AS2 (sbiw,r26,1));
- }
- else
- {
- *l = 3;
- return (AS2 (sbiw,%r1,2) CR_TAB
- AS2 (ld,%A0,%p1) CR_TAB
- AS2 (ldd,%B0,%p1+1));
- }
- }
-
- *l = 2;
- return (AS2 (ld,%B0,%1) CR_TAB
- AS2 (ld,%A0,%1));
+ if (!mem_volatile_p)
+ return avr_asm_len ("ld %B0,%1" CR_TAB
+ "ld %A0,%1", op, plen, -2);
+
+ return REGNO (XEXP (base, 0)) == REG_X
+ ? avr_asm_len ("sbiw r26,2" CR_TAB
+ "ld %A0,X+" CR_TAB
+ "ld %B0,X" CR_TAB
+ "sbiw r26,1", op, plen, -4)
+
+ : avr_asm_len ("sbiw %r1,2" CR_TAB
+ "ld %A0,%p1" CR_TAB
+ "ldd %B0,%p1+1", op, plen, -3);
}
else if (GET_CODE (base) == POST_INC) /* (R++) */
{
if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
- fatal_insn ("incorrect insn:", insn);
+ fatal_insn ("incorrect insn:", insn);
- *l = 2;
- return (AS2 (ld,%A0,%1) CR_TAB
- AS2 (ld,%B0,%1));
+ return avr_asm_len ("ld %A0,%1" CR_TAB
+ "ld %B0,%1", op, plen, -2);
}
else if (CONSTANT_ADDRESS_P (base))
{
- if (optimize > 0 && io_address_operand (base, HImode))
- {
- *l = 2;
- return (AS2 (in,%A0,%m1-0x20) CR_TAB
- AS2 (in,%B0,%m1+1-0x20));
- }
- *l = 4;
- return (AS2 (lds,%A0,%m1) CR_TAB
- AS2 (lds,%B0,%m1+1));
+ return optimize > 0 && io_address_operand (base, HImode)
+ ? avr_asm_len ("in %A0,%i1" CR_TAB
+ "in %B0,%i1+1", op, plen, -2)
+
+ : avr_asm_len ("lds %A0,%m1" CR_TAB
+ "lds %B0,%m1+1", op, plen, -4);
}
fatal_insn ("unknown move insn:",insn);
return "";
}
-const char *
+static const char*
out_movsi_r_mr (rtx insn, rtx op[], int *l)
{
rtx dest = op[0];
@@ -2685,7 +3288,7 @@ out_movsi_r_mr (rtx insn, rtx op[], int *l)
return "";
}
-const char *
+static const char*
out_movsi_mr_r (rtx insn, rtx op[], int *l)
{
rtx dest = op[0];
@@ -2848,6 +3451,12 @@ output_movsisf (rtx insn, rtx operands[], int *l)
rtx src = operands[1];
int *real_l = l;
+ if (avr_mem_pgm_p (src)
+ || avr_mem_pgm_p (dest))
+ {
+ return avr_out_lpm (insn, operands, real_l);
+ }
+
if (!l)
l = &dummy;
@@ -2884,34 +3493,10 @@ output_movsisf (rtx insn, rtx operands[], int *l)
AS2 (mov,%D0,%D1));
}
}
- else if (CONST_INT_P (src)
- || CONST_DOUBLE_P (src))
- {
- return output_reload_insisf (operands, NULL_RTX, real_l);
- }
else if (CONSTANT_P (src))
{
- if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
- {
- *l = 4;
- return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
- AS2 (ldi,%B0,hi8(%1)) CR_TAB
- AS2 (ldi,%C0,hlo8(%1)) CR_TAB
- AS2 (ldi,%D0,hhi8(%1)));
- }
- /* Last resort, better than loading from memory. */
- *l = 10;
- return (AS2 (mov,__tmp_reg__,r31) CR_TAB
- AS2 (ldi,r31,lo8(%1)) CR_TAB
- AS2 (mov,%A0,r31) CR_TAB
- AS2 (ldi,r31,hi8(%1)) CR_TAB
- AS2 (mov,%B0,r31) CR_TAB
- AS2 (ldi,r31,hlo8(%1)) CR_TAB
- AS2 (mov,%C0,r31) CR_TAB
- AS2 (ldi,r31,hhi8(%1)) CR_TAB
- AS2 (mov,%D0,r31) CR_TAB
- AS2 (mov,r31,__tmp_reg__));
- }
+ return output_reload_insisf (operands, NULL_RTX, real_l);
+ }
else if (GET_CODE (src) == MEM)
return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
}
@@ -3167,6 +3752,12 @@ avr_out_movpsi (rtx insn, rtx *op, int *plen)
rtx dest = op[0];
rtx src = op[1];
+ if (avr_mem_pgm_p (src)
+ || avr_mem_pgm_p (dest))
+ {
+ return avr_out_lpm (insn, op, plen);
+ }
+
if (register_operand (dest, VOIDmode))
{
if (register_operand (src, VOIDmode)) /* mov r,r */
@@ -3192,41 +3783,21 @@ avr_out_movpsi (rtx insn, rtx *op, int *plen)
return avr_asm_len ("mov %C0,%C1", op, plen, 1);
}
}
- else if (CONST_INT_P (src))
- {
- return avr_out_reload_inpsi (op, NULL_RTX, plen);
- }
else if (CONSTANT_P (src))
{
- if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
- {
- return avr_asm_len ("ldi %A0,lo8(%1)" CR_TAB
- "ldi %B0,hi8(%1)" CR_TAB
- "ldi %C0,hh8(%1)", op, plen, -3);
- }
-
- /* Last resort, better than loading from memory. */
- return avr_asm_len ("mov __tmp_reg__,r31" CR_TAB
- "ldi r31,lo8(%1)" CR_TAB
- "mov %A0,r31" CR_TAB
- "ldi r31,hi8(%1)" CR_TAB
- "mov %B0,r31" CR_TAB
- "ldi r31,hh8(%1)" CR_TAB
- "mov %C0,r31" CR_TAB
- "mov r31,__tmp_reg__", op, plen, -8);
+ return avr_out_reload_inpsi (op, NULL_RTX, plen);
}
else if (MEM_P (src))
return avr_out_load_psi (insn, op, plen); /* mov r,m */
}
else if (MEM_P (dest))
{
- if (src == CONST0_RTX (GET_MODE (dest)))
- op[1] = zero_reg_rtx;
-
- avr_out_store_psi (insn, op, plen);
+ rtx xop[2];
+
+ xop[0] = dest;
+ xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
- op[1] = src;
- return "";
+ return avr_out_store_psi (insn, xop, plen);
}
fatal_insn ("invalid insn:", insn);
@@ -3234,88 +3805,71 @@ avr_out_movpsi (rtx insn, rtx *op, int *plen)
}
-const char *
-out_movqi_mr_r (rtx insn, rtx op[], int *l)
+static const char*
+out_movqi_mr_r (rtx insn, rtx op[], int *plen)
{
rtx dest = op[0];
rtx src = op[1];
rtx x = XEXP (dest, 0);
- int dummy;
-
- if (!l)
- l = &dummy;
if (CONSTANT_ADDRESS_P (x))
{
- if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
- {
- *l = 1;
- return AS2 (out,__SREG__,%1);
- }
- if (optimize > 0 && io_address_operand (x, QImode))
- {
- *l = 1;
- return AS2 (out,%m0-0x20,%1);
- }
- *l = 2;
- return AS2 (sts,%m0,%1);
+ return optimize > 0 && io_address_operand (x, QImode)
+ ? avr_asm_len ("out %i0,%1", op, plen, -1)
+ : avr_asm_len ("sts %m0,%1", op, plen, -2);
}
- /* memory access by reg+disp */
- else if (GET_CODE (x) == PLUS
- && REG_P (XEXP (x,0))
- && GET_CODE (XEXP (x,1)) == CONST_INT)
+ else if (GET_CODE (x) == PLUS
+ && REG_P (XEXP (x, 0))
+ && CONST_INT_P (XEXP (x, 1)))
{
- if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
- {
- int disp = INTVAL (XEXP (x,1));
- if (REGNO (XEXP (x,0)) != REG_Y)
- fatal_insn ("incorrect insn:",insn);
+ /* memory access by reg+disp */
- if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
- return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
- AS2 (std,Y+63,%1) CR_TAB
- AS2 (sbiw,r28,%o0-63));
+ int disp = INTVAL (XEXP (x, 1));
- return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
- AS2 (sbci,r29,hi8(-%o0)) CR_TAB
- AS2 (st,Y,%1) CR_TAB
- AS2 (subi,r28,lo8(%o0)) CR_TAB
- AS2 (sbci,r29,hi8(%o0)));
- }
+ if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
+ {
+ if (REGNO (XEXP (x, 0)) != REG_Y)
+ fatal_insn ("incorrect insn:",insn);
+
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
+ return avr_asm_len ("adiw r28,%o0-63" CR_TAB
+ "std Y+63,%1" CR_TAB
+ "sbiw r28,%o0-63", op, plen, -3);
+
+ return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
+ "sbci r29,hi8(-%o0)" CR_TAB
+ "st Y,%1" CR_TAB
+ "subi r28,lo8(%o0)" CR_TAB
+ "sbci r29,hi8(%o0)", op, plen, -5);
+ }
else if (REGNO (XEXP (x,0)) == REG_X)
- {
- if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
- {
- if (reg_unused_after (insn, XEXP (x,0)))
- return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
- AS2 (adiw,r26,%o0) CR_TAB
- AS2 (st,X,__tmp_reg__));
-
- return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
- AS2 (adiw,r26,%o0) CR_TAB
- AS2 (st,X,__tmp_reg__) CR_TAB
- AS2 (sbiw,r26,%o0));
- }
- else
- {
- if (reg_unused_after (insn, XEXP (x,0)))
- return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
- AS2 (st,X,%1));
+ {
+ if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
+ {
+ avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
+ "adiw r26,%o0" CR_TAB
+ "st X,__tmp_reg__", op, plen, -3);
+ }
+ else
+ {
+ avr_asm_len ("adiw r26,%o0" CR_TAB
+ "st X,%1", op, plen, -2);
+ }
+
+ if (!reg_unused_after (insn, XEXP (x,0)))
+ avr_asm_len ("sbiw r26,%o0", op, plen, 1);
- return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
- AS2 (st,X,%1) CR_TAB
- AS2 (sbiw,r26,%o0));
- }
- }
- *l = 1;
- return AS2 (std,%0,%1);
+ return "";
+ }
+
+ return avr_asm_len ("std %0,%1", op, plen, 1);
}
- *l = 1;
- return AS2 (st,%0,%1);
+
+ return avr_asm_len ("st %0,%1", op, plen, 1);
}
-const char *
-out_movhi_mr_r (rtx insn, rtx op[], int *l)
+static const char*
+out_movhi_mr_r (rtx insn, rtx op[], int *plen)
{
rtx dest = op[0];
rtx src = op[1];
@@ -3325,127 +3879,103 @@ out_movhi_mr_r (rtx insn, rtx op[], int *l)
/* "volatile" forces writing high byte first, even if less efficient,
for correct operation with 16-bit I/O registers. */
int mem_volatile_p = MEM_VOLATILE_P (dest);
- int tmp;
- if (!l)
- l = &tmp;
if (CONSTANT_ADDRESS_P (base))
- {
- if (optimize > 0 && io_address_operand (base, HImode))
- {
- *l = 2;
- return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
- AS2 (out,%m0-0x20,%A1));
- }
- return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
- AS2 (sts,%m0,%A1));
- }
+ return optimize > 0 && io_address_operand (base, HImode)
+ ? avr_asm_len ("out %i0+1,%B1" CR_TAB
+ "out %i0,%A1", op, plen, -2)
+
+ : avr_asm_len ("sts %m0+1,%B1" CR_TAB
+ "sts %m0,%A1", op, plen, -4);
+
if (reg_base > 0)
{
- if (reg_base == REG_X)
- {
- if (reg_src == REG_X)
- {
- /* "st X+,r26" and "st -X,r26" are undefined. */
- if (!mem_volatile_p && reg_unused_after (insn, src))
- return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
- AS2 (st,X,r26) CR_TAB
- AS2 (adiw,r26,1) CR_TAB
- AS2 (st,X,__tmp_reg__));
- else
- return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
- AS2 (adiw,r26,1) CR_TAB
- AS2 (st,X,__tmp_reg__) CR_TAB
- AS2 (sbiw,r26,1) CR_TAB
- AS2 (st,X,r26));
- }
- else
- {
- if (!mem_volatile_p && reg_unused_after (insn, base))
- return *l=2, (AS2 (st,X+,%A1) CR_TAB
- AS2 (st,X,%B1));
- else
- return *l=3, (AS2 (adiw,r26,1) CR_TAB
- AS2 (st,X,%B1) CR_TAB
- AS2 (st,-X,%A1));
- }
- }
- else
- return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
- AS2 (st,%0,%A1));
+ if (reg_base != REG_X)
+ return avr_asm_len ("std %0+1,%B1" CR_TAB
+ "st %0,%A1", op, plen, -2);
+
+ if (reg_src == REG_X)
+ /* "st X+,r26" and "st -X,r26" are undefined. */
+ return !mem_volatile_p && reg_unused_after (insn, src)
+ ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
+ "st X,r26" CR_TAB
+ "adiw r26,1" CR_TAB
+ "st X,__tmp_reg__", op, plen, -4)
+
+ : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
+ "adiw r26,1" CR_TAB
+ "st X,__tmp_reg__" CR_TAB
+ "sbiw r26,1" CR_TAB
+ "st X,r26", op, plen, -5);
+
+ return !mem_volatile_p && reg_unused_after (insn, base)
+ ? avr_asm_len ("st X+,%A1" CR_TAB
+ "st X,%B1", op, plen, -2)
+ : avr_asm_len ("adiw r26,1" CR_TAB
+ "st X,%B1" CR_TAB
+ "st -X,%A1", op, plen, -3);
}
else if (GET_CODE (base) == PLUS)
{
int disp = INTVAL (XEXP (base, 1));
reg_base = REGNO (XEXP (base, 0));
if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
- {
- if (reg_base != REG_Y)
- fatal_insn ("incorrect insn:",insn);
-
- if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
- return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
- AS2 (std,Y+63,%B1) CR_TAB
- AS2 (std,Y+62,%A1) CR_TAB
- AS2 (sbiw,r28,%o0-62));
-
- return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
- AS2 (sbci,r29,hi8(-%o0)) CR_TAB
- AS2 (std,Y+1,%B1) CR_TAB
- AS2 (st,Y,%A1) CR_TAB
- AS2 (subi,r28,lo8(%o0)) CR_TAB
- AS2 (sbci,r29,hi8(%o0)));
- }
- if (reg_base == REG_X)
- {
- /* (X + d) = R */
- if (reg_src == REG_X)
- {
- *l = 7;
- return (AS2 (mov,__tmp_reg__,r26) CR_TAB
- AS2 (mov,__zero_reg__,r27) CR_TAB
- AS2 (adiw,r26,%o0+1) CR_TAB
- AS2 (st,X,__zero_reg__) CR_TAB
- AS2 (st,-X,__tmp_reg__) CR_TAB
- AS1 (clr,__zero_reg__) CR_TAB
- AS2 (sbiw,r26,%o0));
- }
- *l = 4;
- return (AS2 (adiw,r26,%o0+1) CR_TAB
- AS2 (st,X,%B1) CR_TAB
- AS2 (st,-X,%A1) CR_TAB
- AS2 (sbiw,r26,%o0));
- }
- return *l=2, (AS2 (std,%B0,%B1) CR_TAB
- AS2 (std,%A0,%A1));
+ {
+ if (reg_base != REG_Y)
+ fatal_insn ("incorrect insn:",insn);
+
+ return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
+ ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
+ "std Y+63,%B1" CR_TAB
+ "std Y+62,%A1" CR_TAB
+ "sbiw r28,%o0-62", op, plen, -4)
+
+ : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
+ "sbci r29,hi8(-%o0)" CR_TAB
+ "std Y+1,%B1" CR_TAB
+ "st Y,%A1" CR_TAB
+ "subi r28,lo8(%o0)" CR_TAB
+ "sbci r29,hi8(%o0)", op, plen, -6);
+ }
+
+ if (reg_base != REG_X)
+ return avr_asm_len ("std %B0,%B1" CR_TAB
+ "std %A0,%A1", op, plen, -2);
+ /* (X + d) = R */
+ return reg_src == REG_X
+ ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
+ "mov __zero_reg__,r27" CR_TAB
+ "adiw r26,%o0+1" CR_TAB
+ "st X,__zero_reg__" CR_TAB
+ "st -X,__tmp_reg__" CR_TAB
+ "clr __zero_reg__" CR_TAB
+ "sbiw r26,%o0", op, plen, -7)
+
+ : avr_asm_len ("adiw r26,%o0+1" CR_TAB
+ "st X,%B1" CR_TAB
+ "st -X,%A1" CR_TAB
+ "sbiw r26,%o0", op, plen, -4);
}
else if (GET_CODE (base) == PRE_DEC) /* (--R) */
- return *l=2, (AS2 (st,%0,%B1) CR_TAB
- AS2 (st,%0,%A1));
+ {
+ return avr_asm_len ("st %0,%B1" CR_TAB
+ "st %0,%A1", op, plen, -2);
+ }
else if (GET_CODE (base) == POST_INC) /* (R++) */
{
- if (mem_volatile_p)
- {
- if (REGNO (XEXP (base, 0)) == REG_X)
- {
- *l = 4;
- return (AS2 (adiw,r26,1) CR_TAB
- AS2 (st,X,%B1) CR_TAB
- AS2 (st,-X,%A1) CR_TAB
- AS2 (adiw,r26,2));
- }
- else
- {
- *l = 3;
- return (AS2 (std,%p0+1,%B1) CR_TAB
- AS2 (st,%p0,%A1) CR_TAB
- AS2 (adiw,%r0,2));
- }
- }
+ if (!mem_volatile_p)
+ return avr_asm_len ("st %0,%A1" CR_TAB
+ "st %0,%B1", op, plen, -2);
+
+ return REGNO (XEXP (base, 0)) == REG_X
+ ? avr_asm_len ("adiw r26,1" CR_TAB
+ "st X,%B1" CR_TAB
+ "st -X,%A1" CR_TAB
+ "adiw r26,2", op, plen, -4)
- *l = 2;
- return (AS2 (st,%0,%A1) CR_TAB
- AS2 (st,%0,%B1));
+ : avr_asm_len ("std %p0+1,%B1" CR_TAB
+ "st %p0,%A1" CR_TAB
+ "adiw %r0,2", op, plen, -3);
}
fatal_insn ("unknown move insn:",insn);
return "";
@@ -3740,150 +4270,126 @@ avr_out_tstsi (rtx insn, rtx *op, int *plen)
}
-/* Generate asm equivalent for various shifts.
- Shift count is a CONST_INT, MEM or REG.
- This only handles cases that are not already
- carefully hand-optimized in ?sh??i3_out. */
+/* Generate asm equivalent for various shifts. This only handles cases
+ that are not already carefully hand-optimized in ?sh??i3_out.
+
+ OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
+ OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
+ OPERANDS[3] is a QImode scratch register from LD regs if
+ available and SCRATCH, otherwise (no scratch available)
+
+ TEMPL is an assembler template that shifts by one position.
+ T_LEN is the length of this template. */
void
out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
- int *len, int t_len)
+ int *plen, int t_len)
{
- rtx op[10];
- char str[500];
- int second_label = 1;
- int saved_in_tmp = 0;
- int use_zero_reg = 0;
+ bool second_label = true;
+ bool saved_in_tmp = false;
+ bool use_zero_reg = false;
+ rtx op[5];
op[0] = operands[0];
op[1] = operands[1];
op[2] = operands[2];
op[3] = operands[3];
- str[0] = 0;
- if (len)
- *len = 1;
+ if (plen)
+ *plen = 0;
- if (GET_CODE (operands[2]) == CONST_INT)
+ if (CONST_INT_P (operands[2]))
{
- int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
+ bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
+ && REG_P (operands[3]));
int count = INTVAL (operands[2]);
int max_len = 10; /* If larger than this, always use a loop. */
if (count <= 0)
- {
- if (len)
- *len = 0;
- return;
- }
+ return;
if (count < 8 && !scratch)
- use_zero_reg = 1;
+ use_zero_reg = true;
if (optimize_size)
- max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
+ max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
if (t_len * count <= max_len)
- {
- /* Output shifts inline with no loop - faster. */
- if (len)
- *len = t_len * count;
- else
- {
- while (count-- > 0)
- output_asm_insn (templ, op);
- }
+ {
+ /* Output shifts inline with no loop - faster. */
+
+ while (count-- > 0)
+ avr_asm_len (templ, op, plen, t_len);
- return;
- }
+ return;
+ }
if (scratch)
- {
- if (!len)
- strcat (str, AS2 (ldi,%3,%2));
- }
+ {
+ avr_asm_len ("ldi %3,%2", op, plen, 1);
+ }
else if (use_zero_reg)
- {
- /* Hack to save one word: use __zero_reg__ as loop counter.
- Set one bit, then shift in a loop until it is 0 again. */
+ {
+ /* Hack to save one word: use __zero_reg__ as loop counter.
+ Set one bit, then shift in a loop until it is 0 again. */
- op[3] = zero_reg_rtx;
- if (len)
- *len = 2;
- else
- strcat (str, ("set" CR_TAB
- AS2 (bld,%3,%2-1)));
- }
+ op[3] = zero_reg_rtx;
+
+ avr_asm_len ("set" CR_TAB
+ "bld %3,%2-1", op, plen, 2);
+ }
else
- {
- /* No scratch register available, use one from LD_REGS (saved in
- __tmp_reg__) that doesn't overlap with registers to shift. */
+ {
+ /* No scratch register available, use one from LD_REGS (saved in
+ __tmp_reg__) that doesn't overlap with registers to shift. */
- op[3] = gen_rtx_REG (QImode,
- ((true_regnum (operands[0]) - 1) & 15) + 16);
- op[4] = tmp_reg_rtx;
- saved_in_tmp = 1;
+ op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
+ op[4] = tmp_reg_rtx;
+ saved_in_tmp = true;
- if (len)
- *len = 3; /* Includes "mov %3,%4" after the loop. */
- else
- strcat (str, (AS2 (mov,%4,%3) CR_TAB
- AS2 (ldi,%3,%2)));
- }
+ avr_asm_len ("mov %4,%3" CR_TAB
+ "ldi %3,%2", op, plen, 2);
+ }
- second_label = 0;
+ second_label = false;
}
- else if (GET_CODE (operands[2]) == MEM)
+ else if (MEM_P (op[2]))
{
- rtx op_mov[10];
+ rtx op_mov[2];
- op[3] = op_mov[0] = tmp_reg_rtx;
+ op_mov[0] = op[3] = tmp_reg_rtx;
op_mov[1] = op[2];
- if (len)
- out_movqi_r_mr (insn, op_mov, len);
- else
- output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
+ out_movqi_r_mr (insn, op_mov, plen);
}
- else if (register_operand (operands[2], QImode))
+ else if (register_operand (op[2], QImode))
{
- if (reg_unused_after (insn, operands[2])
- && !reg_overlap_mentioned_p (operands[0], operands[2]))
+ op[3] = op[2];
+
+ if (!reg_unused_after (insn, op[2])
+ || reg_overlap_mentioned_p (op[0], op[2]))
{
- op[3] = op[2];
+ op[3] = tmp_reg_rtx;
+ avr_asm_len ("mov %3,%2", op, plen, 1);
}
- else
- {
- op[3] = tmp_reg_rtx;
- if (!len)
- strcat (str, (AS2 (mov,%3,%2) CR_TAB));
- }
}
else
fatal_insn ("bad shift insn:", insn);
if (second_label)
- {
- if (len)
- ++*len;
- else
- strcat (str, AS1 (rjmp,2f));
- }
+ avr_asm_len ("rjmp 2f", op, plen, 1);
- if (len)
- *len += t_len + 2; /* template + dec + brXX */
- else
- {
- strcat (str, "\n1:\t");
- strcat (str, templ);
- strcat (str, second_label ? "\n2:\t" : "\n\t");
- strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
- strcat (str, CR_TAB);
- strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
- if (saved_in_tmp)
- strcat (str, (CR_TAB AS2 (mov,%3,%4)));
- output_asm_insn (str, op);
- }
+ avr_asm_len ("1:", op, plen, 0);
+ avr_asm_len (templ, op, plen, t_len);
+
+ if (second_label)
+ avr_asm_len ("2:", op, plen, 0);
+
+ avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
+ avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
+
+ if (saved_in_tmp)
+ avr_asm_len ("mov %3,%4", op, plen, 1);
}
@@ -5348,7 +5854,7 @@ avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
op[0] = reg8;
- op[1] = GEN_INT (val8);
+ op[1] = gen_int_mode (val8, QImode);
/* To get usable cc0 no low-bytes must have been skipped. */
@@ -5897,7 +6403,9 @@ adjust_insn_length (rtx insn, int len)
case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
-
+ case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
+ case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
+
case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
@@ -5921,6 +6429,8 @@ adjust_insn_length (rtx insn, int len)
case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
+ case ADJUST_LEN_MAP_BITS: avr_out_map_bits (insn, op, &len); break;
+
default:
gcc_unreachable();
}
@@ -6042,6 +6552,49 @@ _reg_unused_after (rtx insn, rtx reg)
return 1;
}
+
+/* Return RTX that represents the lower 16 bits of a constant address.
+ Unfortunately, simplify_gen_subreg does not handle this case. */
+
+static rtx
+avr_const_address_lo16 (rtx x)
+{
+ rtx lo16;
+
+ switch (GET_CODE (x))
+ {
+ default:
+ break;
+
+ case CONST:
+ if (PLUS == GET_CODE (XEXP (x, 0))
+ && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
+ && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
+ {
+ HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
+ const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
+
+ lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
+ lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
+
+ return lo16;
+ }
+
+ break;
+
+ case SYMBOL_REF:
+ {
+ const char *name = XSTR (x, 0);
+
+ return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
+ }
+ }
+
+ avr_edump ("\n%?: %r\n", x);
+ gcc_unreachable();
+}
+
+
/* Target hook for assembling integer objects. The AVR version needs
special handling for references to certain labels. */
@@ -6054,11 +6607,30 @@ avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
fputs ("\t.word\tgs(", asm_out_file);
output_addr_const (asm_out_file, x);
fputs (")\n", asm_out_file);
+
+ return true;
+ }
+ else if (GET_MODE (x) == PSImode)
+ {
+ default_assemble_integer (avr_const_address_lo16 (x),
+ GET_MODE_SIZE (HImode), aligned_p);
+
+ fputs ("\t.warning\t\"assembling 24-bit address needs binutils extension for hh8(",
+ asm_out_file);
+ output_addr_const (asm_out_file, x);
+ fputs (")\"\n", asm_out_file);
+
+ fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
+ output_addr_const (asm_out_file, x);
+ fputs (")\n", asm_out_file);
+
return true;
}
+
return default_assemble_integer (x, size, aligned_p);
}
+
/* Worker function for ASM_DECLARE_FUNCTION_NAME. */
void
@@ -6208,8 +6780,16 @@ avr_attribute_table[] =
{ NULL, 0, 0, false, false, false, NULL, false }
};
-/* Look for attribute `progmem' in DECL
- if found return 1, otherwise 0. */
+
+/* Look if DECL shall be placed in program memory space by
+ means of attribute `progmem' or some address-space qualifier.
+ Return non-zero if DECL is data that must end up in Flash and
+ zero if the data lives in RAM (.bss, .data, .rodata, ...).
+
+ Return 2 if DECL is located in 24-bit flash address-space
+ Return 1 if DECL is located in 16-bit flash address-space
+ Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
+ Return 0 otherwise */
int
avr_progmem_p (tree decl, tree attributes)
@@ -6219,11 +6799,18 @@ avr_progmem_p (tree decl, tree attributes)
if (TREE_CODE (decl) != VAR_DECL)
return 0;
+ if (avr_decl_pgmx_p (decl))
+ return 2;
+
+ if (avr_decl_pgm_p (decl))
+ return 1;
+
if (NULL_TREE
!= lookup_attribute ("progmem", attributes))
- return 1;
+ return -1;
- a=decl;
+ a = decl;
+
do
a = TREE_TYPE(a);
while (TREE_CODE (a) == ARRAY_TYPE);
@@ -6232,16 +6819,123 @@ avr_progmem_p (tree decl, tree attributes)
return 0;
if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
- return 1;
+ return -1;
return 0;
}
+
+/* Scan type TYP for pointer references to address space ASn.
+ Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
+ the AS are also declared to be CONST.
+ Otherwise, return the respective addres space, i.e. a value != 0. */
+
+static addr_space_t
+avr_nonconst_pointer_addrspace (tree typ)
+{
+ while (ARRAY_TYPE == TREE_CODE (typ))
+ typ = TREE_TYPE (typ);
+
+ if (POINTER_TYPE_P (typ))
+ {
+ tree target = TREE_TYPE (typ);
+
+ /* Pointer to function: Test the function's return type. */
+
+ if (FUNCTION_TYPE == TREE_CODE (target))
+ return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
+
+ /* "Ordinary" pointers... */
+
+ while (TREE_CODE (target) == ARRAY_TYPE)
+ target = TREE_TYPE (target);
+
+ if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (target))
+ && !TYPE_READONLY (target))
+ {
+ /* Pointers to non-generic address space must be const. */
+
+ return TYPE_ADDR_SPACE (target);
+ }
+
+ /* Scan pointer's target type. */
+
+ return avr_nonconst_pointer_addrspace (target);
+ }
+
+ return ADDR_SPACE_GENERIC;
+}
+
+
+/* Sanity check NODE so that all pointers targeting address space AS1
+ go along with CONST qualifier. Writing to this address space should
+ be detected and complained about as early as possible. */
+
+static bool
+avr_pgm_check_var_decl (tree node)
+{
+ const char *reason = NULL;
+
+ addr_space_t as = ADDR_SPACE_GENERIC;
+
+ gcc_assert (as == 0);
+
+ if (avr_log.progmem)
+ avr_edump ("%?: %t\n", node);
+
+ switch (TREE_CODE (node))
+ {
+ default:
+ break;
+
+ case VAR_DECL:
+ if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
+ reason = "variable";
+ break;
+
+ case PARM_DECL:
+ if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
+ reason = "function parameter";
+ break;
+
+ case FIELD_DECL:
+ if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
+ reason = "structure field";
+ break;
+
+ case FUNCTION_DECL:
+ if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
+ as)
+ reason = "return type of function";
+ break;
+
+ case POINTER_TYPE:
+ if (as = avr_nonconst_pointer_addrspace (node), as)
+ reason = "pointer";
+ break;
+ }
+
+ if (reason)
+ {
+ if (TYPE_P (node))
+ error ("pointer targeting address space %qs must be const in %qT",
+ c_addr_space_name (as), node);
+ else
+ error ("pointer targeting address space %qs must be const in %s %q+D",
+ c_addr_space_name (as), reason, node);
+ }
+
+ return reason == NULL;
+}
+
+
/* Add the section attribute if the variable is in progmem. */
static void
avr_insert_attributes (tree node, tree *attributes)
{
+ avr_pgm_check_var_decl (node);
+
if (TREE_CODE (node) == VAR_DECL
&& (TREE_STATIC (node) || DECL_EXTERNAL (node))
&& avr_progmem_p (node, *attributes))
@@ -6258,11 +6952,20 @@ avr_insert_attributes (tree node, tree *attributes)
if (error_mark_node == node0)
return;
- if (!TYPE_READONLY (node0))
+ if (!TYPE_READONLY (node0)
+ && !TREE_READONLY (node))
{
+ addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (node));
+ const char *reason = "__attribute__((progmem))";
+
+ if (!ADDR_SPACE_GENERIC_P (as))
+ reason = c_addr_space_name (as);
+
+ if (avr_log.progmem)
+ avr_edump ("\n%?: %t\n%t\n", node, node0);
+
error ("variable %q+D must be const in order to be put into"
- " read-only section by means of %<__attribute__((progmem))%>",
- node);
+ " read-only section by means of %qs", node, reason);
}
}
}
@@ -6314,11 +7017,23 @@ avr_output_bss_section_asm_op (const void *data)
}
+/* Unnamed section callback for progmem*.data sections. */
+
+static void
+avr_output_progmem_section_asm_op (const void *data)
+{
+ fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
+ (const char*) data);
+}
+
+
/* Implement `TARGET_ASM_INIT_SECTIONS'. */
static void
avr_asm_init_sections (void)
{
+ unsigned int n;
+
/* Set up a section for jump tables. Alignment is handled by
ASM_OUTPUT_BEFORE_CASE_LABEL. */
@@ -6337,9 +7052,12 @@ avr_asm_init_sections (void)
",\"ax\",@progbits");
}
- progmem_section
- = get_unnamed_section (0, output_section_asm_op,
- "\t.section\t.progmem.data,\"a\",@progbits");
+ for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
+ {
+ progmem_section[n]
+ = get_unnamed_section (0, avr_output_progmem_section_asm_op,
+ progmem_section_prefix[n]);
+ }
/* Override section callbacks to keep track of `avr_need_clear_bss_p'
resp. `avr_need_copy_data_p'. */
@@ -6418,8 +7136,9 @@ avr_asm_named_section (const char *name, unsigned int flags, tree decl)
{
if (flags & AVR_SECTION_PROGMEM)
{
+ int segment = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP - 1;
const char *old_prefix = ".rodata";
- const char *new_prefix = ".progmem.data";
+ const char *new_prefix = progmem_section_prefix[segment];
const char *sname = new_prefix;
if (STR_PREFIX_P (name, old_prefix))
@@ -6446,6 +7165,7 @@ avr_asm_named_section (const char *name, unsigned int flags, tree decl)
static unsigned int
avr_section_type_flags (tree decl, const char *name, int reloc)
{
+ int prog;
unsigned int flags = default_section_type_flags (decl, name, reloc);
if (STR_PREFIX_P (name, ".noinit"))
@@ -6459,10 +7179,16 @@ avr_section_type_flags (tree decl, const char *name, int reloc)
}
if (decl && DECL_P (decl)
- && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
+ && (prog = avr_progmem_p (decl, DECL_ATTRIBUTES (decl)), prog))
{
+ int segment = 0;
+
+ if (prog == 1)
+ segment = avr_pgm_segment (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
+
flags &= ~SECTION_WRITE;
- flags |= AVR_SECTION_PROGMEM;
+ flags &= ~SECTION_BSS;
+ flags |= (1 + segment % avr_current_arch->n_segments) * SECTION_MACH_DEP;
}
return flags;
@@ -6498,16 +7224,25 @@ avr_encode_section_info (tree decl, rtx rtl,
static section *
avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
{
+ int prog;
+
section * sect = default_elf_select_section (decl, reloc, align);
if (decl && DECL_P (decl)
- && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
+ && (prog = avr_progmem_p (decl, DECL_ATTRIBUTES (decl)), prog))
{
+ int segment = 0;
+
+ if (prog == 1)
+ segment = avr_pgm_segment (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
+
+ segment %= avr_current_arch->n_segments;
+
if (sect->common.flags & SECTION_NAMED)
{
const char * name = sect->named.name;
const char * old_prefix = ".rodata";
- const char * new_prefix = ".progmem.data";
+ const char * new_prefix = progmem_section_prefix[segment];
if (STR_PREFIX_P (name, old_prefix))
{
@@ -6518,31 +7253,38 @@ avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
}
}
- return progmem_section;
+ return progmem_section[segment];
}
return sect;
}
/* Implement `TARGET_ASM_FILE_START'. */
-/* Outputs some appropriate text to go at the start of an assembler
- file. */
+/* Outputs some text at the start of each assembler file. */
static void
avr_file_start (void)
{
+ int sfr_offset = avr_current_arch->sfr_offset;
+
if (avr_current_arch->asm_only)
error ("MCU %qs supported for assembler only", avr_current_device->name);
default_file_start ();
-/* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
- fputs ("__SREG__ = 0x3f\n"
- "__SP_H__ = 0x3e\n"
- "__SP_L__ = 0x3d\n", asm_out_file);
-
- fputs ("__tmp_reg__ = 0\n"
- "__zero_reg__ = 1\n", asm_out_file);
+ fprintf (asm_out_file,
+ "__SREG__ = 0x%02x\n"
+ "__SP_H__ = 0x%02x\n"
+ "__SP_L__ = 0x%02x\n"
+ "__RAMPZ__ = 0x%02x\n"
+ "__tmp_reg__ = %d\n"
+ "__zero_reg__ = %d\n",
+ -sfr_offset + SREG_ADDR,
+ -sfr_offset + SP_ADDR + 1,
+ -sfr_offset + SP_ADDR,
+ -sfr_offset + RAMPZ_ADDR,
+ TMP_REGNO,
+ ZERO_REGNO);
}
@@ -8049,10 +8791,14 @@ avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
reg_class_t
avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
- addr_space_t as ATTRIBUTE_UNUSED,
- RTX_CODE outer_code,
+ addr_space_t as, RTX_CODE outer_code,
RTX_CODE index_code ATTRIBUTE_UNUSED)
{
+ if (!ADDR_SPACE_GENERIC_P (as))
+ {
+ return POINTER_Z_REGS;
+ }
+
if (!avr_strict_X)
return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
@@ -8071,6 +8817,27 @@ avr_regno_mode_code_ok_for_base_p (int regno,
{
bool ok = false;
+ if (!ADDR_SPACE_GENERIC_P (as))
+ {
+ if (regno < FIRST_PSEUDO_REGISTER
+ && regno == REG_Z)
+ {
+ return true;
+ }
+
+ if (reg_renumber)
+ {
+ regno = reg_renumber[regno];
+
+ if (regno == REG_Z)
+ {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
if (regno < FIRST_PSEUDO_REGISTER
&& (regno == REG_X
|| regno == REG_Y
@@ -8110,7 +8877,13 @@ avr_regno_mode_code_ok_for_base_p (int regno,
LEN != NULL: set *LEN to the length of the instruction sequence
(in words) printed with LEN = NULL.
If CLEAR_P is true, OP[0] had been cleard to Zero already.
- If CLEAR_P is false, nothing is known about OP[0]. */
+ If CLEAR_P is false, nothing is known about OP[0].
+
+ The effect on cc0 is as follows:
+
+ Load 0 to any register : NONE
+ Load ld register with any value : NONE
+ Anything else: : CLOBBER */
static void
output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
@@ -8122,9 +8895,8 @@ output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
int clobber_val = 1234;
bool cooked_clobber_p = false;
bool set_p = false;
- unsigned int n;
enum machine_mode mode = GET_MODE (dest);
- int n_bytes = GET_MODE_SIZE (mode);
+ int n, n_bytes = GET_MODE_SIZE (mode);
gcc_assert (REG_P (dest)
&& CONSTANT_P (src));
@@ -8138,7 +8910,7 @@ output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
if (REGNO (dest) < 16
&& REGNO (dest) + GET_MODE_SIZE (mode) > 16)
{
- clobber_reg = gen_rtx_REG (QImode, REGNO (dest) + n_bytes - 1);
+ clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
}
/* We might need a clobber reg but don't have one. Look at the value to
@@ -8155,7 +8927,7 @@ output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
That's cheaper than loading from constant pool. */
cooked_clobber_p = true;
- clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
+ clobber_reg = all_regs_rtx[REG_Z + 1];
avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
}
@@ -8165,7 +8937,7 @@ output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
{
int ldreg_p;
bool done_byte = false;
- unsigned int j;
+ int j;
rtx xop[3];
/* Crop the n-th destination byte. */
@@ -8188,8 +8960,8 @@ output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
xop[1] = src;
xop[2] = clobber_reg;
- if (n >= 2)
- avr_asm_len ("clr %0", xop, len, 1);
+ if (n >= 2 + (avr_current_arch->n_segments > 1))
+ avr_asm_len ("mov %0,__zero_reg__", xop, len, 1);
else
avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
continue;
@@ -8221,14 +8993,13 @@ output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
}
}
- /* Use CLR to zero a value so that cc0 is set as expected
- for zero. */
+ /* Don't use CLR so that cc0 is set as expected. */
if (ival[n] == 0)
{
if (!clear_p)
- avr_asm_len ("clr %0", &xdest[n], len, 1);
-
+ avr_asm_len (ldreg_p ? "ldi %0,0" : "mov %0,__zero_reg__",
+ &xdest[n], len, 1);
continue;
}
@@ -8364,7 +9135,9 @@ const char *
output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
{
if (AVR_HAVE_MOVW
- && !test_hard_reg_class (LD_REGS, op[0]))
+ && !test_hard_reg_class (LD_REGS, op[0])
+ && (CONST_INT_P (op[1])
+ || CONST_DOUBLE_P (op[1])))
{
int len_clr, len_noclr;
@@ -8498,8 +9271,8 @@ const char *
avr_out_sbxx_branch (rtx insn, rtx operands[])
{
enum rtx_code comp = GET_CODE (operands[0]);
- int long_jump = (get_attr_length (insn) >= 4);
- int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
+ bool long_jump = get_attr_length (insn) >= 4;
+ bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
if (comp == GE)
comp = EQ;
@@ -8509,49 +9282,61 @@ avr_out_sbxx_branch (rtx insn, rtx operands[])
if (reverse)
comp = reverse_condition (comp);
- if (GET_CODE (operands[1]) == CONST_INT)
+ switch (GET_CODE (operands[1]))
{
- if (INTVAL (operands[1]) < 0x40)
- {
- if (comp == EQ)
- output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
- else
- output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
- }
+ default:
+ gcc_unreachable();
+
+ case CONST_INT:
+
+ if (low_io_address_operand (operands[1], QImode))
+ {
+ if (comp == EQ)
+ output_asm_insn ("sbis %i1,%2", operands);
+ else
+ output_asm_insn ("sbic %i1,%2", operands);
+ }
else
- {
- output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
- if (comp == EQ)
- output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
- else
- output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
- }
- }
- else /* GET_CODE (operands[1]) == REG */
- {
+ {
+ output_asm_insn ("in __tmp_reg__,%i1", operands);
+ if (comp == EQ)
+ output_asm_insn ("sbrs __tmp_reg__,%2", operands);
+ else
+ output_asm_insn ("sbrc __tmp_reg__,%2", operands);
+ }
+
+ break; /* CONST_INT */
+
+ case REG:
+
if (GET_MODE (operands[1]) == QImode)
- {
- if (comp == EQ)
- output_asm_insn (AS2 (sbrs,%1,%2), operands);
- else
- output_asm_insn (AS2 (sbrc,%1,%2), operands);
- }
- else /* HImode or SImode */
- {
- static char buf[] = "sbrc %A1,0";
- int bit_nr = INTVAL (operands[2]);
- buf[3] = (comp == EQ) ? 's' : 'c';
- buf[6] = 'A' + (bit_nr >> 3);
- buf[9] = '0' + (bit_nr & 7);
- output_asm_insn (buf, operands);
- }
- }
+ {
+ if (comp == EQ)
+ output_asm_insn ("sbrs %1,%2", operands);
+ else
+ output_asm_insn ("sbrc %1,%2", operands);
+ }
+ else /* HImode, PSImode or SImode */
+ {
+ static char buf[] = "sbrc %A1,0";
+ unsigned int bit_nr = UINTVAL (operands[2]);
+
+ buf[3] = (comp == EQ) ? 's' : 'c';
+ buf[6] = 'A' + (bit_nr / 8);
+ buf[9] = '0' + (bit_nr % 8);
+ output_asm_insn (buf, operands);
+ }
+
+ break; /* REG */
+ } /* switch */
if (long_jump)
- return (AS1 (rjmp,.+4) CR_TAB
- AS1 (jmp,%x3));
+ return ("rjmp .+4" CR_TAB
+ "jmp %x3");
+
if (!reverse)
- return AS1 (rjmp,%x3);
+ return "rjmp %x3";
+
return "";
}
@@ -8595,6 +9380,451 @@ avr_case_values_threshold (void)
return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
}
+
+/* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
+
+static enum machine_mode
+avr_addr_space_address_mode (addr_space_t as)
+{
+ return as == ADDR_SPACE_PGMX ? PSImode : HImode;
+}
+
+
+/* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
+
+static enum machine_mode
+avr_addr_space_pointer_mode (addr_space_t as)
+{
+ return as == ADDR_SPACE_PGMX ? PSImode : HImode;
+}
+
+
+/* Helper for following function. */
+
+static bool
+avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
+{
+ gcc_assert (REG_P (reg));
+
+ if (strict)
+ {
+ return REGNO (reg) == REG_Z;
+ }
+
+ /* Avoid combine to propagate hard regs. */
+
+ if (can_create_pseudo_p()
+ && REGNO (reg) < REG_Z)
+ {
+ return false;
+ }
+
+ return true;
+}
+
+
+/* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
+
+static bool
+avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
+ bool strict, addr_space_t as)
+{
+ bool ok = false;
+
+ switch (as)
+ {
+ default:
+ gcc_unreachable();
+
+ case ADDR_SPACE_GENERIC:
+ return avr_legitimate_address_p (mode, x, strict);
+
+ case ADDR_SPACE_PGM:
+ case ADDR_SPACE_PGM1:
+ case ADDR_SPACE_PGM2:
+ case ADDR_SPACE_PGM3:
+ case ADDR_SPACE_PGM4:
+ case ADDR_SPACE_PGM5:
+
+ switch (GET_CODE (x))
+ {
+ case REG:
+ ok = avr_reg_ok_for_pgm_addr (x, strict);
+ break;
+
+ case POST_INC:
+ ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
+ break;
+
+ default:
+ break;
+ }
+
+ break; /* PGM */
+
+ case ADDR_SPACE_PGMX:
+ if (REG_P (x))
+ ok = (!strict
+ && can_create_pseudo_p());
+
+ if (LO_SUM == GET_CODE (x))
+ {
+ rtx hi = XEXP (x, 0);
+ rtx lo = XEXP (x, 1);
+
+ ok = (REG_P (hi)
+ && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
+ && REG_P (lo)
+ && REGNO (lo) == REG_Z);
+ }
+
+ break; /* PGMX */
+ }
+
+ if (avr_log.legitimate_address_p)
+ {
+ avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
+ "reload_completed=%d reload_in_progress=%d %s:",
+ ok, mode, strict, reload_completed, reload_in_progress,
+ reg_renumber ? "(reg_renumber)" : "");
+
+ if (GET_CODE (x) == PLUS
+ && REG_P (XEXP (x, 0))
+ && CONST_INT_P (XEXP (x, 1))
+ && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
+ && reg_renumber)
+ {
+ avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
+ true_regnum (XEXP (x, 0)));
+ }
+
+ avr_edump ("\n%r\n", x);
+ }
+
+ return ok;
+}
+
+
+/* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
+
+static rtx
+avr_addr_space_legitimize_address (rtx x, rtx old_x,
+ enum machine_mode mode, addr_space_t as)
+{
+ if (ADDR_SPACE_GENERIC_P (as))
+ return avr_legitimize_address (x, old_x, mode);
+
+ if (avr_log.legitimize_address)
+ {
+ avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
+ }
+
+ return old_x;
+}
+
+
+/* Implement `TARGET_ADDR_SPACE_CONVERT'. */
+
+static rtx
+avr_addr_space_convert (rtx src, tree type_from, tree type_to)
+{
+ addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
+ addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
+
+ if (avr_log.progmem)
+ avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
+ src, type_from, type_to);
+
+ if (as_from != ADDR_SPACE_PGMX
+ && as_to == ADDR_SPACE_PGMX)
+ {
+ rtx new_src;
+ int n_segments = avr_current_arch->n_segments;
+ RTX_CODE code = GET_CODE (src);
+
+ if (CONST == code
+ && PLUS == GET_CODE (XEXP (src, 0))
+ && SYMBOL_REF == GET_CODE (XEXP (XEXP (src, 0), 0))
+ && CONST_INT_P (XEXP (XEXP (src, 0), 1)))
+ {
+ HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (src, 0), 1));
+ const char *name = XSTR (XEXP (XEXP (src, 0), 0), 0);
+
+ new_src = gen_rtx_SYMBOL_REF (PSImode, ggc_strdup (name));
+ new_src = gen_rtx_CONST (PSImode,
+ plus_constant (new_src, offset));
+ return new_src;
+ }
+
+ if (SYMBOL_REF == code)
+ {
+ const char *name = XSTR (src, 0);
+
+ return gen_rtx_SYMBOL_REF (PSImode, ggc_strdup (name));
+ }
+
+ src = force_reg (Pmode, src);
+
+ if (ADDR_SPACE_GENERIC_P (as_from)
+ || as_from == ADDR_SPACE_PGM
+ || n_segments == 1)
+ {
+ return gen_rtx_ZERO_EXTEND (PSImode, src);
+ }
+ else
+ {
+ int segment = avr_pgm_segment (as_from) % n_segments;
+
+ new_src = gen_reg_rtx (PSImode);
+ emit_insn (gen_n_extendhipsi2 (new_src, GEN_INT (segment), src));
+
+ return new_src;
+ }
+ }
+
+ return src;
+}
+
+
+/* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
+
+static bool
+avr_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
+{
+ if (subset == ADDR_SPACE_PGMX
+ && superset != ADDR_SPACE_PGMX)
+ {
+ return false;
+ }
+
+ return true;
+}
+
+
+/* Worker function for movmemhi insn.
+ XOP[0] Destination as MEM:BLK
+ XOP[1] Source " "
+ XOP[2] # Bytes to copy
+
+ Return TRUE if the expansion is accomplished.
+ Return FALSE if the operand compination is not supported. */
+
+bool
+avr_emit_movmemhi (rtx *xop)
+{
+ HOST_WIDE_INT count;
+ enum machine_mode loop_mode;
+ addr_space_t as = MEM_ADDR_SPACE (xop[1]);
+ rtx loop_reg, addr0, addr1, a_src, a_dest, insn, xas, reg_x;
+ rtx a_hi8 = NULL_RTX;
+
+ if (avr_mem_pgm_p (xop[0]))
+ return false;
+
+ if (!CONST_INT_P (xop[2]))
+ return false;
+
+ count = INTVAL (xop[2]);
+ if (count <= 0)
+ return false;
+
+ a_src = XEXP (xop[1], 0);
+ a_dest = XEXP (xop[0], 0);
+
+ /* See if constant fits in 8 bits. */
+
+ loop_mode = (count <= 0x100) ? QImode : HImode;
+
+ if (PSImode == GET_MODE (a_src))
+ {
+ addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
+ a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
+ }
+ else
+ {
+ int seg = avr_pgm_segment (as);
+
+ addr1 = a_src;
+
+ if (seg > 0
+ && seg % avr_current_arch->n_segments > 0)
+ {
+ a_hi8 = GEN_INT (seg % avr_current_arch->n_segments);
+ }
+ }
+
+ if (a_hi8
+ && avr_current_arch->n_segments > 1)
+ {
+ emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
+ }
+ else if (!ADDR_SPACE_GENERIC_P (as))
+ {
+ as = ADDR_SPACE_PGM;
+ }
+
+ xas = GEN_INT (as);
+
+ /* Create loop counter register */
+
+ loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
+
+ /* Copy pointers into new pseudos - they will be changed */
+
+ addr0 = copy_to_mode_reg (HImode, a_dest);
+ addr1 = copy_to_mode_reg (HImode, addr1);
+
+ /* FIXME: Register allocator might come up with spill fails if it is left
+ on its own. Thus, we allocate the pointer registers by hand. */
+
+ emit_move_insn (lpm_addr_reg_rtx, addr1);
+ addr1 = lpm_addr_reg_rtx;
+
+ reg_x = gen_rtx_REG (HImode, REG_X);
+ emit_move_insn (reg_x, addr0);
+ addr0 = reg_x;
+
+ /* FIXME: Register allocator does a bad job and might spill address
+ register(s) inside the loop leading to additional move instruction
+ to/from stack which could clobber tmp_reg. Thus, do *not* emit
+ load and store as seperate insns. Instead, we perform the copy
+ by means of one monolithic insn. */
+
+ if (ADDR_SPACE_GENERIC_P (as))
+ {
+ rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
+ = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
+
+ insn = fun (addr0, addr1, xas, loop_reg,
+ addr0, addr1, tmp_reg_rtx, loop_reg);
+ }
+ else if (as == ADDR_SPACE_PGM)
+ {
+ rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
+ = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
+
+ insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
+ AVR_HAVE_LPMX ? tmp_reg_rtx : lpm_reg_rtx, loop_reg);
+ }
+ else
+ {
+ rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
+ = QImode == loop_mode ? gen_movmem_qi_elpm : gen_movmem_hi_elpm;
+
+ insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
+ AVR_HAVE_ELPMX ? tmp_reg_rtx : lpm_reg_rtx, loop_reg,
+ a_hi8, a_hi8, GEN_INT (RAMPZ_ADDR));
+ }
+
+ set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
+ emit_insn (insn);
+
+ return true;
+}
+
+
+/* Print assembler for movmem_qi, movmem_hi insns...
+ $0, $4 : & dest
+ $1, $5 : & src
+ $2 : Address Space
+ $3, $7 : Loop register
+ $6 : Scratch register
+
+ ...and movmem_qi_elpm, movmem_hi_elpm insns.
+
+ $8, $9 : hh8 (& src)
+ $10 : RAMPZ_ADDR
+*/
+
+const char*
+avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
+{
+ addr_space_t as = (addr_space_t) INTVAL (xop[2]);
+ enum machine_mode loop_mode = GET_MODE (xop[3]);
+
+ bool sbiw_p = test_hard_reg_class (ADDW_REGS, xop[3]);
+
+ gcc_assert (REG_X == REGNO (xop[0])
+ && REG_Z == REGNO (xop[1]));
+
+ if (plen)
+ *plen = 0;
+
+ /* Loop label */
+
+ avr_asm_len ("0:", xop, plen, 0);
+
+ /* Load with post-increment */
+
+ switch (as)
+ {
+ default:
+ gcc_unreachable();
+
+ case ADDR_SPACE_GENERIC:
+
+ avr_asm_len ("ld %6,%a1+", xop, plen, 1);
+ break;
+
+ case ADDR_SPACE_PGM:
+
+ if (AVR_HAVE_LPMX)
+ avr_asm_len ("lpm %6,%a1+", xop, plen, 1);
+ else
+ avr_asm_len ("lpm" CR_TAB
+ "adiw %1,1", xop, plen, 2);
+ break;
+
+ case ADDR_SPACE_PGM1:
+ case ADDR_SPACE_PGM2:
+ case ADDR_SPACE_PGM3:
+ case ADDR_SPACE_PGM4:
+ case ADDR_SPACE_PGM5:
+ case ADDR_SPACE_PGMX:
+
+ if (AVR_HAVE_ELPMX)
+ avr_asm_len ("elpm %6,%a1+", xop, plen, 1);
+ else
+ avr_asm_len ("elpm" CR_TAB
+ "adiw %1,1", xop, plen, 2);
+
+ if (as == ADDR_SPACE_PGMX
+ && !AVR_HAVE_ELPMX)
+ {
+ avr_asm_len ("adc %8,__zero_reg__" CR_TAB
+ "out __RAMPZ__,%8", xop, plen, 2);
+ }
+
+ break;
+ }
+
+ /* Store with post-increment */
+
+ avr_asm_len ("st %a0+,%6", xop, plen, 1);
+
+ /* Decrement loop-counter and set Z-flag */
+
+ if (QImode == loop_mode)
+ {
+ avr_asm_len ("dec %3", xop, plen, 1);
+ }
+ else if (sbiw_p)
+ {
+ avr_asm_len ("sbiw %3,1", xop, plen, 1);
+ }
+ else
+ {
+ avr_asm_len ("subi %A3,1" CR_TAB
+ "sbci %B3,0", xop, plen, 2);
+ }
+
+ /* Loop until zero */
+
+ return avr_asm_len ("brne 0b", xop, plen, 1);
+}
+
+
+
/* Helper for __builtin_avr_delay_cycles */
static void
@@ -8655,6 +9885,345 @@ avr_expand_delay_cycles (rtx operands0)
}
}
+
+/* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
+
+static double_int
+avr_double_int_push_digit (double_int val, int base,
+ unsigned HOST_WIDE_INT digit)
+{
+ val = 0 == base
+ ? double_int_lshift (val, 32, 64, false)
+ : double_int_mul (val, uhwi_to_double_int (base));
+
+ return double_int_add (val, uhwi_to_double_int (digit));
+}
+
+
+/* Compute the image of x under f, i.e. perform x --> f(x) */
+
+static int
+avr_map (double_int f, int x)
+{
+ return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
+}
+
+
+/* Return the map R that reverses the bits of byte B.
+
+ R(0) = (0 7) o (1 6) o (2 5) o (3 4)
+ R(1) = (8 15) o (9 14) o (10 13) o (11 12)
+
+ Notice that R o R = id. */
+
+static double_int
+avr_revert_map (int b)
+{
+ int i;
+ double_int r = double_int_zero;
+
+ for (i = 16-1; i >= 0; i--)
+ r = avr_double_int_push_digit (r, 16, i >> 3 == b ? i ^ 7 : i);
+
+ return r;
+}
+
+
+/* Return the map R that swaps bit-chunks of size SIZE in byte B.
+
+ R(1,0) = (0 1) o (2 3) o (4 5) o (6 7)
+ R(1,1) = (8 9) o (10 11) o (12 13) o (14 15)
+
+ R(4,0) = (0 4) o (1 5) o (2 6) o (3 7)
+ R(4,1) = (8 12) o (9 13) o (10 14) o (11 15)
+
+ Notice that R o R = id. */
+
+static double_int
+avr_swap_map (int size, int b)
+{
+ int i;
+ double_int r = double_int_zero;
+
+ for (i = 16-1; i >= 0; i--)
+ r = avr_double_int_push_digit (r, 16, i ^ (i >> 3 == b ? size : 0));
+
+ return r;
+}
+
+
+/* Return Identity. */
+
+static double_int
+avr_id_map (void)
+{
+ int i;
+ double_int r = double_int_zero;
+
+ for (i = 16-1; i >= 0; i--)
+ r = avr_double_int_push_digit (r, 16, i);
+
+ return r;
+}
+
+
+enum
+ {
+ SIG_ID = 0,
+ /* for QI and HI */
+ SIG_ROL = 0xf,
+ SIG_REVERT_0 = 1 << 4,
+ SIG_SWAP1_0 = 1 << 5,
+ /* HI only */
+ SIG_REVERT_1 = 1 << 6,
+ SIG_SWAP1_1 = 1 << 7,
+ SIG_SWAP4_0 = 1 << 8,
+ SIG_SWAP4_1 = 1 << 9
+ };
+
+
+/* Return basic map with signature SIG. */
+
+static double_int
+avr_sig_map (int n ATTRIBUTE_UNUSED, int sig)
+{
+ if (sig == SIG_ID) return avr_id_map ();
+ else if (sig == SIG_REVERT_0) return avr_revert_map (0);
+ else if (sig == SIG_REVERT_1) return avr_revert_map (1);
+ else if (sig == SIG_SWAP1_0) return avr_swap_map (1, 0);
+ else if (sig == SIG_SWAP1_1) return avr_swap_map (1, 1);
+ else if (sig == SIG_SWAP4_0) return avr_swap_map (4, 0);
+ else if (sig == SIG_SWAP4_1) return avr_swap_map (4, 1);
+ else
+ gcc_unreachable();
+}
+
+
+/* Return the Hamming distance between the B-th byte of A and C. */
+
+static bool
+avr_map_hamming_byte (int n, int b, double_int a, double_int c, bool strict)
+{
+ int i, hamming = 0;
+
+ for (i = 8*b; i < n && i < 8*b + 8; i++)
+ {
+ int ai = avr_map (a, i);
+ int ci = avr_map (c, i);
+
+ hamming += ai != ci && (strict || (ai < n && ci < n));
+ }
+
+ return hamming;
+}
+
+
+/* Return the non-strict Hamming distance between A and B. */
+
+#define avr_map_hamming_nonstrict(N,A,B) \
+ (+ avr_map_hamming_byte (N, 0, A, B, false) \
+ + avr_map_hamming_byte (N, 1, A, B, false))
+
+
+/* Return TRUE iff A and B represent the same mapping. */
+
+#define avr_map_equal_p(N,A,B) (0 == avr_map_hamming_nonstrict (N, A, B))
+
+
+/* Return TRUE iff A is a map of signature S. Notice that there is no
+ 1:1 correspondance between maps and signatures and thus this is
+ only supported for basic signatures recognized by avr_sig_map(). */
+
+#define avr_map_sig_p(N,A,S) avr_map_equal_p (N, A, avr_sig_map (N, S))
+
+
+/* Swap odd/even bits of ld-reg %0: %0 = bit-swap (%0) */
+
+static const char*
+avr_out_swap_bits (rtx *xop, int *plen)
+{
+ xop[1] = tmp_reg_rtx;
+
+ return avr_asm_len ("mov %1,%0" CR_TAB
+ "andi %0,0xaa" CR_TAB
+ "eor %1,%0" CR_TAB
+ "lsr %0" CR_TAB
+ "lsl %1" CR_TAB
+ "or %0,%1", xop, plen, 6);
+}
+
+/* Revert bit order: %0 = Revert (%1) with %0 != %1 and clobber %1 */
+
+static const char*
+avr_out_revert_bits (rtx *xop, int *plen)
+{
+ return avr_asm_len ("inc __zero_reg__" "\n"
+ "0:\tror %1" CR_TAB
+ "rol %0" CR_TAB
+ "lsl __zero_reg__" CR_TAB
+ "brne 0b", xop, plen, 5);
+}
+
+
+/* If OUT_P = true: Output BST/BLD instruction according to MAP.
+ If OUT_P = false: Just dry-run and fix XOP[1] to resolve
+ early-clobber conflicts if XOP[0] = XOP[1]. */
+
+static void
+avr_move_bits (rtx *xop, double_int map, int n_bits, bool out_p, int *plen)
+{
+ int bit_dest, b, clobber = 0;
+
+ /* T-flag contains this bit of the source, i.e. of XOP[1] */
+ int t_bit_src = -1;
+
+ if (!optimize && !out_p)
+ {
+ avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
+ xop[1] = tmp_reg_rtx;
+ return;
+ }
+
+ /* We order the operations according to the requested source bit b. */
+
+ for (b = 0; b < n_bits; b++)
+ for (bit_dest = 0; bit_dest < n_bits; bit_dest++)
+ {
+ int bit_src = avr_map (map, bit_dest);
+
+ if (b != bit_src
+ /* Same position: No need to copy as the caller did MOV. */
+ || bit_dest == bit_src
+ /* Accessing bits 8..f for 8-bit version is void. */
+ || bit_src >= n_bits)
+ continue;
+
+ if (t_bit_src != bit_src)
+ {
+ /* Source bit is not yet in T: Store it to T. */
+
+ t_bit_src = bit_src;
+
+ if (out_p)
+ {
+ xop[2] = GEN_INT (bit_src);
+ avr_asm_len ("bst %T1%T2", xop, plen, 1);
+ }
+ else if (clobber & (1 << bit_src))
+ {
+ /* Bit to be read was written already: Backup input
+ to resolve early-clobber conflict. */
+
+ avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
+ xop[1] = tmp_reg_rtx;
+ return;
+ }
+ }
+
+ /* Load destination bit with T. */
+
+ if (out_p)
+ {
+ xop[2] = GEN_INT (bit_dest);
+ avr_asm_len ("bld %T0%T2", xop, plen, 1);
+ }
+
+ clobber |= 1 << bit_dest;
+ }
+}
+
+
+/* Print assembler code for `map_bitsqi' and `map_bitshi'. */
+
+const char*
+avr_out_map_bits (rtx insn, rtx *operands, int *plen)
+{
+ bool copy_0, copy_1;
+ int n_bits = GET_MODE_BITSIZE (GET_MODE (operands[0]));
+ double_int map = rtx_to_double_int (operands[1]);
+ rtx xop[3];
+
+ xop[0] = operands[0];
+ xop[1] = operands[2];
+
+ if (plen)
+ *plen = 0;
+ else if (flag_print_asm_name)
+ avr_fdump (asm_out_file, ASM_COMMENT_START "%X\n", map);
+
+ switch (n_bits)
+ {
+ default:
+ gcc_unreachable();
+
+ case 8:
+ if (avr_map_sig_p (n_bits, map, SIG_SWAP1_0))
+ {
+ return avr_out_swap_bits (xop, plen);
+ }
+ else if (avr_map_sig_p (n_bits, map, SIG_REVERT_0))
+ {
+ if (REGNO (xop[0]) == REGNO (xop[1])
+ || !reg_unused_after (insn, xop[1]))
+ {
+ avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
+ xop[1] = tmp_reg_rtx;
+ }
+
+ return avr_out_revert_bits (xop, plen);
+ }
+
+ break; /* 8 */
+
+ case 16:
+
+ break; /* 16 */
+ }
+
+ /* Copy whole byte is cheaper than moving bits that stay at the same
+ position. Some bits in a byte stay at the same position iff the
+ strict Hamming distance to Identity is not 8. */
+
+ copy_0 = 8 != avr_map_hamming_byte (n_bits, 0, map, avr_id_map(), true);
+ copy_1 = 8 != avr_map_hamming_byte (n_bits, 1, map, avr_id_map(), true);
+
+ /* Perform the move(s) just worked out. */
+
+ if (n_bits == 8)
+ {
+ if (REGNO (xop[0]) == REGNO (xop[1]))
+ {
+ /* Fix early-clobber clashes.
+ Notice XOP[0] hat no eary-clobber in its constraint. */
+
+ avr_move_bits (xop, map, n_bits, false, plen);
+ }
+ else if (copy_0)
+ {
+ avr_asm_len ("mov %0,%1", xop, plen, 1);
+ }
+ }
+ else if (AVR_HAVE_MOVW && copy_0 && copy_1)
+ {
+ avr_asm_len ("movw %A0,%A1", xop, plen, 1);
+ }
+ else
+ {
+ if (copy_0)
+ avr_asm_len ("mov %A0,%A1", xop, plen, 1);
+
+ if (copy_1)
+ avr_asm_len ("mov %B0,%B1", xop, plen, 1);
+ }
+
+ /* Move individual bits. */
+
+ avr_move_bits (xop, map, n_bits, true, plen);
+
+ return "";
+}
+
+
/* IDs for all the AVR builtins. */
enum avr_builtin_id
@@ -8665,6 +10234,8 @@ enum avr_builtin_id
AVR_BUILTIN_WDR,
AVR_BUILTIN_SLEEP,
AVR_BUILTIN_SWAP,
+ AVR_BUILTIN_MAP8,
+ AVR_BUILTIN_MAP16,
AVR_BUILTIN_FMUL,
AVR_BUILTIN_FMULS,
AVR_BUILTIN_FMULSU,
@@ -8721,6 +10292,18 @@ avr_init_builtins (void)
long_unsigned_type_node,
NULL_TREE);
+ tree uchar_ftype_ulong_uchar
+ = build_function_type_list (unsigned_char_type_node,
+ long_unsigned_type_node,
+ unsigned_char_type_node,
+ NULL_TREE);
+
+ tree uint_ftype_ullong_uint
+ = build_function_type_list (unsigned_type_node,
+ long_long_unsigned_type_node,
+ unsigned_type_node,
+ NULL_TREE);
+
DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
@@ -8737,6 +10320,11 @@ avr_init_builtins (void)
DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
AVR_BUILTIN_FMULSU);
+ DEF_BUILTIN ("__builtin_avr_map8", uchar_ftype_ulong_uchar,
+ AVR_BUILTIN_MAP8);
+ DEF_BUILTIN ("__builtin_avr_map16", uint_ftype_ullong_uint,
+ AVR_BUILTIN_MAP16);
+
avr_init_builtin_int24 ();
}
@@ -8760,7 +10348,9 @@ bdesc_2arg[] =
{
{ CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
{ CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
- { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
+ { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU },
+ { CODE_FOR_map_bitsqi, "__builtin_avr_map8", AVR_BUILTIN_MAP8 },
+ { CODE_FOR_map_bitshi, "__builtin_avr_map16", AVR_BUILTIN_MAP16 }
};
/* Subroutine of avr_expand_builtin to take care of unop insns. */
@@ -8876,6 +10466,7 @@ avr_expand_builtin (tree exp, rtx target,
size_t i;
const struct avr_builtin_description *d;
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
+ const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
unsigned int id = DECL_FUNCTION_CODE (fndecl);
tree arg0;
rtx op0;
@@ -8908,12 +10499,37 @@ avr_expand_builtin (tree exp, rtx target,
op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
if (! CONST_INT_P (op0))
- error ("__builtin_avr_delay_cycles expects a"
- " compile time integer constant.");
+ error ("%s expects a compile time integer constant", bname);
avr_expand_delay_cycles (op0);
return 0;
}
+
+ case AVR_BUILTIN_MAP8:
+ {
+ arg0 = CALL_EXPR_ARG (exp, 0);
+ op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+
+ if (!CONST_INT_P (op0))
+ {
+ error ("%s expects a compile time long integer constant"
+ " as first argument", bname);
+ return target;
+ }
+ }
+
+ case AVR_BUILTIN_MAP16:
+ {
+ arg0 = CALL_EXPR_ARG (exp, 0);
+ op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+
+ if (!const_double_operand (op0, VOIDmode))
+ {
+ error ("%s expects a compile time long long integer constant"
+ " as first argument", bname);
+ return target;
+ }
+ }
}
for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
diff --git a/gcc/config/avr/avr.h b/gcc/config/avr/avr.h
index 7a3bc738e92..96133b8da28 100644
--- a/gcc/config/avr/avr.h
+++ b/gcc/config/avr/avr.h
@@ -23,7 +23,8 @@ along with GCC; see the file COPYING3. If not see
/* Names to predefine in the preprocessor for this target machine. */
-struct base_arch_s {
+struct base_arch_s
+{
/* Assembler only. */
int asm_only;
@@ -54,6 +55,13 @@ struct base_arch_s {
/* Default start of data section address for architecture. */
int default_data_section_start;
+ /* Offset between SFR address and RAM address:
+ SFR-address = RAM-address - sfr_offset */
+ int sfr_offset;
+
+ /* Number of 64k segments in the flash. */
+ int n_segments;
+
const char *const macro;
/* Architecture name. */
@@ -131,6 +139,8 @@ extern const struct base_arch_s avr_arch_types[];
#define AVR_HAVE_MUL (avr_current_arch->have_mul)
#define AVR_HAVE_MOVW (avr_current_arch->have_movw_lpmx)
#define AVR_HAVE_LPMX (avr_current_arch->have_movw_lpmx)
+#define AVR_HAVE_ELPM (avr_current_arch->have_elpm)
+#define AVR_HAVE_ELPMX (avr_current_arch->have_elpmx)
#define AVR_HAVE_RAMPZ (avr_current_arch->have_elpm)
#define AVR_HAVE_EIJMP_EICALL (avr_current_arch->have_eijmp_eicall)
#define AVR_HAVE_8BIT_SP (avr_current_device->short_sp || TARGET_TINY_STACK)
@@ -391,6 +401,20 @@ typedef struct avr_args {
#define NO_FUNCTION_CSE
+
+#define ADDR_SPACE_PGM 1
+#define ADDR_SPACE_PGM1 2
+#define ADDR_SPACE_PGM2 3
+#define ADDR_SPACE_PGM3 4
+#define ADDR_SPACE_PGM4 5
+#define ADDR_SPACE_PGM5 6
+#define ADDR_SPACE_PGMX 7
+
+#define REGISTER_TARGET_PRAGMAS() \
+ do { \
+ avr_register_target_pragmas(); \
+ } while (0)
+
#define TEXT_SECTION_ASM_OP "\t.text"
#define DATA_SECTION_ASM_OP "\t.data"
@@ -637,3 +661,5 @@ struct GTY(()) machine_function
#define PUSH_ROUNDING(X) (X)
#define ACCUMULATE_OUTGOING_ARGS avr_accumulate_outgoing_args()
+
+#define INIT_EXPANDERS avr_init_expanders()
diff --git a/gcc/config/avr/avr.md b/gcc/config/avr/avr.md
index 59330104fa0..bddfe933ee6 100644
--- a/gcc/config/avr/avr.md
+++ b/gcc/config/avr/avr.md
@@ -28,9 +28,21 @@
;; j Branch condition.
;; k Reverse branch condition.
;;..m..Constant Direct Data memory address.
+;; i Print the SFR address quivalent of a CONST_INT or a CONST_INT
+;; RAM address. The resulting addres is suitable to be used in IN/OUT.
;; o Displacement for (mem (plus (reg) (const_int))) operands.
;; p POST_INC or PRE_DEC address as a pointer (X, Y, Z)
;; r POST_INC or PRE_DEC address as a register (r26, r28, r30)
+;; T/T Print operand suitable for BLD/BST instruction, i.e. register and
+;; bit number. This gets 2 operands: The first %T gets a REG_P and
+;; just cashes the operand for the next %T. The second %T gets
+;; a CONST_INT that represents a bit position.
+;; Example: With %0 = (reg:HI 18) and %1 = (const_int 13)
+;; "%T0%T1" it will print "r19,5".
+;; Notice that you must not write a comma between %T0 and %T1.
+;; T/t Similar to above, but don't print the comma and the bit number.
+;; Example: With %0 = (reg:HI 18) and %1 = (const_int 13)
+;; "%T0%t1" it will print "r19".
;;..x..Constant Direct Program memory address.
;; ~ Output 'r' if not AVR_HAVE_JMP_CALL.
;; ! Output 'e' if AVR_HAVE_EIJMP_EICALL.
@@ -42,21 +54,27 @@
(REG_Z 30)
(REG_W 24)
(REG_SP 32)
+ (LPM_REGNO 0) ; implicit target register of LPM
(TMP_REGNO 0) ; temporary register r0
(ZERO_REGNO 1) ; zero register r1
-
- (SREG_ADDR 0x5F)
- (RAMPZ_ADDR 0x5B)
+
+ ;; RAM addresses of some SFRs common to all Devices.
+
+ (SREG_ADDR 0x5F) ; Status Register
+ (SP_ADDR 0x5D) ; Stack Pointer
+ (RAMPZ_ADDR 0x5B) ; Address' high part when loading via ELPM
])
(define_c_enum "unspec"
[UNSPEC_STRLEN
+ UNSPEC_MOVMEM
UNSPEC_INDEX_JMP
UNSPEC_FMUL
UNSPEC_FMULS
UNSPEC_FMULSU
UNSPEC_COPYSIGN
UNSPEC_IDENTITY
+ UNSPEC_MAP_BITS
])
(define_c_enum "unspecv"
@@ -127,24 +145,25 @@
"out_bitop, out_plus, out_plus_noclobber, addto_sp,
tsthi, tstpsi, tstsi, compare, call,
mov8, mov16, mov24, mov32, reload_in16, reload_in24, reload_in32,
+ xload, movmem,
ashlqi, ashrqi, lshrqi,
ashlhi, ashrhi, lshrhi,
ashlsi, ashrsi, lshrsi,
ashlpsi, ashrpsi, lshrpsi,
+ map_bits,
no"
(const_string "no"))
;; Flavours of instruction set architecture (ISA), used in enabled attribute
-;; mov: ISA has no MOVW
-;; movw: ISA has MOVW
-;; rjmp: ISA has no CALL/JMP
-;; jmp: ISA has CALL/JMP
-;; ijmp: ISA has no EICALL/EIJMP
-;; eijmp: ISA has EICALL/EIJMP
+;; mov : ISA has no MOVW movw : ISA has MOVW
+;; rjmp : ISA has no CALL/JMP jmp : ISA has CALL/JMP
+;; ijmp : ISA has no EICALL/EIJMP eijmp : ISA has EICALL/EIJMP
+;; lpm : ISA has no LPMX lpmx : ISA has LPMX
+;; elpm : ISA has ELPM but no ELPMX elpmx : ISA has ELPMX
(define_attr "isa"
- "mov,movw, rjmp,jmp, ijmp,eijmp,
+ "mov,movw, rjmp,jmp, ijmp,eijmp, lpm,lpmx, elpm,elpmx,
standard"
(const_string "standard"))
@@ -175,6 +194,22 @@
(and (eq_attr "isa" "eijmp")
(match_test "AVR_HAVE_EIJMP_EICALL"))
(const_int 1)
+
+ (and (eq_attr "isa" "lpm")
+ (match_test "!AVR_HAVE_LPMX"))
+ (const_int 1)
+
+ (and (eq_attr "isa" "lpmx")
+ (match_test "AVR_HAVE_LPMX"))
+ (const_int 1)
+
+ (and (eq_attr "isa" "elpm")
+ (match_test "AVR_HAVE_ELPM && !AVR_HAVE_ELPMX"))
+ (const_int 1)
+
+ (and (eq_attr "isa" "elpmx")
+ (match_test "AVR_HAVE_ELPMX"))
+ (const_int 1)
] (const_int 0)))
@@ -242,12 +277,10 @@
;; even though its function is identical to that in builtins.c
(define_expand "nonlocal_goto"
- [
- (use (match_operand 0 "general_operand"))
- (use (match_operand 1 "general_operand"))
- (use (match_operand 2 "general_operand"))
- (use (match_operand 3 "general_operand"))
- ]
+ [(use (match_operand 0 "general_operand"))
+ (use (match_operand 1 "general_operand"))
+ (use (match_operand 2 "general_operand"))
+ (use (match_operand 3 "general_operand"))]
""
{
rtx r_label = copy_to_reg (operands[1]);
@@ -315,6 +348,168 @@
"")
;;========================================================================
+;; Move stuff around
+
+(define_expand "load<mode>_libgcc"
+ [(set (match_dup 3)
+ (match_dup 2))
+ (set (reg:MOVMODE 22)
+ (match_operand:MOVMODE 1 "memory_operand" ""))
+ (set (match_operand:MOVMODE 0 "register_operand" "")
+ (reg:MOVMODE 22))]
+ "avr_load_libgcc_p (operands[1])"
+ {
+ operands[3] = gen_rtx_REG (HImode, REG_Z);
+ operands[2] = force_operand (XEXP (operands[1], 0), NULL_RTX);
+ operands[1] = replace_equiv_address (operands[1], operands[3]);
+ set_mem_addr_space (operands[1], ADDR_SPACE_PGM);
+ })
+
+(define_insn "load_<mode>_libgcc"
+ [(set (reg:MOVMODE 22)
+ (match_operand:MOVMODE 0 "memory_operand" "m,m"))]
+ "avr_load_libgcc_p (operands[0])
+ && REG_P (XEXP (operands[0], 0))
+ && REG_Z == REGNO (XEXP (operands[0], 0))"
+ {
+ operands[0] = GEN_INT (GET_MODE_SIZE (<MODE>mode));
+ return "%~call __load_%0";
+ }
+ [(set_attr "length" "1,2")
+ (set_attr "isa" "rjmp,jmp")
+ (set_attr "cc" "clobber")])
+
+
+(define_insn_and_split "xload8_A"
+ [(set (match_operand:QI 0 "register_operand" "=r")
+ (match_operand:QI 1 "memory_operand" "m"))
+ (clobber (reg:HI REG_Z))]
+ "can_create_pseudo_p()
+ && avr_mem_pgmx_p (operands[1])
+ && REG_P (XEXP (operands[1], 0))"
+ { gcc_unreachable(); }
+ "&& 1"
+ [(clobber (const_int 0))]
+ {
+ rtx insn, addr = XEXP (operands[1], 0);
+ rtx hi8 = gen_reg_rtx (QImode);
+ rtx reg_z = gen_rtx_REG (HImode, REG_Z);
+
+ emit_move_insn (reg_z, simplify_gen_subreg (HImode, addr, PSImode, 0));
+ emit_move_insn (hi8, simplify_gen_subreg (QImode, addr, PSImode, 2));
+
+ insn = emit_insn (gen_xload_8 (operands[0], hi8));
+ set_mem_addr_space (SET_SRC (single_set (insn)),
+ MEM_ADDR_SPACE (operands[1]));
+ DONE;
+ })
+
+(define_insn_and_split "xload<mode>_A"
+ [(set (match_operand:MOVMODE 0 "register_operand" "=r")
+ (match_operand:MOVMODE 1 "memory_operand" "m"))
+ (clobber (reg:QI 21))
+ (clobber (reg:HI REG_Z))]
+ "QImode != <MODE>mode
+ && can_create_pseudo_p()
+ && avr_mem_pgmx_p (operands[1])
+ && REG_P (XEXP (operands[1], 0))"
+ { gcc_unreachable(); }
+ "&& 1"
+ [(clobber (const_int 0))]
+ {
+ rtx addr = XEXP (operands[1], 0);
+ rtx reg_z = gen_rtx_REG (HImode, REG_Z);
+ rtx addr_hi8 = simplify_gen_subreg (QImode, addr, PSImode, 2);
+ addr_space_t as = MEM_ADDR_SPACE (operands[1]);
+ rtx hi8, insn;
+
+ emit_move_insn (reg_z, simplify_gen_subreg (HImode, addr, PSImode, 0));
+
+ if (avr_xload_libgcc_p (<MODE>mode))
+ {
+ emit_move_insn (gen_rtx_REG (QImode, 21), addr_hi8);
+ insn = emit_insn (gen_xload_<mode>_libgcc ());
+ emit_move_insn (operands[0], gen_rtx_REG (<MODE>mode, 22));
+ }
+ else if (avr_current_arch->n_segments == 1
+ && GET_MODE_SIZE (<MODE>mode) > 2
+ && !AVR_HAVE_LPMX)
+ {
+ rtx src = gen_rtx_MEM (<MODE>mode, reg_z);
+
+ as = ADDR_SPACE_PGM;
+ insn = emit_insn (gen_load_<mode>_libgcc (src));
+ emit_move_insn (operands[0], gen_rtx_REG (<MODE>mode, 22));
+ }
+ else
+ {
+ hi8 = gen_reg_rtx (QImode);
+ emit_move_insn (hi8, addr_hi8);
+ insn = emit_insn (gen_xload_<mode> (operands[0], hi8));
+ }
+
+ set_mem_addr_space (SET_SRC (single_set (insn)), as);
+
+ DONE;
+ })
+
+;; Move value from address space pgmx to a register
+;; These insns must be prior to respective generic move insn.
+
+(define_insn "xload_8"
+ [(set (match_operand:QI 0 "register_operand" "=r")
+ (mem:QI (lo_sum:PSI (match_operand:QI 1 "register_operand" "r")
+ (reg:HI REG_Z))))]
+ ""
+ {
+ return avr_out_xload (insn, operands, NULL);
+ }
+ [(set_attr "adjust_len" "xload")
+ (set_attr "cc" "clobber")])
+
+;; "xload_hi_libgcc"
+;; "xload_psi_libgcc"
+;; "xload_si_libgcc"
+;; "xload_sf_libgcc"
+(define_insn "xload_<mode>_libgcc"
+ [(set (reg:MOVMODE 22)
+ (mem:MOVMODE (lo_sum:PSI (reg:QI 21)
+ (reg:HI REG_Z))))
+ (clobber (reg:QI 21))
+ (clobber (reg:HI REG_Z))]
+ "<MODE>mode != QImode
+ && avr_xload_libgcc_p (<MODE>mode)"
+ {
+ rtx x_bytes = GEN_INT (GET_MODE_SIZE (<MODE>mode));
+
+ /* Devices with ELPM* also have CALL. */
+
+ output_asm_insn ("call __xload_%0", &x_bytes);
+ return "";
+ }
+ [(set_attr "length" "2")
+ (set_attr "cc" "clobber")])
+
+;; "xload_hi"
+;; "xload_psi"
+;; "xload_si"
+;; "xload_sf"
+(define_insn "xload_<mode>"
+ [(set (match_operand:MOVMODE 0 "register_operand" "=r")
+ (mem:MOVMODE (lo_sum:PSI (match_operand:QI 1 "register_operand" "r")
+ (reg:HI REG_Z))))
+ (clobber (scratch:HI))
+ (clobber (reg:HI REG_Z))]
+ "<MODE>mode != QImode
+ && !avr_xload_libgcc_p (<MODE>mode)"
+ {
+ return avr_out_xload (insn, operands, NULL);
+ }
+ [(set_attr "adjust_len" "xload")
+ (set_attr "cc" "clobber")])
+
+
+;; General move expanders
;; "movqi"
;; "movhi"
@@ -327,12 +522,40 @@
(match_operand:MOVMODE 1 "general_operand" ""))]
""
{
- /* One of the ops has to be in a register. */
- if (!register_operand (operands[0], <MODE>mode)
- && !(register_operand (operands[1], <MODE>mode)
- || CONST0_RTX (<MODE>mode) == operands[1]))
+ rtx dest = operands[0];
+ rtx src = operands[1];
+
+ if (avr_mem_pgm_p (dest))
+ DONE;
+
+ /* One of the operands has to be in a register. */
+ if (!register_operand (dest, <MODE>mode)
+ && !(register_operand (src, <MODE>mode)
+ || src == CONST0_RTX (<MODE>mode)))
+ {
+ operands[1] = src = copy_to_mode_reg (<MODE>mode, src);
+ }
+
+ if (avr_mem_pgmx_p (src))
+ {
+ rtx addr = XEXP (src, 0);
+
+ if (!REG_P (addr))
+ src = replace_equiv_address (src, copy_to_mode_reg (PSImode, addr));
+
+ if (QImode == <MODE>mode)
+ emit_insn (gen_xload8_A (dest, src));
+ else
+ emit_insn (gen_xload<mode>_A (dest, src));
+
+ DONE;
+ }
+
+ if (avr_load_libgcc_p (src))
{
- operands[1] = copy_to_mode_reg (<MODE>mode, operands[1]);
+ /* For the small devices, do loads per libgcc call. */
+ emit_insn (gen_load<mode>_libgcc (dest, src));
+ DONE;
}
})
@@ -346,11 +569,14 @@
;; so this may still be a win for registers live across function calls.
(define_insn "movqi_insn"
- [(set (match_operand:QI 0 "nonimmediate_operand" "=r,d,Qm,r,q,r,*r")
- (match_operand:QI 1 "general_operand" "rL,i,rL,Qm,r,q,i"))]
- "(register_operand (operands[0],QImode)
- || register_operand (operands[1], QImode) || const0_rtx == operands[1])"
- "* return output_movqi (insn, operands, NULL);"
+ [(set (match_operand:QI 0 "nonimmediate_operand" "=r ,d,Qm,r ,q,r,*r")
+ (match_operand:QI 1 "nox_general_operand" "rL,i,rL,Qm,r,q,i"))]
+ "register_operand (operands[0], QImode)
+ || register_operand (operands[1], QImode)
+ || const0_rtx == operands[1]"
+ {
+ return output_movqi (insn, operands, NULL);
+ }
[(set_attr "length" "1,1,5,5,1,1,4")
(set_attr "adjust_len" "mov8")
(set_attr "cc" "none,none,clobber,clobber,none,none,clobber")])
@@ -425,14 +651,17 @@
(set_attr "cc" "none")])
(define_insn "*movhi"
- [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,d,*r,q,r")
- (match_operand:HI 1 "general_operand" "r,L,m,rL,i,i,r,q"))]
- "(register_operand (operands[0],HImode)
- || register_operand (operands[1],HImode) || const0_rtx == operands[1])"
- "* return output_movhi (insn, operands, NULL);"
+ [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m ,d,*r,q,r")
+ (match_operand:HI 1 "nox_general_operand" "r,L,m,rL,i,i ,r,q"))]
+ "register_operand (operands[0], HImode)
+ || register_operand (operands[1], HImode)
+ || const0_rtx == operands[1]"
+ {
+ return output_movhi (insn, operands, NULL);
+ }
[(set_attr "length" "2,2,6,7,2,6,5,2")
(set_attr "adjust_len" "mov16")
- (set_attr "cc" "none,clobber,clobber,clobber,none,clobber,none,none")])
+ (set_attr "cc" "none,none,clobber,clobber,none,clobber,none,none")])
(define_peephole2 ; movw
[(set (match_operand:QI 0 "even_register_operand" "")
@@ -462,6 +691,40 @@
operands[5] = gen_rtx_REG (HImode, REGNO (operands[3]));
})
+;; For LPM loads from AS1 we split
+;; R = *Z
+;; to
+;; R = *Z++
+;; Z = Z - sizeof (R)
+;;
+;; so that the second instruction can be optimized out.
+
+(define_split ; "split-lpmx"
+ [(set (match_operand:HISI 0 "register_operand" "")
+ (match_operand:HISI 1 "memory_operand" ""))]
+ "reload_completed
+ && AVR_HAVE_LPMX"
+ [(set (match_dup 0)
+ (match_dup 2))
+ (set (match_dup 3)
+ (plus:HI (match_dup 3)
+ (match_dup 4)))]
+ {
+ rtx addr = XEXP (operands[1], 0);
+
+ if (!avr_mem_pgm_p (operands[1])
+ || !REG_P (addr)
+ || reg_overlap_mentioned_p (addr, operands[0]))
+ {
+ FAIL;
+ }
+
+ operands[2] = replace_equiv_address (operands[1],
+ gen_rtx_POST_INC (Pmode, addr));
+ operands[3] = addr;
+ operands[4] = gen_int_mode (-GET_MODE_SIZE (<MODE>mode), HImode);
+ })
+
;;==========================================================================
;; xpointer move (24 bit)
@@ -492,7 +755,7 @@
(define_insn "*movpsi"
[(set (match_operand:PSI 0 "nonimmediate_operand" "=r,r,r ,Qm,!d,r")
- (match_operand:PSI 1 "general_operand" "r,L,Qm,rL,i ,i"))]
+ (match_operand:PSI 1 "nox_general_operand" "r,L,Qm,rL,i ,i"))]
"register_operand (operands[0], PSImode)
|| register_operand (operands[1], PSImode)
|| const0_rtx == operands[1]"
@@ -501,7 +764,7 @@
}
[(set_attr "length" "3,3,8,9,4,10")
(set_attr "adjust_len" "mov24")
- (set_attr "cc" "none,set_zn,clobber,clobber,clobber,clobber")])
+ (set_attr "cc" "none,none,clobber,clobber,none,clobber")])
;;==========================================================================
;; move double word (32 bit)
@@ -532,23 +795,24 @@
(define_insn "*movsi"
- [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r,Qm,!d,r")
- (match_operand:SI 1 "general_operand" "r,L,Qm,rL,i,i"))]
- "(register_operand (operands[0],SImode)
- || register_operand (operands[1],SImode) || const0_rtx == operands[1])"
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r ,Qm,!d,r")
+ (match_operand:SI 1 "nox_general_operand" "r,L,Qm,rL,i ,i"))]
+ "register_operand (operands[0], SImode)
+ || register_operand (operands[1], SImode)
+ || const0_rtx == operands[1]"
{
return output_movsisf (insn, operands, NULL);
}
[(set_attr "length" "4,4,8,9,4,10")
(set_attr "adjust_len" "mov32")
- (set_attr "cc" "none,set_zn,clobber,clobber,clobber,clobber")])
+ (set_attr "cc" "none,none,clobber,clobber,none,clobber")])
;; fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
;; move floating point numbers (32 bit)
(define_insn "*movsf"
- [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,r,Qm,!d,r")
- (match_operand:SF 1 "general_operand" "r,G,Qm,rG,F,F"))]
+ [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,r ,Qm,!d,r")
+ (match_operand:SF 1 "nox_general_operand" "r,G,Qm,rG,F ,F"))]
"register_operand (operands[0], SFmode)
|| register_operand (operands[1], SFmode)
|| operands[1] == CONST0_RTX (SFmode)"
@@ -557,7 +821,7 @@
}
[(set_attr "length" "4,4,8,9,4,10")
(set_attr "adjust_len" "mov32")
- (set_attr "cc" "none,set_zn,clobber,clobber,clobber,clobber")])
+ (set_attr "cc" "none,none,clobber,clobber,none,clobber")])
(define_peephole2 ; *reload_insf
[(match_scratch:QI 2 "d")
@@ -585,166 +849,164 @@
;;=========================================================================
;; move string (like memcpy)
-;; implement as RTL loop
(define_expand "movmemhi"
[(parallel [(set (match_operand:BLK 0 "memory_operand" "")
- (match_operand:BLK 1 "memory_operand" ""))
- (use (match_operand:HI 2 "const_int_operand" ""))
- (use (match_operand:HI 3 "const_int_operand" ""))])]
+ (match_operand:BLK 1 "memory_operand" ""))
+ (use (match_operand:HI 2 "const_int_operand" ""))
+ (use (match_operand:HI 3 "const_int_operand" ""))])]
""
- "{
- int prob;
- HOST_WIDE_INT count;
- enum machine_mode mode;
- rtx label = gen_label_rtx ();
- rtx loop_reg;
- rtx jump;
-
- /* Copy pointers into new psuedos - they will be changed. */
- rtx addr0 = copy_to_mode_reg (Pmode, XEXP (operands[0], 0));
- rtx addr1 = copy_to_mode_reg (Pmode, XEXP (operands[1], 0));
-
- /* Create rtx for tmp register - we use this as scratch. */
- rtx tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
-
- if (GET_CODE (operands[2]) != CONST_INT)
- FAIL;
-
- count = INTVAL (operands[2]);
- if (count <= 0)
+ {
+ if (avr_emit_movmemhi (operands))
+ DONE;
+
FAIL;
+ })
- /* Work out branch probability for latter use. */
- prob = REG_BR_PROB_BASE - REG_BR_PROB_BASE / count;
-
- /* See if constant fit 8 bits. */
- mode = (count < 0x100) ? QImode : HImode;
- /* Create loop counter register. */
- loop_reg = copy_to_mode_reg (mode, gen_int_mode (count, mode));
-
- /* Now create RTL code for move loop. */
- /* Label at top of loop. */
- emit_label (label);
-
- /* Move one byte into scratch and inc pointer. */
- emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, addr1));
- emit_move_insn (addr1, gen_rtx_PLUS (Pmode, addr1, const1_rtx));
-
- /* Move to mem and inc pointer. */
- emit_move_insn (gen_rtx_MEM (QImode, addr0), tmp_reg_rtx);
- emit_move_insn (addr0, gen_rtx_PLUS (Pmode, addr0, const1_rtx));
+(define_mode_attr MOVMEM_r_d [(QI "r")
+ (HI "d")])
+
+;; $0, $4 : & dest
+;; $1, $5 : & src
+;; $2 : Address Space
+;; $3, $7 : Loop register
+;; $6 : Scratch register
+
+;; "movmem_qi"
+;; "movmem_hi"
+(define_insn "movmem_<mode>"
+ [(set (mem:BLK (match_operand:HI 0 "register_operand" "x"))
+ (mem:BLK (match_operand:HI 1 "register_operand" "z")))
+ (unspec [(match_operand:QI 2 "const_int_operand" "LP")]
+ UNSPEC_MOVMEM)
+ (use (match_operand:QIHI 3 "register_operand" "<MOVMEM_r_d>"))
+ (clobber (match_operand:HI 4 "register_operand" "=0"))
+ (clobber (match_operand:HI 5 "register_operand" "=1"))
+ (clobber (match_operand:QI 6 "register_operand" "=&r"))
+ (clobber (match_operand:QIHI 7 "register_operand" "=3"))]
+ ""
+ {
+ return avr_out_movmem (insn, operands, NULL);
+ }
+ [(set_attr "adjust_len" "movmem")
+ (set_attr "cc" "clobber")])
- /* Decrement count. */
- emit_move_insn (loop_reg, gen_rtx_PLUS (mode, loop_reg, constm1_rtx));
+;; Ditto and
+;; $8, $9 : hh8 (& src)
+;; $10 : RAMPZ_ADDR
+
+;; "movmem_qi_elpm"
+;; "movmem_hi_elpm"
+(define_insn "movmem_<mode>_elpm"
+ [(set (mem:BLK (match_operand:HI 0 "register_operand" "x"))
+ (mem:BLK (lo_sum:PSI (match_operand:QI 8 "register_operand" "r")
+ (match_operand:HI 1 "register_operand" "z"))))
+ (unspec [(match_operand:QI 2 "const_int_operand" "n")]
+ UNSPEC_MOVMEM)
+ (use (match_operand:QIHI 3 "register_operand" "<MOVMEM_r_d>"))
+ (clobber (match_operand:HI 4 "register_operand" "=0"))
+ (clobber (match_operand:HI 5 "register_operand" "=1"))
+ (clobber (match_operand:QI 6 "register_operand" "=&r"))
+ (clobber (match_operand:QIHI 7 "register_operand" "=3"))
+ (clobber (match_operand:QI 9 "register_operand" "=8"))
+ (clobber (mem:QI (match_operand:QI 10 "io_address_operand" "n")))]
+ ""
+ {
+ return avr_out_movmem (insn, operands, NULL);
+ }
+ [(set_attr "adjust_len" "movmem")
+ (set_attr "cc" "clobber")])
- /* Compare with zero and jump if not equal. */
- emit_cmp_and_jump_insns (loop_reg, const0_rtx, NE, NULL_RTX, mode, 1,
- label);
- /* Set jump probability based on loop count. */
- jump = get_last_insn ();
- add_reg_note (jump, REG_BR_PROB, GEN_INT (prob));
- DONE;
-}")
-;; =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2
+;; =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2
;; memset (%0, %2, %1)
(define_expand "setmemhi"
[(parallel [(set (match_operand:BLK 0 "memory_operand" "")
- (match_operand 2 "const_int_operand" ""))
- (use (match_operand:HI 1 "const_int_operand" ""))
- (use (match_operand:HI 3 "const_int_operand" "n"))
- (clobber (match_scratch:HI 4 ""))
- (clobber (match_dup 5))])]
+ (match_operand 2 "const_int_operand" ""))
+ (use (match_operand:HI 1 "const_int_operand" ""))
+ (use (match_operand:HI 3 "const_int_operand" ""))
+ (clobber (match_scratch:HI 4 ""))
+ (clobber (match_dup 5))])]
""
- "{
- rtx addr0;
- enum machine_mode mode;
+ {
+ rtx addr0;
+ enum machine_mode mode;
- /* If value to set is not zero, use the library routine. */
- if (operands[2] != const0_rtx)
- FAIL;
+ /* If value to set is not zero, use the library routine. */
+ if (operands[2] != const0_rtx)
+ FAIL;
- if (!CONST_INT_P (operands[1]))
- FAIL;
+ if (!CONST_INT_P (operands[1]))
+ FAIL;
+
+ mode = u8_operand (operands[1], VOIDmode) ? QImode : HImode;
+ operands[5] = gen_rtx_SCRATCH (mode);
+ operands[1] = copy_to_mode_reg (mode,
+ gen_int_mode (INTVAL (operands[1]), mode));
+ addr0 = copy_to_mode_reg (Pmode, XEXP (operands[0], 0));
+ operands[0] = gen_rtx_MEM (BLKmode, addr0);
+ })
- mode = u8_operand (operands[1], VOIDmode) ? QImode : HImode;
- operands[5] = gen_rtx_SCRATCH (mode);
- operands[1] = copy_to_mode_reg (mode,
- gen_int_mode (INTVAL (operands[1]), mode));
- addr0 = copy_to_mode_reg (Pmode, XEXP (operands[0], 0));
- operands[0] = gen_rtx_MEM (BLKmode, addr0);
-}")
(define_insn "*clrmemqi"
[(set (mem:BLK (match_operand:HI 0 "register_operand" "e"))
- (const_int 0))
+ (const_int 0))
(use (match_operand:QI 1 "register_operand" "r"))
(use (match_operand:QI 2 "const_int_operand" "n"))
(clobber (match_scratch:HI 3 "=0"))
(clobber (match_scratch:QI 4 "=&1"))]
""
- "st %a0+,__zero_reg__
- dec %1
- brne .-6"
+ "0:\;st %a0+,__zero_reg__\;dec %1\;brne 0b"
[(set_attr "length" "3")
(set_attr "cc" "clobber")])
+
(define_insn "*clrmemhi"
[(set (mem:BLK (match_operand:HI 0 "register_operand" "e,e"))
- (const_int 0))
+ (const_int 0))
(use (match_operand:HI 1 "register_operand" "!w,d"))
(use (match_operand:HI 2 "const_int_operand" "n,n"))
(clobber (match_scratch:HI 3 "=0,0"))
(clobber (match_scratch:HI 4 "=&1,&1"))]
""
- "*{
- if (which_alternative==0)
- return (AS2 (st,%a0+,__zero_reg__) CR_TAB
- AS2 (sbiw,%A1,1) CR_TAB
- AS1 (brne,.-6));
- else
- return (AS2 (st,%a0+,__zero_reg__) CR_TAB
- AS2 (subi,%A1,1) CR_TAB
- AS2 (sbci,%B1,0) CR_TAB
- AS1 (brne,.-8));
-}"
+ "@
+ 0:\;st %a0+,__zero_reg__\;sbiw %A1,1\;brne 0b
+ 0:\;st %a0+,__zero_reg__\;subi %A1,1\;sbci %B1,0\;brne 0b"
[(set_attr "length" "3,4")
(set_attr "cc" "clobber,clobber")])
(define_expand "strlenhi"
- [(set (match_dup 4)
- (unspec:HI [(match_operand:BLK 1 "memory_operand" "")
- (match_operand:QI 2 "const_int_operand" "")
- (match_operand:HI 3 "immediate_operand" "")]
- UNSPEC_STRLEN))
- (set (match_dup 4) (plus:HI (match_dup 4)
- (const_int -1)))
- (set (match_operand:HI 0 "register_operand" "")
- (minus:HI (match_dup 4)
- (match_dup 5)))]
- ""
- "{
- rtx addr;
- if (operands[2] != const0_rtx)
- FAIL;
- addr = copy_to_mode_reg (Pmode, XEXP (operands[1],0));
- operands[1] = gen_rtx_MEM (BLKmode, addr);
- operands[5] = addr;
- operands[4] = gen_reg_rtx (HImode);
-}")
+ [(set (match_dup 4)
+ (unspec:HI [(match_operand:BLK 1 "memory_operand" "")
+ (match_operand:QI 2 "const_int_operand" "")
+ (match_operand:HI 3 "immediate_operand" "")]
+ UNSPEC_STRLEN))
+ (set (match_dup 4)
+ (plus:HI (match_dup 4)
+ (const_int -1)))
+ (set (match_operand:HI 0 "register_operand" "")
+ (minus:HI (match_dup 4)
+ (match_dup 5)))]
+ ""
+ {
+ rtx addr;
+ if (operands[2] != const0_rtx)
+ FAIL;
+ addr = copy_to_mode_reg (Pmode, XEXP (operands[1], 0));
+ operands[1] = gen_rtx_MEM (BLKmode, addr);
+ operands[5] = addr;
+ operands[4] = gen_reg_rtx (HImode);
+ })
(define_insn "*strlenhi"
- [(set (match_operand:HI 0 "register_operand" "=e")
- (unspec:HI [(mem:BLK (match_operand:HI 1 "register_operand" "%0"))
- (const_int 0)
- (match_operand:HI 2 "immediate_operand" "i")]
- UNSPEC_STRLEN))]
- ""
- "ld __tmp_reg__,%a0+
- tst __tmp_reg__
- brne .-6"
+ [(set (match_operand:HI 0 "register_operand" "=e")
+ (unspec:HI [(mem:BLK (match_operand:HI 1 "register_operand" "0"))
+ (const_int 0)
+ (match_operand:HI 2 "immediate_operand" "i")]
+ UNSPEC_STRLEN))]
+ ""
+ "0:\;ld __tmp_reg__,%a0+\;tst __tmp_reg__\;brne 0b"
[(set_attr "length" "3")
(set_attr "cc" "clobber")])
@@ -2770,7 +3032,7 @@
(const_int 15)))]
""
"bst %A0,0\;ror %B0\;ror %A0\;bld %B0,7"
- [(set_attr "length" "3")
+ [(set_attr "length" "4")
(set_attr "cc" "clobber")])
(define_insn "*rotlpsi2.1"
@@ -2868,10 +3130,8 @@
(define_expand "ashlqi3"
[(set (match_operand:QI 0 "register_operand" "")
- (ashift:QI (match_operand:QI 1 "register_operand" "")
- (match_operand:QI 2 "general_operand" "")))]
- ""
- "")
+ (ashift:QI (match_operand:QI 1 "register_operand" "")
+ (match_operand:QI 2 "nop_general_operand" "")))])
(define_split ; ashlqi3_const4
[(set (match_operand:QI 0 "d_register_operand" "")
@@ -2903,21 +3163,25 @@
"")
(define_insn "*ashlqi3"
- [(set (match_operand:QI 0 "register_operand" "=r,r,r,r,!d,r,r")
- (ashift:QI (match_operand:QI 1 "register_operand" "0,0,0,0,0,0,0")
- (match_operand:QI 2 "general_operand" "r,L,P,K,n,n,Qm")))]
+ [(set (match_operand:QI 0 "register_operand" "=r,r,r,r,!d,r,r")
+ (ashift:QI (match_operand:QI 1 "register_operand" "0,0,0,0,0 ,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,L,P,K,n ,n,Qm")))]
""
- "* return ashlqi3_out (insn, operands, NULL);"
+ {
+ return ashlqi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "5,0,1,2,4,6,9")
(set_attr "adjust_len" "ashlqi")
(set_attr "cc" "clobber,none,set_czn,set_czn,set_czn,set_czn,clobber")])
(define_insn "ashlhi3"
- [(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r,r,r")
- (ashift:HI (match_operand:HI 1 "register_operand" "0,0,0,r,0,0,0")
- (match_operand:QI 2 "general_operand" "r,L,P,O,K,n,Qm")))]
+ [(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r,r,r")
+ (ashift:HI (match_operand:HI 1 "register_operand" "0,0,0,r,0,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,L,P,O,K,n,Qm")))]
""
- "* return ashlhi3_out (insn, operands, NULL);"
+ {
+ return ashlhi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "6,0,2,2,4,10,10")
(set_attr "adjust_len" "ashlhi")
(set_attr "cc" "clobber,none,set_n,clobber,set_n,clobber,clobber")])
@@ -3005,11 +3269,13 @@
(define_insn "ashlsi3"
- [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r")
- (ashift:SI (match_operand:SI 1 "register_operand" "0,0,0,r,0,0,0")
- (match_operand:QI 2 "general_operand" "r,L,P,O,K,n,Qm")))]
+ [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r")
+ (ashift:SI (match_operand:SI 1 "register_operand" "0,0,0,r,0,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,L,P,O,K,n,Qm")))]
""
- "* return ashlsi3_out (insn, operands, NULL);"
+ {
+ return ashlsi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "8,0,4,4,8,10,12")
(set_attr "adjust_len" "ashlsi")
(set_attr "cc" "clobber,none,set_n,clobber,set_n,clobber,clobber")])
@@ -3063,11 +3329,13 @@
(define_insn "*ashlhi3_const"
[(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r")
- (ashift:HI (match_operand:HI 1 "register_operand" "0,0,r,0,0")
- (match_operand:QI 2 "const_int_operand" "L,P,O,K,n")))
- (clobber (match_scratch:QI 3 "=X,X,X,X,&d"))]
+ (ashift:HI (match_operand:HI 1 "register_operand" "0,0,r,0,0")
+ (match_operand:QI 2 "const_int_operand" "L,P,O,K,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,X,&d"))]
"reload_completed"
- "* return ashlhi3_out (insn, operands, NULL);"
+ {
+ return ashlhi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "0,2,2,4,10")
(set_attr "adjust_len" "ashlhi")
(set_attr "cc" "none,set_n,clobber,set_n,clobber")])
@@ -3084,11 +3352,13 @@
(define_insn "*ashlsi3_const"
[(set (match_operand:SI 0 "register_operand" "=r,r,r,r")
- (ashift:SI (match_operand:SI 1 "register_operand" "0,0,r,0")
- (match_operand:QI 2 "const_int_operand" "L,P,O,n")))
- (clobber (match_scratch:QI 3 "=X,X,X,&d"))]
+ (ashift:SI (match_operand:SI 1 "register_operand" "0,0,r,0")
+ (match_operand:QI 2 "const_int_operand" "L,P,O,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,&d"))]
"reload_completed"
- "* return ashlsi3_out (insn, operands, NULL);"
+ {
+ return ashlsi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "0,4,4,10")
(set_attr "adjust_len" "ashlsi")
(set_attr "cc" "none,set_n,clobber,clobber")])
@@ -3109,21 +3379,25 @@
;; arithmetic shift right
(define_insn "ashrqi3"
- [(set (match_operand:QI 0 "register_operand" "=r,r,r,r,r ,r ,r")
- (ashiftrt:QI (match_operand:QI 1 "register_operand" "0,0,0,0,0 ,0 ,0")
- (match_operand:QI 2 "general_operand" "r,L,P,K,C03 C04 C05,C06 C07,Qm")))]
+ [(set (match_operand:QI 0 "register_operand" "=r,r,r,r,r ,r ,r")
+ (ashiftrt:QI (match_operand:QI 1 "register_operand" "0,0,0,0,0 ,0 ,0")
+ (match_operand:QI 2 "nop_general_operand" "r,L,P,K,C03 C04 C05,C06 C07,Qm")))]
""
- "* return ashrqi3_out (insn, operands, NULL);"
+ {
+ return ashrqi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "5,0,1,2,5,4,9")
(set_attr "adjust_len" "ashrqi")
(set_attr "cc" "clobber,none,set_czn,set_czn,set_czn,clobber,clobber")])
(define_insn "ashrhi3"
- [(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r,r,r")
- (ashiftrt:HI (match_operand:HI 1 "register_operand" "0,0,0,r,0,0,0")
- (match_operand:QI 2 "general_operand" "r,L,P,O,K,n,Qm")))]
+ [(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r,r,r")
+ (ashiftrt:HI (match_operand:HI 1 "register_operand" "0,0,0,r,0,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,L,P,O,K,n,Qm")))]
""
- "* return ashrhi3_out (insn, operands, NULL);"
+ {
+ return ashrhi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "6,0,2,4,4,10,10")
(set_attr "adjust_len" "ashrhi")
(set_attr "cc" "clobber,none,clobber,set_n,clobber,clobber,clobber")])
@@ -3141,11 +3415,13 @@
(set_attr "cc" "clobber")])
(define_insn "ashrsi3"
- [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r")
- (ashiftrt:SI (match_operand:SI 1 "register_operand" "0,0,0,r,0,0,0")
- (match_operand:QI 2 "general_operand" "r,L,P,O,K,n,Qm")))]
+ [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r")
+ (ashiftrt:SI (match_operand:SI 1 "register_operand" "0,0,0,r,0,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,L,P,O,K,n,Qm")))]
""
- "* return ashrsi3_out (insn, operands, NULL);"
+ {
+ return ashrsi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "8,0,4,6,8,10,12")
(set_attr "adjust_len" "ashrsi")
(set_attr "cc" "clobber,none,clobber,set_n,clobber,clobber,clobber")])
@@ -3164,11 +3440,13 @@
(define_insn "*ashrhi3_const"
[(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r")
- (ashiftrt:HI (match_operand:HI 1 "register_operand" "0,0,r,0,0")
- (match_operand:QI 2 "const_int_operand" "L,P,O,K,n")))
- (clobber (match_scratch:QI 3 "=X,X,X,X,&d"))]
+ (ashiftrt:HI (match_operand:HI 1 "register_operand" "0,0,r,0,0")
+ (match_operand:QI 2 "const_int_operand" "L,P,O,K,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,X,&d"))]
"reload_completed"
- "* return ashrhi3_out (insn, operands, NULL);"
+ {
+ return ashrhi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "0,2,4,4,10")
(set_attr "adjust_len" "ashrhi")
(set_attr "cc" "none,clobber,set_n,clobber,clobber")])
@@ -3185,11 +3463,13 @@
(define_insn "*ashrsi3_const"
[(set (match_operand:SI 0 "register_operand" "=r,r,r,r")
- (ashiftrt:SI (match_operand:SI 1 "register_operand" "0,0,r,0")
- (match_operand:QI 2 "const_int_operand" "L,P,O,n")))
- (clobber (match_scratch:QI 3 "=X,X,X,&d"))]
+ (ashiftrt:SI (match_operand:SI 1 "register_operand" "0,0,r,0")
+ (match_operand:QI 2 "const_int_operand" "L,P,O,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,&d"))]
"reload_completed"
- "* return ashrsi3_out (insn, operands, NULL);"
+ {
+ return ashrsi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "0,4,4,10")
(set_attr "adjust_len" "ashrsi")
(set_attr "cc" "none,clobber,set_n,clobber")])
@@ -3198,11 +3478,9 @@
;; logical shift right
(define_expand "lshrqi3"
- [(set (match_operand:QI 0 "register_operand" "")
- (lshiftrt:QI (match_operand:QI 1 "register_operand" "")
- (match_operand:QI 2 "general_operand" "")))]
- ""
- "")
+ [(set (match_operand:QI 0 "register_operand" "")
+ (lshiftrt:QI (match_operand:QI 1 "register_operand" "")
+ (match_operand:QI 2 "nop_general_operand" "")))])
(define_split ; lshrqi3_const4
[(set (match_operand:QI 0 "d_register_operand" "")
@@ -3234,21 +3512,25 @@
"")
(define_insn "*lshrqi3"
- [(set (match_operand:QI 0 "register_operand" "=r,r,r,r,!d,r,r")
- (lshiftrt:QI (match_operand:QI 1 "register_operand" "0,0,0,0,0,0,0")
- (match_operand:QI 2 "general_operand" "r,L,P,K,n,n,Qm")))]
+ [(set (match_operand:QI 0 "register_operand" "=r,r,r,r,!d,r,r")
+ (lshiftrt:QI (match_operand:QI 1 "register_operand" "0,0,0,0,0 ,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,L,P,K,n ,n,Qm")))]
""
- "* return lshrqi3_out (insn, operands, NULL);"
+ {
+ return lshrqi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "5,0,1,2,4,6,9")
(set_attr "adjust_len" "lshrqi")
(set_attr "cc" "clobber,none,set_czn,set_czn,set_czn,set_czn,clobber")])
(define_insn "lshrhi3"
- [(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r,r,r")
- (lshiftrt:HI (match_operand:HI 1 "register_operand" "0,0,0,r,0,0,0")
- (match_operand:QI 2 "general_operand" "r,L,P,O,K,n,Qm")))]
+ [(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r,r,r")
+ (lshiftrt:HI (match_operand:HI 1 "register_operand" "0,0,0,r,0,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,L,P,O,K,n,Qm")))]
""
- "* return lshrhi3_out (insn, operands, NULL);"
+ {
+ return lshrhi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "6,0,2,2,4,10,10")
(set_attr "adjust_len" "lshrhi")
(set_attr "cc" "clobber,none,clobber,clobber,clobber,clobber,clobber")])
@@ -3266,11 +3548,13 @@
(set_attr "cc" "clobber")])
(define_insn "lshrsi3"
- [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r")
- (lshiftrt:SI (match_operand:SI 1 "register_operand" "0,0,0,r,0,0,0")
- (match_operand:QI 2 "general_operand" "r,L,P,O,K,n,Qm")))]
+ [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r")
+ (lshiftrt:SI (match_operand:SI 1 "register_operand" "0,0,0,r,0,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,L,P,O,K,n,Qm")))]
""
- "* return lshrsi3_out (insn, operands, NULL);"
+ {
+ return lshrsi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "8,0,4,4,8,10,12")
(set_attr "adjust_len" "lshrsi")
(set_attr "cc" "clobber,none,clobber,clobber,clobber,clobber,clobber")])
@@ -3324,11 +3608,13 @@
(define_insn "*lshrhi3_const"
[(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r")
- (lshiftrt:HI (match_operand:HI 1 "register_operand" "0,0,r,0,0")
- (match_operand:QI 2 "const_int_operand" "L,P,O,K,n")))
- (clobber (match_scratch:QI 3 "=X,X,X,X,&d"))]
+ (lshiftrt:HI (match_operand:HI 1 "register_operand" "0,0,r,0,0")
+ (match_operand:QI 2 "const_int_operand" "L,P,O,K,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,X,&d"))]
"reload_completed"
- "* return lshrhi3_out (insn, operands, NULL);"
+ {
+ return lshrhi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "0,2,2,4,10")
(set_attr "adjust_len" "lshrhi")
(set_attr "cc" "none,clobber,clobber,clobber,clobber")])
@@ -3345,11 +3631,13 @@
(define_insn "*lshrsi3_const"
[(set (match_operand:SI 0 "register_operand" "=r,r,r,r")
- (lshiftrt:SI (match_operand:SI 1 "register_operand" "0,0,r,0")
- (match_operand:QI 2 "const_int_operand" "L,P,O,n")))
- (clobber (match_scratch:QI 3 "=X,X,X,&d"))]
+ (lshiftrt:SI (match_operand:SI 1 "register_operand" "0,0,r,0")
+ (match_operand:QI 2 "const_int_operand" "L,P,O,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,&d"))]
"reload_completed"
- "* return lshrsi3_out (insn, operands, NULL);"
+ {
+ return lshrsi3_out (insn, operands, NULL);
+ }
[(set_attr "length" "0,4,4,10")
(set_attr "adjust_len" "lshrsi")
(set_attr "cc" "none,clobber,clobber,clobber")])
@@ -3615,6 +3903,27 @@
operands[3] = simplify_gen_subreg (QImode, operands[0], PSImode, 2);
})
+(define_insn_and_split "n_extendhipsi2"
+ [(set (match_operand:PSI 0 "register_operand" "=r,r,d,r")
+ (lo_sum:PSI (match_operand:QI 1 "const_int_operand" "L,P,n,n")
+ (match_operand:HI 2 "register_operand" "r,r,r,r")))
+ (clobber (match_scratch:QI 3 "=X,X,X,&d"))]
+ ""
+ "#"
+ "reload_completed"
+ [(set (match_dup 4) (match_dup 2))
+ (set (match_dup 3) (match_dup 6))
+ ; no-op move in the case where no scratch is needed
+ (set (match_dup 5) (match_dup 3))]
+ {
+ operands[4] = simplify_gen_subreg (HImode, operands[0], PSImode, 0);
+ operands[5] = simplify_gen_subreg (QImode, operands[0], PSImode, 2);
+ operands[6] = operands[1];
+
+ if (GET_CODE (operands[3]) == SCRATCH)
+ operands[3] = operands[5];
+ })
+
(define_insn_and_split "zero_extendhisi2"
[(set (match_operand:SI 0 "register_operand" "=r")
(zero_extend:SI (match_operand:HI 1 "combine_pseudo_register_operand" "r")))]
@@ -4363,25 +4672,25 @@
(define_insn "*cbi"
[(set (mem:QI (match_operand 0 "low_io_address_operand" "n"))
- (and:QI (mem:QI (match_dup 0))
- (match_operand:QI 1 "single_zero_operand" "n")))]
- "(optimize > 0)"
-{
- operands[2] = GEN_INT (exact_log2 (~INTVAL (operands[1]) & 0xff));
- return AS2 (cbi,%m0-0x20,%2);
-}
+ (and:QI (mem:QI (match_dup 0))
+ (match_operand:QI 1 "single_zero_operand" "n")))]
+ "optimize > 0"
+ {
+ operands[2] = GEN_INT (exact_log2 (~INTVAL (operands[1]) & 0xff));
+ return "cbi %i0,%2";
+ }
[(set_attr "length" "1")
(set_attr "cc" "none")])
(define_insn "*sbi"
[(set (mem:QI (match_operand 0 "low_io_address_operand" "n"))
- (ior:QI (mem:QI (match_dup 0))
- (match_operand:QI 1 "single_one_operand" "n")))]
- "(optimize > 0)"
-{
- operands[2] = GEN_INT (exact_log2 (INTVAL (operands[1]) & 0xff));
- return AS2 (sbi,%m0-0x20,%2);
-}
+ (ior:QI (mem:QI (match_dup 0))
+ (match_operand:QI 1 "single_one_operand" "n")))]
+ "optimize > 0"
+ {
+ operands[2] = GEN_INT (exact_log2 (INTVAL (operands[1]) & 0xff));
+ return "sbi %i0,%2";
+ }
[(set_attr "length" "1")
(set_attr "cc" "none")])
@@ -4796,6 +5105,30 @@
[(set_attr "length" "9")
(set_attr "cc" "clobber")])
+(define_insn "map_bitsqi"
+ [(set (match_operand:QI 0 "register_operand" "=d")
+ (unspec:QI [(match_operand:SI 1 "const_int_operand" "n")
+ (match_operand:QI 2 "register_operand" "r")]
+ UNSPEC_MAP_BITS))]
+ ""
+ {
+ return avr_out_map_bits (insn, operands, NULL);
+ }
+ [(set_attr "adjust_len" "map_bits")
+ (set_attr "cc" "clobber")])
+
+(define_insn "map_bitshi"
+ [(set (match_operand:HI 0 "register_operand" "=&r")
+ (unspec:HI [(match_operand:DI 1 "const_double_operand" "n")
+ (match_operand:HI 2 "register_operand" "r")]
+ UNSPEC_MAP_BITS))]
+ ""
+ {
+ return avr_out_map_bits (insn, operands, NULL);
+ }
+ [(set_attr "adjust_len" "map_bits")
+ (set_attr "cc" "clobber")])
+
;; Parity
@@ -5340,9 +5673,9 @@
(match_operand:QI 2 "nonmemory_operand" "L,P,r"))]
""
"@
- cbi %m0-0x20,%1
- sbi %m0-0x20,%1
- sbrc %2,0\;sbi %m0-0x20,%1\;sbrs %2,0\;cbi %m0-0x20,%1"
+ cbi %i0,%1
+ sbi %i0,%1
+ sbrc %2,0\;sbi %i0,%1\;sbrs %2,0\;cbi %i0,%1"
[(set_attr "length" "1,1,4")
(set_attr "cc" "none")])
@@ -5352,7 +5685,7 @@
(match_operand:QI 1 "const_0_to_7_operand" "n"))
(not:QI (match_operand:QI 2 "register_operand" "r")))]
""
- "sbrs %2,0\;sbi %m0-0x20,%1\;sbrc %2,0\;cbi %m0-0x20,%1"
+ "sbrs %2,0\;sbi %i0,%1\;sbrc %2,0\;cbi %i0,%1"
[(set_attr "length" "4")
(set_attr "cc" "none")])
diff --git a/gcc/config/avr/predicates.md b/gcc/config/avr/predicates.md
index 55a25b8b05a..fff34b58464 100644
--- a/gcc/config/avr/predicates.md
+++ b/gcc/config/avr/predicates.md
@@ -57,6 +57,18 @@
(and (match_code "const_int")
(match_test "IN_RANGE((INTVAL (op)), 0x20, (0x60 - GET_MODE_SIZE(mode)))")))
+;; Return 1 if OP is a general operand not in program memory
+(define_predicate "nop_general_operand"
+ (and (match_operand 0 "general_operand")
+ (match_test "!avr_mem_pgm_p (op)")))
+
+;; Return 1 if OP is an "ordinary" general operand, i.e. a general
+;; operand whose load is not handled by a libgcc call or ELPM.
+(define_predicate "nox_general_operand"
+ (and (match_operand 0 "general_operand")
+ (not (match_test "avr_load_libgcc_p (op)"))
+ (not (match_test "avr_mem_pgmx_p (op)"))))
+
;; Return 1 if OP is the zero constant for MODE.
(define_predicate "const0_operand"
(and (match_code "const_int,const_double")