summaryrefslogtreecommitdiff
path: root/gcc/tree-complex.c
diff options
context:
space:
mode:
authorbstarynk <bstarynk@138bc75d-0d04-0410-961f-82ee72b054a4>2008-07-29 15:47:54 +0000
committerbstarynk <bstarynk@138bc75d-0d04-0410-961f-82ee72b054a4>2008-07-29 15:47:54 +0000
commit81bdf64f2035b8979549fc533915ea17d429dc9f (patch)
tree69e2b47b22500339535d6f5f37e3fca2350d4f3a /gcc/tree-complex.c
parent7f1c23c2e7981438c251121d85cf5d807b9a4f5d (diff)
downloadgcc-81bdf64f2035b8979549fc533915ea17d429dc9f.tar.gz
2008-07-29 Basile Starynkevitch <basile@starynkevitch.net>
MELT branch merged with trunk r138226 [after tuple merge into trunk] some compiler probe stuff are missing * gcc/compiler-probe.h: more gimple, less tree * gcc/compiler-probe.c: incomplete merge. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/melt-branch@138247 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/tree-complex.c')
-rw-r--r--gcc/tree-complex.c811
1 files changed, 440 insertions, 371 deletions
diff --git a/gcc/tree-complex.c b/gcc/tree-complex.c
index 0fc1cc5f305..bbf4c494218 100644
--- a/gcc/tree-complex.c
+++ b/gcc/tree-complex.c
@@ -26,7 +26,7 @@ along with GCC; see the file COPYING3. If not see
#include "real.h"
#include "flags.h"
#include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
#include "tree-iterator.h"
#include "tree-pass.h"
#include "tree-ssa-propagate.h"
@@ -104,16 +104,37 @@ some_nonzerop (tree t)
return !zerop;
}
-/* Compute a lattice value from T. It may be a gimple_val, or, as a
- special exception, a COMPLEX_EXPR. */
+
+/* Compute a lattice value from the components of a complex type REAL
+ and IMAG. */
static complex_lattice_t
-find_lattice_value (tree t)
+find_lattice_value_parts (tree real, tree imag)
{
- tree real, imag;
int r, i;
complex_lattice_t ret;
+ r = some_nonzerop (real);
+ i = some_nonzerop (imag);
+ ret = r * ONLY_REAL + i * ONLY_IMAG;
+
+ /* ??? On occasion we could do better than mapping 0+0i to real, but we
+ certainly don't want to leave it UNINITIALIZED, which eventually gets
+ mapped to VARYING. */
+ if (ret == UNINITIALIZED)
+ ret = ONLY_REAL;
+
+ return ret;
+}
+
+
+/* Compute a lattice value from gimple_val T. */
+
+static complex_lattice_t
+find_lattice_value (tree t)
+{
+ tree real, imag;
+
switch (TREE_CODE (t))
{
case SSA_NAME:
@@ -125,26 +146,11 @@ find_lattice_value (tree t)
imag = TREE_IMAGPART (t);
break;
- case COMPLEX_EXPR:
- real = TREE_OPERAND (t, 0);
- imag = TREE_OPERAND (t, 1);
- break;
-
default:
gcc_unreachable ();
}
- r = some_nonzerop (real);
- i = some_nonzerop (imag);
- ret = r*ONLY_REAL + i*ONLY_IMAG;
-
- /* ??? On occasion we could do better than mapping 0+0i to real, but we
- certainly don't want to leave it UNINITIALIZED, which eventually gets
- mapped to VARYING. */
- if (ret == UNINITIALIZED)
- ret = ONLY_REAL;
-
- return ret;
+ return find_lattice_value_parts (real, imag);
}
/* Determine if LHS is something for which we're interested in seeing
@@ -171,66 +177,72 @@ init_parameter_lattice_values (void)
SSA_NAME_VERSION (ssa_name), VARYING);
}
-/* Initialize DONT_SIMULATE_AGAIN for each stmt and phi. Return false if
- we found no statements we want to simulate, and thus there's nothing for
- the entire pass to do. */
+/* Initialize simulation state for each statement. Return false if we
+ found no statements we want to simulate, and thus there's nothing
+ for the entire pass to do. */
static bool
init_dont_simulate_again (void)
{
basic_block bb;
- block_stmt_iterator bsi;
- tree phi;
+ gimple_stmt_iterator gsi;
+ gimple phi;
bool saw_a_complex_op = false;
FOR_EACH_BB (bb)
{
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- DONT_SIMULATE_AGAIN (phi) = !is_complex_reg (PHI_RESULT (phi));
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ phi = gsi_stmt (gsi);
+ prop_set_simulate_again (phi,
+ is_complex_reg (gimple_phi_result (phi)));
+ }
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- tree orig_stmt, stmt, rhs = NULL;
- bool dsa;
+ gimple stmt;
+ tree op0, op1;
+ bool sim_again_p;
- orig_stmt = stmt = bsi_stmt (bsi);
+ stmt = gsi_stmt (gsi);
+ op0 = op1 = NULL_TREE;
/* Most control-altering statements must be initially
simulated, else we won't cover the entire cfg. */
- dsa = !stmt_ends_bb_p (stmt);
+ sim_again_p = stmt_ends_bb_p (stmt);
- switch (TREE_CODE (stmt))
+ switch (gimple_code (stmt))
{
- case RETURN_EXPR:
- /* We don't care what the lattice value of <retval> is,
- since it's never used as an input to another computation. */
- dsa = true;
- stmt = TREE_OPERAND (stmt, 0);
- if (!stmt || TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
- break;
- /* FALLTHRU */
+ case GIMPLE_CALL:
+ if (gimple_call_lhs (stmt))
+ sim_again_p = is_complex_reg (gimple_call_lhs (stmt));
+ break;
- case GIMPLE_MODIFY_STMT:
- dsa = !is_complex_reg (GIMPLE_STMT_OPERAND (stmt, 0));
- rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+ case GIMPLE_ASSIGN:
+ sim_again_p = is_complex_reg (gimple_assign_lhs (stmt));
+ if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
+ || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR)
+ op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
+ else
+ op0 = gimple_assign_rhs1 (stmt);
+ if (gimple_num_ops (stmt) > 2)
+ op1 = gimple_assign_rhs2 (stmt);
break;
- case COND_EXPR:
- rhs = TREE_OPERAND (stmt, 0);
+ case GIMPLE_COND:
+ op0 = gimple_cond_lhs (stmt);
+ op1 = gimple_cond_rhs (stmt);
break;
default:
break;
}
- if (rhs)
- switch (TREE_CODE (rhs))
+ if (op0 || op1)
+ switch (gimple_expr_code (stmt))
{
case EQ_EXPR:
case NE_EXPR:
- rhs = TREE_OPERAND (rhs, 0);
- /* FALLTHRU */
-
case PLUS_EXPR:
case MINUS_EXPR:
case MULT_EXPR:
@@ -239,20 +251,25 @@ init_dont_simulate_again (void)
case FLOOR_DIV_EXPR:
case ROUND_DIV_EXPR:
case RDIV_EXPR:
+ if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (op1)) == COMPLEX_TYPE)
+ saw_a_complex_op = true;
+ break;
+
case NEGATE_EXPR:
case CONJ_EXPR:
- if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE)
+ if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
saw_a_complex_op = true;
break;
case REALPART_EXPR:
case IMAGPART_EXPR:
/* The total store transformation performed during
- gimplification creates such uninitialized loads
- and we need to lower the statement to be able
- to fix things up. */
- if (TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
- && ssa_undefined_value_p (TREE_OPERAND (rhs, 0)))
+ gimplification creates such uninitialized loads
+ and we need to lower the statement to be able
+ to fix things up. */
+ if (TREE_CODE (op0) == SSA_NAME
+ && ssa_undefined_value_p (op0))
saw_a_complex_op = true;
break;
@@ -260,7 +277,7 @@ init_dont_simulate_again (void)
break;
}
- DONT_SIMULATE_AGAIN (orig_stmt) = dsa;
+ prop_set_simulate_again (stmt, sim_again_p);
}
}
@@ -271,19 +288,18 @@ init_dont_simulate_again (void)
/* Evaluate statement STMT against the complex lattice defined above. */
static enum ssa_prop_result
-complex_visit_stmt (tree stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
+complex_visit_stmt (gimple stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
tree *result_p)
{
complex_lattice_t new_l, old_l, op1_l, op2_l;
unsigned int ver;
- tree lhs, rhs;
+ tree lhs;
- if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+ lhs = gimple_get_lhs (stmt);
+ /* Skip anything but GIMPLE_ASSIGN and GIMPLE_CALL with a lhs. */
+ if (!lhs)
return SSA_PROP_VARYING;
- lhs = GIMPLE_STMT_OPERAND (stmt, 0);
- rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-
/* These conditions should be satisfied due to the initial filter
set up in init_dont_simulate_again. */
gcc_assert (TREE_CODE (lhs) == SSA_NAME);
@@ -293,18 +309,22 @@ complex_visit_stmt (tree stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
ver = SSA_NAME_VERSION (lhs);
old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver);
- switch (TREE_CODE (rhs))
+ switch (gimple_expr_code (stmt))
{
case SSA_NAME:
- case COMPLEX_EXPR:
case COMPLEX_CST:
- new_l = find_lattice_value (rhs);
+ new_l = find_lattice_value (gimple_assign_rhs1 (stmt));
+ break;
+
+ case COMPLEX_EXPR:
+ new_l = find_lattice_value_parts (gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt));
break;
case PLUS_EXPR:
case MINUS_EXPR:
- op1_l = find_lattice_value (TREE_OPERAND (rhs, 0));
- op2_l = find_lattice_value (TREE_OPERAND (rhs, 1));
+ op1_l = find_lattice_value (gimple_assign_rhs1 (stmt));
+ op2_l = find_lattice_value (gimple_assign_rhs2 (stmt));
/* We've set up the lattice values such that IOR neatly
models addition. */
@@ -317,8 +337,8 @@ complex_visit_stmt (tree stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
case CEIL_DIV_EXPR:
case FLOOR_DIV_EXPR:
case ROUND_DIV_EXPR:
- op1_l = find_lattice_value (TREE_OPERAND (rhs, 0));
- op2_l = find_lattice_value (TREE_OPERAND (rhs, 1));
+ op1_l = find_lattice_value (gimple_assign_rhs1 (stmt));
+ op2_l = find_lattice_value (gimple_assign_rhs2 (stmt));
/* Obviously, if either varies, so does the result. */
if (op1_l == VARYING || op2_l == VARYING)
@@ -344,7 +364,7 @@ complex_visit_stmt (tree stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
case NEGATE_EXPR:
case CONJ_EXPR:
- new_l = find_lattice_value (TREE_OPERAND (rhs, 0));
+ new_l = find_lattice_value (gimple_assign_rhs1 (stmt));
break;
default:
@@ -363,14 +383,14 @@ complex_visit_stmt (tree stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
/* Evaluate a PHI node against the complex lattice defined above. */
static enum ssa_prop_result
-complex_visit_phi (tree phi)
+complex_visit_phi (gimple phi)
{
complex_lattice_t new_l, old_l;
unsigned int ver;
tree lhs;
int i;
- lhs = PHI_RESULT (phi);
+ lhs = gimple_phi_result (phi);
/* This condition should be satisfied due to the initial filter
set up in init_dont_simulate_again. */
@@ -378,8 +398,8 @@ complex_visit_phi (tree phi)
/* We've set up the lattice values such that IOR neatly models PHI meet. */
new_l = UNINITIALIZED;
- for (i = PHI_NUM_ARGS (phi) - 1; i >= 0; --i)
- new_l |= find_lattice_value (PHI_ARG_DEF (phi, i));
+ for (i = gimple_phi_num_args (phi) - 1; i >= 0; --i)
+ new_l |= find_lattice_value (gimple_phi_arg_def (phi, i));
ver = SSA_NAME_VERSION (lhs);
old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver);
@@ -475,7 +495,7 @@ get_component_ssa_name (tree ssa_name, bool imag_p)
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret)
= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name);
if (TREE_CODE (SSA_NAME_VAR (ssa_name)) == VAR_DECL
- && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name)))
+ && gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
{
SSA_NAME_DEF_STMT (ret) = SSA_NAME_DEF_STMT (ssa_name);
set_default_def (SSA_NAME_VAR (ret), ret);
@@ -487,15 +507,17 @@ get_component_ssa_name (tree ssa_name, bool imag_p)
return ret;
}
-/* Set a value for a complex component of SSA_NAME, return a STMT_LIST of
- stuff that needs doing. */
+/* Set a value for a complex component of SSA_NAME, return a
+ gimple_seq of stuff that needs doing. */
-static tree
+static gimple_seq
set_component_ssa_name (tree ssa_name, bool imag_p, tree value)
{
complex_lattice_t lattice = find_lattice_value (ssa_name);
size_t ssa_name_index;
- tree comp, list, last;
+ tree comp;
+ gimple last;
+ gimple_seq list;
/* We know the value must be zero, else there's a bug in our lattice
analysis. But the value may well be a variable known to contain
@@ -542,22 +564,21 @@ set_component_ssa_name (tree ssa_name, bool imag_p, tree value)
comp = get_component_ssa_name (ssa_name, imag_p);
/* Do all the work to assign VALUE to COMP. */
+ list = NULL;
value = force_gimple_operand (value, &list, false, NULL);
- last = build_gimple_modify_stmt (comp, value);
- append_to_statement_list (last, &list);
-
- gcc_assert (SSA_NAME_DEF_STMT (comp) == NULL);
- SSA_NAME_DEF_STMT (comp) = last;
+ last = gimple_build_assign (comp, value);
+ gimple_seq_add_stmt (&list, last);
+ gcc_assert (SSA_NAME_DEF_STMT (comp) == last);
return list;
}
/* Extract the real or imaginary part of a complex variable or constant.
Make sure that it's a proper gimple_val and gimplify it if not.
- Emit any new code before BSI. */
+ Emit any new code before gsi. */
static tree
-extract_component (block_stmt_iterator *bsi, tree t, bool imagpart_p,
+extract_component (gimple_stmt_iterator *gsi, tree t, bool imagpart_p,
bool gimple_p)
{
switch (TREE_CODE (t))
@@ -566,7 +587,7 @@ extract_component (block_stmt_iterator *bsi, tree t, bool imagpart_p,
return imagpart_p ? TREE_IMAGPART (t) : TREE_REALPART (t);
case COMPLEX_EXPR:
- return TREE_OPERAND (t, imagpart_p);
+ gcc_unreachable ();
case VAR_DECL:
case RESULT_DECL:
@@ -581,7 +602,8 @@ extract_component (block_stmt_iterator *bsi, tree t, bool imagpart_p,
inner_type, unshare_expr (t));
if (gimple_p)
- t = gimplify_val (bsi, inner_type, t);
+ t = force_gimple_operand_gsi (gsi, t, true, NULL, true,
+ GSI_SAME_STMT);
return t;
}
@@ -597,53 +619,53 @@ extract_component (block_stmt_iterator *bsi, tree t, bool imagpart_p,
/* Update the complex components of the ssa name on the lhs of STMT. */
static void
-update_complex_components (block_stmt_iterator *bsi, tree stmt, tree r, tree i)
+update_complex_components (gimple_stmt_iterator *gsi, gimple stmt, tree r,
+ tree i)
{
- tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
- tree list;
+ tree lhs;
+ gimple_seq list;
+
+ lhs = gimple_get_lhs (stmt);
list = set_component_ssa_name (lhs, false, r);
if (list)
- bsi_insert_after (bsi, list, BSI_CONTINUE_LINKING);
+ gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING);
list = set_component_ssa_name (lhs, true, i);
if (list)
- bsi_insert_after (bsi, list, BSI_CONTINUE_LINKING);
+ gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING);
}
static void
update_complex_components_on_edge (edge e, tree lhs, tree r, tree i)
{
- tree list;
+ gimple_seq list;
list = set_component_ssa_name (lhs, false, r);
if (list)
- bsi_insert_on_edge (e, list);
+ gsi_insert_seq_on_edge (e, list);
list = set_component_ssa_name (lhs, true, i);
if (list)
- bsi_insert_on_edge (e, list);
+ gsi_insert_seq_on_edge (e, list);
}
+
/* Update an assignment to a complex variable in place. */
static void
-update_complex_assignment (block_stmt_iterator *bsi, tree r, tree i)
+update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i)
{
- tree stmt, mod;
- tree type;
-
- mod = stmt = bsi_stmt (*bsi);
- if (TREE_CODE (stmt) == RETURN_EXPR)
- mod = TREE_OPERAND (mod, 0);
- else if (gimple_in_ssa_p (cfun))
- update_complex_components (bsi, stmt, r, i);
-
- type = TREE_TYPE (GIMPLE_STMT_OPERAND (mod, 1));
- GIMPLE_STMT_OPERAND (mod, 1) = build2 (COMPLEX_EXPR, type, r, i);
- update_stmt (stmt);
+ gimple_stmt_iterator orig_si = *gsi;
+
+ if (gimple_in_ssa_p (cfun))
+ update_complex_components (gsi, gsi_stmt (*gsi), r, i);
+
+ gimple_assign_set_rhs_with_ops (&orig_si, COMPLEX_EXPR, r, i);
+ update_stmt (gsi_stmt (orig_si));
}
+
/* Generate code at the entry point of the function to initialize the
component variables for a complex parameter. */
@@ -678,49 +700,54 @@ update_parameter_components (void)
static void
update_phi_components (basic_block bb)
{
- tree phi;
+ gimple_stmt_iterator gsi;
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- if (is_complex_reg (PHI_RESULT (phi)))
- {
- tree lr, li, pr = NULL, pi = NULL;
- unsigned int i, n;
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple phi = gsi_stmt (gsi);
- lr = get_component_ssa_name (PHI_RESULT (phi), false);
- if (TREE_CODE (lr) == SSA_NAME)
- {
- pr = create_phi_node (lr, bb);
- SSA_NAME_DEF_STMT (lr) = pr;
- }
+ if (is_complex_reg (gimple_phi_result (phi)))
+ {
+ tree lr, li;
+ gimple pr = NULL, pi = NULL;
+ unsigned int i, n;
- li = get_component_ssa_name (PHI_RESULT (phi), true);
- if (TREE_CODE (li) == SSA_NAME)
- {
- pi = create_phi_node (li, bb);
- SSA_NAME_DEF_STMT (li) = pi;
- }
-
- for (i = 0, n = PHI_NUM_ARGS (phi); i < n; ++i)
- {
- tree comp, arg = PHI_ARG_DEF (phi, i);
- if (pr)
- {
- comp = extract_component (NULL, arg, false, false);
- SET_PHI_ARG_DEF (pr, i, comp);
- }
- if (pi)
- {
- comp = extract_component (NULL, arg, true, false);
- SET_PHI_ARG_DEF (pi, i, comp);
- }
- }
- }
+ lr = get_component_ssa_name (gimple_phi_result (phi), false);
+ if (TREE_CODE (lr) == SSA_NAME)
+ {
+ pr = create_phi_node (lr, bb);
+ SSA_NAME_DEF_STMT (lr) = pr;
+ }
+
+ li = get_component_ssa_name (gimple_phi_result (phi), true);
+ if (TREE_CODE (li) == SSA_NAME)
+ {
+ pi = create_phi_node (li, bb);
+ SSA_NAME_DEF_STMT (li) = pi;
+ }
+
+ for (i = 0, n = gimple_phi_num_args (phi); i < n; ++i)
+ {
+ tree comp, arg = gimple_phi_arg_def (phi, i);
+ if (pr)
+ {
+ comp = extract_component (NULL, arg, false, false);
+ SET_PHI_ARG_DEF (pr, i, comp);
+ }
+ if (pi)
+ {
+ comp = extract_component (NULL, arg, true, false);
+ SET_PHI_ARG_DEF (pi, i, comp);
+ }
+ }
+ }
+ }
}
/* Mark each virtual op in STMT for ssa update. */
static void
-update_all_vops (tree stmt)
+update_all_vops (gimple stmt)
{
ssa_op_iter iter;
tree sym;
@@ -733,18 +760,35 @@ update_all_vops (tree stmt)
}
}
+
/* Expand a complex move to scalars. */
static void
-expand_complex_move (block_stmt_iterator *bsi, tree stmt, tree type,
- tree lhs, tree rhs)
+expand_complex_move (gimple_stmt_iterator *gsi, tree type)
{
tree inner_type = TREE_TYPE (type);
- tree r, i;
+ tree r, i, lhs, rhs;
+ gimple stmt = gsi_stmt (*gsi);
+
+ if (is_gimple_assign (stmt))
+ {
+ lhs = gimple_assign_lhs (stmt);
+ if (gimple_num_ops (stmt) == 2)
+ rhs = gimple_assign_rhs1 (stmt);
+ else
+ rhs = NULL_TREE;
+ }
+ else if (is_gimple_call (stmt))
+ {
+ lhs = gimple_call_lhs (stmt);
+ rhs = NULL_TREE;
+ }
+ else
+ gcc_unreachable ();
if (TREE_CODE (lhs) == SSA_NAME)
{
- if (is_ctrl_altering_stmt (bsi_stmt (*bsi)))
+ if (is_ctrl_altering_stmt (stmt))
{
edge_iterator ei;
edge e;
@@ -752,7 +796,7 @@ expand_complex_move (block_stmt_iterator *bsi, tree stmt, tree type,
/* The value is not assigned on the exception edges, so we need not
concern ourselves there. We do need to update on the fallthru
edge. Find it. */
- FOR_EACH_EDGE (e, ei, bsi->bb->succs)
+ FOR_EACH_EDGE (e, ei, gsi_bb (*gsi)->succs)
if (e->flags & EDGE_FALLTHRU)
goto found_fallthru;
gcc_unreachable ();
@@ -762,47 +806,57 @@ expand_complex_move (block_stmt_iterator *bsi, tree stmt, tree type,
i = build1 (IMAGPART_EXPR, inner_type, lhs);
update_complex_components_on_edge (e, lhs, r, i);
}
- else if (TREE_CODE (rhs) == CALL_EXPR || TREE_SIDE_EFFECTS (rhs)
- || TREE_CODE (rhs) == PAREN_EXPR)
+ else if (is_gimple_call (stmt)
+ || gimple_has_side_effects (stmt)
+ || gimple_assign_rhs_code (stmt) == PAREN_EXPR)
{
r = build1 (REALPART_EXPR, inner_type, lhs);
i = build1 (IMAGPART_EXPR, inner_type, lhs);
- update_complex_components (bsi, stmt, r, i);
+ update_complex_components (gsi, stmt, r, i);
}
else
{
- update_all_vops (bsi_stmt (*bsi));
- r = extract_component (bsi, rhs, 0, true);
- i = extract_component (bsi, rhs, 1, true);
- update_complex_assignment (bsi, r, i);
+ update_all_vops (stmt);
+ if (gimple_assign_rhs_code (stmt) != COMPLEX_EXPR)
+ {
+ r = extract_component (gsi, rhs, 0, true);
+ i = extract_component (gsi, rhs, 1, true);
+ }
+ else
+ {
+ r = gimple_assign_rhs1 (stmt);
+ i = gimple_assign_rhs2 (stmt);
+ }
+ update_complex_assignment (gsi, r, i);
}
}
- else if (TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs))
+ else if (rhs && TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs))
{
tree x;
+ gimple t;
- r = extract_component (bsi, rhs, 0, false);
- i = extract_component (bsi, rhs, 1, false);
+ r = extract_component (gsi, rhs, 0, false);
+ i = extract_component (gsi, rhs, 1, false);
x = build1 (REALPART_EXPR, inner_type, unshare_expr (lhs));
- x = build_gimple_modify_stmt (x, r);
- bsi_insert_before (bsi, x, BSI_SAME_STMT);
+ t = gimple_build_assign (x, r);
+ gsi_insert_before (gsi, t, GSI_SAME_STMT);
- if (stmt == bsi_stmt (*bsi))
+ if (stmt == gsi_stmt (*gsi))
{
x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
- GIMPLE_STMT_OPERAND (stmt, 0) = x;
- GIMPLE_STMT_OPERAND (stmt, 1) = i;
+ gimple_assign_set_lhs (stmt, x);
+ gimple_assign_set_rhs1 (stmt, i);
}
else
{
x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
- x = build_gimple_modify_stmt (x, i);
- bsi_insert_before (bsi, x, BSI_SAME_STMT);
+ t = gimple_build_assign (x, i);
+ gsi_insert_before (gsi, t, GSI_SAME_STMT);
- stmt = bsi_stmt (*bsi);
- gcc_assert (TREE_CODE (stmt) == RETURN_EXPR);
- GIMPLE_STMT_OPERAND (stmt, 0) = lhs;
+ stmt = gsi_stmt (*gsi);
+ gcc_assert (gimple_code (stmt) == GIMPLE_RETURN);
+ gimple_return_set_retval (stmt, lhs);
}
update_all_vops (stmt);
@@ -816,7 +870,7 @@ expand_complex_move (block_stmt_iterator *bsi, tree stmt, tree type,
*/
static void
-expand_complex_addition (block_stmt_iterator *bsi, tree inner_type,
+expand_complex_addition (gimple_stmt_iterator *gsi, tree inner_type,
tree ar, tree ai, tree br, tree bi,
enum tree_code code,
complex_lattice_t al, complex_lattice_t bl)
@@ -826,21 +880,21 @@ expand_complex_addition (block_stmt_iterator *bsi, tree inner_type,
switch (PAIR (al, bl))
{
case PAIR (ONLY_REAL, ONLY_REAL):
- rr = gimplify_build2 (bsi, code, inner_type, ar, br);
+ rr = gimplify_build2 (gsi, code, inner_type, ar, br);
ri = ai;
break;
case PAIR (ONLY_REAL, ONLY_IMAG):
rr = ar;
if (code == MINUS_EXPR)
- ri = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ai, bi);
+ ri = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, bi);
else
ri = bi;
break;
case PAIR (ONLY_IMAG, ONLY_REAL):
if (code == MINUS_EXPR)
- rr = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ar, br);
+ rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ar, br);
else
rr = br;
ri = ai;
@@ -848,23 +902,23 @@ expand_complex_addition (block_stmt_iterator *bsi, tree inner_type,
case PAIR (ONLY_IMAG, ONLY_IMAG):
rr = ar;
- ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
+ ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
break;
case PAIR (VARYING, ONLY_REAL):
- rr = gimplify_build2 (bsi, code, inner_type, ar, br);
+ rr = gimplify_build2 (gsi, code, inner_type, ar, br);
ri = ai;
break;
case PAIR (VARYING, ONLY_IMAG):
rr = ar;
- ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
+ ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
break;
case PAIR (ONLY_REAL, VARYING):
if (code == MINUS_EXPR)
goto general;
- rr = gimplify_build2 (bsi, code, inner_type, ar, br);
+ rr = gimplify_build2 (gsi, code, inner_type, ar, br);
ri = bi;
break;
@@ -872,38 +926,41 @@ expand_complex_addition (block_stmt_iterator *bsi, tree inner_type,
if (code == MINUS_EXPR)
goto general;
rr = br;
- ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
+ ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
break;
case PAIR (VARYING, VARYING):
general:
- rr = gimplify_build2 (bsi, code, inner_type, ar, br);
- ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
+ rr = gimplify_build2 (gsi, code, inner_type, ar, br);
+ ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
break;
default:
gcc_unreachable ();
}
- update_complex_assignment (bsi, rr, ri);
+ update_complex_assignment (gsi, rr, ri);
}
/* Expand a complex multiplication or division to a libcall to the c99
compliant routines. */
static void
-expand_complex_libcall (block_stmt_iterator *bsi, tree ar, tree ai,
+expand_complex_libcall (gimple_stmt_iterator *gsi, tree ar, tree ai,
tree br, tree bi, enum tree_code code)
{
enum machine_mode mode;
enum built_in_function bcode;
- tree fn, stmt, type;
+ tree fn, type, lhs;
+ gimple stmt;
- stmt = bsi_stmt (*bsi);
- type = TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 1));
+ stmt = gsi_stmt (*gsi);
+ lhs = gimple_assign_lhs (stmt);
+ type = TREE_TYPE (lhs);
mode = TYPE_MODE (type);
gcc_assert (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT);
+
if (code == MULT_EXPR)
bcode = BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT;
else if (code == RDIV_EXPR)
@@ -912,16 +969,18 @@ expand_complex_libcall (block_stmt_iterator *bsi, tree ar, tree ai,
gcc_unreachable ();
fn = built_in_decls[bcode];
- GIMPLE_STMT_OPERAND (stmt, 1) = build_call_expr (fn, 4, ar, ai, br, bi);
+ stmt = gimple_build_call (fn, 4, ar, ai, br, bi);
+ gimple_call_set_lhs (stmt, lhs);
update_stmt (stmt);
+ gsi_replace (gsi, stmt, true);
if (gimple_in_ssa_p (cfun))
{
- tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
type = TREE_TYPE (type);
- update_complex_components (bsi, stmt,
+ update_complex_components (gsi, stmt,
build1 (REALPART_EXPR, type, lhs),
build1 (IMAGPART_EXPR, type, lhs));
+ SSA_NAME_DEF_STMT (lhs) = stmt;
}
}
@@ -930,7 +989,7 @@ expand_complex_libcall (block_stmt_iterator *bsi, tree ar, tree ai,
*/
static void
-expand_complex_multiplication (block_stmt_iterator *bsi, tree inner_type,
+expand_complex_multiplication (gimple_stmt_iterator *gsi, tree inner_type,
tree ar, tree ai, tree br, tree bi,
complex_lattice_t al, complex_lattice_t bl)
{
@@ -947,7 +1006,7 @@ expand_complex_multiplication (block_stmt_iterator *bsi, tree inner_type,
switch (PAIR (al, bl))
{
case PAIR (ONLY_REAL, ONLY_REAL):
- rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
+ rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
ri = ai;
break;
@@ -957,49 +1016,49 @@ expand_complex_multiplication (block_stmt_iterator *bsi, tree inner_type,
&& REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst1))
ri = br;
else
- ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
+ ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
break;
case PAIR (ONLY_IMAG, ONLY_IMAG):
- rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
- rr = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, rr);
+ rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
+ rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr);
ri = ar;
break;
case PAIR (VARYING, ONLY_REAL):
- rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
- ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
+ rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
+ ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
break;
case PAIR (VARYING, ONLY_IMAG):
- rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
- rr = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, rr);
- ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
+ rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
+ rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr);
+ ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
break;
case PAIR (VARYING, VARYING):
if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type))
{
- expand_complex_libcall (bsi, ar, ai, br, bi, MULT_EXPR);
+ expand_complex_libcall (gsi, ar, ai, br, bi, MULT_EXPR);
return;
}
else
{
tree t1, t2, t3, t4;
- t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
- t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
- t3 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
+ t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
+ t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
+ t3 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
/* Avoid expanding redundant multiplication for the common
case of squaring a complex number. */
if (ar == br && ai == bi)
t4 = t3;
else
- t4 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
+ t4 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
- rr = gimplify_build2 (bsi, MINUS_EXPR, inner_type, t1, t2);
- ri = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t3, t4);
+ rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2);
+ ri = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t3, t4);
}
break;
@@ -1007,7 +1066,7 @@ expand_complex_multiplication (block_stmt_iterator *bsi, tree inner_type,
gcc_unreachable ();
}
- update_complex_assignment (bsi, rr, ri);
+ update_complex_assignment (gsi, rr, ri);
}
/* Expand complex division to scalars, straightforward algorithm.
@@ -1016,43 +1075,44 @@ expand_complex_multiplication (block_stmt_iterator *bsi, tree inner_type,
*/
static void
-expand_complex_div_straight (block_stmt_iterator *bsi, tree inner_type,
+expand_complex_div_straight (gimple_stmt_iterator *gsi, tree inner_type,
tree ar, tree ai, tree br, tree bi,
enum tree_code code)
{
tree rr, ri, div, t1, t2, t3;
- t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, br, br);
- t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, bi, bi);
- div = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, t2);
+ t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, br);
+ t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, bi);
+ div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2);
- t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
- t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
- t3 = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, t2);
- rr = gimplify_build2 (bsi, code, inner_type, t3, div);
+ t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
+ t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
+ t3 = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2);
+ rr = gimplify_build2 (gsi, code, inner_type, t3, div);
- t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
- t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
- t3 = gimplify_build2 (bsi, MINUS_EXPR, inner_type, t1, t2);
- ri = gimplify_build2 (bsi, code, inner_type, t3, div);
+ t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
+ t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
+ t3 = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2);
+ ri = gimplify_build2 (gsi, code, inner_type, t3, div);
- update_complex_assignment (bsi, rr, ri);
+ update_complex_assignment (gsi, rr, ri);
}
/* Expand complex division to scalars, modified algorithm to minimize
overflow with wide input ranges. */
static void
-expand_complex_div_wide (block_stmt_iterator *bsi, tree inner_type,
+expand_complex_div_wide (gimple_stmt_iterator *gsi, tree inner_type,
tree ar, tree ai, tree br, tree bi,
enum tree_code code)
{
tree rr, ri, ratio, div, t1, t2, tr, ti, compare;
basic_block bb_cond, bb_true, bb_false, bb_join;
+ gimple stmt;
/* Examine |br| < |bi|, and branch. */
- t1 = gimplify_build1 (bsi, ABS_EXPR, inner_type, br);
- t2 = gimplify_build1 (bsi, ABS_EXPR, inner_type, bi);
+ t1 = gimplify_build1 (gsi, ABS_EXPR, inner_type, br);
+ t2 = gimplify_build1 (gsi, ABS_EXPR, inner_type, bi);
compare = fold_build2 (LT_EXPR, boolean_type_node, t1, t2);
STRIP_NOPS (compare);
@@ -1061,20 +1121,25 @@ expand_complex_div_wide (block_stmt_iterator *bsi, tree inner_type,
if (!TREE_CONSTANT (compare))
{
edge e;
+ gimple stmt;
tree cond, tmp;
tmp = create_tmp_var (boolean_type_node, NULL);
- cond = build_gimple_modify_stmt (tmp, compare);
+ stmt = gimple_build_assign (tmp, compare);
if (gimple_in_ssa_p (cfun))
- tmp = make_ssa_name (tmp, cond);
- GIMPLE_STMT_OPERAND (cond, 0) = tmp;
- bsi_insert_before (bsi, cond, BSI_SAME_STMT);
+ {
+ tmp = make_ssa_name (tmp, stmt);
+ gimple_assign_set_lhs (stmt, tmp);
+ }
+
+ gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
- cond = build3 (COND_EXPR, void_type_node, tmp, NULL_TREE, NULL_TREE);
- bsi_insert_before (bsi, cond, BSI_SAME_STMT);
+ cond = fold_build2 (EQ_EXPR, boolean_type_node, tmp, boolean_true_node);
+ stmt = gimple_build_cond_from_tree (cond, NULL_TREE, NULL_TREE);
+ gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
/* Split the original block, and create the TRUE and FALSE blocks. */
- e = split_block (bsi->bb, cond);
+ e = split_block (gsi_bb (*gsi), stmt);
bb_cond = e->src;
bb_join = e->dest;
bb_true = create_empty_bb (bb_cond);
@@ -1110,31 +1175,31 @@ expand_complex_div_wide (block_stmt_iterator *bsi, tree inner_type,
{
if (bb_true)
{
- *bsi = bsi_last (bb_true);
- bsi_insert_after (bsi, build_empty_stmt (), BSI_NEW_STMT);
+ *gsi = gsi_last_bb (bb_true);
+ gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT);
}
- ratio = gimplify_build2 (bsi, code, inner_type, br, bi);
+ ratio = gimplify_build2 (gsi, code, inner_type, br, bi);
- t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, br, ratio);
- div = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, bi);
+ t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, ratio);
+ div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, bi);
- t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, ratio);
- tr = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, ai);
+ t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio);
+ tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ai);
- t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, ratio);
- ti = gimplify_build2 (bsi, MINUS_EXPR, inner_type, t1, ar);
+ t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio);
+ ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, ar);
- tr = gimplify_build2 (bsi, code, inner_type, tr, div);
- ti = gimplify_build2 (bsi, code, inner_type, ti, div);
+ tr = gimplify_build2 (gsi, code, inner_type, tr, div);
+ ti = gimplify_build2 (gsi, code, inner_type, ti, div);
if (bb_true)
{
- t1 = build_gimple_modify_stmt (rr, tr);
- bsi_insert_before (bsi, t1, BSI_SAME_STMT);
- t1 = build_gimple_modify_stmt (ri, ti);
- bsi_insert_before (bsi, t1, BSI_SAME_STMT);
- bsi_remove (bsi, true);
+ stmt = gimple_build_assign (rr, tr);
+ gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+ stmt = gimple_build_assign (ri, ti);
+ gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+ gsi_remove (gsi, true);
}
}
@@ -1149,46 +1214,46 @@ expand_complex_div_wide (block_stmt_iterator *bsi, tree inner_type,
{
if (bb_false)
{
- *bsi = bsi_last (bb_false);
- bsi_insert_after (bsi, build_empty_stmt (), BSI_NEW_STMT);
+ *gsi = gsi_last_bb (bb_false);
+ gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT);
}
- ratio = gimplify_build2 (bsi, code, inner_type, bi, br);
+ ratio = gimplify_build2 (gsi, code, inner_type, bi, br);
- t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, bi, ratio);
- div = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, br);
+ t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, ratio);
+ div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, br);
- t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, ratio);
- tr = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, ar);
+ t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio);
+ tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ar);
- t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, ratio);
- ti = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ai, t1);
+ t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio);
+ ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, t1);
- tr = gimplify_build2 (bsi, code, inner_type, tr, div);
- ti = gimplify_build2 (bsi, code, inner_type, ti, div);
+ tr = gimplify_build2 (gsi, code, inner_type, tr, div);
+ ti = gimplify_build2 (gsi, code, inner_type, ti, div);
if (bb_false)
{
- t1 = build_gimple_modify_stmt (rr, tr);
- bsi_insert_before (bsi, t1, BSI_SAME_STMT);
- t1 = build_gimple_modify_stmt (ri, ti);
- bsi_insert_before (bsi, t1, BSI_SAME_STMT);
- bsi_remove (bsi, true);
+ stmt = gimple_build_assign (rr, tr);
+ gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+ stmt = gimple_build_assign (ri, ti);
+ gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+ gsi_remove (gsi, true);
}
}
if (bb_join)
- *bsi = bsi_start (bb_join);
+ *gsi = gsi_start_bb (bb_join);
else
rr = tr, ri = ti;
- update_complex_assignment (bsi, rr, ri);
+ update_complex_assignment (gsi, rr, ri);
}
/* Expand complex division to scalars. */
static void
-expand_complex_division (block_stmt_iterator *bsi, tree inner_type,
+expand_complex_division (gimple_stmt_iterator *gsi, tree inner_type,
tree ar, tree ai, tree br, tree bi,
enum tree_code code,
complex_lattice_t al, complex_lattice_t bl)
@@ -1198,35 +1263,35 @@ expand_complex_division (block_stmt_iterator *bsi, tree inner_type,
switch (PAIR (al, bl))
{
case PAIR (ONLY_REAL, ONLY_REAL):
- rr = gimplify_build2 (bsi, code, inner_type, ar, br);
+ rr = gimplify_build2 (gsi, code, inner_type, ar, br);
ri = ai;
break;
case PAIR (ONLY_REAL, ONLY_IMAG):
rr = ai;
- ri = gimplify_build2 (bsi, code, inner_type, ar, bi);
- ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ri);
+ ri = gimplify_build2 (gsi, code, inner_type, ar, bi);
+ ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri);
break;
case PAIR (ONLY_IMAG, ONLY_REAL):
rr = ar;
- ri = gimplify_build2 (bsi, code, inner_type, ai, br);
+ ri = gimplify_build2 (gsi, code, inner_type, ai, br);
break;
case PAIR (ONLY_IMAG, ONLY_IMAG):
- rr = gimplify_build2 (bsi, code, inner_type, ai, bi);
+ rr = gimplify_build2 (gsi, code, inner_type, ai, bi);
ri = ar;
break;
case PAIR (VARYING, ONLY_REAL):
- rr = gimplify_build2 (bsi, code, inner_type, ar, br);
- ri = gimplify_build2 (bsi, code, inner_type, ai, br);
+ rr = gimplify_build2 (gsi, code, inner_type, ar, br);
+ ri = gimplify_build2 (gsi, code, inner_type, ai, br);
break;
case PAIR (VARYING, ONLY_IMAG):
- rr = gimplify_build2 (bsi, code, inner_type, ai, bi);
- ri = gimplify_build2 (bsi, code, inner_type, ar, bi);
- ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ri);
+ rr = gimplify_build2 (gsi, code, inner_type, ai, bi);
+ ri = gimplify_build2 (gsi, code, inner_type, ar, bi);
+ ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri);
case PAIR (ONLY_REAL, VARYING):
case PAIR (ONLY_IMAG, VARYING):
@@ -1235,20 +1300,20 @@ expand_complex_division (block_stmt_iterator *bsi, tree inner_type,
{
case 0:
/* straightforward implementation of complex divide acceptable. */
- expand_complex_div_straight (bsi, inner_type, ar, ai, br, bi, code);
+ expand_complex_div_straight (gsi, inner_type, ar, ai, br, bi, code);
break;
case 2:
if (SCALAR_FLOAT_TYPE_P (inner_type))
{
- expand_complex_libcall (bsi, ar, ai, br, bi, code);
+ expand_complex_libcall (gsi, ar, ai, br, bi, code);
break;
}
/* FALLTHRU */
case 1:
/* wide ranges of inputs must work for complex divide. */
- expand_complex_div_wide (bsi, inner_type, ar, ai, br, bi, code);
+ expand_complex_div_wide (gsi, inner_type, ar, ai, br, bi, code);
break;
default:
@@ -1260,7 +1325,7 @@ expand_complex_division (block_stmt_iterator *bsi, tree inner_type,
gcc_unreachable ();
}
- update_complex_assignment (bsi, rr, ri);
+ update_complex_assignment (gsi, rr, ri);
}
/* Expand complex negation to scalars:
@@ -1268,15 +1333,15 @@ expand_complex_division (block_stmt_iterator *bsi, tree inner_type,
*/
static void
-expand_complex_negation (block_stmt_iterator *bsi, tree inner_type,
+expand_complex_negation (gimple_stmt_iterator *gsi, tree inner_type,
tree ar, tree ai)
{
tree rr, ri;
- rr = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ar);
- ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ai);
+ rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ar);
+ ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai);
- update_complex_assignment (bsi, rr, ri);
+ update_complex_assignment (gsi, rr, ri);
}
/* Expand complex conjugate to scalars:
@@ -1284,44 +1349,52 @@ expand_complex_negation (block_stmt_iterator *bsi, tree inner_type,
*/
static void
-expand_complex_conjugate (block_stmt_iterator *bsi, tree inner_type,
+expand_complex_conjugate (gimple_stmt_iterator *gsi, tree inner_type,
tree ar, tree ai)
{
tree ri;
- ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ai);
+ ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai);
- update_complex_assignment (bsi, ar, ri);
+ update_complex_assignment (gsi, ar, ri);
}
/* Expand complex comparison (EQ or NE only). */
static void
-expand_complex_comparison (block_stmt_iterator *bsi, tree ar, tree ai,
+expand_complex_comparison (gimple_stmt_iterator *gsi, tree ar, tree ai,
tree br, tree bi, enum tree_code code)
{
- tree cr, ci, cc, stmt, expr, type;
+ tree cr, ci, cc, type;
+ gimple stmt;
- cr = gimplify_build2 (bsi, code, boolean_type_node, ar, br);
- ci = gimplify_build2 (bsi, code, boolean_type_node, ai, bi);
- cc = gimplify_build2 (bsi,
+ cr = gimplify_build2 (gsi, code, boolean_type_node, ar, br);
+ ci = gimplify_build2 (gsi, code, boolean_type_node, ai, bi);
+ cc = gimplify_build2 (gsi,
(code == EQ_EXPR ? TRUTH_AND_EXPR : TRUTH_OR_EXPR),
boolean_type_node, cr, ci);
- stmt = expr = bsi_stmt (*bsi);
+ stmt = gsi_stmt (*gsi);
- switch (TREE_CODE (stmt))
+ switch (gimple_code (stmt))
{
- case RETURN_EXPR:
- expr = TREE_OPERAND (stmt, 0);
- /* FALLTHRU */
- case GIMPLE_MODIFY_STMT:
- type = TREE_TYPE (GIMPLE_STMT_OPERAND (expr, 1));
- GIMPLE_STMT_OPERAND (expr, 1) = fold_convert (type, cc);
+ case GIMPLE_RETURN:
+ type = TREE_TYPE (gimple_return_retval (stmt));
+ gimple_return_set_retval (stmt, fold_convert (type, cc));
break;
- case COND_EXPR:
- TREE_OPERAND (stmt, 0) = cc;
+
+ case GIMPLE_ASSIGN:
+ type = TREE_TYPE (gimple_assign_lhs (stmt));
+ gimple_assign_set_rhs_from_tree (gsi, fold_convert (type, cc));
+ stmt = gsi_stmt (*gsi);
break;
+
+ case GIMPLE_COND:
+ gimple_cond_set_code (stmt, EQ_EXPR);
+ gimple_cond_set_lhs (stmt, cc);
+ gimple_cond_set_rhs (stmt, boolean_true_node);
+ break;
+
default:
gcc_unreachable ();
}
@@ -1329,41 +1402,24 @@ expand_complex_comparison (block_stmt_iterator *bsi, tree ar, tree ai,
update_stmt (stmt);
}
+
/* Process one statement. If we identify a complex operation, expand it. */
static void
-expand_complex_operations_1 (block_stmt_iterator *bsi)
+expand_complex_operations_1 (gimple_stmt_iterator *gsi)
{
- tree stmt = bsi_stmt (*bsi);
- tree rhs, type, inner_type;
+ gimple stmt = gsi_stmt (*gsi);
+ tree type, inner_type, lhs;
tree ac, ar, ai, bc, br, bi;
complex_lattice_t al, bl;
enum tree_code code;
- switch (TREE_CODE (stmt))
- {
- case RETURN_EXPR:
- stmt = TREE_OPERAND (stmt, 0);
- if (!stmt)
- return;
- if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
- return;
- /* FALLTHRU */
-
- case GIMPLE_MODIFY_STMT:
- rhs = GIMPLE_STMT_OPERAND (stmt, 1);
- break;
+ lhs = gimple_get_lhs (stmt);
+ if (!lhs && gimple_code (stmt) != GIMPLE_COND)
+ return;
- case COND_EXPR:
- rhs = TREE_OPERAND (stmt, 0);
- break;
-
- default:
- return;
- }
-
- type = TREE_TYPE (rhs);
- code = TREE_CODE (rhs);
+ type = TREE_TYPE (gimple_op (stmt, 0));
+ code = gimple_expr_code (stmt);
/* Initial filter for operations we handle. */
switch (code)
@@ -1385,32 +1441,36 @@ expand_complex_operations_1 (block_stmt_iterator *bsi)
case EQ_EXPR:
case NE_EXPR:
- inner_type = TREE_TYPE (TREE_OPERAND (rhs, 1));
+ /* Note, both GIMPLE_ASSIGN and GIMPLE_COND may have an EQ_EXPR
+ subocde, so we need to access the operands using gimple_op. */
+ inner_type = TREE_TYPE (gimple_op (stmt, 1));
if (TREE_CODE (inner_type) != COMPLEX_TYPE)
return;
break;
default:
{
- tree lhs, rhs;
+ tree rhs;
- /* COND_EXPR may also fallthru here, but we do not need to do anything
- with it. */
- if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+ /* GIMPLE_COND may also fallthru here, but we do not need to
+ do anything with it. */
+ if (gimple_code (stmt) == GIMPLE_COND)
return;
- lhs = GIMPLE_STMT_OPERAND (stmt, 0);
- rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-
if (TREE_CODE (type) == COMPLEX_TYPE)
- expand_complex_move (bsi, stmt, type, lhs, rhs);
- else if ((TREE_CODE (rhs) == REALPART_EXPR
- || TREE_CODE (rhs) == IMAGPART_EXPR)
- && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
+ expand_complex_move (gsi, type);
+ else if (is_gimple_assign (stmt)
+ && (gimple_assign_rhs_code (stmt) == REALPART_EXPR
+ || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR)
+ && TREE_CODE (lhs) == SSA_NAME)
{
- GENERIC_TREE_OPERAND (stmt, 1)
- = extract_component (bsi, TREE_OPERAND (rhs, 0),
- TREE_CODE (rhs) == IMAGPART_EXPR, false);
+ rhs = gimple_assign_rhs1 (stmt);
+ rhs = extract_component (gsi, TREE_OPERAND (rhs, 0),
+ gimple_assign_rhs_code (stmt)
+ == IMAGPART_EXPR,
+ false);
+ gimple_assign_set_rhs_from_tree (gsi, rhs);
+ stmt = gsi_stmt (*gsi);
update_stmt (stmt);
}
}
@@ -1419,23 +1479,30 @@ expand_complex_operations_1 (block_stmt_iterator *bsi)
/* Extract the components of the two complex values. Make sure and
handle the common case of the same value used twice specially. */
- ac = TREE_OPERAND (rhs, 0);
- ar = extract_component (bsi, ac, 0, true);
- ai = extract_component (bsi, ac, 1, true);
-
- if (TREE_CODE_CLASS (code) == tcc_unary)
- bc = br = bi = NULL;
+ if (is_gimple_assign (stmt))
+ {
+ ac = gimple_assign_rhs1 (stmt);
+ bc = (gimple_num_ops (stmt) > 2) ? gimple_assign_rhs2 (stmt) : NULL;
+ }
+ /* GIMPLE_CALL can not get here. */
else
{
- bc = TREE_OPERAND (rhs, 1);
- if (ac == bc)
- br = ar, bi = ai;
- else
- {
- br = extract_component (bsi, bc, 0, true);
- bi = extract_component (bsi, bc, 1, true);
- }
+ ac = gimple_cond_lhs (stmt);
+ bc = gimple_cond_rhs (stmt);
+ }
+
+ ar = extract_component (gsi, ac, false, true);
+ ai = extract_component (gsi, ac, true, true);
+
+ if (ac == bc)
+ br = ar, bi = ai;
+ else if (bc)
+ {
+ br = extract_component (gsi, bc, 0, true);
+ bi = extract_component (gsi, bc, 1, true);
}
+ else
+ br = bi = NULL_TREE;
if (gimple_in_ssa_p (cfun))
{
@@ -1461,11 +1528,11 @@ expand_complex_operations_1 (block_stmt_iterator *bsi)
{
case PLUS_EXPR:
case MINUS_EXPR:
- expand_complex_addition (bsi, inner_type, ar, ai, br, bi, code, al, bl);
+ expand_complex_addition (gsi, inner_type, ar, ai, br, bi, code, al, bl);
break;
case MULT_EXPR:
- expand_complex_multiplication (bsi, inner_type, ar, ai, br, bi, al, bl);
+ expand_complex_multiplication (gsi, inner_type, ar, ai, br, bi, al, bl);
break;
case TRUNC_DIV_EXPR:
@@ -1473,20 +1540,20 @@ expand_complex_operations_1 (block_stmt_iterator *bsi)
case FLOOR_DIV_EXPR:
case ROUND_DIV_EXPR:
case RDIV_EXPR:
- expand_complex_division (bsi, inner_type, ar, ai, br, bi, code, al, bl);
+ expand_complex_division (gsi, inner_type, ar, ai, br, bi, code, al, bl);
break;
case NEGATE_EXPR:
- expand_complex_negation (bsi, inner_type, ar, ai);
+ expand_complex_negation (gsi, inner_type, ar, ai);
break;
case CONJ_EXPR:
- expand_complex_conjugate (bsi, inner_type, ar, ai);
+ expand_complex_conjugate (gsi, inner_type, ar, ai);
break;
case EQ_EXPR:
case NE_EXPR:
- expand_complex_comparison (bsi, ar, ai, br, bi, code);
+ expand_complex_comparison (gsi, ar, ai, br, bi, code);
break;
default:
@@ -1501,7 +1568,7 @@ static unsigned int
tree_lower_complex (void)
{
int old_last_basic_block;
- block_stmt_iterator bsi;
+ gimple_stmt_iterator gsi;
basic_block bb;
if (!init_dont_simulate_again ())
@@ -1529,12 +1596,13 @@ tree_lower_complex (void)
{
if (bb->index >= old_last_basic_block)
continue;
+
update_phi_components (bb);
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- expand_complex_operations_1 (&bsi);
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ expand_complex_operations_1 (&gsi);
}
- bsi_commit_edge_inserts ();
+ gsi_commit_edge_inserts ();
htab_delete (complex_variable_components);
VEC_free (tree, heap, complex_ssa_name_components);
@@ -1571,15 +1639,16 @@ static unsigned int
tree_lower_complex_O0 (void)
{
int old_last_basic_block = last_basic_block;
- block_stmt_iterator bsi;
+ gimple_stmt_iterator gsi;
basic_block bb;
FOR_EACH_BB (bb)
{
if (bb->index >= old_last_basic_block)
continue;
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- expand_complex_operations_1 (&bsi);
+
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ expand_complex_operations_1 (&gsi);
}
return 0;
}