summaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog10
-rw-r--r--gcc/cpplex.c2
-rw-r--r--gcc/function.c2
-rw-r--r--gcc/integrate.c2
-rw-r--r--gcc/regrename.c2
-rw-r--r--gcc/sibcall.c2
-rw-r--r--gcc/simplify-rtx.c2
-rw-r--r--gcc/tree-inline.c10
8 files changed, 21 insertions, 11 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index b7daa5c1151..b8a6637ed4e 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,13 @@
+2002-04-30 Kazu Hirata <kazu@hxi.com>
+
+ * cpplex.c: Fix comment formatting.
+ * function.c: Likewise.
+ * integrate.c: Likewise.
+ * regrename.c: Likewise.
+ * sibcall.c: Likewise.
+ * simplify-rtx.c: Likewise.
+ * tree-inline.c: Likewise.
+
2002-04-30 John David Anglin <dave@hiauly1.hia.nrc.ca>
* config.gcc (hppa1.1-*-pro*, hppa1.1-*-osf*, hppa1.1-*-rtems*,
diff --git a/gcc/cpplex.c b/gcc/cpplex.c
index 6a44b86d15f..bc129784561 100644
--- a/gcc/cpplex.c
+++ b/gcc/cpplex.c
@@ -763,7 +763,7 @@ save_comment (pfile, token, from, type)
buffer[0] = '/';
memcpy (buffer + 1, from, len - 1);
- /* Finish conversion to a C comment, if necessary. */
+ /* Finish conversion to a C comment, if necessary. */
if (pfile->state.in_directive && type == '/')
{
buffer[1] = '*';
diff --git a/gcc/function.c b/gcc/function.c
index 56e90147b32..e90fddd3b87 100644
--- a/gcc/function.c
+++ b/gcc/function.c
@@ -2567,7 +2567,7 @@ fixup_memory_subreg (x, insn, promoted_mode, uncritical)
if (BYTES_BIG_ENDIAN)
/* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
the offset so that it points to the right location within the
- MEM. */
+ MEM. */
offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
if (!flag_force_addr
diff --git a/gcc/integrate.c b/gcc/integrate.c
index 66be25bd683..2ea46c9363f 100644
--- a/gcc/integrate.c
+++ b/gcc/integrate.c
@@ -1396,7 +1396,7 @@ copy_insn_list (insns, map, static_chain_value)
memory references via that register can then be
identified as static chain references. We assume that
the register is only assigned once, and that the static
- chain address is only live in one register at a time. */
+ chain address is only live in one register at a time. */
else if (static_chain_value != 0
&& set != 0
diff --git a/gcc/regrename.c b/gcc/regrename.c
index efdd9f9f24b..82f3ae97a04 100644
--- a/gcc/regrename.c
+++ b/gcc/regrename.c
@@ -1108,7 +1108,7 @@ kill_value (x, vd)
/* SUBREGS are supposed to have been eliminated by now. But some
ports, e.g. i386 sse, use them to smuggle vector type information
through to instruction selection. Each such SUBREG should simplify,
- so if we get a NULL we've done something wrong elsewhere. */
+ so if we get a NULL we've done something wrong elsewhere. */
if (GET_CODE (x) == SUBREG)
x = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
diff --git a/gcc/sibcall.c b/gcc/sibcall.c
index 6e753fa2fa9..ec13d89280b 100644
--- a/gcc/sibcall.c
+++ b/gcc/sibcall.c
@@ -758,7 +758,7 @@ optimize_sibling_and_tail_recursive_calls ()
}
/* Similarly, invalidate RTX_UNCHANGING_P for any incoming
- arguments passed in registers. */
+ arguments passed in registers. */
for (arg = DECL_ARGUMENTS (current_function_decl);
arg;
arg = TREE_CHAIN (arg))
diff --git a/gcc/simplify-rtx.c b/gcc/simplify-rtx.c
index 3bc2824b58a..8441ea9e985 100644
--- a/gcc/simplify-rtx.c
+++ b/gcc/simplify-rtx.c
@@ -1904,7 +1904,7 @@ simplify_relational_operation (code, mode, op0, op1)
REAL_VALUE_FROM_CONST_DOUBLE (d0, trueop0);
REAL_VALUE_FROM_CONST_DOUBLE (d1, trueop1);
- /* Comparisons are unordered iff at least one of the values is NaN. */
+ /* Comparisons are unordered iff at least one of the values is NaN. */
if (REAL_VALUE_ISNAN (d0) || REAL_VALUE_ISNAN (d1))
switch (code)
{
diff --git a/gcc/tree-inline.c b/gcc/tree-inline.c
index 1385e8389be..b102b51b5d6 100644
--- a/gcc/tree-inline.c
+++ b/gcc/tree-inline.c
@@ -678,7 +678,7 @@ inlinable_function_p (fn, id)
/* Assume it is not inlinable. */
inlinable = 0;
- /* The number of instructions (estimated) of current function. */
+ /* The number of instructions (estimated) of current function. */
currfn_insns = DECL_NUM_STMTS (fn) * INSNS_PER_STMT;
/* If we're not inlining things, then nothing is inlinable. */
@@ -709,7 +709,7 @@ inlinable_function_p (fn, id)
DECL_UNINLINABLE (fn) = ! inlinable;
/* In case we don't disregard the inlining limits and we basically
- can inline this function, investigate further. */
+ can inline this function, investigate further. */
if (! (*lang_hooks.tree_inlining.disregard_inline_limits) (fn)
&& inlinable)
{
@@ -717,13 +717,13 @@ inlinable_function_p (fn, id)
+ currfn_insns;
/* In the extreme case that we have exceeded the recursive inlining
limit by a huge factor (128), we just say no. Should not happen
- in real life. */
+ in real life. */
if (sum_insns > MAX_INLINE_INSNS * 128)
inlinable = 0;
/* If we did not hit the extreme limit, we use a linear function
with slope -1/MAX_INLINE_SLOPE to exceedingly decrease the
allowable size. We always allow a size of MIN_INLINE_INSNS
- though. */
+ though. */
else if ((sum_insns > MAX_INLINE_INSNS)
&& (currfn_insns > MIN_INLINE_INSNS))
{
@@ -988,7 +988,7 @@ expand_call_inline (tp, walk_subtrees, data)
/* Our function now has more statements than it did before. */
DECL_NUM_STMTS (VARRAY_TREE (id->fns, 0)) += DECL_NUM_STMTS (fn);
- /* For accounting, subtract one for the saved call/ret. */
+ /* For accounting, subtract one for the saved call/ret. */
id->inlined_stmts += DECL_NUM_STMTS (fn) - 1;
/* Recurse into the body of the just inlined function. */