summaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorkazu <kazu@138bc75d-0d04-0410-961f-82ee72b054a4>2004-04-29 15:39:12 +0000
committerkazu <kazu@138bc75d-0d04-0410-961f-82ee72b054a4>2004-04-29 15:39:12 +0000
commit9c9bad970601f22c1e83f9333b912bbb2d9f7763 (patch)
tree6c0fb333200e6a04c7e72f1b30ce293bef0d3d4c /gcc
parent767efd18dd81f73b1b69029182b1bc5b2b0b3c1d (diff)
downloadgcc-9c9bad970601f22c1e83f9333b912bbb2d9f7763.tar.gz
* builtins.c, cgraph.c, cgraphunit.c, final.c, fold-const.c:
Fix comment typos. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@81291 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog5
-rw-r--r--gcc/builtins.c2
-rw-r--r--gcc/cgraph.c10
-rw-r--r--gcc/cgraphunit.c8
-rw-r--r--gcc/final.c2
-rw-r--r--gcc/fold-const.c2
6 files changed, 17 insertions, 12 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 68ee9a5ff11..b660fdbf794 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,8 @@
+2004-04-29 Kazu Hirata <kazu@cs.umass.edu>
+
+ * builtins.c, cgraph.c, cgraphunit.c, final.c, fold-const.c:
+ Fix comment typos.
+
2004-04-29 Douglas B Rupp <rupp@gnat.com>
* gcc.c (DELETE_IF_ORDINARY): New macro default definition.
diff --git a/gcc/builtins.c b/gcc/builtins.c
index e2d393444d1..3130f3b7089 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -5134,7 +5134,7 @@ expand_builtin_fork_or_exec (tree fn, tree arglist, rtx target, int ignore)
/* Otherwise call the wrapper. This should be equivalent for the rest of
compiler, so the code does not diverge, and the wrapper may run the
- code neccesary for keeping the profiling sane. */
+ code necessary for keeping the profiling sane. */
switch (DECL_FUNCTION_CODE (fn))
{
diff --git a/gcc/cgraph.c b/gcc/cgraph.c
index 04049df6bf5..0766a5dcb6b 100644
--- a/gcc/cgraph.c
+++ b/gcc/cgraph.c
@@ -68,8 +68,8 @@ The callgraph:
caller.
Each edge has "inline_failed" field. When the field is set to NULL,
- the call will be inlined. When it is non-NULL it contains an reason
- why inlining wasn't performaned.
+ the call will be inlined. When it is non-NULL it contains a reason
+ why inlining wasn't performed.
The varpool data structure:
@@ -99,8 +99,8 @@ The varpool data structure:
/* Hash table used to convert declarations into nodes. */
static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
-/* We destructivly update callgraph during inlining and thus we need to
- keep information on whether inlining happent separately. */
+/* We destructively update callgraph during inlining and thus we need to
+ keep information on whether inlining happend separately. */
htab_t cgraph_inline_hash;
/* The linked list of cgraph nodes. */
@@ -209,7 +209,7 @@ cgraph_edge (struct cgraph_node *node, tree call_expr)
/* This loop may turn out to be performance problem. In such case adding
hashtables into call nodes with very many edges is probably best
- sollution. It is not good idea to add pointer into CALL_EXPR itself
+ solution. It is not good idea to add pointer into CALL_EXPR itself
because we want to make possible having multiple cgraph nodes representing
different clones of the same body before the body is actually cloned. */
for (e = node->callees; e; e= e->next_callee)
diff --git a/gcc/cgraphunit.c b/gcc/cgraphunit.c
index 4bdd41a5aa9..cbe468e2a9e 100644
--- a/gcc/cgraphunit.c
+++ b/gcc/cgraphunit.c
@@ -923,7 +923,7 @@ cgraph_remove_unreachable_nodes (void)
eliminated
Reachable extern inline functions we sometimes inlined will be turned into
unanalyzed nodes so they look like for true extern functions to the rest
- of code. Body of such functions is relased via remove_node once the
+ of code. Body of such functions is released via remove_node once the
inline clones are eliminated. */
for (node = cgraph_nodes; node; node = node->next)
{
@@ -1047,7 +1047,7 @@ cgraph_clone_inlined_nodes (struct cgraph_edge *e, bool duplicate)
else
e->callee->global.inlined_to = e->caller;
- /* Recursivly clone all bodies. */
+ /* Recursively clone all bodies. */
for (e = e->callee->callees; e; e = e->next_callee)
if (!e->inline_failed)
cgraph_clone_inlined_nodes (e, duplicate);
@@ -1110,7 +1110,7 @@ cgraph_mark_inline (struct cgraph_edge *edge)
struct cgraph_edge *e, *next;
int times = 0;
- /* Look for all calls, mark them inline and clone recursivly
+ /* Look for all calls, mark them inline and clone recursively
all inlined functions. */
for (e = what->callers; e; e = next)
{
@@ -1182,7 +1182,7 @@ cgraph_default_inline_p (struct cgraph_node *n)
/* Return true when inlining WHAT would create recursive inlining.
We call recursive inlining all cases where same function appears more than
- once in the single recusion nest path in the inline graph. */
+ once in the single recursion nest path in the inline graph. */
static bool
cgraph_recursive_inlining_p (struct cgraph_node *to,
diff --git a/gcc/final.c b/gcc/final.c
index 9aa4657b068..500f4062e2d 100644
--- a/gcc/final.c
+++ b/gcc/final.c
@@ -1713,7 +1713,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
case NOTE_INSN_BASIC_BLOCK:
- /* If we are performing the optimization that paritions
+ /* If we are performing the optimization that partitions
basic blocks into hot & cold sections of the .o file,
then at the start of each new basic block, before
beginning to write code for the basic block, we need to
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index 57ae918392c..e2a1ffb096f 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -4806,7 +4806,7 @@ count_cond (tree expr, int lim)
expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
COND is the first argument to CODE; otherwise (as in the example
given here), it is the second argument. TYPE is the type of the
- original expression. Return NULL_TREE if no simplication is
+ original expression. Return NULL_TREE if no simplification is
possible. */
static tree