diff options
-rw-r--r-- | hv.c | 2 | ||||
-rw-r--r-- | locale.c | 2 | ||||
-rw-r--r-- | malloc.c | 2 | ||||
-rw-r--r-- | pp_ctl.c | 4 | ||||
-rw-r--r-- | util.c | 4 |
5 files changed, 7 insertions, 7 deletions
@@ -1290,7 +1290,7 @@ S_hsplit(pTHX_ HV *hv, STRLEN const oldsize, STRLEN newsize) dest->xhv_fill_lazy = 0; } else { /* no existing aux structure, but we allocated space for one - * so intialize it properly. This unrolls hv_auxinit() a bit, + * so initialize it properly. This unrolls hv_auxinit() a bit, * since we have to do the realloc anyway. */ /* first we set the iterator's xhv_rand so it can be copied into lastrand below */ #ifdef PERL_HASH_RANDOMIZE_KEYS @@ -921,7 +921,7 @@ Perl_init_i18nl10n(pTHX_ int printwarn) } /* Calculate what fallback locales to try. We have avoided this - * until we have to, becuase failure is quite unlikely. This will + * until we have to, because failure is quite unlikely. This will * usually change the upper bound of the loop we are in. * * Since the system's default way of setting the locale has not @@ -1524,7 +1524,7 @@ getpages(MEM_SIZE needed, int *nblksp, int bucket) if (add) { DEBUG_m(PerlIO_printf(Perl_debug_log, - "sbrk(%ld) to fix non-continuous/off-page sbrk:\n\t%ld for alignement,\t%ld were assumed to come from the tail of the previous sbrk\n", + "sbrk(%ld) to fix non-continuous/off-page sbrk:\n\t%ld for alignment,\t%ld were assumed to come from the tail of the previous sbrk\n", (long)add, (long) slack, (long) sbrked_remains)); newcp = (char *)sbrk(add); @@ -3670,7 +3670,7 @@ S_doopen_pm(pTHX_ SV *name) #endif /* !PERL_DISABLE_PMC */ /* require doesn't search for absolute names, or when the name is - explicity relative the current directory */ + explicitly relative the current directory */ PERL_STATIC_INLINE bool S_path_is_searchable(const char *name) { @@ -5433,7 +5433,7 @@ S_run_user_filter(pTHX_ int idx, SV *buf_sv, int maxlen) umaxlen = maxlen; /* I was having segfault trouble under Linux 2.2.5 after a - parse error occured. (Had to hack around it with a test + parse error occurred. (Had to hack around it with a test for PL_parser->error_count == 0.) Solaris doesn't segfault -- not sure where the trouble is yet. XXX */ @@ -2966,7 +2966,7 @@ Perl_wait4pid(pTHX_ Pid_t pid, int *statusp, int flags) *statusp = SvIVX(sv); /* The hash iterator is currently on this entry, so simply calling hv_delete would trigger the lazy delete, which on - aggregate does more work, beacuse next call to hv_iterinit() + aggregate does more work, because next call to hv_iterinit() would spot the flag, and have to call the delete routine, while in the meantime any new entries can't re-use that memory. */ @@ -3936,7 +3936,7 @@ Fill the sv with current working directory /* Originally written in Perl by John Bazik; rewritten in C by Ben Sugars. * rewritten again by dougm, optimized for use with xs TARG, and to prefer * getcwd(3) if available - * Comments from the orignal: + * Comments from the original: * This is a faster version of getcwd. It's also more dangerous * because you might chdir out of a directory that you can't chdir * back into. */ |