summaryrefslogtreecommitdiff
path: root/gcc/ggc-page.c
diff options
context:
space:
mode:
authorAndi Kleen <ak@linux.intel.com>2011-10-29 01:02:35 +0000
committerAndi Kleen <ak@gcc.gnu.org>2011-10-29 01:02:35 +0000
commit3bc50163311275e8da17365a68e10a765a134867 (patch)
tree8f9e002586202119430d4bd8c63fee062dce5c6a /gcc/ggc-page.c
parent25f0ea8135b221a7d560e9062d30d14be33c5582 (diff)
downloadgcc-3bc50163311275e8da17365a68e10a765a134867.tar.gz
Use more efficient alignment in ggc
Jakub had some concerns about the performance of page alignments in ggc-page, which use a hardware division instructions currently. This patch changes them all to use a new PAGE_ALIGN macro, which exploits that pages are a power of two. gcc/: 2011-10-21 Andi Kleen <ak@linux.intel.com> * ggc-page (PAGE_ALIGN): Add. (alloc_page, ggc_pch_total_size, ggc_pch_this_base, ggc_pch_read): Replace ROUND_UP with PAGE_ALIGN. From-SVN: r180650
Diffstat (limited to 'gcc/ggc-page.c')
-rw-r--r--gcc/ggc-page.c12
1 files changed, 8 insertions, 4 deletions
diff --git a/gcc/ggc-page.c b/gcc/ggc-page.c
index c796160316b..ee796cbb7e9 100644
--- a/gcc/ggc-page.c
+++ b/gcc/ggc-page.c
@@ -221,6 +221,10 @@ static const size_t extra_order_size_table[] = {
#define ROUND_UP(x, f) (CEIL (x, f) * (f))
+/* Round X to next multiple of the page size */
+
+#define PAGE_ALIGN(x) (((x) + G.pagesize - 1) & ~(G.pagesize - 1))
+
/* The Ith entry is the number of objects on a page or order I. */
static unsigned objects_per_page_table[NUM_ORDERS];
@@ -739,7 +743,7 @@ alloc_page (unsigned order)
entry_size = num_objects * OBJECT_SIZE (order);
if (entry_size < G.pagesize)
entry_size = G.pagesize;
- entry_size = ROUND_UP (entry_size, G.pagesize);
+ entry_size = PAGE_ALIGN (entry_size);
entry = NULL;
page = NULL;
@@ -2236,7 +2240,7 @@ ggc_pch_total_size (struct ggc_pch_data *d)
unsigned i;
for (i = 0; i < NUM_ORDERS; i++)
- a += ROUND_UP (d->d.totals[i] * OBJECT_SIZE (i), G.pagesize);
+ a += PAGE_ALIGN (d->d.totals[i] * OBJECT_SIZE (i));
return a;
}
@@ -2249,7 +2253,7 @@ ggc_pch_this_base (struct ggc_pch_data *d, void *base)
for (i = 0; i < NUM_ORDERS; i++)
{
d->base[i] = a;
- a += ROUND_UP (d->d.totals[i] * OBJECT_SIZE (i), G.pagesize);
+ a += PAGE_ALIGN (d->d.totals[i] * OBJECT_SIZE (i));
}
}
@@ -2442,7 +2446,7 @@ ggc_pch_read (FILE *f, void *addr)
if (d.totals[i] == 0)
continue;
- bytes = ROUND_UP (d.totals[i] * OBJECT_SIZE (i), G.pagesize);
+ bytes = PAGE_ALIGN (d.totals[i] * OBJECT_SIZE (i));
num_objs = bytes / OBJECT_SIZE (i);
entry = XCNEWVAR (struct page_entry, (sizeof (struct page_entry)
- sizeof (long)