diff mbox

[4/4] Use more efficient alignment in ggc

Message ID 1319262853-32370-5-git-send-email-andi@firstfloor.org
State New
Headers show

Commit Message

Andi Kleen Oct. 22, 2011, 5:54 a.m. UTC
From: Andi Kleen <ak@linux.intel.com>

Jakub had some concerns about the performance of page alignments in
ggc-page, which use a hardware division instructions currently.
This patch changes them all to use a new PAGE_ALIGN macro, which
exploits that pages are a power of two.

2011-10-21  Andi Kleen  <ak@linux.intel.com>

	* ggc-page (PAGE_ALIGN): Add.
	(alloc_page, ggc_pch_total_size, ggc_pch_this_base, ggc_pch_read):
	Replace ROUND_UP with PAGE_ALIGN.
---
 gcc/ggc-page.c |   12 ++++++++----
 1 files changed, 8 insertions(+), 4 deletions(-)
diff mbox

Patch

diff --git a/gcc/ggc-page.c b/gcc/ggc-page.c
index 0bf0907..02db7e7 100644
--- a/gcc/ggc-page.c
+++ b/gcc/ggc-page.c
@@ -220,6 +220,10 @@  static const size_t extra_order_size_table[] = {
 
 #define ROUND_UP(x, f) (CEIL (x, f) * (f))
 
+/* Round X to next multiple of the page size */
+
+#define PAGE_ALIGN(x) (((x) + G.pagesize - 1) & ~(G.pagesize - 1))
+
 /* The Ith entry is the number of objects on a page or order I.  */
 
 static unsigned objects_per_page_table[NUM_ORDERS];
@@ -738,7 +742,7 @@  alloc_page (unsigned order)
   entry_size = num_objects * OBJECT_SIZE (order);
   if (entry_size < G.pagesize)
     entry_size = G.pagesize;
-  entry_size = ROUND_UP (entry_size, G.pagesize);
+  entry_size = PAGE_ALIGN (entry_size);
 
   entry = NULL;
   page = NULL;
@@ -2235,7 +2239,7 @@  ggc_pch_total_size (struct ggc_pch_data *d)
   unsigned i;
 
   for (i = 0; i < NUM_ORDERS; i++)
-    a += ROUND_UP (d->d.totals[i] * OBJECT_SIZE (i), G.pagesize);
+    a += PAGE_ALIGN (d->d.totals[i] * OBJECT_SIZE (i));
   return a;
 }
 
@@ -2248,7 +2252,7 @@  ggc_pch_this_base (struct ggc_pch_data *d, void *base)
   for (i = 0; i < NUM_ORDERS; i++)
     {
       d->base[i] = a;
-      a += ROUND_UP (d->d.totals[i] * OBJECT_SIZE (i), G.pagesize);
+      a += PAGE_ALIGN (d->d.totals[i] * OBJECT_SIZE (i));
     }
 }
 
@@ -2441,7 +2445,7 @@  ggc_pch_read (FILE *f, void *addr)
       if (d.totals[i] == 0)
 	continue;
 
-      bytes = ROUND_UP (d.totals[i] * OBJECT_SIZE (i), G.pagesize);
+      bytes = PAGE_ALIGN (d.totals[i] * OBJECT_SIZE (i));
       num_objs = bytes / OBJECT_SIZE (i);
       entry = XCNEWVAR (struct page_entry, (sizeof (struct page_entry)
 					    - sizeof (long)