nused->nallocatable. this avoids computing nmax for every allocation,
authorRobert Haas <rhaas@postgresql.org>
Thu, 10 Apr 2014 21:12:31 +0000 (21:12 +0000)
committerRobert Haas <rhaas@postgresql.org>
Thu, 10 Apr 2014 21:12:31 +0000 (21:12 +0000)
which turns out to speed things up *a lot* (3.7s vs. 5.3s for 100m
8-byte allocations)

src/backend/utils/mmgr/sb_alloc.c

index bf4b0586c58d93c4f5cda7ea487114e90a27d2c7..489f9678bae06aede1ae53878c647ce43455886b 100644 (file)
@@ -47,10 +47,12 @@ struct sb_span
        Size            npages;                 /* Length of span in pages. */
        uint16          size_class;             /* Size class. */
        uint16          ninitialized;   /* Maximum number of objects ever allocated. */
-       uint16          nused;                  /* Number of objects currently allocated. */
+       uint16          nallocatable;   /* Number of objects currently allocated. */
        uint16          firstfree;              /* First object on free list. */
 };
 
+#define SB_SPAN_NOTHING_FREE           ((uint16) -1)
+
 /*
  * Small allocations are handled by dividing a relatively large chunk of
  * memory called a superblock into many small objects of equal size.  The
@@ -351,61 +353,68 @@ sb_alloc_from_heap(char *base, sb_heap *heap, int size_class)
        char   *superblock;
        char   *result;
        Size    obsize;
-       Size    nmax;
 
        /* Work out object size. */
        Assert(size_class < SB_NUM_SIZE_CLASSES);
        obsize = sb_size_classes[size_class];
-       if (size_class == 0)
-               nmax = FPM_PAGE_SIZE / obsize;
-       else
-               nmax = (FPM_PAGE_SIZE * SB_PAGES_PER_SUPERBLOCK) / obsize;
 
        /*
         * If fullness class 1 is empty, try to find something to put in it by
         * scanning higher-numbered fullness classes (excluding the last one,
         * whose blocks are certain to all be completely full).
         */
-       for (fclass = 2; fclass < SB_FULLNESS_CLASSES - 1; ++fclass)
+       if (relptr_is_null(heap->spans[1]))
        {
-               sb_span *span;
+               Size    nmax;
 
-               if (relptr_is_null(heap->spans[1]))
-                       break;
+               if (size_class == SB_SCLASS_SPAN_OF_SPANS)
+                       nmax = FPM_PAGE_SIZE / obsize;
+               else
+                       nmax = (FPM_PAGE_SIZE * SB_PAGES_PER_SUPERBLOCK) / obsize;
 
-               span = relptr_access(base, heap->spans[fclass]);
-               while (span != NULL)
+               for (fclass = 2; fclass < SB_FULLNESS_CLASSES - 1; ++fclass)
                {
-                       int             tfclass;
-                       sb_span *nextspan;
-                       sb_span *prevspan;
-
-                       /* Figure out what fullness class should contain this. */
-                       tfclass = span->nused * (SB_FULLNESS_CLASSES - 1) / nmax;
-
-                       /* Look up next span. */
-                       nextspan = relptr_access(base, span->nextspan);
+                       sb_span *span;
 
-                       /*
-                        * If utilization has dropped enough that this now belongs in some
-                        * other fullness class, move it there.
-                        */
-                       if (tfclass < fclass)
+                       span = relptr_access(base, heap->spans[fclass]);
+                       while (span != NULL)
                        {
-                               prevspan = relptr_access(base, span->prevspan);
-
-                               relptr_copy(span->nextspan, heap->spans[tfclass]);
-                               relptr_store(base, span->prevspan, (sb_span *) NULL);
-                               if (nextspan != NULL)
-                                       relptr_copy(nextspan->prevspan, span->prevspan);
-                               if (prevspan != NULL)
-                                       relptr_copy(prevspan->nextspan, span->nextspan);
-                               else
-                                       relptr_copy(heap->spans[fclass], span->nextspan);
+                               int             tfclass;
+                               sb_span *nextspan;
+                               sb_span *prevspan;
+
+                               /* Figure out what fullness class should contain this. */
+                               tfclass = (nmax - span->nallocatable)
+                                       * (SB_FULLNESS_CLASSES - 1) / nmax;
+
+                               /* Look up next span. */
+                               nextspan = relptr_access(base, span->nextspan);
+
+                               /*
+                                * If utilization has dropped enough that this now belongs in
+                                * some other fullness class, move it there.
+                                */
+                               if (tfclass < fclass)
+                               {
+                                       prevspan = relptr_access(base, span->prevspan);
+
+                                       relptr_copy(span->nextspan, heap->spans[tfclass]);
+                                       relptr_store(base, span->prevspan, (sb_span *) NULL);
+                                       if (nextspan != NULL)
+                                               relptr_copy(nextspan->prevspan, span->prevspan);
+                                       if (prevspan != NULL)
+                                               relptr_copy(prevspan->nextspan, span->nextspan);
+                                       else
+                                               relptr_copy(heap->spans[fclass], span->nextspan);
+                               }
+
+                               /* Advance to next span on list. */
+                               span = nextspan;
                        }
 
-                       /* Advance to next span on list. */
-                       span = nextspan;
+                       /* Stop now if we found a suitable superblock. */
+                       if (relptr_is_null(heap->spans[1]))
+                               break;
                }
        }
 
@@ -434,27 +443,21 @@ sb_alloc_from_heap(char *base, sb_heap *heap, int size_class)
         */
        active_sb = relptr_access(base, heap->spans[1]);
        superblock = relptr_access(base, active_sb->start);
-       Assert(active_sb != NULL);
-       Assert(active_sb->nused < nmax);
-       Assert(active_sb->nused <= active_sb->ninitialized);
-       if (active_sb->firstfree < nmax)
+       Assert(active_sb != NULL && active_sb->nallocatable > 0);
+       if (active_sb->firstfree != SB_SPAN_NOTHING_FREE)
        {
                result = superblock + active_sb->firstfree * obsize;
-               elog(FATAL, "superblock at %p points to %p reallocating %d of %zu result %p",
-                       active_sb, superblock, active_sb->firstfree, nmax, result);
                active_sb->firstfree = * (Size *) result;
        }
        else
        {
-               Assert(active_sb->ninitialized < nmax);
-               Assert(active_sb->ninitialized == active_sb->nused);
                result = superblock + active_sb->ninitialized * obsize;
                ++active_sb->ninitialized;
        }
-       ++active_sb->nused;
+       --active_sb->nallocatable;
 
        /* If it's now full, move it to the highest-numbered fullness class. */
-       if (active_sb->nused == nmax)
+       if (active_sb->nallocatable == 0)
                sb_transfer_first_span(base, heap, 1, SB_FULLNESS_CLASSES - 1);
 
        return result;
@@ -544,7 +547,10 @@ sb_alloc_guts(char *base, sb_region *region, sb_allocator *a, int size_class)
 
                        /* For a span-of-spans, record that we allocated ourselves. */
                        if (size_class == SB_SCLASS_SPAN_OF_SPANS)
-                               span->ninitialized = span->nused = 1;
+                       {
+                               span->ninitialized = 1;
+                               span->nallocatable--;
+                       }
 
                        /* This should work now. */
                        result = sb_alloc_from_heap(base, heap, size_class);
@@ -567,6 +573,7 @@ sb_init_span(char *base, sb_span *span, sb_heap *heap, char *ptr,
                         Size npages, uint16 size_class)
 {
        sb_span *head = relptr_access(base, heap->spans[1]);
+       Size    obsize = sb_size_classes[size_class];
 
        if (head != NULL)
                relptr_store(base, head->prevspan, span);
@@ -578,8 +585,11 @@ sb_init_span(char *base, sb_span *span, sb_heap *heap, char *ptr,
        span->npages = npages;
        span->size_class = size_class;
        span->ninitialized = 0;
-       span->nused = 0;
-       span->firstfree = (uint16) -1;
+       if (size_class == SB_SCLASS_SPAN_OF_SPANS)
+               span->nallocatable = FPM_PAGE_SIZE / obsize;
+       else if (size_class != SB_SCLASS_SPAN_LARGE)
+               span->nallocatable = (FPM_PAGE_SIZE * SB_PAGES_PER_SUPERBLOCK) / obsize;
+       span->firstfree = SB_SPAN_NOTHING_FREE;
 }
 
 /*