Size npages; /* Length of span in pages. */
uint16 size_class; /* Size class. */
uint16 ninitialized; /* Maximum number of objects ever allocated. */
- uint16 nused; /* Number of objects currently allocated. */
+ uint16 nallocatable; /* Number of objects currently allocated. */
uint16 firstfree; /* First object on free list. */
};
+#define SB_SPAN_NOTHING_FREE ((uint16) -1)
+
/*
* Small allocations are handled by dividing a relatively large chunk of
* memory called a superblock into many small objects of equal size. The
char *superblock;
char *result;
Size obsize;
- Size nmax;
/* Work out object size. */
Assert(size_class < SB_NUM_SIZE_CLASSES);
obsize = sb_size_classes[size_class];
- if (size_class == 0)
- nmax = FPM_PAGE_SIZE / obsize;
- else
- nmax = (FPM_PAGE_SIZE * SB_PAGES_PER_SUPERBLOCK) / obsize;
/*
* If fullness class 1 is empty, try to find something to put in it by
* scanning higher-numbered fullness classes (excluding the last one,
* whose blocks are certain to all be completely full).
*/
- for (fclass = 2; fclass < SB_FULLNESS_CLASSES - 1; ++fclass)
+ if (relptr_is_null(heap->spans[1]))
{
- sb_span *span;
+ Size nmax;
- if (relptr_is_null(heap->spans[1]))
- break;
+ if (size_class == SB_SCLASS_SPAN_OF_SPANS)
+ nmax = FPM_PAGE_SIZE / obsize;
+ else
+ nmax = (FPM_PAGE_SIZE * SB_PAGES_PER_SUPERBLOCK) / obsize;
- span = relptr_access(base, heap->spans[fclass]);
- while (span != NULL)
+ for (fclass = 2; fclass < SB_FULLNESS_CLASSES - 1; ++fclass)
{
- int tfclass;
- sb_span *nextspan;
- sb_span *prevspan;
-
- /* Figure out what fullness class should contain this. */
- tfclass = span->nused * (SB_FULLNESS_CLASSES - 1) / nmax;
-
- /* Look up next span. */
- nextspan = relptr_access(base, span->nextspan);
+ sb_span *span;
- /*
- * If utilization has dropped enough that this now belongs in some
- * other fullness class, move it there.
- */
- if (tfclass < fclass)
+ span = relptr_access(base, heap->spans[fclass]);
+ while (span != NULL)
{
- prevspan = relptr_access(base, span->prevspan);
-
- relptr_copy(span->nextspan, heap->spans[tfclass]);
- relptr_store(base, span->prevspan, (sb_span *) NULL);
- if (nextspan != NULL)
- relptr_copy(nextspan->prevspan, span->prevspan);
- if (prevspan != NULL)
- relptr_copy(prevspan->nextspan, span->nextspan);
- else
- relptr_copy(heap->spans[fclass], span->nextspan);
+ int tfclass;
+ sb_span *nextspan;
+ sb_span *prevspan;
+
+ /* Figure out what fullness class should contain this. */
+ tfclass = (nmax - span->nallocatable)
+ * (SB_FULLNESS_CLASSES - 1) / nmax;
+
+ /* Look up next span. */
+ nextspan = relptr_access(base, span->nextspan);
+
+ /*
+ * If utilization has dropped enough that this now belongs in
+ * some other fullness class, move it there.
+ */
+ if (tfclass < fclass)
+ {
+ prevspan = relptr_access(base, span->prevspan);
+
+ relptr_copy(span->nextspan, heap->spans[tfclass]);
+ relptr_store(base, span->prevspan, (sb_span *) NULL);
+ if (nextspan != NULL)
+ relptr_copy(nextspan->prevspan, span->prevspan);
+ if (prevspan != NULL)
+ relptr_copy(prevspan->nextspan, span->nextspan);
+ else
+ relptr_copy(heap->spans[fclass], span->nextspan);
+ }
+
+ /* Advance to next span on list. */
+ span = nextspan;
}
- /* Advance to next span on list. */
- span = nextspan;
+ /* Stop now if we found a suitable superblock. */
+ if (relptr_is_null(heap->spans[1]))
+ break;
}
}
*/
active_sb = relptr_access(base, heap->spans[1]);
superblock = relptr_access(base, active_sb->start);
- Assert(active_sb != NULL);
- Assert(active_sb->nused < nmax);
- Assert(active_sb->nused <= active_sb->ninitialized);
- if (active_sb->firstfree < nmax)
+ Assert(active_sb != NULL && active_sb->nallocatable > 0);
+ if (active_sb->firstfree != SB_SPAN_NOTHING_FREE)
{
result = superblock + active_sb->firstfree * obsize;
- elog(FATAL, "superblock at %p points to %p reallocating %d of %zu result %p",
- active_sb, superblock, active_sb->firstfree, nmax, result);
active_sb->firstfree = * (Size *) result;
}
else
{
- Assert(active_sb->ninitialized < nmax);
- Assert(active_sb->ninitialized == active_sb->nused);
result = superblock + active_sb->ninitialized * obsize;
++active_sb->ninitialized;
}
- ++active_sb->nused;
+ --active_sb->nallocatable;
/* If it's now full, move it to the highest-numbered fullness class. */
- if (active_sb->nused == nmax)
+ if (active_sb->nallocatable == 0)
sb_transfer_first_span(base, heap, 1, SB_FULLNESS_CLASSES - 1);
return result;
/* For a span-of-spans, record that we allocated ourselves. */
if (size_class == SB_SCLASS_SPAN_OF_SPANS)
- span->ninitialized = span->nused = 1;
+ {
+ span->ninitialized = 1;
+ span->nallocatable--;
+ }
/* This should work now. */
result = sb_alloc_from_heap(base, heap, size_class);
Size npages, uint16 size_class)
{
sb_span *head = relptr_access(base, heap->spans[1]);
+ Size obsize = sb_size_classes[size_class];
if (head != NULL)
relptr_store(base, head->prevspan, span);
span->npages = npages;
span->size_class = size_class;
span->ninitialized = 0;
- span->nused = 0;
- span->firstfree = (uint16) -1;
+ if (size_class == SB_SCLASS_SPAN_OF_SPANS)
+ span->nallocatable = FPM_PAGE_SIZE / obsize;
+ else if (size_class != SB_SCLASS_SPAN_LARGE)
+ span->nallocatable = (FPM_PAGE_SIZE * SB_PAGES_PER_SUPERBLOCK) / obsize;
+ span->firstfree = SB_SPAN_NOTHING_FREE;
}
/*