Refactoring to streamline the common case.
authorRobert Haas <rhaas@postgresql.org>
Fri, 11 Apr 2014 01:07:08 +0000 (01:07 +0000)
committerRobert Haas <rhaas@postgresql.org>
Fri, 11 Apr 2014 01:07:08 +0000 (01:07 +0000)
It makes more sense this way, too.

src/backend/utils/mmgr/sb_alloc.c

index b967f9dd742fc6fb9a6e407f5b84c5dbf874ab1e..a2fd3ccc57ee9176c1fc4e9677d902d7d7524304 100644 (file)
@@ -112,9 +112,11 @@ static char sb_size_class_map[] = {
 #define SB_NUM_SIZE_CLASSES                            lengthof(sb_size_classes)
 
 /* Helper functions. */
-static char *sb_alloc_from_heap(char *base, sb_heap *heap, int size_class);
 static char *sb_alloc_guts(char *base, sb_region *region,
                          sb_allocator *a, int size_class);
+static bool sb_ensure_active_superblock(char *base, sb_region *region,
+                                                       sb_allocator *a, sb_heap *heap,
+                                                       int size_class);
 static void sb_init_span(char *base, sb_span *span, sb_heap *heap,
                         char *ptr, Size npages, uint16 size_class);
 static void sb_out_of_memory_error(sb_allocator *a);
@@ -322,117 +324,35 @@ sb_reset_allocator(sb_allocator *a)
 }
 
 /*
- * Allocate an object from the provided heap.  Caller is responsible for
- * any required locking.
- *
- * Fullness classes K of 0..N is loosely intended to represent superblocks
- * whose utilization percentage is at least K/N, but we only enforce this
- * rigorously for the highest-numbered fullness class, which always contains
- * exactly those blocks that are completely full.  It's otherwise acceptable
- * for a superblock to be in a higher-numbered fullness class than the one
- * to which it logically belongs.  In addition, the active superblock, which
- * is always the first block in fullness class 1, is permitted to have a
- * higher allocation percentage than would normally be allowable for that
- * fullness class; we don't move it until it's completely full, and then
- * it goes to the highest-numbered fullness class.
- *
- * It might seem odd that the active superblock is the head of fullness class
- * 1 rather than fullness class 0, but experience with other allocators has
- * shown that it's usually better to allocate from a superblock that's
- * moderately full rather than one that's nearly empty.  Insofar as is
- * reasonably possible, we want to avoid performing new allocations in a
- * superblock that would otherwise become empty soon.
+ * Allocate an object of the requested size class from the given allocator.
+ * If necessary, steal or create another superblock.
  */
 static char *
-sb_alloc_from_heap(char *base, sb_heap *heap, int size_class)
+sb_alloc_guts(char *base, sb_region *region, sb_allocator *a, int size_class)
 {
+       sb_heap *heap = &a->heaps[size_class];
+       LWLock *lock = relptr_access(base, heap->lock);
        sb_span *active_sb;
-       Size    fclass;
        char   *superblock;
        char   *result;
        Size    obsize;
 
-       /* Work out object size. */
-       Assert(size_class < SB_NUM_SIZE_CLASSES);
-       obsize = sb_size_classes[size_class];
-
-       /*
-        * If fullness class 1 is empty, try to find something to put in it by
-        * scanning higher-numbered fullness classes (excluding the last one,
-        * whose blocks are certain to all be completely full).
-        */
-       if (relptr_is_null(heap->spans[1]))
-       {
-               Size    nmax;
-
-               if (size_class == SB_SCLASS_SPAN_OF_SPANS)
-                       nmax = FPM_PAGE_SIZE / obsize;
-               else
-                       nmax = (FPM_PAGE_SIZE * SB_PAGES_PER_SUPERBLOCK) / obsize;
-
-               for (fclass = 2; fclass < SB_FULLNESS_CLASSES - 1; ++fclass)
-               {
-                       sb_span *span;
-
-                       span = relptr_access(base, heap->spans[fclass]);
-                       while (span != NULL)
-                       {
-                               int             tfclass;
-                               sb_span *nextspan;
-                               sb_span *prevspan;
-
-                               /* Figure out what fullness class should contain this. */
-                               tfclass = (nmax - span->nallocatable)
-                                       * (SB_FULLNESS_CLASSES - 1) / nmax;
-
-                               /* Look up next span. */
-                               nextspan = relptr_access(base, span->nextspan);
-
-                               /*
-                                * If utilization has dropped enough that this now belongs in
-                                * some other fullness class, move it there.
-                                */
-                               if (tfclass < fclass)
-                               {
-                                       prevspan = relptr_access(base, span->prevspan);
-
-                                       relptr_copy(span->nextspan, heap->spans[tfclass]);
-                                       relptr_store(base, span->prevspan, (sb_span *) NULL);
-                                       if (nextspan != NULL)
-                                               relptr_copy(nextspan->prevspan, span->prevspan);
-                                       if (prevspan != NULL)
-                                               relptr_copy(prevspan->nextspan, span->nextspan);
-                                       else
-                                               relptr_copy(heap->spans[fclass], span->nextspan);
-                               }
-
-                               /* Advance to next span on list. */
-                               span = nextspan;
-                       }
-
-                       /* Stop now if we found a suitable superblock. */
-                       if (relptr_is_null(heap->spans[1]))
-                               break;
-               }
-       }
+       /* If locking is in use, acquire the lock. */
+       if (lock != NULL)
+               LWLockAcquire(lock, LW_EXCLUSIVE);
 
        /*
-        * If there are no superblocks that properly belong in fullness class 1,
-        * pick one from some other fullness class and move it there anyway, so
-        * that we have an allocation target.  Our last choice is to transfer a
-        * superblock that's almost empty (and might become completely empty soon
-        * if left alone), but even that is better than failing, which is what we
-        * must do if there are no superblocks at all with freespace.
+        * If there's no active superblock, we must successfully obtain one or
+        * fail the request.
         */
-       if (relptr_is_null(heap->spans[1]))
+       if (relptr_is_null(heap->spans[1])
+               && !sb_ensure_active_superblock(base, region, a, heap, size_class))
        {
-               for (fclass = 2; fclass < SB_FULLNESS_CLASSES - 1; ++fclass)
-                       if (sb_transfer_first_span(base, heap, fclass, 1))
-                               break;
-               if (relptr_is_null(heap->spans[1]) &&
-                       !sb_transfer_first_span(base, heap, 0, 1))
-                               return NULL;
+               if (lock != NULL)
+                       LWLockRelease(lock);
+               return NULL;
        }
+       Assert(!relptr_is_null(heap->spans[1]));
 
        /*
         * There should be a superblock in fullness class 1 at this point, and
@@ -440,8 +360,10 @@ sb_alloc_from_heap(char *base, sb_heap *heap, int size_class)
         * free list or, failing that, initialize a new object.
         */
        active_sb = relptr_access(base, heap->spans[1]);
-       superblock = relptr_access(base, active_sb->start);
        Assert(active_sb != NULL && active_sb->nallocatable > 0);
+       superblock = relptr_access(base, active_sb->start);
+       Assert(size_class < SB_NUM_SIZE_CLASSES);
+       obsize = sb_size_classes[size_class];
        if (active_sb->firstfree != SB_SPAN_NOTHING_FREE)
        {
                result = superblock + active_sb->firstfree * obsize;
@@ -458,100 +380,175 @@ sb_alloc_from_heap(char *base, sb_heap *heap, int size_class)
        if (active_sb->nallocatable == 0)
                sb_transfer_first_span(base, heap, 1, SB_FULLNESS_CLASSES - 1);
 
+       /* We're all done.  Release the lock. */
+       if (lock != NULL)
+               LWLockRelease(lock);
+
        return result;
 }
 
 /*
- * Allocate an object of the requested size class from the given allocator.
- * If necessary, steal or create another superblock.
+ * Ensure an active (i.e. fullness class 1) superblock, unless all existing
+ * superblocks are completely full and no more can be allocated.
+ *
+ * Fullness classes K of 0..N is loosely intended to represent superblocks
+ * whose utilization percentage is at least K/N, but we only enforce this
+ * rigorously for the highest-numbered fullness class, which always contains
+ * exactly those blocks that are completely full.  It's otherwise acceptable
+ * for a superblock to be in a higher-numbered fullness class than the one
+ * to which it logically belongs.  In addition, the active superblock, which
+ * is always the first block in fullness class 1, is permitted to have a
+ * higher allocation percentage than would normally be allowable for that
+ * fullness class; we don't move it until it's completely full, and then
+ * it goes to the highest-numbered fullness class.
+ *
+ * It might seem odd that the active superblock is the head of fullness class
+ * 1 rather than fullness class 0, but experience with other allocators has
+ * shown that it's usually better to allocate from a superblock that's
+ * moderately full rather than one that's nearly empty.  Insofar as is
+ * reasonably possible, we want to avoid performing new allocations in a
+ * superblock that would otherwise become empty soon.
  */
-static char *
-sb_alloc_guts(char *base, sb_region *region, sb_allocator *a, int size_class)
+static bool
+sb_ensure_active_superblock(char *base, sb_region *region, sb_allocator *a,
+                                                       sb_heap *heap, int size_class)
 {
-       sb_heap *heap = &a->heaps[size_class];
-       LWLock *lock = relptr_access(base, heap->lock);
-       char *result = NULL;
-
-       /* If locking is in use, acquire the lock. */
-       if (lock != NULL)
-               LWLockAcquire(lock, LW_EXCLUSIVE);
+       Size    obsize = sb_size_classes[size_class];
+       Size    nmax;
+       int             fclass;
+       sb_span *span = NULL;
+       Size    npages = 1;
+       Size    first_page;
+       Size    i;
+       void   *ptr;
 
-       /* Attempt to allocate from the heap. */
-       result = sb_alloc_from_heap(base, heap, size_class);
+       /*
+        * Compute the number of objects that will fit in a superblock of this
+        * size class.
+        */
+       if (size_class == SB_SCLASS_SPAN_OF_SPANS)
+               nmax = FPM_PAGE_SIZE / obsize;
+       else
+               nmax = (FPM_PAGE_SIZE * SB_PAGES_PER_SUPERBLOCK) / obsize;
 
        /*
-        * If there's no space in the current heap, but there are multiple heaps
-        * per size class, we can attempt to grab a superblock from some other
-        * heap.  Otherwise, we'll need to attempt to create a new superblock.
+        * If fullness class 1 is empty, try to find something to put in it by
+        * scanning higher-numbered fullness classes (excluding the last one,
+        * whose blocks are certain to all be completely full).
         */
-       if (result == NULL)
+       for (fclass = 2; fclass < SB_FULLNESS_CLASSES - 1; ++fclass)
        {
-               sb_span *span = NULL;
-               Size    npages = 1;
-               Size    first_page;
-               Size    i;
-               void   *ptr;
-
-               /*
-                * Get an sb_span object to describe the new superblock... unless
-                * this allocation is for an sb_span object, in which case that's
-                * surely not going to work.  We handle that case by storing the
-                * sb_span describing an sb_span superblock inline.
-                */
-               if (size_class != SB_SCLASS_SPAN_OF_SPANS)
+               sb_span *span;
+
+               span = relptr_access(base, heap->spans[fclass]);
+               while (span != NULL)
                {
-                       sb_region *span_region = a->private ? NULL : region;
+                       int             tfclass;
+                       sb_span *nextspan;
+                       sb_span *prevspan;
+
+                       /* Figure out what fullness class should contain this. */
+                       tfclass = (nmax - span->nallocatable)
+                               * (SB_FULLNESS_CLASSES - 1) / nmax;
+
+                       /* Look up next span. */
+                       nextspan = relptr_access(base, span->nextspan);
+
+                       /*
+                        * If utilization has dropped enough that this now belongs in
+                        * some other fullness class, move it there.
+                        */
+                       if (tfclass < fclass)
+                       {
+                               prevspan = relptr_access(base, span->prevspan);
+
+                               relptr_copy(span->nextspan, heap->spans[tfclass]);
+                               relptr_store(base, span->prevspan, (sb_span *) NULL);
+                               if (nextspan != NULL)
+                                       relptr_copy(nextspan->prevspan, span->prevspan);
+                               if (prevspan != NULL)
+                                       relptr_copy(prevspan->nextspan, span->nextspan);
+                               else
+                                       relptr_copy(heap->spans[fclass], span->nextspan);
+                       }
 
-                       span = (sb_span *) sb_alloc_guts(base, span_region, a,
-                                                                                        SB_SCLASS_SPAN_OF_SPANS);
-                       if (span == NULL)
-                               return NULL;
-                       npages = SB_PAGES_PER_SUPERBLOCK;
+                       /* Advance to next span on list. */
+                       span = nextspan;
                }
 
-               /* Find a region from which to allocate the superblock. */
-               if (region == NULL)
-                       region = sb_private_region_for_allocator(npages);
+               /* Stop now if we found a suitable superblock. */
+               if (!relptr_is_null(heap->spans[1]))
+                       return true;
+       }
 
-               /* Try to allocate the actual superblock. */
-               if (region == NULL ||
-                       !FreePageManagerGet(region->fpm, npages, &first_page))
-               {
-                       /* XXX. Free the span, if any. */
-                       return NULL;
-               }
-               ptr = fpm_page_to_pointer(fpm_segment_base(region->fpm),
-                                                                 first_page);
+       /*
+        * If there are no superblocks that properly belong in fullness class 1,
+        * pick one from some other fullness class and move it there anyway, so
+        * that we have an allocation target.  Our last choice is to transfer a
+        * superblock that's almost empty (and might become completely empty soon
+        * if left alone), but even that is better than failing, which is what we
+        * must do if there are no superblocks at all with freespace.
+        */
+       Assert(relptr_is_null(heap->spans[1]));
+       for (fclass = 2; fclass < SB_FULLNESS_CLASSES - 1; ++fclass)
+               if (sb_transfer_first_span(base, heap, fclass, 1))
+                       return true;
+       if (relptr_is_null(heap->spans[1]) &&
+               sb_transfer_first_span(base, heap, 0, 1))
+                       return true;
 
-               /*
-                * If this is a span-of-spans, carve the descriptor right out of
-                * the allocated space.
-                */
-               if (size_class == SB_SCLASS_SPAN_OF_SPANS)
-                       span = (sb_span *) ptr;
+       /*
+        * Get an sb_span object to describe the new superblock... unless
+        * this allocation is for an sb_span object, in which case that's
+        * surely not going to work.  We handle that case by storing the
+        * sb_span describing an sb_span superblock inline.
+        */
+       if (size_class != SB_SCLASS_SPAN_OF_SPANS)
+       {
+               sb_region *span_region = a->private ? NULL : region;
 
-               /* Initialize span and pagemap. */
-               sb_init_span(base, span, heap, ptr, npages, size_class);
-               for (i = 0; i < npages; ++i)
-                       sb_map_set(region->pagemap, first_page + i, span);
+               span = (sb_span *) sb_alloc_guts(base, span_region, a,
+                                                                                SB_SCLASS_SPAN_OF_SPANS);
+               if (span == NULL)
+                       return false;
+               npages = SB_PAGES_PER_SUPERBLOCK;
+       }
 
-               /* For a span-of-spans, record that we allocated ourselves. */
-               if (size_class == SB_SCLASS_SPAN_OF_SPANS)
-               {
-                       span->ninitialized = 1;
-                       span->nallocatable--;
-               }
+       /* Find a region from which to allocate the superblock. */
+       if (region == NULL)
+       {
+               Assert(a->private);
+               region = sb_private_region_for_allocator(npages);
+       }
 
-               /* This should work now. */
-               result = sb_alloc_from_heap(base, heap, size_class);
-               Assert(result != NULL);
+       /* Try to allocate the actual superblock. */
+       if (region == NULL ||
+               !FreePageManagerGet(region->fpm, npages, &first_page))
+       {
+               /* XXX. Free the span, if any. */
+               return false;
        }
+       ptr = fpm_page_to_pointer(fpm_segment_base(region->fpm), first_page);
 
-       /* We're all done.  Release the lock. */
-       if (lock != NULL)
-               LWLockRelease(lock);
+       /*
+        * If this is a span-of-spans, carve the descriptor right out of
+        * the allocated space.
+        */
+       if (size_class == SB_SCLASS_SPAN_OF_SPANS)
+               span = (sb_span *) ptr;
 
-       return result;
+       /* Initialize span and pagemap. */
+       sb_init_span(base, span, heap, ptr, npages, size_class);
+       for (i = 0; i < npages; ++i)
+               sb_map_set(region->pagemap, first_page + i, span);
+
+       /* For a span-of-spans, record that we allocated ourselves. */
+       if (size_class == SB_SCLASS_SPAN_OF_SPANS)
+       {
+               span->ninitialized = 1;
+               span->nallocatable--;
+       }
+       return true;
 }
 
 /*