Remove sb_try_to_steal_superblock.
authorRobert Haas <rhaas@postgresql.org>
Thu, 10 Apr 2014 23:51:43 +0000 (23:51 +0000)
committerRobert Haas <rhaas@postgresql.org>
Thu, 10 Apr 2014 23:51:43 +0000 (23:51 +0000)
It makes no sense with just one heap per size class.

src/backend/utils/mmgr/sb_alloc.c

index 489f9678bae06aede1ae53878c647ce43455886b..b967f9dd742fc6fb9a6e407f5b84c5dbf874ab1e 100644 (file)
@@ -120,8 +120,6 @@ static void sb_init_span(char *base, sb_span *span, sb_heap *heap,
 static void sb_out_of_memory_error(sb_allocator *a);
 static bool sb_transfer_first_span(char *base, sb_heap *heap,
                                           int fromclass, int toclass);
-static bool sb_try_to_steal_superblock(char *base, sb_allocator *a,
-                                                  uint16 size_class);
 
 /*
  * Create a backend-private allocator.
@@ -488,74 +486,65 @@ sb_alloc_guts(char *base, sb_region *region, sb_allocator *a, int size_class)
         */
        if (result == NULL)
        {
-               if (sb_try_to_steal_superblock(base, a, size_class))
+               sb_span *span = NULL;
+               Size    npages = 1;
+               Size    first_page;
+               Size    i;
+               void   *ptr;
+
+               /*
+                * Get an sb_span object to describe the new superblock... unless
+                * this allocation is for an sb_span object, in which case that's
+                * surely not going to work.  We handle that case by storing the
+                * sb_span describing an sb_span superblock inline.
+                */
+               if (size_class != SB_SCLASS_SPAN_OF_SPANS)
                {
-                       /* The superblock we stole shouldn't full, so this should work. */
-                       result = sb_alloc_from_heap(base, heap, size_class);
-                       Assert(result != NULL);
+                       sb_region *span_region = a->private ? NULL : region;
+
+                       span = (sb_span *) sb_alloc_guts(base, span_region, a,
+                                                                                        SB_SCLASS_SPAN_OF_SPANS);
+                       if (span == NULL)
+                               return NULL;
+                       npages = SB_PAGES_PER_SUPERBLOCK;
                }
-               else
-               {
-                       sb_span *span = NULL;
-                       Size    npages = 1;
-                       Size    first_page;
-                       Size    i;
-                       void   *ptr;
-
-                       /*
-                        * Get an sb_span object to describe the new superblock... unless
-                        * this allocation is for an sb_span object, in which case that's
-                        * surely not going to work.  We handle that case by storing the
-                        * sb_span describing an sb_span superblock inline.
-                        */
-                       if (size_class != SB_SCLASS_SPAN_OF_SPANS)
-                       {
-                               sb_region *span_region = a->private ? NULL : region;
 
-                               span = (sb_span *) sb_alloc_guts(base, span_region, a,
-                                                                                                SB_SCLASS_SPAN_OF_SPANS);
-                               if (span == NULL)
-                                       return NULL;
-                               npages = SB_PAGES_PER_SUPERBLOCK;
-                       }
+               /* Find a region from which to allocate the superblock. */
+               if (region == NULL)
+                       region = sb_private_region_for_allocator(npages);
 
-                       /* Find a region from which to allocate the superblock. */
-                       if (region == NULL)
-                               region = sb_private_region_for_allocator(npages);
+               /* Try to allocate the actual superblock. */
+               if (region == NULL ||
+                       !FreePageManagerGet(region->fpm, npages, &first_page))
+               {
+                       /* XXX. Free the span, if any. */
+                       return NULL;
+               }
+               ptr = fpm_page_to_pointer(fpm_segment_base(region->fpm),
+                                                                 first_page);
 
-                       /* Try to allocate the actual superblock. */
-                       if (region == NULL ||
-                               !FreePageManagerGet(region->fpm, npages, &first_page))
-                       {
-                               /* XXX. Free the span, if any. */
-                               return NULL;
-                       }
-                       ptr = fpm_page_to_pointer(fpm_segment_base(region->fpm),
-                                                                         first_page);
-
-                       /*
-                        * If this is a span-of-spans, carve the descriptor right out of
-                        * the allocated space.
-                        */
-                       if (size_class == SB_SCLASS_SPAN_OF_SPANS)
-                               span = (sb_span *) ptr;
-
-                       /* Initialize span and pagemap. */
-                       sb_init_span(base, span, heap, ptr, npages, size_class);
-                       for (i = 0; i < npages; ++i)
-                               sb_map_set(region->pagemap, first_page + i, span);
-
-                       /* For a span-of-spans, record that we allocated ourselves. */
-                       if (size_class == SB_SCLASS_SPAN_OF_SPANS)
-                       {
-                               span->ninitialized = 1;
-                               span->nallocatable--;
-                       }
+               /*
+                * If this is a span-of-spans, carve the descriptor right out of
+                * the allocated space.
+                */
+               if (size_class == SB_SCLASS_SPAN_OF_SPANS)
+                       span = (sb_span *) ptr;
 
-                       /* This should work now. */
-                       result = sb_alloc_from_heap(base, heap, size_class);
-                       Assert(result != NULL);
+               /* Initialize span and pagemap. */
+               sb_init_span(base, span, heap, ptr, npages, size_class);
+               for (i = 0; i < npages; ++i)
+                       sb_map_set(region->pagemap, first_page + i, span);
+
+               /* For a span-of-spans, record that we allocated ourselves. */
+               if (size_class == SB_SCLASS_SPAN_OF_SPANS)
+               {
+                       span->ninitialized = 1;
+                       span->nallocatable--;
                }
+
+               /* This should work now. */
+               result = sb_alloc_from_heap(base, heap, size_class);
+               Assert(result != NULL);
        }
 
        /* We're all done.  Release the lock. */
@@ -638,15 +627,3 @@ sb_transfer_first_span(char *base, sb_heap *heap, int fromclass, int toclass)
 
        return true;
 }
-
-/*
- * Try to steal a superblock from another heap for the same size class,
- * to avoid wasting too much memory in concurrent allocaton scenarios.
- * Returns true if we succeed in stealing one, and false if not.
- */
-static bool
-sb_try_to_steal_superblock(char *base, sb_allocator *a, uint16 size_class)
-{
-       /* XXX */
-       return false;
-}