static void sb_init_span(char *base, sb_span *span, sb_heap *heap,
Size first_page, Size npages, uint16 size_class);
static void sb_out_of_memory_error(sb_allocator *a);
+static bool sb_transfer_first_span(char *base, sb_heap *heap,
+ int fromclass, int toclass);
static bool sb_try_to_steal_superblock(char *base, sb_allocator *a,
uint16 heapproc, uint16 size_class);
span = relptr_access(base, heap->spans[fclass]);
while (span != NULL)
{
- /* XXX. Figure out what fullness class it should go in. */
+ int tfclass;
+ sb_span *nextspan;
+ sb_span *prevspan;
- /* XXX. If the fullness class it should go is less than this one,
- * move it there. */
+ /* Figure out what fullness class should contain this. */
+ tfclass = span->nused * (SB_FULLNESS_CLASSES - 1) / nmax;
- /* XXX. Advance to next span on list. */
+ /* Look up next span. */
+ nextspan = relptr_access(base, span->nextspan);
+
+ /*
+ * If utilization has dropped enough that this now belongs in some
+ * other fullness class, move it there.
+ */
+ if (tfclass < fclass)
+ {
+ prevspan = relptr_access(base, span->prevspan);
+
+ relptr_copy(span->nextspan, heap->spans[tfclass]);
+ relptr_store(base, span->prevspan, (sb_span *) NULL);
+ if (nextspan != NULL)
+ relptr_copy(nextspan->prevspan, span->prevspan);
+ if (prevspan != NULL)
+ relptr_copy(prevspan->nextspan, span->nextspan);
+ else
+ relptr_copy(heap->spans[fclass], span->nextspan);
+ }
+
+ /* Advance to next span on list. */
+ span = nextspan;
}
}
/*
* If there are no superblocks that properly belong in fullness class 1,
* pick one from some other fullness class and move it there anyway, so
- * that we have an allocation target.
+ * that we have an allocation target. Our last choice is to transfer a
+ * superblock that's almost empty (and might become completely empty soon
+ * if left alone), but even that is better than failing, which is what we
+ * must do if there are no superblocks at all with freespace.
*/
if (relptr_is_null(heap->spans[1]))
{
for (fclass = 2; fclass < SB_FULLNESS_CLASSES - 1; ++fclass)
- {
- sb_span *span;
-
- span = relptr_access(base, heap->spans[fclass]);
- if (span != NULL)
- /* XXX. Move it to fullness class 1 and break. */;
- }
+ if (sb_transfer_first_span(base, heap, fclass, 1))
+ break;
if (relptr_is_null(heap->spans[1]))
- {
- sb_span *span = relptr_access(base, heap->spans[0]);
-
- if (span != NULL)
- {
- /*
- * The only superblocks with freespace are almost empty, but
- * using one of those is still better than allocating a new
- * one.
- */
- /* XXX. Move it to fullness class 1. */
- }
- else
- {
- /* All superblocks are completely full. */
+ if (!sb_transfer_first_span(base, heap, 0, 1))
return NULL;
- }
- }
}
/* We have a superblock from which to allocate; do it. */
Assert(active_sb != NULL);
/* XXX need to actually allocate something here! */
+ /* If it's now full, move it to the highest-numbered fullness class. */
if (active_sb->nused == nmax)
- /* XXX. Move it to the highest-numbered fullness class. */;
+ sb_transfer_first_span(base, heap, 1, SB_FULLNESS_CLASSES - 1);
return NULL; /* XXX */
}
errmsg("out of shared memory")));
}
+/*
+ * Transfer the first span in one fullness class to the head of another
+ * fullness class.
+ */
+static bool
+sb_transfer_first_span(char *base, sb_heap *heap, int fromclass, int toclass)
+{
+ sb_span *span;
+ sb_span *nextspan;
+
+ /* Can't do it if source list is empty. */
+ span = relptr_access(base, heap->spans[fromclass]);
+ if (span == NULL)
+ return false;
+
+ /* Remove span from source list. */
+ nextspan = relptr_access(base, span->nextspan);
+ relptr_store(base, heap->spans[fromclass], nextspan);
+ if (nextspan != NULL)
+ relptr_store(base, nextspan->prevspan, (sb_span *) NULL);
+
+ /* Add span to target list. */
+ relptr_copy(span->nextspan, heap->spans[toclass]);
+ relptr_store(base, heap->spans[toclass], span);
+ nextspan = relptr_access(base, span->nextspan);
+ if (nextspan != NULL)
+ relptr_store(base, nextspan->prevspan, span);
+
+ return true;
+}
+
/*
* Try to steal a superblock from another heap for the same size class,
* to avoid wasting too much memory in concurrent allocaton scenarios.