static AllocatorRegionLookupLeaf lookup_root_leaf;
#endif
+/* Single-entry cache for the most recently looked-up region. */
+static AllocatorRegion *most_recent_region;
+
/*
* Backend-private chunks binned by maximum contiguous freespace. Lists are
* doubly-linked using fl_node. List 0 contains regions with no internal
AllocatorRegionLookupLeaf *leaf = NULL;
int high, low;
+ /* It's pretty common to look up the same region repeatedly. */
+ if (most_recent_region != NULL &&
+ ((char *) ptr) >= most_recent_region->region_start &&
+ ((char *) ptr) < most_recent_region->region_start +
+ most_recent_region->region_size)
+ return most_recent_region;
+
/*
* If this is a 64-bit system, locate the lookup table that pertains
* to the upper 32 bits of ptr. On a 32-bit system, there's only one
else if (region->region_start + region->region_size < (char *) ptr)
low = mid + 1;
else
+ {
+ most_recent_region = region;
return region;
+ }
}
return NULL;
AllocatorRegionAdjustLookup(AllocatorRegion *region, bool insert)
{
bool ok = true;
+#if SIZEOF_SIZE_T > 4
+ Size tabstart;
+ Size tabstop;
+ Size i;
+#endif
+
+ /* Flush cache needed. */
+ if (!insert && most_recent_region == region)
+ most_recent_region = NULL;
/*
* If this is a 64-bit system, we need to loop over all of the relevant
* and we simply update that.
*/
#if SIZEOF_SIZE_T > 4
- Size tabstart;
- Size tabstop;
- Size i;
-
tabstart = ((Size) region->region_start) >> 32;
tabstop = ((Size) region->region_start + region->region_size - 1) >> 32;