summaryrefslogtreecommitdiffstats
path: root/src/3rdparty/v8/src/heap.h
diff options
context:
space:
mode:
Diffstat (limited to 'src/3rdparty/v8/src/heap.h')
-rw-r--r--src/3rdparty/v8/src/heap.h325
1 files changed, 224 insertions, 101 deletions
diff --git a/src/3rdparty/v8/src/heap.h b/src/3rdparty/v8/src/heap.h
index 0e744e4..12cd295 100644
--- a/src/3rdparty/v8/src/heap.h
+++ b/src/3rdparty/v8/src/heap.h
@@ -64,7 +64,7 @@ namespace internal {
V(Map, ascii_symbol_map, AsciiSymbolMap) \
V(Map, ascii_string_map, AsciiStringMap) \
V(Map, heap_number_map, HeapNumberMap) \
- V(Map, global_context_map, GlobalContextMap) \
+ V(Map, native_context_map, NativeContextMap) \
V(Map, fixed_array_map, FixedArrayMap) \
V(Map, code_map, CodeMap) \
V(Map, scope_info_map, ScopeInfoMap) \
@@ -87,6 +87,7 @@ namespace internal {
V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
V(FixedArray, string_split_cache, StringSplitCache) \
+ V(FixedArray, regexp_multiple_cache, RegExpMultipleCache) \
V(Object, termination_exception, TerminationException) \
V(Smi, hash_seed, HashSeed) \
V(Map, string_map, StringMap) \
@@ -130,6 +131,7 @@ namespace internal {
V(Map, with_context_map, WithContextMap) \
V(Map, block_context_map, BlockContextMap) \
V(Map, module_context_map, ModuleContextMap) \
+ V(Map, global_context_map, GlobalContextMap) \
V(Map, oddball_map, OddballMap) \
V(Map, message_object_map, JSMessageObjectMap) \
V(Map, foreign_map, ForeignMap) \
@@ -150,7 +152,10 @@ namespace internal {
V(Smi, real_stack_limit, RealStackLimit) \
V(StringDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
- V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)
+ V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
+ V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
+ V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset) \
+ V(JSObject, observation_state, ObservationState)
#define ROOT_LIST(V) \
STRONG_ROOT_LIST(V) \
@@ -172,6 +177,7 @@ namespace internal {
V(constructor_symbol, "constructor") \
V(code_symbol, ".code") \
V(result_symbol, ".result") \
+ V(dot_for_symbol, ".for.") \
V(catch_var_symbol, ".catch-var") \
V(empty_symbol, "") \
V(eval_symbol, "eval") \
@@ -241,7 +247,8 @@ namespace internal {
V(use_strict, "use strict") \
V(dot_symbol, ".") \
V(anonymous_function_symbol, "(anonymous function)") \
- V(compare_ic_symbol, ".compare_ic") \
+ V(compare_ic_symbol, "==") \
+ V(strict_compare_ic_symbol, "===") \
V(infinity_symbol, "Infinity") \
V(minus_infinity_symbol, "-Infinity") \
V(hidden_stack_trace_symbol, "v8::hidden_stack_trace") \
@@ -486,6 +493,9 @@ class Heap {
// Returns the amount of executable memory currently committed for the heap.
intptr_t CommittedMemoryExecutable();
+ // Returns the amount of phyical memory currently committed for the heap.
+ size_t CommittedPhysicalMemory();
+
// Returns the available bytes in space w/o growing.
// Heap doesn't guarantee that it can allocate an object that requires
// all available bytes. Check MaxHeapObjectSize() instead.
@@ -508,6 +518,24 @@ class Heap {
MapSpace* map_space() { return map_space_; }
CellSpace* cell_space() { return cell_space_; }
LargeObjectSpace* lo_space() { return lo_space_; }
+ PagedSpace* paged_space(int idx) {
+ switch (idx) {
+ case OLD_POINTER_SPACE:
+ return old_pointer_space();
+ case OLD_DATA_SPACE:
+ return old_data_space();
+ case MAP_SPACE:
+ return map_space();
+ case CELL_SPACE:
+ return cell_space();
+ case CODE_SPACE:
+ return code_space();
+ case NEW_SPACE:
+ case LO_SPACE:
+ UNREACHABLE();
+ }
+ return NULL;
+ }
bool always_allocate() { return always_allocate_scope_depth_ != 0; }
Address always_allocate_scope_depth_address() {
@@ -535,7 +563,8 @@ class Heap {
MUST_USE_RESULT MaybeObject* AllocateJSObject(
JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
- MUST_USE_RESULT MaybeObject* AllocateJSModule();
+ MUST_USE_RESULT MaybeObject* AllocateJSModule(Context* context,
+ ScopeInfo* scope_info);
// Allocate a JSArray with no elements
MUST_USE_RESULT MaybeObject* AllocateEmptyJSArray(
@@ -626,7 +655,7 @@ class Heap {
MUST_USE_RESULT MaybeObject* AllocateMap(
InstanceType instance_type,
int instance_size,
- ElementsKind elements_kind = FAST_ELEMENTS);
+ ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
// Allocates a partial map for bootstrapping.
MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
@@ -656,6 +685,9 @@ class Heap {
// Clear the Instanceof cache (used when a prototype changes).
inline void ClearInstanceofCache();
+ // For use during bootup.
+ void RepairFreeListsAfterBoot();
+
// Allocates and fully initializes a String. There are two String
// encodings: ASCII and two byte. One should choose between the three string
// allocation functions based on the encoding of the string buffer used to
@@ -682,6 +714,7 @@ class Heap {
PretenureFlag pretenure = NOT_TENURED);
MUST_USE_RESULT MaybeObject* AllocateStringFromUtf8Slow(
Vector<const char> str,
+ int non_ascii_start,
PretenureFlag pretenure = NOT_TENURED);
MUST_USE_RESULT MaybeObject* AllocateStringFromTwoByte(
Vector<const uc16> str,
@@ -825,13 +858,16 @@ class Heap {
MUST_USE_RESULT MaybeObject* AllocateHashTable(
int length, PretenureFlag pretenure = NOT_TENURED);
- // Allocate a global (but otherwise uninitialized) context.
- MUST_USE_RESULT MaybeObject* AllocateGlobalContext();
+ // Allocate a native (but otherwise uninitialized) context.
+ MUST_USE_RESULT MaybeObject* AllocateNativeContext();
- // Allocate a module context.
- MUST_USE_RESULT MaybeObject* AllocateModuleContext(Context* previous,
+ // Allocate a global context.
+ MUST_USE_RESULT MaybeObject* AllocateGlobalContext(JSFunction* function,
ScopeInfo* scope_info);
+ // Allocate a module context.
+ MUST_USE_RESULT MaybeObject* AllocateModuleContext(ScopeInfo* scope_info);
+
// Allocate a function context.
MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
JSFunction* function);
@@ -1077,7 +1113,10 @@ class Heap {
void EnsureHeapIsIterable();
// Notify the heap that a context has been disposed.
- int NotifyContextDisposed() { return ++contexts_disposed_; }
+ int NotifyContextDisposed() {
+ flush_monomorphic_ics_ = true;
+ return ++contexts_disposed_;
+ }
// Utility to invoke the scavenger. This is needed in test code to
// ensure correct callback for weak global handles.
@@ -1105,8 +1144,8 @@ class Heap {
#endif
void AddGCPrologueCallback(
- GCEpilogueCallback callback, GCType gc_type_filter);
- void RemoveGCPrologueCallback(GCEpilogueCallback callback);
+ GCPrologueCallback callback, GCType gc_type_filter);
+ void RemoveGCPrologueCallback(GCPrologueCallback callback);
void AddGCEpilogueCallback(
GCEpilogueCallback callback, GCType gc_type_filter);
@@ -1153,13 +1192,13 @@ class Heap {
// not match the empty string.
String* hidden_symbol() { return hidden_symbol_; }
- void set_global_contexts_list(Object* object) {
- global_contexts_list_ = object;
+ void set_native_contexts_list(Object* object) {
+ native_contexts_list_ = object;
}
- Object* global_contexts_list() { return global_contexts_list_; }
+ Object* native_contexts_list() { return native_contexts_list_; }
// Number of mark-sweeps.
- int ms_count() { return ms_count_; }
+ unsigned int ms_count() { return ms_count_; }
// Iterates over all roots in the heap.
void IterateRoots(ObjectVisitor* v, VisitMode mode);
@@ -1230,21 +1269,19 @@ class Heap {
return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
}
- // Get address of global contexts list for serialization support.
- Object** global_contexts_list_address() {
- return &global_contexts_list_;
+ // Get address of native contexts list for serialization support.
+ Object** native_contexts_list_address() {
+ return &native_contexts_list_;
}
-#ifdef DEBUG
- void Print();
- void PrintHandles();
-
+#ifdef VERIFY_HEAP
// Verify the heap is in its normal state before or after a GC.
void Verify();
+#endif
- // Verify that AccessorPairs are not shared, i.e. make sure that they have
- // exactly one pointer to them.
- void VerifyNoAccessorPairSharing();
+#ifdef DEBUG
+ void Print();
+ void PrintHandles();
void OldPointerSpaceCheckStoreBuffer();
void MapSpaceCheckStoreBuffer();
@@ -1253,10 +1290,23 @@ class Heap {
// Report heap statistics.
void ReportHeapStatistics(const char* title);
void ReportCodeStatistics(const char* title);
+#endif
+
+ // Zapping is needed for verify heap, and always done in debug builds.
+ static inline bool ShouldZapGarbage() {
+#ifdef DEBUG
+ return true;
+#else
+#ifdef VERIFY_HEAP
+ return FLAG_verify_heap;
+#else
+ return false;
+#endif
+#endif
+ }
// Fill in bogus values in from space
void ZapFromSpace();
-#endif
// Print short heap statistics.
void PrintShortHeapStatistics();
@@ -1294,6 +1344,7 @@ class Heap {
return disallow_allocation_failure_;
}
+ void TracePathToObjectFrom(Object* target, Object* root);
void TracePathToObject(Object* target);
void TracePathToGlobal();
#endif
@@ -1308,20 +1359,9 @@ class Heap {
// Commits from space if it is uncommitted.
void EnsureFromSpaceIsCommitted();
- // Support for partial snapshots. After calling this we can allocate a
- // certain number of bytes using only linear allocation (with a
- // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
- // or causing a GC. It returns true of space was reserved or false if a GC is
- // needed. For paged spaces the space requested must include the space wasted
- // at the end of each page when allocating linearly.
- void ReserveSpace(
- int new_space_size,
- int pointer_space_size,
- int data_space_size,
- int code_space_size,
- int map_space_size,
- int cell_space_size,
- int large_object_size);
+ // Support for partial snapshots. After calling this we have a linear
+ // space to write objects in each space.
+ void ReserveSpace(int *sizes, Address* addresses);
//
// Support for the API.
@@ -1397,15 +1437,15 @@ class Heap {
STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
#undef ROOT_INDEX_DECLARATION
-// Utility type maps
-#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
- STRUCT_LIST(DECLARE_STRUCT_MAP)
-#undef DECLARE_STRUCT_MAP
-
#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
#undef SYMBOL_DECLARATION
+ // Utility type maps
+#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
+ STRUCT_LIST(DECLARE_STRUCT_MAP)
+#undef DECLARE_STRUCT_MAP
+
kSymbolTableRootIndex,
kStrongRootListLength = kSymbolTableRootIndex,
kRootListLength
@@ -1417,6 +1457,10 @@ class Heap {
STATIC_CHECK(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
STATIC_CHECK(kempty_symbolRootIndex == Internals::kEmptySymbolRootIndex);
+ // Generated code can embed direct references to non-writable roots if
+ // they are in new space.
+ static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
+
MUST_USE_RESULT MaybeObject* NumberToString(
Object* number, bool check_number_string_cache = true);
MUST_USE_RESULT MaybeObject* Uint32ToString(
@@ -1488,13 +1532,6 @@ class Heap {
void ClearNormalizedMapCaches();
- // Clears the cache of ICs related to this map.
- void ClearCacheOnMap(Map* map) {
- if (FLAG_cleanup_code_caches_at_gc) {
- map->ClearCodeCache(this);
- }
- }
-
GCTracer* tracer() { return tracer_; }
// Returns the size of objects residing in non new spaces.
@@ -1592,6 +1629,16 @@ class Heap {
set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
}
+ void SetGetterStubDeoptPCOffset(int pc_offset) {
+ ASSERT(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
+ set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
+ }
+
+ void SetSetterStubDeoptPCOffset(int pc_offset) {
+ ASSERT(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
+ set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
+ }
+
// For post mortem debugging.
void RememberUnmappedPage(Address page, bool compacted);
@@ -1602,9 +1649,65 @@ class Heap {
}
void AgeInlineCaches() {
- ++global_ic_age_;
+ global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
+ }
+
+ bool flush_monomorphic_ics() { return flush_monomorphic_ics_; }
+
+ intptr_t amount_of_external_allocated_memory() {
+ return amount_of_external_allocated_memory_;
+ }
+
+ // ObjectStats are kept in two arrays, counts and sizes. Related stats are
+ // stored in a contiguous linear buffer. Stats groups are stored one after
+ // another.
+ enum {
+ FIRST_CODE_KIND_SUB_TYPE = LAST_TYPE + 1,
+ FIRST_FIXED_ARRAY_SUB_TYPE =
+ FIRST_CODE_KIND_SUB_TYPE + Code::LAST_CODE_KIND + 1,
+ OBJECT_STATS_COUNT =
+ FIRST_FIXED_ARRAY_SUB_TYPE + LAST_FIXED_ARRAY_SUB_TYPE + 1
+ };
+
+ void RecordObjectStats(InstanceType type, int sub_type, size_t size) {
+ ASSERT(type <= LAST_TYPE);
+ if (sub_type < 0) {
+ object_counts_[type]++;
+ object_sizes_[type] += size;
+ } else {
+ if (type == CODE_TYPE) {
+ ASSERT(sub_type <= Code::LAST_CODE_KIND);
+ object_counts_[FIRST_CODE_KIND_SUB_TYPE + sub_type]++;
+ object_sizes_[FIRST_CODE_KIND_SUB_TYPE + sub_type] += size;
+ } else if (type == FIXED_ARRAY_TYPE) {
+ ASSERT(sub_type <= LAST_FIXED_ARRAY_SUB_TYPE);
+ object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type]++;
+ object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type] += size;
+ }
+ }
}
+ void CheckpointObjectStats();
+
+ // We don't use a ScopedLock here since we want to lock the heap
+ // only when FLAG_parallel_recompilation is true.
+ class RelocationLock {
+ public:
+ explicit RelocationLock(Heap* heap) : heap_(heap) {
+ if (FLAG_parallel_recompilation) {
+ heap_->relocation_mutex_->Lock();
+ }
+ }
+ ~RelocationLock() {
+ if (FLAG_parallel_recompilation) {
+ heap_->relocation_mutex_->Unlock();
+ }
+ }
+
+ private:
+ Heap* heap_;
+ };
+
private:
Heap();
@@ -1636,6 +1739,8 @@ class Heap {
int global_ic_age_;
+ bool flush_monomorphic_ics_;
+
int scan_on_scavenge_pages_;
#if defined(V8_TARGET_ARCH_X64)
@@ -1657,7 +1762,7 @@ class Heap {
// Returns the amount of external memory registered since last global gc.
intptr_t PromotedExternalMemorySize();
- int ms_count_; // how many mark-sweep collections happened
+ unsigned int ms_count_; // how many mark-sweep collections happened
unsigned int gc_count_; // how many gc happened
// For post mortem debugging.
@@ -1729,7 +1834,7 @@ class Heap {
// last GC.
int old_gen_exhausted_;
- Object* global_contexts_list_;
+ Object* native_contexts_list_;
StoreBufferRebuilder store_buffer_rebuilder_;
@@ -1807,6 +1912,7 @@ class Heap {
bool PerformGarbageCollection(GarbageCollector collector,
GCTracer* tracer);
+ bool IterateObjectGroups(ObjectVisitor* scavenge_visitor);
inline void UpdateOldSpaceLimits();
@@ -1998,14 +2104,24 @@ class Heap {
void AdvanceIdleIncrementalMarking(intptr_t step_size);
+ void ClearObjectStats(bool clear_last_time_stats = false);
static const int kInitialSymbolTableSize = 2048;
static const int kInitialEvalCacheSize = 64;
static const int kInitialNumberStringCacheSize = 256;
+ // Object counts and used memory by InstanceType
+ size_t object_counts_[OBJECT_STATS_COUNT];
+ size_t object_counts_last_time_[OBJECT_STATS_COUNT];
+ size_t object_sizes_[OBJECT_STATS_COUNT];
+ size_t object_sizes_last_time_[OBJECT_STATS_COUNT];
+
// Maximum GC pause.
int max_gc_pause_;
+ // Total time spent in GC.
+ int total_gc_time_ms_;
+
// Maximum size of objects alive after GC.
intptr_t max_alive_after_gc_;
@@ -2050,15 +2166,16 @@ class Heap {
MemoryChunk* chunks_queued_for_free_;
+ Mutex* relocation_mutex_;
+
friend class Factory;
friend class GCTracer;
friend class DisallowAllocationFailure;
friend class AlwaysAllocateScope;
- friend class LinearAllocationScope;
friend class Page;
friend class Isolate;
friend class MarkCompactCollector;
- friend class StaticMarkingVisitor;
+ friend class MarkCompactMarkingVisitor;
friend class MapCompact;
DISALLOW_COPY_AND_ASSIGN(Heap);
@@ -2098,21 +2215,29 @@ class HeapStats {
};
-class AlwaysAllocateScope {
+class DisallowAllocationFailure {
public:
- inline AlwaysAllocateScope();
- inline ~AlwaysAllocateScope();
+ inline DisallowAllocationFailure();
+ inline ~DisallowAllocationFailure();
+
+#ifdef DEBUG
+ private:
+ bool old_state_;
+#endif
};
-class LinearAllocationScope {
+class AlwaysAllocateScope {
public:
- inline LinearAllocationScope();
- inline ~LinearAllocationScope();
+ inline AlwaysAllocateScope();
+ inline ~AlwaysAllocateScope();
+
+ private:
+ // Implicitly disable artificial allocation failures.
+ DisallowAllocationFailure disallow_allocation_failure_;
};
-#ifdef DEBUG
// Visitor class to verify interior pointers in spaces that do not contain
// or care about intergenerational references. All heap object pointers have to
// point into the heap to a location that has a map pointer at its first word.
@@ -2122,7 +2247,6 @@ class VerifyPointersVisitor: public ObjectVisitor {
public:
inline void VisitPointers(Object** start, Object** end);
};
-#endif
// Space iterator for iterating over all spaces of the heap.
@@ -2281,7 +2405,7 @@ class KeyedLookupCache {
};
-// Cache for mapping (array, property name) into descriptor index.
+// Cache for mapping (map, property name) into descriptor index.
// The cache contains both positive and negative results.
// Descriptor index equals kNotFound means the property is absent.
// Cleared at startup and prior to any gc.
@@ -2289,21 +2413,21 @@ class DescriptorLookupCache {
public:
// Lookup descriptor index for (map, name).
// If absent, kAbsent is returned.
- int Lookup(DescriptorArray* array, String* name) {
+ int Lookup(Map* source, String* name) {
if (!StringShape(name).IsSymbol()) return kAbsent;
- int index = Hash(array, name);
+ int index = Hash(source, name);
Key& key = keys_[index];
- if ((key.array == array) && (key.name == name)) return results_[index];
+ if ((key.source == source) && (key.name == name)) return results_[index];
return kAbsent;
}
// Update an element in the cache.
- void Update(DescriptorArray* array, String* name, int result) {
+ void Update(Map* source, String* name, int result) {
ASSERT(result != kAbsent);
if (StringShape(name).IsSymbol()) {
- int index = Hash(array, name);
+ int index = Hash(source, name);
Key& key = keys_[index];
- key.array = array;
+ key.source = source;
key.name = name;
results_[index] = result;
}
@@ -2317,24 +2441,26 @@ class DescriptorLookupCache {
private:
DescriptorLookupCache() {
for (int i = 0; i < kLength; ++i) {
- keys_[i].array = NULL;
+ keys_[i].source = NULL;
keys_[i].name = NULL;
results_[i] = kAbsent;
}
}
- static int Hash(DescriptorArray* array, String* name) {
+ static int Hash(Object* source, String* name) {
// Uses only lower 32 bits if pointers are larger.
- uint32_t array_hash =
- static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
+ uint32_t source_hash =
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source))
+ >> kPointerSizeLog2;
uint32_t name_hash =
- static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
- return (array_hash ^ name_hash) % kLength;
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name))
+ >> kPointerSizeLog2;
+ return (source_hash ^ name_hash) % kLength;
}
static const int kLength = 64;
struct Key {
- DescriptorArray* array;
+ Map* source;
String* name;
};
@@ -2346,18 +2472,6 @@ class DescriptorLookupCache {
};
-#ifdef DEBUG
-class DisallowAllocationFailure {
- public:
- inline DisallowAllocationFailure();
- inline ~DisallowAllocationFailure();
-
- private:
- bool old_state_;
-};
-#endif
-
-
// A helper class to document/test C++ scopes where we do not
// expect a GC. Usage:
//
@@ -2373,6 +2487,7 @@ class AssertNoAllocation {
#ifdef DEBUG
private:
bool old_state_;
+ bool active_;
#endif
};
@@ -2385,6 +2500,7 @@ class DisableAssertNoAllocation {
#ifdef DEBUG
private:
bool old_state_;
+ bool active_;
#endif
};
@@ -2504,24 +2620,31 @@ class GCTracer BASE_EMBEDDED {
};
-class StringSplitCache {
+class RegExpResultsCache {
public:
- static Object* Lookup(FixedArray* cache, String* string, String* pattern);
+ enum ResultsCacheType { REGEXP_MULTIPLE_INDICES, STRING_SPLIT_SUBSTRINGS };
+
+ // Attempt to retrieve a cached result. On failure, 0 is returned as a Smi.
+ // On success, the returned result is guaranteed to be a COW-array.
+ static Object* Lookup(Heap* heap,
+ String* key_string,
+ Object* key_pattern,
+ ResultsCacheType type);
+ // Attempt to add value_array to the cache specified by type. On success,
+ // value_array is turned into a COW-array.
static void Enter(Heap* heap,
- FixedArray* cache,
- String* string,
- String* pattern,
- FixedArray* array);
+ String* key_string,
+ Object* key_pattern,
+ FixedArray* value_array,
+ ResultsCacheType type);
static void Clear(FixedArray* cache);
- static const int kStringSplitCacheSize = 0x100;
+ static const int kRegExpResultsCacheSize = 0x100;
private:
static const int kArrayEntriesPerCacheEntry = 4;
static const int kStringOffset = 0;
static const int kPatternOffset = 1;
static const int kArrayOffset = 2;
-
- static MaybeObject* WrapFixedArrayInJSArray(Object* fixed_array);
};