summaryrefslogtreecommitdiffstats
path: root/src/3rdparty/v8/src/objects-inl.h
diff options
context:
space:
mode:
Diffstat (limited to 'src/3rdparty/v8/src/objects-inl.h')
-rw-r--r--src/3rdparty/v8/src/objects-inl.h1408
1 files changed, 961 insertions, 447 deletions
diff --git a/src/3rdparty/v8/src/objects-inl.h b/src/3rdparty/v8/src/objects-inl.h
index 74e63f2..b45b4d0 100644
--- a/src/3rdparty/v8/src/objects-inl.h
+++ b/src/3rdparty/v8/src/objects-inl.h
@@ -47,6 +47,7 @@
#include "v8memory.h"
#include "factory.h"
#include "incremental-marking.h"
+#include "transitions-inl.h"
namespace v8 {
namespace internal {
@@ -128,18 +129,6 @@ PropertyDetails PropertyDetails::AsDeleted() {
}
-bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
- ElementsKind to_kind) {
- if (to_kind == FAST_ELEMENTS) {
- return from_kind == FAST_SMI_ONLY_ELEMENTS ||
- from_kind == FAST_DOUBLE_ELEMENTS;
- } else {
- return to_kind == FAST_DOUBLE_ELEMENTS &&
- from_kind == FAST_SMI_ONLY_ELEMENTS;
- }
-}
-
-
bool Object::IsFixedArrayBase() {
return IsFixedArray() || IsFixedDoubleArray();
}
@@ -300,7 +289,7 @@ bool StringShape::IsSymbol() {
bool String::IsAsciiRepresentation() {
uint32_t type = map()->instance_type();
- return (type & kStringEncodingMask) == kAsciiStringTag;
+ return (type & kStringEncodingMask) == kOneByteStringTag;
}
@@ -316,7 +305,7 @@ bool String::IsAsciiRepresentationUnderneath() {
STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
ASSERT(IsFlat());
switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
- case kAsciiStringTag:
+ case kOneByteStringTag:
return true;
case kTwoByteStringTag:
return false;
@@ -332,7 +321,7 @@ bool String::IsTwoByteRepresentationUnderneath() {
STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
ASSERT(IsFlat());
switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
- case kAsciiStringTag:
+ case kOneByteStringTag:
return false;
case kTwoByteStringTag:
return true;
@@ -344,7 +333,7 @@ bool String::IsTwoByteRepresentationUnderneath() {
bool String::HasOnlyAsciiChars() {
uint32_t type = map()->instance_type();
- return (type & kStringEncodingMask) == kAsciiStringTag ||
+ return (type & kStringEncodingMask) == kOneByteStringTag ||
(type & kAsciiDataHintMask) == kAsciiDataHintTag;
}
@@ -393,9 +382,12 @@ uint32_t StringShape::full_representation_tag() {
STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
Internals::kFullStringRepresentationMask);
+STATIC_CHECK(static_cast<uint32_t>(kStringEncodingMask) ==
+ Internals::kStringEncodingMask);
+
bool StringShape::IsSequentialAscii() {
- return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
+ return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
}
@@ -405,10 +397,16 @@ bool StringShape::IsSequentialTwoByte() {
bool StringShape::IsExternalAscii() {
- return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
+ return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
}
+STATIC_CHECK((kExternalStringTag | kOneByteStringTag) ==
+ Internals::kExternalAsciiRepresentationTag);
+
+STATIC_CHECK(v8::String::ASCII_ENCODING == kOneByteStringTag);
+
+
bool StringShape::IsExternalTwoByte() {
return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
}
@@ -417,6 +415,7 @@ bool StringShape::IsExternalTwoByte() {
STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
Internals::kExternalTwoByteRepresentationTag);
+STATIC_CHECK(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
uc32 FlatStringReader::Get(int index) {
ASSERT(0 <= index && index <= length_);
@@ -536,6 +535,11 @@ bool Object::IsDescriptorArray() {
}
+bool Object::IsTransitionArray() {
+ return IsFixedArray();
+}
+
+
bool Object::IsDeoptimizationInputData() {
// Must be a fixed array.
if (!IsFixedArray()) return false;
@@ -574,31 +578,23 @@ bool Object::IsTypeFeedbackCells() {
bool Object::IsContext() {
- if (Object::IsHeapObject()) {
- Map* map = HeapObject::cast(this)->map();
- Heap* heap = map->GetHeap();
- return (map == heap->function_context_map() ||
- map == heap->catch_context_map() ||
- map == heap->with_context_map() ||
- map == heap->global_context_map() ||
- map == heap->block_context_map() ||
- map == heap->module_context_map());
- }
- return false;
-}
-
-
-bool Object::IsGlobalContext() {
- return Object::IsHeapObject() &&
- HeapObject::cast(this)->map() ==
- HeapObject::cast(this)->GetHeap()->global_context_map();
+ if (!Object::IsHeapObject()) return false;
+ Map* map = HeapObject::cast(this)->map();
+ Heap* heap = map->GetHeap();
+ return (map == heap->function_context_map() ||
+ map == heap->catch_context_map() ||
+ map == heap->with_context_map() ||
+ map == heap->native_context_map() ||
+ map == heap->block_context_map() ||
+ map == heap->module_context_map() ||
+ map == heap->global_context_map());
}
-bool Object::IsModuleContext() {
+bool Object::IsNativeContext() {
return Object::IsHeapObject() &&
HeapObject::cast(this)->map() ==
- HeapObject::cast(this)->GetHeap()->module_context_map();
+ HeapObject::cast(this)->GetHeap()->native_context_map();
}
@@ -664,8 +660,8 @@ bool Object::IsDictionary() {
bool Object::IsSymbolTable() {
- return IsHashTable() && this ==
- HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
+ return IsHashTable() &&
+ this == HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
}
@@ -678,7 +674,7 @@ bool Object::IsJSFunctionResultCache() {
% JSFunctionResultCache::kEntrySize != 0) {
return false;
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
reinterpret_cast<JSFunctionResultCache*>(this)->
JSFunctionResultCacheVerify();
@@ -693,7 +689,7 @@ bool Object::IsNormalizedMapCache() {
if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
return false;
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
}
@@ -722,6 +718,11 @@ bool Object::IsMapCache() {
}
+bool Object::IsObjectHashTable() {
+ return IsHashTable();
+}
+
+
bool Object::IsPrimitive() {
return IsOddball() || IsNumber() || IsString();
}
@@ -1114,13 +1115,13 @@ HeapObject* MapWord::ToForwardingAddress() {
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void HeapObject::VerifyObjectField(int offset) {
VerifyPointer(READ_FIELD(this, offset));
}
void HeapObject::VerifySmiField(int offset) {
- ASSERT(READ_FIELD(this, offset)->IsSmi());
+ CHECK(READ_FIELD(this, offset)->IsSmi());
}
#endif
@@ -1244,35 +1245,26 @@ FixedArrayBase* JSObject::elements() {
return static_cast<FixedArrayBase*>(array);
}
-void JSObject::ValidateSmiOnlyElements() {
+
+void JSObject::ValidateElements() {
#if DEBUG
- if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
- Heap* heap = GetHeap();
- // Don't use elements, since integrity checks will fail if there
- // are filler pointers in the array.
- FixedArray* fixed_array =
- reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
- Map* map = fixed_array->map();
- // Arrays that have been shifted in place can't be verified.
- if (map != heap->raw_unchecked_one_pointer_filler_map() &&
- map != heap->raw_unchecked_two_pointer_filler_map() &&
- map != heap->free_space_map()) {
- for (int i = 0; i < fixed_array->length(); i++) {
- Object* current = fixed_array->get(i);
- ASSERT(current->IsSmi() || current->IsTheHole());
- }
- }
+ if (FLAG_enable_slow_asserts) {
+ ElementsAccessor* accessor = GetElementsAccessor();
+ accessor->Validate(this);
}
#endif
}
MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
-#if DEBUG
- ValidateSmiOnlyElements();
-#endif
- if ((map()->elements_kind() != FAST_ELEMENTS)) {
- return TransitionElementsKind(FAST_ELEMENTS);
+ ValidateElements();
+ ElementsKind elements_kind = map()->elements_kind();
+ if (!IsFastObjectElementsKind(elements_kind)) {
+ if (IsFastHoleyElementsKind(elements_kind)) {
+ return TransitionElementsKind(FAST_HOLEY_ELEMENTS);
+ } else {
+ return TransitionElementsKind(FAST_ELEMENTS);
+ }
}
return this;
}
@@ -1284,20 +1276,29 @@ MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
ElementsKind current_kind = map()->elements_kind();
ElementsKind target_kind = current_kind;
ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
- if (current_kind == FAST_ELEMENTS) return this;
-
+ bool is_holey = IsFastHoleyElementsKind(current_kind);
+ if (current_kind == FAST_HOLEY_ELEMENTS) return this;
Heap* heap = GetHeap();
Object* the_hole = heap->the_hole_value();
- Object* heap_number_map = heap->heap_number_map();
for (uint32_t i = 0; i < count; ++i) {
Object* current = *objects++;
- if (!current->IsSmi() && current != the_hole) {
- if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS &&
- HeapObject::cast(current)->map() == heap_number_map) {
- target_kind = FAST_DOUBLE_ELEMENTS;
+ if (current == the_hole) {
+ is_holey = true;
+ target_kind = GetHoleyElementsKind(target_kind);
+ } else if (!current->IsSmi()) {
+ if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
+ if (IsFastSmiElementsKind(target_kind)) {
+ if (is_holey) {
+ target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
+ } else {
+ target_kind = FAST_DOUBLE_ELEMENTS;
+ }
+ }
+ } else if (is_holey) {
+ target_kind = FAST_HOLEY_ELEMENTS;
+ break;
} else {
target_kind = FAST_ELEMENTS;
- break;
}
}
}
@@ -1310,6 +1311,7 @@ MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
+ uint32_t length,
EnsureElementsMode mode) {
if (elements->map() != GetHeap()->fixed_double_array_map()) {
ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
@@ -1318,11 +1320,19 @@ MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
mode = DONT_ALLOW_DOUBLE_ELEMENTS;
}
Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
- return EnsureCanContainElements(objects, elements->length(), mode);
+ return EnsureCanContainElements(objects, length, mode);
}
ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
- if (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) {
+ if (GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
+ return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
+ } else if (GetElementsKind() == FAST_SMI_ELEMENTS) {
+ FixedDoubleArray* double_array = FixedDoubleArray::cast(elements);
+ for (uint32_t i = 0; i < length; ++i) {
+ if (double_array->is_the_hole(i)) {
+ return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
+ }
+ }
return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
}
@@ -1334,21 +1344,20 @@ MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
ElementsKind to_kind) {
Map* current_map = map();
ElementsKind from_kind = current_map->elements_kind();
-
if (from_kind == to_kind) return current_map;
- Context* global_context = isolate->context()->global_context();
- if (current_map == global_context->smi_js_array_map()) {
- if (to_kind == FAST_ELEMENTS) {
- return global_context->object_js_array_map();
- } else {
- if (to_kind == FAST_DOUBLE_ELEMENTS) {
- return global_context->double_js_array_map();
- } else {
- ASSERT(to_kind == DICTIONARY_ELEMENTS);
+ Context* native_context = isolate->context()->native_context();
+ Object* maybe_array_maps = native_context->js_array_maps();
+ if (maybe_array_maps->IsFixedArray()) {
+ FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
+ if (array_maps->get(from_kind) == current_map) {
+ Object* maybe_transitioned_map = array_maps->get(to_kind);
+ if (maybe_transitioned_map->IsMap()) {
+ return Map::cast(maybe_transitioned_map);
}
}
}
+
return GetElementsTransitionMapSlow(to_kind);
}
@@ -1357,9 +1366,6 @@ void JSObject::set_map_and_elements(Map* new_map,
FixedArrayBase* value,
WriteBarrierMode mode) {
ASSERT(value->HasValidElements());
-#ifdef DEBUG
- ValidateSmiOnlyElements();
-#endif
if (new_map != NULL) {
if (mode == UPDATE_WRITE_BARRIER) {
set_map(new_map);
@@ -1368,8 +1374,7 @@ void JSObject::set_map_and_elements(Map* new_map,
set_map_no_write_barrier(new_map);
}
}
- ASSERT((map()->has_fast_elements() ||
- map()->has_fast_smi_only_elements() ||
+ ASSERT((map()->has_fast_smi_or_object_elements() ||
(value == GetHeap()->empty_fixed_array())) ==
(value->map() == GetHeap()->fixed_array_map() ||
value->map() == GetHeap()->fixed_cow_array_map()));
@@ -1392,8 +1397,7 @@ void JSObject::initialize_properties() {
void JSObject::initialize_elements() {
- ASSERT(map()->has_fast_elements() ||
- map()->has_fast_smi_only_elements() ||
+ ASSERT(map()->has_fast_smi_or_object_elements() ||
map()->has_fast_double_elements());
ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
@@ -1402,9 +1406,10 @@ void JSObject::initialize_elements() {
MaybeObject* JSObject::ResetElements() {
Object* obj;
- ElementsKind elements_kind = FLAG_smi_only_arrays
- ? FAST_SMI_ONLY_ELEMENTS
- : FAST_ELEMENTS;
+ ElementsKind elements_kind = GetInitialFastElementsKind();
+ if (!FLAG_smi_only_arrays) {
+ elements_kind = FastSmiToObjectElementsKind(elements_kind);
+ }
MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
elements_kind);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
@@ -1414,6 +1419,43 @@ MaybeObject* JSObject::ResetElements() {
}
+MaybeObject* JSObject::AddFastPropertyUsingMap(Map* map) {
+ ASSERT(this->map()->NumberOfOwnDescriptors() + 1 ==
+ map->NumberOfOwnDescriptors());
+ if (this->map()->unused_property_fields() == 0) {
+ int new_size = properties()->length() + map->unused_property_fields() + 1;
+ FixedArray* new_properties;
+ MaybeObject* maybe_properties = properties()->CopySize(new_size);
+ if (!maybe_properties->To(&new_properties)) return maybe_properties;
+ set_properties(new_properties);
+ }
+ set_map(map);
+ return this;
+}
+
+
+bool JSObject::TryTransitionToField(Handle<JSObject> object,
+ Handle<String> key) {
+ if (!object->map()->HasTransitionArray()) return false;
+ Handle<TransitionArray> transitions(object->map()->transitions());
+ int transition = transitions->Search(*key);
+ if (transition == TransitionArray::kNotFound) return false;
+ PropertyDetails target_details = transitions->GetTargetDetails(transition);
+ if (target_details.type() != FIELD) return false;
+ if (target_details.attributes() != NONE) return false;
+ Handle<Map> target(transitions->GetTarget(transition));
+ JSObject::AddFastPropertyUsingMap(object, target);
+ return true;
+}
+
+
+int JSObject::LastAddedFieldIndex() {
+ Map* map = this->map();
+ int last_added = map->LastAdded();
+ return map->instance_descriptors()->GetFieldIndex(last_added);
+}
+
+
ACCESSORS(Oddball, to_string, String, kToStringOffset)
ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
@@ -1627,17 +1669,27 @@ void JSObject::InitializeBody(Map* map,
bool JSObject::HasFastProperties() {
+ ASSERT(properties()->IsDictionary() == map()->is_dictionary_map());
return !properties()->IsDictionary();
}
-int JSObject::MaxFastProperties() {
+bool JSObject::TooManyFastProperties(int properties,
+ JSObject::StoreFromKeyed store_mode) {
// Allow extra fast properties if the object has more than
- // kMaxFastProperties in-object properties. When this is the case,
+ // kFastPropertiesSoftLimit in-object properties. When this is the case,
// it is very unlikely that the object is being used as a dictionary
// and there is a good chance that allowing more map transitions
// will be worth it.
- return Max(map()->inobject_properties(), kMaxFastProperties);
+ int inobject = map()->inobject_properties();
+
+ int limit;
+ if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED) {
+ limit = Max(inobject, kMaxFastProperties);
+ } else {
+ limit = Max(inobject, kFastPropertiesSoftLimit);
+ }
+ return properties > limit;
}
@@ -1681,6 +1733,23 @@ bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
}
+
+void Object::VerifyApiCallResultType() {
+#if ENABLE_EXTRA_CHECKS
+ if (!(IsSmi() ||
+ IsString() ||
+ IsSpecObject() ||
+ IsHeapNumber() ||
+ IsUndefined() ||
+ IsTrue() ||
+ IsFalse() ||
+ IsNull())) {
+ FATAL("API call returned invalid object");
+ }
+#endif // ENABLE_EXTRA_CHECKS
+}
+
+
FixedArrayBase* FixedArrayBase::cast(Object* object) {
ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
return reinterpret_cast<FixedArrayBase*>(object);
@@ -1693,6 +1762,11 @@ Object* FixedArray::get(int index) {
}
+bool FixedArray::is_the_hole(int index) {
+ return get(index) == GetHeap()->the_hole_value();
+}
+
+
void FixedArray::set(int index, Smi* value) {
ASSERT(map() != HEAP->fixed_cow_array_map());
ASSERT(index >= 0 && index < this->length());
@@ -1798,7 +1872,7 @@ void FixedArray::set(int index,
void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
int index,
Object* value) {
- ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
+ ASSERT(array->map() != HEAP->fixed_cow_array_map());
ASSERT(index >= 0 && index < array->length());
int offset = kHeaderSize + index * kPointerSize;
WRITE_FIELD(array, offset, value);
@@ -1812,7 +1886,7 @@ void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
void FixedArray::NoWriteBarrierSet(FixedArray* array,
int index,
Object* value) {
- ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
+ ASSERT(array->map() != HEAP->fixed_cow_array_map());
ASSERT(index >= 0 && index < array->length());
ASSERT(!HEAP->InNewSpace(value));
WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
@@ -1874,7 +1948,7 @@ void FixedArray::set_unchecked(Heap* heap,
void FixedArray::set_null_unchecked(Heap* heap, int index) {
ASSERT(index >= 0 && index < this->length());
- ASSERT(!HEAP->InNewSpace(heap->null_value()));
+ ASSERT(!heap->InNewSpace(heap->null_value()));
WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
}
@@ -1885,76 +1959,198 @@ Object** FixedArray::data_start() {
bool DescriptorArray::IsEmpty() {
- ASSERT(this->IsSmi() ||
- this->length() > kFirstIndex ||
+ ASSERT(length() >= kFirstIndex ||
this == HEAP->empty_descriptor_array());
- return this->IsSmi() || length() <= kFirstIndex;
+ return length() < kFirstIndex;
}
-int DescriptorArray::bit_field3_storage() {
- Object* storage = READ_FIELD(this, kBitField3StorageOffset);
- return Smi::cast(storage)->value();
+void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
+ WRITE_FIELD(
+ this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
}
-void DescriptorArray::set_bit_field3_storage(int value) {
- ASSERT(!IsEmpty());
- WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
+
+// Perform a binary search in a fixed array. Low and high are entry indices. If
+// there are three entries in this array it should be called with low=0 and
+// high=2.
+template<SearchMode search_mode, typename T>
+int BinarySearch(T* array, String* name, int low, int high, int valid_entries) {
+ uint32_t hash = name->Hash();
+ int limit = high;
+
+ ASSERT(low <= high);
+
+ while (low != high) {
+ int mid = (low + high) / 2;
+ String* mid_name = array->GetSortedKey(mid);
+ uint32_t mid_hash = mid_name->Hash();
+
+ if (mid_hash >= hash) {
+ high = mid;
+ } else {
+ low = mid + 1;
+ }
+ }
+
+ for (; low <= limit; ++low) {
+ int sort_index = array->GetSortedKeyIndex(low);
+ String* entry = array->GetKey(sort_index);
+ if (entry->Hash() != hash) break;
+ if (entry->Equals(name)) {
+ if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
+ return sort_index;
+ }
+ return T::kNotFound;
+ }
+ }
+
+ return T::kNotFound;
}
-void DescriptorArray::NoIncrementalWriteBarrierSwap(FixedArray* array,
- int first,
- int second) {
- Object* tmp = array->get(first);
- NoIncrementalWriteBarrierSet(array, first, array->get(second));
- NoIncrementalWriteBarrierSet(array, second, tmp);
+// Perform a linear search in this fixed array. len is the number of entry
+// indices that are valid.
+template<SearchMode search_mode, typename T>
+int LinearSearch(T* array, String* name, int len, int valid_entries) {
+ uint32_t hash = name->Hash();
+ if (search_mode == ALL_ENTRIES) {
+ for (int number = 0; number < len; number++) {
+ int sorted_index = array->GetSortedKeyIndex(number);
+ String* entry = array->GetKey(sorted_index);
+ uint32_t current_hash = entry->Hash();
+ if (current_hash > hash) break;
+ if (current_hash == hash && entry->Equals(name)) return sorted_index;
+ }
+ } else {
+ ASSERT(len >= valid_entries);
+ for (int number = 0; number < valid_entries; number++) {
+ String* entry = array->GetKey(number);
+ uint32_t current_hash = entry->Hash();
+ if (current_hash == hash && entry->Equals(name)) return number;
+ }
+ }
+ return T::kNotFound;
}
-int DescriptorArray::Search(String* name) {
- SLOW_ASSERT(IsSortedNoDuplicates());
+template<SearchMode search_mode, typename T>
+int Search(T* array, String* name, int valid_entries) {
+ if (search_mode == VALID_ENTRIES) {
+ SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
+ } else {
+ SLOW_ASSERT(array->IsSortedNoDuplicates());
+ }
- // Check for empty descriptor array.
- int nof = number_of_descriptors();
- if (nof == 0) return kNotFound;
+ int nof = array->number_of_entries();
+ if (nof == 0) return T::kNotFound;
// Fast case: do linear search for small arrays.
const int kMaxElementsForLinearSearch = 8;
- if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
- return LinearSearch(name, nof);
+ if ((search_mode == ALL_ENTRIES &&
+ nof <= kMaxElementsForLinearSearch) ||
+ (search_mode == VALID_ENTRIES &&
+ valid_entries <= (kMaxElementsForLinearSearch * 3))) {
+ return LinearSearch<search_mode>(array, name, nof, valid_entries);
}
// Slow case: perform binary search.
- return BinarySearch(name, 0, nof - 1);
+ return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
+}
+
+
+int DescriptorArray::Search(String* name, int valid_descriptors) {
+ return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
}
-int DescriptorArray::SearchWithCache(String* name) {
- int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
+int DescriptorArray::SearchWithCache(String* name, Map* map) {
+ int number_of_own_descriptors = map->NumberOfOwnDescriptors();
+ if (number_of_own_descriptors == 0) return kNotFound;
+
+ DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
+ int number = cache->Lookup(map, name);
+
if (number == DescriptorLookupCache::kAbsent) {
- number = Search(name);
- GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
+ number = Search(name, number_of_own_descriptors);
+ cache->Update(map, name, number);
}
+
return number;
}
+void Map::LookupDescriptor(JSObject* holder,
+ String* name,
+ LookupResult* result) {
+ DescriptorArray* descriptors = this->instance_descriptors();
+ int number = descriptors->SearchWithCache(name, this);
+ if (number == DescriptorArray::kNotFound) return result->NotFound();
+ result->DescriptorResult(holder, descriptors->GetDetails(number), number);
+}
+
+
+void Map::LookupTransition(JSObject* holder,
+ String* name,
+ LookupResult* result) {
+ if (HasTransitionArray()) {
+ TransitionArray* transition_array = transitions();
+ int number = transition_array->Search(name);
+ if (number != TransitionArray::kNotFound) {
+ return result->TransitionResult(holder, number);
+ }
+ }
+ result->NotFound();
+}
+
+
+Object** DescriptorArray::GetKeySlot(int descriptor_number) {
+ ASSERT(descriptor_number < number_of_descriptors());
+ return HeapObject::RawField(
+ reinterpret_cast<HeapObject*>(this),
+ OffsetOfElementAt(ToKeyIndex(descriptor_number)));
+}
+
+
String* DescriptorArray::GetKey(int descriptor_number) {
ASSERT(descriptor_number < number_of_descriptors());
return String::cast(get(ToKeyIndex(descriptor_number)));
}
+int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
+ return GetDetails(descriptor_number).pointer();
+}
+
+
+String* DescriptorArray::GetSortedKey(int descriptor_number) {
+ return GetKey(GetSortedKeyIndex(descriptor_number));
+}
+
+
+void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
+ PropertyDetails details = GetDetails(descriptor_index);
+ set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
+}
+
+
+Object** DescriptorArray::GetValueSlot(int descriptor_number) {
+ ASSERT(descriptor_number < number_of_descriptors());
+ return HeapObject::RawField(
+ reinterpret_cast<HeapObject*>(this),
+ OffsetOfElementAt(ToValueIndex(descriptor_number)));
+}
+
+
Object* DescriptorArray::GetValue(int descriptor_number) {
ASSERT(descriptor_number < number_of_descriptors());
- return GetContentArray()->get(ToValueIndex(descriptor_number));
+ return get(ToValueIndex(descriptor_number));
}
PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
ASSERT(descriptor_number < number_of_descriptors());
- Object* details = GetContentArray()->get(ToDetailsIndex(descriptor_number));
+ Object* details = get(ToDetailsIndex(descriptor_number));
return PropertyDetails(Smi::cast(details));
}
@@ -1987,42 +2183,6 @@ AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
}
-bool DescriptorArray::IsProperty(int descriptor_number) {
- Entry entry(this, descriptor_number);
- return IsPropertyDescriptor(&entry);
-}
-
-
-bool DescriptorArray::IsTransitionOnly(int descriptor_number) {
- switch (GetType(descriptor_number)) {
- case MAP_TRANSITION:
- case CONSTANT_TRANSITION:
- case ELEMENTS_TRANSITION:
- return true;
- case CALLBACKS: {
- Object* value = GetValue(descriptor_number);
- if (!value->IsAccessorPair()) return false;
- AccessorPair* accessors = AccessorPair::cast(value);
- return accessors->getter()->IsMap() && accessors->setter()->IsMap();
- }
- case NORMAL:
- case FIELD:
- case CONSTANT_FUNCTION:
- case HANDLER:
- case INTERCEPTOR:
- case NULL_DESCRIPTOR:
- return false;
- }
- UNREACHABLE(); // Keep the compiler happy.
- return false;
-}
-
-
-bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
- return GetType(descriptor_number) == NULL_DESCRIPTOR;
-}
-
-
void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
desc->Init(GetKey(descriptor_number),
GetValue(descriptor_number),
@@ -2035,40 +2195,89 @@ void DescriptorArray::Set(int descriptor_number,
const WhitenessWitness&) {
// Range check.
ASSERT(descriptor_number < number_of_descriptors());
+ ASSERT(desc->GetDetails().descriptor_index() <=
+ number_of_descriptors());
+ ASSERT(desc->GetDetails().descriptor_index() > 0);
NoIncrementalWriteBarrierSet(this,
ToKeyIndex(descriptor_number),
desc->GetKey());
- FixedArray* content_array = GetContentArray();
- NoIncrementalWriteBarrierSet(content_array,
+ NoIncrementalWriteBarrierSet(this,
ToValueIndex(descriptor_number),
desc->GetValue());
- NoIncrementalWriteBarrierSet(content_array,
+ NoIncrementalWriteBarrierSet(this,
ToDetailsIndex(descriptor_number),
desc->GetDetails().AsSmi());
}
-void DescriptorArray::NoIncrementalWriteBarrierSwapDescriptors(
- int first, int second) {
- NoIncrementalWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
- FixedArray* content_array = GetContentArray();
- NoIncrementalWriteBarrierSwap(content_array,
- ToValueIndex(first),
- ToValueIndex(second));
- NoIncrementalWriteBarrierSwap(content_array,
- ToDetailsIndex(first),
- ToDetailsIndex(second));
+void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
+ // Range check.
+ ASSERT(descriptor_number < number_of_descriptors());
+ ASSERT(desc->GetDetails().descriptor_index() <=
+ number_of_descriptors());
+ ASSERT(desc->GetDetails().descriptor_index() > 0);
+
+ set(ToKeyIndex(descriptor_number), desc->GetKey());
+ set(ToValueIndex(descriptor_number), desc->GetValue());
+ set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
}
-DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
+void DescriptorArray::Append(Descriptor* desc,
+ const WhitenessWitness& witness) {
+ int descriptor_number = number_of_descriptors();
+ int enumeration_index = descriptor_number + 1;
+ SetNumberOfDescriptors(descriptor_number + 1);
+ desc->SetEnumerationIndex(enumeration_index);
+ Set(descriptor_number, desc, witness);
+
+ uint32_t hash = desc->GetKey()->Hash();
+
+ int insertion;
+
+ for (insertion = descriptor_number; insertion > 0; --insertion) {
+ String* key = GetSortedKey(insertion - 1);
+ if (key->Hash() <= hash) break;
+ SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
+ }
+
+ SetSortedKey(insertion, descriptor_number);
+}
+
+
+void DescriptorArray::Append(Descriptor* desc) {
+ int descriptor_number = number_of_descriptors();
+ int enumeration_index = descriptor_number + 1;
+ SetNumberOfDescriptors(descriptor_number + 1);
+ desc->SetEnumerationIndex(enumeration_index);
+ Set(descriptor_number, desc);
+
+ uint32_t hash = desc->GetKey()->Hash();
+
+ int insertion;
+
+ for (insertion = descriptor_number; insertion > 0; --insertion) {
+ String* key = GetSortedKey(insertion - 1);
+ if (key->Hash() <= hash) break;
+ SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
+ }
+
+ SetSortedKey(insertion, descriptor_number);
+}
+
+
+void DescriptorArray::SwapSortedKeys(int first, int second) {
+ int first_key = GetSortedKeyIndex(first);
+ SetSortedKey(first, GetSortedKeyIndex(second));
+ SetSortedKey(second, first_key);
+}
+
+
+DescriptorArray::WhitenessWitness::WhitenessWitness(FixedArray* array)
: marking_(array->GetHeap()->incremental_marking()) {
marking_->EnterNoMarkingScope();
- if (array->number_of_descriptors() > 0) {
- ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
- ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
- }
+ ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
}
@@ -2103,7 +2312,8 @@ int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
// EnsureCapacity will guarantee the hash table is never full.
while (true) {
Object* element = KeyAt(entry);
- // Empty entry.
+ // Empty entry. Uses raw unchecked accessors because it is called by the
+ // symbol table during bootstrapping.
if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
Shape::IsMatch(key, element)) return entry;
@@ -2258,18 +2468,18 @@ String* String::TryFlattenGetString(PretenureFlag pretenure) {
uint16_t String::Get(int index) {
ASSERT(index >= 0 && index < length());
switch (StringShape(this).full_representation_tag()) {
- case kSeqStringTag | kAsciiStringTag:
+ case kSeqStringTag | kOneByteStringTag:
return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
case kSeqStringTag | kTwoByteStringTag:
return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
- case kConsStringTag | kAsciiStringTag:
+ case kConsStringTag | kOneByteStringTag:
case kConsStringTag | kTwoByteStringTag:
return ConsString::cast(this)->ConsStringGet(index);
- case kExternalStringTag | kAsciiStringTag:
+ case kExternalStringTag | kOneByteStringTag:
return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
case kExternalStringTag | kTwoByteStringTag:
return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
- case kSlicedStringTag | kAsciiStringTag:
+ case kSlicedStringTag | kOneByteStringTag:
case kSlicedStringTag | kTwoByteStringTag:
return SlicedString::cast(this)->SlicedStringGet(index);
default:
@@ -2368,9 +2578,10 @@ String* SlicedString::parent() {
}
-void SlicedString::set_parent(String* parent) {
+void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
ASSERT(parent->IsSeqString() || parent->IsExternalString());
WRITE_FIELD(this, kParentOffset, parent);
+ CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
}
@@ -2874,16 +3085,12 @@ bool Map::has_non_instance_prototype() {
void Map::set_function_with_prototype(bool value) {
- if (value) {
- set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
- } else {
- set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
- }
+ set_bit_field3(FunctionWithPrototype::update(bit_field3(), value));
}
bool Map::function_with_prototype() {
- return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
+ return FunctionWithPrototype::decode(bit_field3());
}
@@ -2915,28 +3122,21 @@ bool Map::is_extensible() {
void Map::set_attached_to_shared_function_info(bool value) {
- if (value) {
- set_bit_field3(bit_field3() | (1 << kAttachedToSharedFunctionInfo));
- } else {
- set_bit_field3(bit_field3() & ~(1 << kAttachedToSharedFunctionInfo));
- }
+ set_bit_field3(AttachedToSharedFunctionInfo::update(bit_field3(), value));
}
bool Map::attached_to_shared_function_info() {
- return ((1 << kAttachedToSharedFunctionInfo) & bit_field3()) != 0;
+ return AttachedToSharedFunctionInfo::decode(bit_field3());
}
void Map::set_is_shared(bool value) {
- if (value) {
- set_bit_field3(bit_field3() | (1 << kIsShared));
- } else {
- set_bit_field3(bit_field3() & ~(1 << kIsShared));
- }
+ set_bit_field3(IsShared::update(bit_field3(), value));
}
+
bool Map::is_shared() {
- return ((1 << kIsShared) & bit_field3()) != 0;
+ return IsShared::decode(bit_field3());
}
void Map::set_has_external_resource(bool value) {
@@ -2968,15 +3168,21 @@ bool Map::use_user_object_comparison() {
void Map::set_named_interceptor_is_fallback(bool value) {
- if (value) {
- set_bit_field3(bit_field3() | (1 << kNamedInterceptorIsFallback));
- } else {
- set_bit_field3(bit_field3() & ~(1 << kNamedInterceptorIsFallback));
- }
+ set_bit_field3(NamedInterceptorIsFallback::update(bit_field3(), value));
}
bool Map::named_interceptor_is_fallback() {
- return ((1 << kNamedInterceptorIsFallback) & bit_field3()) != 0;
+ return NamedInterceptorIsFallback::decode(bit_field3());
+}
+
+
+void Map::set_dictionary_map(bool value) {
+ set_bit_field3(DictionaryMap::update(bit_field3(), value));
+}
+
+
+bool Map::is_dictionary_map() {
+ return DictionaryMap::decode(bit_field3());
}
@@ -2990,6 +3196,26 @@ Code::Flags Code::flags() {
}
+void Map::set_owns_descriptors(bool is_shared) {
+ set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
+}
+
+
+bool Map::owns_descriptors() {
+ return OwnsDescriptors::decode(bit_field3());
+}
+
+
+void Map::set_is_observed(bool is_observed) {
+ set_bit_field3(IsObserved::update(bit_field3(), is_observed));
+}
+
+
+bool Map::is_observed() {
+ return IsObserved::decode(bit_field3());
+}
+
+
void Code::set_flags(Code::Flags flags) {
STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
// Make sure that all call stubs have an arguments count.
@@ -3024,7 +3250,7 @@ Code::ExtraICState Code::extra_ic_state() {
}
-PropertyType Code::type() {
+Code::StubType Code::type() {
return ExtractTypeFromFlags(flags());
}
@@ -3041,7 +3267,8 @@ int Code::major_key() {
kind() == BINARY_OP_IC ||
kind() == COMPARE_IC ||
kind() == TO_BOOLEAN_IC);
- return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
+ return StubMajorKeyField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
}
@@ -3052,7 +3279,9 @@ void Code::set_major_key(int major) {
kind() == COMPARE_IC ||
kind() == TO_BOOLEAN_IC);
ASSERT(0 <= major && major < 256);
- WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
+ int updated = StubMajorKeyField::update(previous, major);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
}
@@ -3154,39 +3383,50 @@ void Code::set_profiler_ticks(int ticks) {
unsigned Code::stack_slots() {
ASSERT(kind() == OPTIMIZED_FUNCTION);
- return READ_UINT32_FIELD(this, kStackSlotsOffset);
+ return StackSlotsField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_stack_slots(unsigned slots) {
+ CHECK(slots <= (1 << kStackSlotsBitCount));
ASSERT(kind() == OPTIMIZED_FUNCTION);
- WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = StackSlotsField::update(previous, slots);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
unsigned Code::safepoint_table_offset() {
ASSERT(kind() == OPTIMIZED_FUNCTION);
- return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
+ return SafepointTableOffsetField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
}
void Code::set_safepoint_table_offset(unsigned offset) {
+ CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
ASSERT(kind() == OPTIMIZED_FUNCTION);
ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
- WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
+ int updated = SafepointTableOffsetField::update(previous, offset);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
}
unsigned Code::stack_check_table_offset() {
ASSERT_EQ(FUNCTION, kind());
- return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
+ return StackCheckTableOffsetField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
}
void Code::set_stack_check_table_offset(unsigned offset) {
ASSERT_EQ(FUNCTION, kind());
ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
- WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
+ int updated = StackCheckTableOffsetField::update(previous, offset);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
}
@@ -3205,85 +3445,106 @@ void Code::set_check_type(CheckType value) {
byte Code::unary_op_type() {
ASSERT(is_unary_op_stub());
- return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
+ return UnaryOpTypeField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_unary_op_type(byte value) {
ASSERT(is_unary_op_stub());
- WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = UnaryOpTypeField::update(previous, value);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
byte Code::binary_op_type() {
ASSERT(is_binary_op_stub());
- return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
+ return BinaryOpTypeField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_binary_op_type(byte value) {
ASSERT(is_binary_op_stub());
- WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = BinaryOpTypeField::update(previous, value);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
byte Code::binary_op_result_type() {
ASSERT(is_binary_op_stub());
- return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
+ return BinaryOpResultTypeField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_binary_op_result_type(byte value) {
ASSERT(is_binary_op_stub());
- WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = BinaryOpResultTypeField::update(previous, value);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
byte Code::compare_state() {
ASSERT(is_compare_ic_stub());
- return READ_BYTE_FIELD(this, kCompareStateOffset);
+ return CompareStateField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_compare_state(byte value) {
ASSERT(is_compare_ic_stub());
- WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = CompareStateField::update(previous, value);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
byte Code::compare_operation() {
ASSERT(is_compare_ic_stub());
- return READ_BYTE_FIELD(this, kCompareOperationOffset);
+ return CompareOperationField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_compare_operation(byte value) {
ASSERT(is_compare_ic_stub());
- WRITE_BYTE_FIELD(this, kCompareOperationOffset, value);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = CompareOperationField::update(previous, value);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
byte Code::to_boolean_state() {
ASSERT(is_to_boolean_ic_stub());
- return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
+ return ToBooleanStateField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_to_boolean_state(byte value) {
ASSERT(is_to_boolean_ic_stub());
- WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = ToBooleanStateField::update(previous, value);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
bool Code::has_function_cache() {
ASSERT(kind() == STUB);
- return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
+ return HasFunctionCacheField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_has_function_cache(bool flag) {
ASSERT(kind() == STUB);
- WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = HasFunctionCacheField::update(previous, flag);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
@@ -3296,7 +3557,7 @@ bool Code::is_inline_cache_stub() {
Code::Flags Code::ComputeFlags(Kind kind,
InlineCacheState ic_state,
ExtraICState extra_ic_state,
- PropertyType type,
+ StubType type,
int argc,
InlineCacheHolderFlag holder) {
// Extra IC state is only allowed for call IC stubs or for store IC
@@ -3317,7 +3578,7 @@ Code::Flags Code::ComputeFlags(Kind kind,
Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
- PropertyType type,
+ StubType type,
ExtraICState extra_ic_state,
InlineCacheHolderFlag holder,
int argc) {
@@ -3340,7 +3601,7 @@ Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
}
-PropertyType Code::ExtractTypeFromFlags(Flags flags) {
+Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
return TypeField::decode(flags);
}
@@ -3390,150 +3651,222 @@ void Map::set_prototype(Object* value, WriteBarrierMode mode) {
}
-DescriptorArray* Map::instance_descriptors() {
- Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
- if (object->IsSmi()) {
- return GetHeap()->empty_descriptor_array();
+// If the descriptor is using the empty transition array, install a new empty
+// transition array that will have place for an element transition.
+static MaybeObject* EnsureHasTransitionArray(Map* map) {
+ TransitionArray* transitions;
+ MaybeObject* maybe_transitions;
+ if (!map->HasTransitionArray()) {
+ maybe_transitions = TransitionArray::Allocate(0);
+ if (!maybe_transitions->To(&transitions)) return maybe_transitions;
+ transitions->set_back_pointer_storage(map->GetBackPointer());
+ } else if (!map->transitions()->IsFullTransitionArray()) {
+ maybe_transitions = map->transitions()->ExtendToFullTransitionArray();
+ if (!maybe_transitions->To(&transitions)) return maybe_transitions;
} else {
- return DescriptorArray::cast(object);
+ return map;
}
+ map->set_transitions(transitions);
+ return transitions;
}
-void Map::init_instance_descriptors() {
- WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
-}
-
-
-void Map::clear_instance_descriptors() {
- Object* object = READ_FIELD(this,
- kInstanceDescriptorsOrBitField3Offset);
- if (!object->IsSmi()) {
+void Map::InitializeDescriptors(DescriptorArray* descriptors) {
+ int len = descriptors->number_of_descriptors();
#ifdef DEBUG
- ZapInstanceDescriptors();
-#endif
- WRITE_FIELD(
- this,
- kInstanceDescriptorsOrBitField3Offset,
- Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
+ ASSERT(len <= DescriptorArray::kMaxNumberOfDescriptors);
+
+ bool used_indices[DescriptorArray::kMaxNumberOfDescriptors];
+ for (int i = 0; i < len; ++i) used_indices[i] = false;
+
+ // Ensure that all enumeration indexes between 1 and length occur uniquely in
+ // the descriptor array.
+ for (int i = 0; i < len; ++i) {
+ int enum_index = descriptors->GetDetails(i).descriptor_index() -
+ PropertyDetails::kInitialIndex;
+ ASSERT(0 <= enum_index && enum_index < len);
+ ASSERT(!used_indices[enum_index]);
+ used_indices[enum_index] = true;
}
+#endif
+
+ set_instance_descriptors(descriptors);
+ SetNumberOfOwnDescriptors(len);
}
-void Map::set_instance_descriptors(DescriptorArray* value,
- WriteBarrierMode mode) {
- Object* object = READ_FIELD(this,
- kInstanceDescriptorsOrBitField3Offset);
- Heap* heap = GetHeap();
- if (value == heap->empty_descriptor_array()) {
- clear_instance_descriptors();
- return;
- } else {
- if (object->IsSmi()) {
- value->set_bit_field3_storage(Smi::cast(object)->value());
- } else {
- value->set_bit_field3_storage(
- DescriptorArray::cast(object)->bit_field3_storage());
- }
- }
- ASSERT(!is_shared());
-#ifdef DEBUG
- if (value != instance_descriptors()) {
- ZapInstanceDescriptors();
- }
-#endif
- WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
- CONDITIONAL_WRITE_BARRIER(
- heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
-}
+ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
+SMI_ACCESSORS(Map, bit_field3, kBitField3Offset)
-int Map::bit_field3() {
- Object* object = READ_FIELD(this,
- kInstanceDescriptorsOrBitField3Offset);
- if (object->IsSmi()) {
- return Smi::cast(object)->value();
- } else {
- return DescriptorArray::cast(object)->bit_field3_storage();
+void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
+ Object* back_pointer = GetBackPointer();
+
+ if (Heap::ShouldZapGarbage() && HasTransitionArray()) {
+ ZapTransitions();
}
+
+ WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer);
+ CONDITIONAL_WRITE_BARRIER(
+ heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode);
}
-void Map::set_bit_field3(int value) {
- ASSERT(Smi::IsValid(value));
- Object* object = READ_FIELD(this,
- kInstanceDescriptorsOrBitField3Offset);
- if (object->IsSmi()) {
- WRITE_FIELD(this,
- kInstanceDescriptorsOrBitField3Offset,
- Smi::FromInt(value));
- } else {
- DescriptorArray::cast(object)->set_bit_field3_storage(value);
- }
+void Map::AppendDescriptor(Descriptor* desc,
+ const DescriptorArray::WhitenessWitness& witness) {
+ DescriptorArray* descriptors = instance_descriptors();
+ int number_of_own_descriptors = NumberOfOwnDescriptors();
+ ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
+ descriptors->Append(desc, witness);
+ SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
}
Object* Map::GetBackPointer() {
- Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
- if (object->IsFixedArray()) {
- return FixedArray::cast(object)->get(kProtoTransitionBackPointerOffset);
+ Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
+ if (object->IsDescriptorArray()) {
+ return TransitionArray::cast(object)->back_pointer_storage();
} else {
+ ASSERT(object->IsMap() || object->IsUndefined());
return object;
}
}
-void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
- Heap* heap = GetHeap();
- ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
- ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
- (value->IsMap() && GetBackPointer()->IsUndefined()));
- Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
- if (object->IsFixedArray()) {
- FixedArray::cast(object)->set(
- kProtoTransitionBackPointerOffset, value, mode);
- } else {
- WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
- CONDITIONAL_WRITE_BARRIER(
- heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
- }
+bool Map::HasElementsTransition() {
+ return HasTransitionArray() && transitions()->HasElementsTransition();
}
-FixedArray* Map::prototype_transitions() {
- Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
- if (object->IsFixedArray()) {
- return FixedArray::cast(object);
- } else {
+bool Map::HasTransitionArray() {
+ Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
+ return object->IsTransitionArray();
+}
+
+
+Map* Map::elements_transition_map() {
+ return transitions()->elements_transition();
+}
+
+
+bool Map::CanHaveMoreTransitions() {
+ if (!HasTransitionArray()) return true;
+ return FixedArray::SizeFor(transitions()->length() +
+ TransitionArray::kTransitionSize)
+ <= Page::kMaxNonCodeHeapObjectSize;
+}
+
+
+MaybeObject* Map::AddTransition(String* key,
+ Map* target,
+ SimpleTransitionFlag flag) {
+ if (HasTransitionArray()) return transitions()->CopyInsert(key, target);
+ return TransitionArray::NewWith(flag, key, target, GetBackPointer());
+}
+
+
+void Map::SetTransition(int transition_index, Map* target) {
+ transitions()->SetTarget(transition_index, target);
+}
+
+
+Map* Map::GetTransition(int transition_index) {
+ return transitions()->GetTarget(transition_index);
+}
+
+
+MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) {
+ MaybeObject* allow_elements = EnsureHasTransitionArray(this);
+ if (allow_elements->IsFailure()) return allow_elements;
+ transitions()->set_elements_transition(transitioned_map);
+ return this;
+}
+
+
+FixedArray* Map::GetPrototypeTransitions() {
+ if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
+ if (!transitions()->HasPrototypeTransitions()) {
return GetHeap()->empty_fixed_array();
}
+ return transitions()->GetPrototypeTransitions();
}
-void Map::set_prototype_transitions(FixedArray* value, WriteBarrierMode mode) {
- Heap* heap = GetHeap();
- ASSERT(value != heap->empty_fixed_array());
- value->set(kProtoTransitionBackPointerOffset, GetBackPointer());
+MaybeObject* Map::SetPrototypeTransitions(FixedArray* proto_transitions) {
+ MaybeObject* allow_prototype = EnsureHasTransitionArray(this);
+ if (allow_prototype->IsFailure()) return allow_prototype;
#ifdef DEBUG
- if (value != prototype_transitions()) {
+ if (HasPrototypeTransitions()) {
+ ASSERT(GetPrototypeTransitions() != proto_transitions);
ZapPrototypeTransitions();
}
#endif
- WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
+ transitions()->SetPrototypeTransitions(proto_transitions);
+ return this;
+}
+
+
+bool Map::HasPrototypeTransitions() {
+ return HasTransitionArray() && transitions()->HasPrototypeTransitions();
+}
+
+
+TransitionArray* Map::transitions() {
+ ASSERT(HasTransitionArray());
+ Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
+ return TransitionArray::cast(object);
+}
+
+
+void Map::set_transitions(TransitionArray* transition_array,
+ WriteBarrierMode mode) {
+ // In release mode, only run this code if verify_heap is on.
+ if (Heap::ShouldZapGarbage() && HasTransitionArray()) {
+ CHECK(transitions() != transition_array);
+ ZapTransitions();
+ }
+
+ WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
CONDITIONAL_WRITE_BARRIER(
- heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
+ GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
}
-void Map::init_prototype_transitions(Object* undefined) {
+void Map::init_back_pointer(Object* undefined) {
ASSERT(undefined->IsUndefined());
- WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, undefined);
+ WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
+}
+
+
+void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
+ ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
+ ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
+ (value->IsMap() && GetBackPointer()->IsUndefined()));
+ Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
+ if (object->IsTransitionArray()) {
+ TransitionArray::cast(object)->set_back_pointer_storage(value);
+ } else {
+ WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
+ CONDITIONAL_WRITE_BARRIER(
+ GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
+ }
+}
+
+
+// Can either be Smi (no transitions), normal transition array, or a transition
+// array with the header overwritten as a Smi (thus iterating).
+TransitionArray* Map::unchecked_transition_array() {
+ Object* object = *HeapObject::RawField(this,
+ Map::kTransitionsOrBackPointerOffset);
+ TransitionArray* transition_array = static_cast<TransitionArray*>(object);
+ return transition_array;
}
-HeapObject* Map::unchecked_prototype_transitions() {
- Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
- return reinterpret_cast<HeapObject*>(object);
+HeapObject* Map::UncheckedPrototypeTransitions() {
+ ASSERT(HasTransitionArray());
+ ASSERT(unchecked_transition_array()->HasPrototypeTransitions());
+ return unchecked_transition_array()->UncheckedPrototypeTransitions();
}
@@ -3542,22 +3875,22 @@ ACCESSORS(Map, constructor, Object, kConstructorOffset)
ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
-ACCESSORS(JSFunction,
- next_function_link,
- Object,
- kNextFunctionLinkOffset)
+ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
+ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
-ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
+ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
ACCESSORS(AccessorInfo, data, Object, kDataOffset)
ACCESSORS(AccessorInfo, name, Object, kNameOffset)
ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
+ACCESSORS(AccessorInfo, expected_receiver_type, Object,
+ kExpectedReceiverTypeOffset)
ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
@@ -3606,7 +3939,7 @@ ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
kInternalFieldCountOffset)
ACCESSORS(ObjectTemplateInfo, has_external_resource, Object,
kHasExternalResourceOffset)
-ACCESSORS(ObjectTemplateInfo, use_user_object_comparison, Object,
+ACCESSORS(ObjectTemplateInfo, use_user_object_comparison, Object,
kUseUserObjectComparisonOffset)
ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
@@ -3643,6 +3976,8 @@ ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
#endif
ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
+ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
+ kOptimizedCodeMapOffset)
ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
@@ -3653,7 +3988,7 @@ ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
kThisPropertyAssignmentsOffset)
-SMI_ACCESSORS(SharedFunctionInfo, ic_age, kICAgeOffset)
+SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
@@ -3677,6 +4012,10 @@ BOOL_ACCESSORS(SharedFunctionInfo,
kAllowLazyCompilation)
BOOL_ACCESSORS(SharedFunctionInfo,
compiler_hints,
+ allows_lazy_compilation_without_context,
+ kAllowLazyCompilationWithoutContext)
+BOOL_ACCESSORS(SharedFunctionInfo,
+ compiler_hints,
uses_arguments,
kUsesArguments)
BOOL_ACCESSORS(SharedFunctionInfo,
@@ -3702,8 +4041,10 @@ SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
kThisPropertyAssignmentsCountOffset)
SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
-SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
-SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
+SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
+SMI_ACCESSORS(SharedFunctionInfo,
+ stress_deopt_counter,
+ kStressDeoptCounterOffset)
#else
#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
@@ -3755,8 +4096,10 @@ PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
kThisPropertyAssignmentsCountOffset)
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
-PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
-PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
+PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, counters, kCountersOffset)
+PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
+ stress_deopt_counter,
+ kStressDeoptCounterOffset)
#endif
@@ -3851,6 +4194,18 @@ BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
kDontOptimize)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
+
+void SharedFunctionInfo::BeforeVisitingPointers() {
+ if (IsInobjectSlackTrackingInProgress()) DetachInitialMap();
+
+ // Flush optimized code map on major GC.
+ // Note: we may experiment with rebuilding it or retaining entries
+ // which should survive as we iterate through optimized functions
+ // anyway.
+ set_optimized_code_map(Smi::FromInt(0));
+}
+
ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
@@ -3959,14 +4314,66 @@ void SharedFunctionInfo::set_code_age(int code_age) {
}
+int SharedFunctionInfo::ic_age() {
+ return ICAgeBits::decode(counters());
+}
+
+
+void SharedFunctionInfo::set_ic_age(int ic_age) {
+ set_counters(ICAgeBits::update(counters(), ic_age));
+}
+
+
+int SharedFunctionInfo::deopt_count() {
+ return DeoptCountBits::decode(counters());
+}
+
+
+void SharedFunctionInfo::set_deopt_count(int deopt_count) {
+ set_counters(DeoptCountBits::update(counters(), deopt_count));
+}
+
+
+void SharedFunctionInfo::increment_deopt_count() {
+ int value = counters();
+ int deopt_count = DeoptCountBits::decode(value);
+ deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
+ set_counters(DeoptCountBits::update(value, deopt_count));
+}
+
+
+int SharedFunctionInfo::opt_reenable_tries() {
+ return OptReenableTriesBits::decode(counters());
+}
+
+
+void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
+ set_counters(OptReenableTriesBits::update(counters(), tries));
+}
+
+
bool SharedFunctionInfo::has_deoptimization_support() {
Code* code = this->code();
return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
}
+void SharedFunctionInfo::TryReenableOptimization() {
+ int tries = opt_reenable_tries();
+ set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
+ // We reenable optimization whenever the number of tries is a large
+ // enough power of 2.
+ if (tries >= 16 && (((tries - 1) & tries) == 0)) {
+ set_optimization_disabled(false);
+ set_opt_count(0);
+ set_deopt_count(0);
+ code()->set_optimizable(true);
+ }
+}
+
+
bool JSFunction::IsBuiltin() {
- return context()->global()->IsJSBuiltinsObject();
+ return context()->global_object()->IsJSBuiltinsObject();
}
@@ -3991,6 +4398,18 @@ bool JSFunction::IsMarkedForLazyRecompilation() {
}
+bool JSFunction::IsMarkedForParallelRecompilation() {
+ return code() ==
+ GetIsolate()->builtins()->builtin(Builtins::kParallelRecompile);
+}
+
+
+bool JSFunction::IsInRecompileQueue() {
+ return code() == GetIsolate()->builtins()->builtin(
+ Builtins::kInRecompileQueue);
+}
+
+
Code* JSFunction::code() {
return Code::cast(unchecked_code());
}
@@ -4022,10 +4441,10 @@ void JSFunction::ReplaceCode(Code* code) {
// Add/remove the function from the list of optimized functions for this
// context based on the state change.
if (!was_optimized && is_optimized) {
- context()->global_context()->AddOptimizedFunction(this);
+ context()->native_context()->AddOptimizedFunction(this);
}
if (was_optimized && !is_optimized) {
- context()->global_context()->RemoveOptimizedFunction(this);
+ context()->native_context()->RemoveOptimizedFunction(this);
}
}
@@ -4066,40 +4485,6 @@ void JSFunction::set_initial_map(Map* value) {
}
-MaybeObject* JSFunction::set_initial_map_and_cache_transitions(
- Map* initial_map) {
- Context* global_context = context()->global_context();
- Object* array_function =
- global_context->get(Context::ARRAY_FUNCTION_INDEX);
- if (array_function->IsJSFunction() &&
- this == JSFunction::cast(array_function)) {
- ASSERT(initial_map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
-
- MaybeObject* maybe_map = initial_map->CopyDropTransitions();
- Map* new_double_map = NULL;
- if (!maybe_map->To<Map>(&new_double_map)) return maybe_map;
- new_double_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
- maybe_map = initial_map->AddElementsTransition(FAST_DOUBLE_ELEMENTS,
- new_double_map);
- if (maybe_map->IsFailure()) return maybe_map;
-
- maybe_map = new_double_map->CopyDropTransitions();
- Map* new_object_map = NULL;
- if (!maybe_map->To<Map>(&new_object_map)) return maybe_map;
- new_object_map->set_elements_kind(FAST_ELEMENTS);
- maybe_map = new_double_map->AddElementsTransition(FAST_ELEMENTS,
- new_object_map);
- if (maybe_map->IsFailure()) return maybe_map;
-
- global_context->set_smi_js_array_map(initial_map);
- global_context->set_double_js_array_map(new_double_map);
- global_context->set_object_js_array_map(new_object_map);
- }
- set_initial_map(initial_map);
- return this;
-}
-
-
bool JSFunction::has_initial_map() {
return prototype_or_initial_map()->IsMap();
}
@@ -4132,6 +4517,7 @@ Object* JSFunction::prototype() {
return instance_prototype();
}
+
bool JSFunction::should_have_prototype() {
return map()->function_with_prototype();
}
@@ -4235,6 +4621,7 @@ void Foreign::set_foreign_address(Address value) {
ACCESSORS(JSModule, context, Object, kContextOffset)
+ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
JSModule* JSModule::cast(Object* obj) {
@@ -4429,18 +4816,18 @@ ElementsKind JSObject::GetElementsKind() {
FixedArrayBase* fixed_array =
reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
Map* map = fixed_array->map();
- ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
- (map == GetHeap()->fixed_array_map() ||
- map == GetHeap()->fixed_cow_array_map())) ||
- (kind == FAST_DOUBLE_ELEMENTS &&
- (fixed_array->IsFixedDoubleArray() ||
- fixed_array == GetHeap()->empty_fixed_array())) ||
- (kind == DICTIONARY_ELEMENTS &&
+ ASSERT((IsFastSmiOrObjectElementsKind(kind) &&
+ (map == GetHeap()->fixed_array_map() ||
+ map == GetHeap()->fixed_cow_array_map())) ||
+ (IsFastDoubleElementsKind(kind) &&
+ (fixed_array->IsFixedDoubleArray() ||
+ fixed_array == GetHeap()->empty_fixed_array())) ||
+ (kind == DICTIONARY_ELEMENTS &&
fixed_array->IsFixedArray() &&
- fixed_array->IsDictionary()) ||
- (kind > DICTIONARY_ELEMENTS));
- ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
- (elements()->IsFixedArray() && elements()->length() >= 2));
+ fixed_array->IsDictionary()) ||
+ (kind > DICTIONARY_ELEMENTS));
+ ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
+ (elements()->IsFixedArray() && elements()->length() >= 2));
#endif
return kind;
}
@@ -4451,25 +4838,28 @@ ElementsAccessor* JSObject::GetElementsAccessor() {
}
-bool JSObject::HasFastElements() {
- return GetElementsKind() == FAST_ELEMENTS;
+bool JSObject::HasFastObjectElements() {
+ return IsFastObjectElementsKind(GetElementsKind());
}
-bool JSObject::HasFastSmiOnlyElements() {
- return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
+bool JSObject::HasFastSmiElements() {
+ return IsFastSmiElementsKind(GetElementsKind());
}
-bool JSObject::HasFastTypeElements() {
- ElementsKind elements_kind = GetElementsKind();
- return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
- elements_kind == FAST_ELEMENTS;
+bool JSObject::HasFastSmiOrObjectElements() {
+ return IsFastSmiOrObjectElementsKind(GetElementsKind());
}
bool JSObject::HasFastDoubleElements() {
- return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
+ return IsFastDoubleElementsKind(GetElementsKind());
+}
+
+
+bool JSObject::HasFastHoleyElements() {
+ return IsFastHoleyElementsKind(GetElementsKind());
}
@@ -4526,7 +4916,7 @@ bool JSObject::HasIndexedInterceptor() {
MaybeObject* JSObject::EnsureWritableFastElements() {
- ASSERT(HasFastTypeElements());
+ ASSERT(HasFastSmiOrObjectElements());
FixedArray* elems = FixedArray::cast(elements());
Isolate* isolate = GetIsolate();
if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
@@ -4579,8 +4969,7 @@ StringHasher::StringHasher(int length, uint32_t seed)
raw_running_hash_(seed),
array_index_(0),
is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
- is_first_char_(true),
- is_valid_(true) {
+ is_first_char_(true) {
ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
}
@@ -4590,6 +4979,25 @@ bool StringHasher::has_trivial_hash() {
}
+uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint32_t c) {
+ running_hash += c;
+ running_hash += (running_hash << 10);
+ running_hash ^= (running_hash >> 6);
+ return running_hash;
+}
+
+
+uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
+ running_hash += (running_hash << 3);
+ running_hash ^= (running_hash >> 11);
+ running_hash += (running_hash << 15);
+ if ((running_hash & String::kHashBitMask) == 0) {
+ return kZeroHash;
+ }
+ return running_hash;
+}
+
+
void StringHasher::AddCharacter(uint32_t c) {
if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
AddSurrogatePair(c); // Not inlined.
@@ -4597,9 +5005,7 @@ void StringHasher::AddCharacter(uint32_t c) {
}
// Use the Jenkins one-at-a-time hash function to update the hash
// for the given character.
- raw_running_hash_ += c;
- raw_running_hash_ += (raw_running_hash_ << 10);
- raw_running_hash_ ^= (raw_running_hash_ >> 6);
+ raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
// Incremental array index computation.
if (is_array_index_) {
if (c < '0' || c > '9') {
@@ -4629,23 +5035,14 @@ void StringHasher::AddCharacterNoIndex(uint32_t c) {
AddSurrogatePairNoIndex(c); // Not inlined.
return;
}
- raw_running_hash_ += c;
- raw_running_hash_ += (raw_running_hash_ << 10);
- raw_running_hash_ ^= (raw_running_hash_ >> 6);
+ raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
}
uint32_t StringHasher::GetHash() {
// Get the calculated raw hash value and do some more bit ops to distribute
// the hash further. Ensure that we never return zero as the hash value.
- uint32_t result = raw_running_hash_;
- result += (result << 3);
- result ^= (result >> 11);
- result += (result << 15);
- if ((result & String::kHashBitMask) == 0) {
- result = 27;
- }
- return result;
+ return GetHashCore(raw_running_hash_);
}
@@ -4675,7 +5072,12 @@ bool String::AsArrayIndex(uint32_t* index) {
Object* JSReceiver::GetPrototype() {
- return HeapObject::cast(this)->map()->prototype();
+ return map()->prototype();
+}
+
+
+Object* JSReceiver::GetConstructor() {
+ return map()->constructor();
}
@@ -4699,6 +5101,16 @@ PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
return GetPropertyAttributeWithReceiver(this, key);
}
+
+PropertyAttributes JSReceiver::GetElementAttribute(uint32_t index) {
+ if (IsJSProxy()) {
+ return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
+ }
+ return JSObject::cast(this)->GetElementAttributeWithReceiver(
+ this, index, true);
+}
+
+
// TODO(504): this may be useful in other places too where JSGlobalProxy
// is used.
Object* JSObject::BypassGlobalProxy() {
@@ -4723,7 +5135,26 @@ bool JSReceiver::HasElement(uint32_t index) {
if (IsJSProxy()) {
return JSProxy::cast(this)->HasElementWithHandler(index);
}
- return JSObject::cast(this)->HasElementWithReceiver(this, index);
+ return JSObject::cast(this)->GetElementAttributeWithReceiver(
+ this, index, true) != ABSENT;
+}
+
+
+bool JSReceiver::HasLocalElement(uint32_t index) {
+ if (IsJSProxy()) {
+ return JSProxy::cast(this)->HasElementWithHandler(index);
+ }
+ return JSObject::cast(this)->GetElementAttributeWithReceiver(
+ this, index, false) != ABSENT;
+}
+
+
+PropertyAttributes JSReceiver::GetLocalElementAttribute(uint32_t index) {
+ if (IsJSProxy()) {
+ return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
+ }
+ return JSObject::cast(this)->GetElementAttributeWithReceiver(
+ this, index, false);
}
@@ -4767,6 +5198,13 @@ void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
}
+bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
+ Object* function_template = expected_receiver_type();
+ if (!function_template->IsFunctionTemplateInfo()) return true;
+ return receiver->IsInstanceOf(FunctionTemplateInfo::cast(function_template));
+}
+
+
template<typename Shape, typename Key>
void Dictionary<Shape, Key>::SetEntry(int entry,
Object* key,
@@ -4780,7 +5218,9 @@ void Dictionary<Shape, Key>::SetEntry(int entry,
Object* key,
Object* value,
PropertyDetails details) {
- ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
+ ASSERT(!key->IsString() ||
+ details.IsDeleted() ||
+ details.dictionary_index() > 0);
int index = HashTable<Shape, Key>::EntryToIndex(entry);
AssertNoAllocation no_gc;
WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
@@ -4878,13 +5318,13 @@ void Map::ClearCodeCache(Heap* heap) {
// Please note this function is used during marking:
// - MarkCompactCollector::MarkUnmarkedObject
// - IncrementalMarking::Step
- ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
- WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
+ ASSERT(!heap->InNewSpace(heap->empty_fixed_array()));
+ WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
}
void JSArray::EnsureSize(int required_size) {
- ASSERT(HasFastTypeElements());
+ ASSERT(HasFastSmiOrObjectElements());
FixedArray* elts = FixedArray::cast(elements());
const int kArraySizeThatFitsComfortablyInNewSpace = 128;
if (elts->length() < required_size) {
@@ -4916,13 +5356,13 @@ bool JSArray::AllowsSetElementsLength() {
MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
MaybeObject* maybe_result = EnsureCanContainElements(
- storage, ALLOW_COPIED_DOUBLE_ELEMENTS);
+ storage, storage->length(), ALLOW_COPIED_DOUBLE_ELEMENTS);
if (maybe_result->IsFailure()) return maybe_result;
ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
- GetElementsKind() == FAST_DOUBLE_ELEMENTS) ||
+ IsFastDoubleElementsKind(GetElementsKind())) ||
((storage->map() != GetHeap()->fixed_double_array_map()) &&
- ((GetElementsKind() == FAST_ELEMENTS) ||
- (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
+ (IsFastObjectElementsKind(GetElementsKind()) ||
+ (IsFastSmiElementsKind(GetElementsKind()) &&
FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
set_elements(storage);
set_length(Smi::FromInt(storage->length()));
@@ -4942,13 +5382,13 @@ MaybeObject* FixedDoubleArray::Copy() {
}
-void TypeFeedbackCells::SetAstId(int index, Smi* id) {
- set(1 + index * 2, id);
+void TypeFeedbackCells::SetAstId(int index, TypeFeedbackId id) {
+ set(1 + index * 2, Smi::FromInt(id.ToInt()));
}
-Smi* TypeFeedbackCells::AstId(int index) {
- return Smi::cast(get(1 + index * 2));
+TypeFeedbackId TypeFeedbackCells::AstId(int index) {
+ return TypeFeedbackId(Smi::cast(get(1 + index * 2))->value());
}
@@ -4973,13 +5413,88 @@ Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
- return heap->raw_unchecked_the_hole_value();
+ return heap->the_hole_value();
+}
+
+
+int TypeFeedbackInfo::ic_total_count() {
+ int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
+ return ICTotalCountField::decode(current);
+}
+
+
+void TypeFeedbackInfo::set_ic_total_count(int count) {
+ int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
+ value = ICTotalCountField::update(value,
+ ICTotalCountField::decode(count));
+ WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
+}
+
+
+int TypeFeedbackInfo::ic_with_type_info_count() {
+ int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
+ return ICsWithTypeInfoCountField::decode(current);
+}
+
+
+void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
+ int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
+ int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
+ // We can get negative count here when the type-feedback info is
+ // shared between two code objects. The can only happen when
+ // the debugger made a shallow copy of code object (see Heap::CopyCode).
+ // Since we do not optimize when the debugger is active, we can skip
+ // this counter update.
+ if (new_count >= 0) {
+ new_count &= ICsWithTypeInfoCountField::kMask;
+ value = ICsWithTypeInfoCountField::update(value, new_count);
+ WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
+ }
+}
+
+
+void TypeFeedbackInfo::initialize_storage() {
+ WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
+ WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
+}
+
+
+void TypeFeedbackInfo::change_own_type_change_checksum() {
+ int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
+ int checksum = OwnTypeChangeChecksum::decode(value);
+ checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
+ value = OwnTypeChangeChecksum::update(value, checksum);
+ // Ensure packed bit field is in Smi range.
+ if (value > Smi::kMaxValue) value |= Smi::kMinValue;
+ if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
+ WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
+}
+
+
+void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
+ int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
+ int mask = (1 << kTypeChangeChecksumBits) - 1;
+ value = InlinedTypeChangeChecksum::update(value, checksum & mask);
+ // Ensure packed bit field is in Smi range.
+ if (value > Smi::kMaxValue) value |= Smi::kMinValue;
+ if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
+ WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
+}
+
+
+int TypeFeedbackInfo::own_type_change_checksum() {
+ int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
+ return OwnTypeChangeChecksum::decode(value);
+}
+
+
+bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
+ int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
+ int mask = (1 << kTypeChangeChecksumBits) - 1;
+ return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
}
-SMI_ACCESSORS(TypeFeedbackInfo, ic_total_count, kIcTotalCountOffset)
-SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count,
- kIcWithTypeinfoCountOffset)
ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
kTypeFeedbackCellsOffset)
@@ -5049,14 +5564,13 @@ void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
}
-#define SLOT_ADDR(obj, offset) \
- reinterpret_cast<Object**>((obj)->address() + offset)
template<int start_offset, int end_offset, int size>
void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
HeapObject* obj,
ObjectVisitor* v) {
- v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
+ v->VisitPointers(HeapObject::RawField(obj, start_offset),
+ HeapObject::RawField(obj, end_offset));
}
@@ -5064,10 +5578,10 @@ template<int start_offset>
void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
int object_size,
ObjectVisitor* v) {
- v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
+ v->VisitPointers(HeapObject::RawField(obj, start_offset),
+ HeapObject::RawField(obj, object_size));
}
-#undef SLOT_ADDR
#undef TYPE_CHECKER
#undef CAST_ACCESSOR