summaryrefslogtreecommitdiffstats
path: root/src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc
diff options
context:
space:
mode:
Diffstat (limited to 'src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc')
-rw-r--r--src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc427
1 files changed, 296 insertions, 131 deletions
diff --git a/src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc b/src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc
index c31b0c2..26d0f92 100644
--- a/src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc
@@ -85,7 +85,7 @@ void MacroAssembler::RememberedSetHelper(
SaveFPRegsMode save_fp,
MacroAssembler::RememberedSetFinalAction and_then) {
Label done;
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
Label ok;
JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
int3();
@@ -129,17 +129,22 @@ void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
XMMRegister scratch_reg,
Register result_reg) {
Label done;
- ExternalReference zero_ref = ExternalReference::address_of_zero();
- movdbl(scratch_reg, Operand::StaticVariable(zero_ref));
+ Label conv_failure;
+ pxor(scratch_reg, scratch_reg);
+ cvtsd2si(result_reg, input_reg);
+ test(result_reg, Immediate(0xFFFFFF00));
+ j(zero, &done, Label::kNear);
+ cmp(result_reg, Immediate(0x80000000));
+ j(equal, &conv_failure, Label::kNear);
+ mov(result_reg, Immediate(0));
+ setcc(above, result_reg);
+ sub(result_reg, Immediate(1));
+ and_(result_reg, Immediate(255));
+ jmp(&done, Label::kNear);
+ bind(&conv_failure);
Set(result_reg, Immediate(0));
ucomisd(input_reg, scratch_reg);
j(below, &done, Label::kNear);
- ExternalReference half_ref = ExternalReference::address_of_one_half();
- movdbl(scratch_reg, Operand::StaticVariable(half_ref));
- addsd(scratch_reg, input_reg);
- cvttsd2si(result_reg, Operand(scratch_reg));
- test(result_reg, Immediate(0xFFFFFF00));
- j(zero, &done, Label::kNear);
Set(result_reg, Immediate(255));
bind(&done);
}
@@ -155,6 +160,24 @@ void MacroAssembler::ClampUint8(Register reg) {
}
+static double kUint32Bias =
+ static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
+
+
+void MacroAssembler::LoadUint32(XMMRegister dst,
+ Register src,
+ XMMRegister scratch) {
+ Label done;
+ cmp(src, Immediate(0));
+ movdbl(scratch,
+ Operand(reinterpret_cast<int32_t>(&kUint32Bias), RelocInfo::NONE));
+ cvtsi2sd(dst, src);
+ j(not_sign, &done, Label::kNear);
+ addsd(dst, scratch);
+ bind(&done);
+}
+
+
void MacroAssembler::RecordWriteArray(Register object,
Register value,
Register index,
@@ -237,6 +260,66 @@ void MacroAssembler::RecordWriteField(
}
+void MacroAssembler::RecordWriteForMap(
+ Register object,
+ Handle<Map> map,
+ Register scratch1,
+ Register scratch2,
+ SaveFPRegsMode save_fp) {
+ Label done;
+
+ Register address = scratch1;
+ Register value = scratch2;
+ if (emit_debug_code()) {
+ Label ok;
+ lea(address, FieldOperand(object, HeapObject::kMapOffset));
+ test_b(address, (1 << kPointerSizeLog2) - 1);
+ j(zero, &ok, Label::kNear);
+ int3();
+ bind(&ok);
+ }
+
+ ASSERT(!object.is(value));
+ ASSERT(!object.is(address));
+ ASSERT(!value.is(address));
+ AssertNotSmi(object);
+
+ if (!FLAG_incremental_marking) {
+ return;
+ }
+
+ // A single check of the map's pages interesting flag suffices, since it is
+ // only set during incremental collection, and then it's also guaranteed that
+ // the from object's page's interesting flag is also set. This optimization
+ // relies on the fact that maps can never be in new space.
+ ASSERT(!isolate()->heap()->InNewSpace(*map));
+ CheckPageFlagForMap(map,
+ MemoryChunk::kPointersToHereAreInterestingMask,
+ zero,
+ &done,
+ Label::kNear);
+
+ // Delay the initialization of |address| and |value| for the stub until it's
+ // known that the will be needed. Up until this point their values are not
+ // needed since they are embedded in the operands of instructions that need
+ // them.
+ lea(address, FieldOperand(object, HeapObject::kMapOffset));
+ mov(value, Immediate(map));
+ RecordWriteStub stub(object, value, address, OMIT_REMEMBERED_SET, save_fp);
+ CallStub(&stub);
+
+ bind(&done);
+
+ // Clobber clobbered input registers when running with the debug-code flag
+ // turned on to provoke errors.
+ if (emit_debug_code()) {
+ mov(value, Immediate(BitCast<int32_t>(kZapValue)));
+ mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
+ mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
+ }
+}
+
+
void MacroAssembler::RecordWrite(Register object,
Register address,
Register value,
@@ -246,16 +329,14 @@ void MacroAssembler::RecordWrite(Register object,
ASSERT(!object.is(value));
ASSERT(!object.is(address));
ASSERT(!value.is(address));
- if (emit_debug_code()) {
- AbortIfSmi(object);
- }
+ AssertNotSmi(object);
if (remembered_set_action == OMIT_REMEMBERED_SET &&
!FLAG_incremental_marking) {
return;
}
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
Label ok;
cmp(value, Operand(address, 0));
j(equal, &ok, Label::kNear);
@@ -382,10 +463,12 @@ void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
void MacroAssembler::CheckFastElements(Register map,
Label* fail,
Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastElementValue);
+ Map::kMaximumBitField2FastHoleyElementValue);
j(above, fail, distance);
}
@@ -393,23 +476,26 @@ void MacroAssembler::CheckFastElements(Register map,
void MacroAssembler::CheckFastObjectElements(Register map,
Label* fail,
Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastSmiOnlyElementValue);
+ Map::kMaximumBitField2FastHoleySmiElementValue);
j(below_equal, fail, distance);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastElementValue);
+ Map::kMaximumBitField2FastHoleyElementValue);
j(above, fail, distance);
}
-void MacroAssembler::CheckFastSmiOnlyElements(Register map,
- Label* fail,
- Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
+void MacroAssembler::CheckFastSmiElements(Register map,
+ Label* fail,
+ Label::Distance distance) {
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastSmiOnlyElementValue);
+ Map::kMaximumBitField2FastHoleySmiElementValue);
j(above, fail, distance);
}
@@ -493,24 +579,18 @@ void MacroAssembler::CompareMap(Register obj,
CompareMapMode mode) {
cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
- Map* transitioned_fast_element_map(
- map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
- ASSERT(transitioned_fast_element_map == NULL ||
- map->elements_kind() != FAST_ELEMENTS);
- if (transitioned_fast_element_map != NULL) {
- j(equal, early_success, Label::kNear);
- cmp(FieldOperand(obj, HeapObject::kMapOffset),
- Handle<Map>(transitioned_fast_element_map));
- }
-
- Map* transitioned_double_map(
- map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
- ASSERT(transitioned_double_map == NULL ||
- map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
- if (transitioned_double_map != NULL) {
- j(equal, early_success, Label::kNear);
- cmp(FieldOperand(obj, HeapObject::kMapOffset),
- Handle<Map>(transitioned_double_map));
+ ElementsKind kind = map->elements_kind();
+ if (IsFastElementsKind(kind)) {
+ bool packed = IsFastPackedElementsKind(kind);
+ Map* current_map = *map;
+ while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
+ kind = GetNextMoreGeneralFastElementsKind(kind, packed);
+ current_map = current_map->LookupElementsTransitionMap(kind);
+ if (!current_map) break;
+ j(equal, early_success, Label::kNear);
+ cmp(FieldOperand(obj, HeapObject::kMapOffset),
+ Handle<Map>(current_map));
+ }
}
}
}
@@ -592,36 +672,44 @@ void MacroAssembler::FCmp() {
}
-void MacroAssembler::AbortIfNotNumber(Register object) {
- Label ok;
- JumpIfSmi(object, &ok);
- cmp(FieldOperand(object, HeapObject::kMapOffset),
- isolate()->factory()->heap_number_map());
- Assert(equal, "Operand not a number");
- bind(&ok);
+void MacroAssembler::AssertNumber(Register object) {
+ if (emit_debug_code()) {
+ Label ok;
+ JumpIfSmi(object, &ok);
+ cmp(FieldOperand(object, HeapObject::kMapOffset),
+ isolate()->factory()->heap_number_map());
+ Check(equal, "Operand not a number");
+ bind(&ok);
+ }
}
-void MacroAssembler::AbortIfNotSmi(Register object) {
- test(object, Immediate(kSmiTagMask));
- Assert(equal, "Operand is not a smi");
+void MacroAssembler::AssertSmi(Register object) {
+ if (emit_debug_code()) {
+ test(object, Immediate(kSmiTagMask));
+ Check(equal, "Operand is not a smi");
+ }
}
-void MacroAssembler::AbortIfNotString(Register object) {
- test(object, Immediate(kSmiTagMask));
- Assert(not_equal, "Operand is not a string");
- push(object);
- mov(object, FieldOperand(object, HeapObject::kMapOffset));
- CmpInstanceType(object, FIRST_NONSTRING_TYPE);
- pop(object);
- Assert(below, "Operand is not a string");
+void MacroAssembler::AssertString(Register object) {
+ if (emit_debug_code()) {
+ test(object, Immediate(kSmiTagMask));
+ Check(not_equal, "Operand is a smi and not a string");
+ push(object);
+ mov(object, FieldOperand(object, HeapObject::kMapOffset));
+ CmpInstanceType(object, FIRST_NONSTRING_TYPE);
+ pop(object);
+ Check(below, "Operand is not a string");
+ }
}
-void MacroAssembler::AbortIfSmi(Register object) {
- test(object, Immediate(kSmiTagMask));
- Assert(not_equal, "Operand is a smi");
+void MacroAssembler::AssertNotSmi(Register object) {
+ if (emit_debug_code()) {
+ test(object, Immediate(kSmiTagMask));
+ Check(not_equal, "Operand is a smi");
+ }
}
@@ -921,23 +1009,24 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
cmp(scratch, Immediate(0));
Check(not_equal, "we should not have an empty lexical context");
}
- // Load the global context of the current context.
- int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ // Load the native context of the current context.
+ int offset =
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
mov(scratch, FieldOperand(scratch, offset));
- mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
+ mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
- // Check the context is a global context.
+ // Check the context is a native context.
if (emit_debug_code()) {
push(scratch);
- // Read the first word and compare to global_context_map.
+ // Read the first word and compare to native_context_map.
mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
- cmp(scratch, isolate()->factory()->global_context_map());
- Check(equal, "JSGlobalObject::global_context should be a global context.");
+ cmp(scratch, isolate()->factory()->native_context_map());
+ Check(equal, "JSGlobalObject::native_context should be a native context.");
pop(scratch);
}
// Check if both contexts are the same.
- cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
+ cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
j(equal, &same_contexts);
// Compare security tokens, save holder_reg on the stack so we can use it
@@ -948,18 +1037,19 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// Check that the security token in the calling global object is
// compatible with the security token in the receiving global
// object.
- mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
+ mov(holder_reg,
+ FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
- // Check the context is a global context.
+ // Check the context is a native context.
if (emit_debug_code()) {
cmp(holder_reg, isolate()->factory()->null_value());
Check(not_equal, "JSGlobalProxy::context() should not be null.");
push(holder_reg);
- // Read the first word and compare to global_context_map(),
+ // Read the first word and compare to native_context_map(),
mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
- cmp(holder_reg, isolate()->factory()->global_context_map());
- Check(equal, "JSGlobalObject::global_context should be a global context.");
+ cmp(holder_reg, isolate()->factory()->native_context_map());
+ Check(equal, "JSGlobalObject::native_context should be a native context.");
pop(holder_reg);
}
@@ -1646,7 +1736,7 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
}
-void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
+void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
}
@@ -1861,16 +1951,53 @@ void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
cmp(Operand::StaticVariable(scheduled_exception_address),
Immediate(isolate()->factory()->the_hole_value()));
j(not_equal, &promote_scheduled_exception);
+
+#if ENABLE_EXTRA_CHECKS
+ // Check if the function returned a valid JavaScript value.
+ Label ok;
+ Register return_value = eax;
+ Register map = ecx;
+
+ JumpIfSmi(return_value, &ok, Label::kNear);
+ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
+
+ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
+ j(below, &ok, Label::kNear);
+
+ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
+ j(above_equal, &ok, Label::kNear);
+
+ cmp(map, isolate()->factory()->heap_number_map());
+ j(equal, &ok, Label::kNear);
+
+ cmp(return_value, isolate()->factory()->undefined_value());
+ j(equal, &ok, Label::kNear);
+
+ cmp(return_value, isolate()->factory()->true_value());
+ j(equal, &ok, Label::kNear);
+
+ cmp(return_value, isolate()->factory()->false_value());
+ j(equal, &ok, Label::kNear);
+
+ cmp(return_value, isolate()->factory()->null_value());
+ j(equal, &ok, Label::kNear);
+
+ Abort("API call returned invalid object");
+
+ bind(&ok);
+#endif
+
LeaveApiExitFrame();
ret(stack_space * kPointerSize);
- bind(&promote_scheduled_exception);
- TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
bind(&empty_handle);
// It was zero; the result is undefined.
mov(eax, isolate()->factory()->undefined_value());
jmp(&prologue);
+ bind(&promote_scheduled_exception);
+ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
+
// HandleScope limit has changed. Delete allocated extensions.
ExternalReference delete_extensions =
ExternalReference::delete_handle_scope_extensions(isolate());
@@ -2108,7 +2235,7 @@ void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
void MacroAssembler::GetBuiltinFunction(Register target,
Builtins::JavaScript id) {
// Load the JavaScript builtin function from the builtins object.
- mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
mov(target, FieldOperand(target,
JSBuiltinsObject::OffsetOfFunctionWithId(id)));
@@ -2157,31 +2284,42 @@ void MacroAssembler::LoadTransitionedArrayMapConditional(
Register scratch,
Label* no_map_match) {
// Load the global or builtins object from the current context.
- mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
+ mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
// Check that the function's map is the same as the expected cached map.
- int expected_index =
- Context::GetContextMapIndexFromElementsKind(expected_kind);
- cmp(map_in_out, Operand(scratch, Context::SlotOffset(expected_index)));
+ mov(scratch, Operand(scratch,
+ Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
+
+ size_t offset = expected_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ cmp(map_in_out, FieldOperand(scratch, offset));
j(not_equal, no_map_match);
// Use the transitioned cached map.
- int trans_index =
- Context::GetContextMapIndexFromElementsKind(transitioned_kind);
- mov(map_in_out, Operand(scratch, Context::SlotOffset(trans_index)));
+ offset = transitioned_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ mov(map_in_out, FieldOperand(scratch, offset));
}
void MacroAssembler::LoadInitialArrayMap(
- Register function_in, Register scratch, Register map_out) {
+ Register function_in, Register scratch,
+ Register map_out, bool can_have_holes) {
ASSERT(!function_in.is(map_out));
Label done;
mov(map_out, FieldOperand(function_in,
JSFunction::kPrototypeOrInitialMapOffset));
if (!FLAG_smi_only_arrays) {
- LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_ELEMENTS,
+ ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ kind,
+ map_out,
+ scratch,
+ &done);
+ } else if (can_have_holes) {
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_HOLEY_SMI_ELEMENTS,
map_out,
scratch,
&done);
@@ -2192,10 +2330,11 @@ void MacroAssembler::LoadInitialArrayMap(
void MacroAssembler::LoadGlobalFunction(int index, Register function) {
// Load the global or builtins object from the current context.
- mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- // Load the global context from the global or builtins object.
- mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
- // Load the function from the global context.
+ mov(function,
+ Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ // Load the native context from the global or builtins object.
+ mov(function, FieldOperand(function, GlobalObject::kNativeContextOffset));
+ // Load the function from the native context.
mov(function, Operand(function, Context::SlotOffset(index)));
}
@@ -2446,12 +2585,13 @@ void MacroAssembler::Abort(const char* msg) {
void MacroAssembler::LoadInstanceDescriptors(Register map,
Register descriptors) {
- mov(descriptors,
- FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
- Label not_smi;
- JumpIfNotSmi(descriptors, &not_smi);
- mov(descriptors, isolate()->factory()->empty_descriptor_array());
- bind(&not_smi);
+ mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
+}
+
+
+void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
+ mov(dst, FieldOperand(map, Map::kBitField3Offset));
+ DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
}
@@ -2475,7 +2615,7 @@ void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
}
and_(scratch,
kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
- cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
+ cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
j(not_equal, failure);
}
@@ -2608,6 +2748,28 @@ void MacroAssembler::CheckPageFlag(
}
+void MacroAssembler::CheckPageFlagForMap(
+ Handle<Map> map,
+ int mask,
+ Condition cc,
+ Label* condition_met,
+ Label::Distance condition_met_distance) {
+ ASSERT(cc == zero || cc == not_zero);
+ Page* page = Page::FromAddress(map->address());
+ ExternalReference reference(ExternalReference::page_flags(page));
+ // The inlined static address check of the page's flags relies
+ // on maps never being compacted.
+ ASSERT(!isolate()->heap()->mark_compact_collector()->
+ IsOnEvacuationCandidate(*map));
+ if (mask < (1 << kBitsPerByte)) {
+ test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
+ } else {
+ test(Operand::StaticVariable(reference), Immediate(mask));
+ }
+ j(cc, condition_met, condition_met_distance);
+}
+
+
void MacroAssembler::JumpIfBlack(Register object,
Register scratch0,
Register scratch1,
@@ -2692,7 +2854,7 @@ void MacroAssembler::EnsureNotWhite(
test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
j(not_zero, &done, Label::kNear);
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
// Check for impossible bit pattern.
Label ok;
push(mask_scratch);
@@ -2744,7 +2906,7 @@ void MacroAssembler::EnsureNotWhite(
bind(&not_external);
// Sequential string, either ASCII or UC16.
- ASSERT(kAsciiStringTag == 0x04);
+ ASSERT(kOneByteStringTag == 0x04);
and_(length, Immediate(kStringEncodingMask));
xor_(length, Immediate(kStringEncodingMask));
add(length, Immediate(0x04));
@@ -2767,7 +2929,7 @@ void MacroAssembler::EnsureNotWhite(
and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
length);
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
Check(less_equal, "Live Bytes Count overflow chunk size");
@@ -2777,40 +2939,43 @@ void MacroAssembler::EnsureNotWhite(
}
+void MacroAssembler::EnumLength(Register dst, Register map) {
+ STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
+ mov(dst, FieldOperand(map, Map::kBitField3Offset));
+ and_(dst, Immediate(Smi::FromInt(Map::EnumLengthBits::kMask)));
+}
+
+
void MacroAssembler::CheckEnumCache(Label* call_runtime) {
- Label next;
+ Label next, start;
mov(ecx, eax);
- bind(&next);
-
- // Check that there are no elements. Register ecx contains the
- // current JS object we've reached through the prototype chain.
- cmp(FieldOperand(ecx, JSObject::kElementsOffset),
- isolate()->factory()->empty_fixed_array());
- j(not_equal, call_runtime);
- // Check that instance descriptors are not empty so that we can
- // check for an enum cache. Leave the map in ebx for the subsequent
- // prototype load.
+ // Check if the enum length field is properly initialized, indicating that
+ // there is an enum cache.
mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
- mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOrBitField3Offset));
- JumpIfSmi(edx, call_runtime);
- // Check that there is an enum cache in the non-empty instance
- // descriptors (edx). This is the case if the next enumeration
- // index field does not contain a smi.
- mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
- JumpIfSmi(edx, call_runtime);
+ EnumLength(edx, ebx);
+ cmp(edx, Immediate(Smi::FromInt(Map::kInvalidEnumCache)));
+ j(equal, call_runtime);
+
+ jmp(&start);
+
+ bind(&next);
+ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
// For all objects but the receiver, check that the cache is empty.
- Label check_prototype;
- cmp(ecx, eax);
- j(equal, &check_prototype, Label::kNear);
- mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
- cmp(edx, isolate()->factory()->empty_fixed_array());
+ EnumLength(edx, ebx);
+ cmp(edx, Immediate(Smi::FromInt(0)));
+ j(not_equal, call_runtime);
+
+ bind(&start);
+
+ // Check that there are no elements. Register rcx contains the current JS
+ // object we've reached through the prototype chain.
+ mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
+ cmp(ecx, isolate()->factory()->empty_fixed_array());
j(not_equal, call_runtime);
- // Load the prototype from the map and loop if non-null.
- bind(&check_prototype);
mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
cmp(ecx, isolate()->factory()->null_value());
j(not_equal, &next);