summaryrefslogtreecommitdiffstats
path: root/src/3rdparty/v8/src/arm/full-codegen-arm.cc
diff options
context:
space:
mode:
Diffstat (limited to 'src/3rdparty/v8/src/arm/full-codegen-arm.cc')
-rw-r--r--src/3rdparty/v8/src/arm/full-codegen-arm.cc425
1 files changed, 213 insertions, 212 deletions
diff --git a/src/3rdparty/v8/src/arm/full-codegen-arm.cc b/src/3rdparty/v8/src/arm/full-codegen-arm.cc
index 2555c04..9a7b116 100644
--- a/src/3rdparty/v8/src/arm/full-codegen-arm.cc
+++ b/src/3rdparty/v8/src/arm/full-codegen-arm.cc
@@ -73,9 +73,6 @@ class JumpPatchSite BASE_EMBEDDED {
Assembler::BlockConstPoolScope block_const_pool(masm_);
__ bind(&patch_site_);
__ cmp(reg, Operand(reg));
- // Don't use b(al, ...) as that might emit the constant pool right after the
- // branch. After patching when the branch is no longer unconditional
- // execution can continue into the constant pool.
__ b(eq, target); // Always taken before patched.
}
@@ -90,6 +87,8 @@ class JumpPatchSite BASE_EMBEDDED {
}
void EmitPatchInfo() {
+ // Block literal pool emission whilst recording patch site information.
+ Assembler::BlockConstPoolScope block_const_pool(masm_);
if (patch_site_.is_bound()) {
int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
Register reg;
@@ -135,6 +134,8 @@ void FullCodeGenerator::Generate() {
SetFunctionPosition(function());
Comment cmnt(masm_, "[ function compiled by full code generator");
+ ProfileEntryHookStub::MaybeCallEntryHook(masm_);
+
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
@@ -148,12 +149,15 @@ void FullCodeGenerator::Generate() {
// function calls.
if (!info->is_classic_mode() || info->is_native()) {
Label ok;
+ Label begin;
+ __ bind(&begin);
__ cmp(r5, Operand(0));
__ b(eq, &ok);
int receiver_offset = info->scope()->num_parameters() * kPointerSize;
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
__ str(r2, MemOperand(sp, receiver_offset));
__ bind(&ok);
+ ASSERT_EQ(kSizeOfFullCodegenStrictModePrologue, ok.pos() - begin.pos());
}
// Open a frame scope to indicate that there is a frame on the stack. The
@@ -163,12 +167,12 @@ void FullCodeGenerator::Generate() {
int locals_count = info->scope()->num_stack_slots();
- __ Push(lr, fp, cp, r1);
- if (locals_count > 0) {
- // Load undefined value here, so the value is ready for the loop
- // below.
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
- }
+ // The following four instructions must remain together and unmodified for
+ // code aging to work properly.
+ __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
+ // Load undefined value here, so the value is ready for the loop
+ // below.
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
// Adjust fp to point to caller's fp.
__ add(fp, sp, Operand(2 * kPointerSize));
@@ -184,11 +188,14 @@ void FullCodeGenerator::Generate() {
int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
if (heap_slots > 0 ||
(scope()->is_qml_mode() && scope()->is_global_scope())) {
- Comment cmnt(masm_, "[ Allocate local context");
- // Argument to NewContext is the function, which is in r1.
+ // Argument to NewContext is the function, which is still in r1.
+ Comment cmnt(masm_, "[ Allocate context");
__ push(r1);
- if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
+ if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
+ __ Push(info->scope()->GetScopeInfo());
+ __ CallRuntime(Runtime::kNewGlobalContext, 2);
+ } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+ FastNewContextStub stub((heap_slots < 0) ? 0 : heap_slots);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kNewFunctionContext, 1);
@@ -264,7 +271,7 @@ void FullCodeGenerator::Generate() {
scope()->VisitIllegalRedeclaration(this);
} else {
- PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
+ PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
{ Comment cmnt(masm_, "[ Declarations");
// For named function expressions, declare the function name as a
// constant.
@@ -279,11 +286,12 @@ void FullCodeGenerator::Generate() {
}
{ Comment cmnt(masm_, "[ Stack check");
- PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
+ PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
Label ok;
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(hs, &ok);
+ PredictableCodeSizeScope predictable(masm_);
StackCheckStub stub;
__ CallStub(&stub);
__ bind(&ok);
@@ -330,7 +338,7 @@ void FullCodeGenerator::EmitProfilingCounterReset() {
}
if (isolate()->IsDebuggerActive()) {
// Detect debug break requests as soon as possible.
- reset_value = 10;
+ reset_value = FLAG_interrupt_budget >> 4;
}
__ mov(r2, Operand(profiling_counter_));
__ mov(r3, Operand(Smi::FromInt(reset_value)));
@@ -338,13 +346,11 @@ void FullCodeGenerator::EmitProfilingCounterReset() {
}
-static const int kMaxBackEdgeWeight = 127;
-static const int kBackEdgeDistanceDivisor = 142;
-
-
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
Label* back_edge_target) {
Comment cmnt(masm_, "[ Stack check");
+ // Block literal pools whilst emitting stack check code.
+ Assembler::BlockConstPoolScope block_const_pool(masm_);
Label ok;
if (FLAG_count_based_interrupts) {
@@ -353,7 +359,7 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
ASSERT(back_edge_target->is_bound());
int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
weight = Min(kMaxBackEdgeWeight,
- Max(1, distance / kBackEdgeDistanceDivisor));
+ Max(1, distance / kBackEdgeDistanceUnit));
}
EmitProfilingCounterDecrement(weight);
__ b(pl, &ok);
@@ -363,6 +369,7 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(hs, &ok);
+ PredictableCodeSizeScope predictable(masm_);
StackCheckStub stub;
__ CallStub(&stub);
}
@@ -405,7 +412,7 @@ void FullCodeGenerator::EmitReturnSequence() {
} else if (FLAG_weighted_back_edges) {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
- Max(1, distance / kBackEdgeDistanceDivisor));
+ Max(1, distance / kBackEdgeDistanceUnit));
}
EmitProfilingCounterDecrement(weight);
Label ok;
@@ -436,6 +443,7 @@ void FullCodeGenerator::EmitReturnSequence() {
// tool from instrumenting as we rely on the code size here.
int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
+ PredictableCodeSizeScope predictable(masm_);
__ RecordJSReturn();
masm_->mov(sp, fp);
masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
@@ -675,18 +683,9 @@ void FullCodeGenerator::DoTest(Expression* condition,
Label* if_true,
Label* if_false,
Label* fall_through) {
- if (CpuFeatures::IsSupported(VFP3)) {
- ToBooleanStub stub(result_register());
- __ CallStub(&stub);
- __ tst(result_register(), result_register());
- } else {
- // Call the runtime to find the boolean value of the source and then
- // translate it into control flow to the pair of labels.
- __ push(result_register());
- __ CallRuntime(Runtime::kToBool, 1);
- __ LoadRoot(ip, Heap::kFalseValueRootIndex);
- __ cmp(r0, ip);
- }
+ ToBooleanStub stub(result_register());
+ __ CallStub(&stub);
+ __ tst(result_register(), result_register());
Split(ne, if_true, if_false, fall_through);
}
@@ -787,7 +786,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
// The variable in the declaration always resides in the current function
// context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
- if (FLAG_debug_code) {
+ if (generate_debug_code_) {
// Check that we're not inside a with or catch context.
__ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
__ CompareRoot(r1, Heap::kWithContextMapRootIndex);
@@ -809,11 +808,12 @@ void FullCodeGenerator::VisitVariableDeclaration(
bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
switch (variable->location()) {
case Variable::UNALLOCATED:
- globals_->Add(variable->name());
+ globals_->Add(variable->name(), zone());
globals_->Add(variable->binding_needs_init()
? isolate()->factory()->the_hole_value()
- : isolate()->factory()->undefined_value());
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ : isolate()->factory()->undefined_value(),
+ zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
break;
case Variable::PARAMETER:
@@ -840,10 +840,9 @@ void FullCodeGenerator::VisitVariableDeclaration(
Comment cmnt(masm_, "[ VariableDeclaration");
__ mov(r2, Operand(variable->name()));
// Declaration nodes are always introduced in one of four modes.
- ASSERT(mode == VAR || mode == LET ||
- mode == CONST || mode == CONST_HARMONY);
- PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
- ? READ_ONLY : NONE;
+ ASSERT(IsDeclaredVariableMode(mode));
+ PropertyAttributes attr =
+ IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
__ mov(r1, Operand(Smi::FromInt(attr)));
// Push initial value, if any.
// Note: For variables we must not push an initial value (such as
@@ -869,13 +868,13 @@ void FullCodeGenerator::VisitFunctionDeclaration(
Variable* variable = proxy->var();
switch (variable->location()) {
case Variable::UNALLOCATED: {
- globals_->Add(variable->name());
+ globals_->Add(variable->name(), zone());
Handle<SharedFunctionInfo> function =
Compiler::BuildFunctionInfo(declaration->fun(), script());
// Check for stack-overflow exception.
if (function.is_null()) return SetStackOverflow();
- globals_->Add(function);
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ globals_->Add(function, zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
break;
}
@@ -929,9 +928,9 @@ void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
switch (variable->location()) {
case Variable::UNALLOCATED: {
Comment cmnt(masm_, "[ ModuleDeclaration");
- globals_->Add(variable->name());
- globals_->Add(instance);
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ globals_->Add(variable->name(), zone());
+ globals_->Add(instance, zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
Visit(declaration->module());
break;
}
@@ -1135,26 +1134,34 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// modification check. Otherwise, we got a fixed array, and we have
// to do a slow check.
Label fixed_array;
- __ mov(r2, r0);
- __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
+ __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kMetaMapRootIndex);
- __ cmp(r1, ip);
+ __ cmp(r2, ip);
__ b(ne, &fixed_array);
// We got a map in register r0. Get the enumeration cache from it.
+ Label no_descriptors;
__ bind(&use_cache);
- __ LoadInstanceDescriptors(r0, r1);
- __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
- __ ldr(r2, FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
+
+ __ EnumLength(r1, r0);
+ __ cmp(r1, Operand(Smi::FromInt(0)));
+ __ b(eq, &no_descriptors);
+
+ __ LoadInstanceDescriptors(r0, r2);
+ __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
+ __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
// Set up the four remaining stack slots.
__ push(r0); // Map.
- __ ldr(r1, FieldMemOperand(r2, FixedArray::kLengthOffset));
__ mov(r0, Operand(Smi::FromInt(0)));
// Push enumeration cache, enumeration cache length (as smi) and zero.
__ Push(r2, r1, r0);
__ jmp(&loop);
+ __ bind(&no_descriptors);
+ __ Drop(1);
+ __ jmp(&exit);
+
// We got a fixed array in register r0. Iterate through that.
Label non_proxy;
__ bind(&fixed_array);
@@ -1163,7 +1170,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
isolate()->factory()->NewJSGlobalPropertyCell(
Handle<Object>(
Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
- RecordTypeFeedbackCell(stmt->PrepareId(), cell);
+ RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
__ LoadHeapObject(r1, cell);
__ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
__ str(r2, FieldMemOperand(r1, JSGlobalPropertyCell::kValueOffset));
@@ -1319,9 +1326,9 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
__ Move(next, current);
}
__ bind(&loop);
- // Terminate at global context.
+ // Terminate at native context.
__ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
- __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
+ __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
__ cmp(temp, ip);
__ b(eq, &fast);
// Check that extension is NULL.
@@ -1607,9 +1614,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// Mark all computed expressions that are bound to a key that
// is shadowed by a later occurrence of the same key. For the
// marked expressions, no store code is emitted.
- expr->CalculateEmitStore();
+ expr->CalculateEmitStore(zone());
- AccessorTable accessor_table(isolate()->zone());
+ AccessorTable accessor_table(zone());
for (int i = 0; i < expr->properties()->length(); i++) {
ObjectLiteral::Property* property = expr->properties()->at(i);
if (property->IsCompileTimeValue()) continue;
@@ -1635,7 +1642,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, key->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -1705,7 +1712,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT_EQ(2, constant_elements->length());
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
- bool has_fast_elements = constant_elements_kind == FAST_ELEMENTS;
+ bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
@@ -1726,8 +1733,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
- ASSERT(constant_elements_kind == FAST_ELEMENTS ||
- constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
+ ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode = has_fast_elements
? FastCloneShallowArrayStub::CLONE_ELEMENTS
@@ -1755,7 +1761,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
}
VisitForAccumulatorValue(subexpr);
- if (constant_elements_kind == FAST_ELEMENTS) {
+ if (IsFastObjectElementsKind(constant_elements_kind)) {
int offset = FixedArray::kHeaderSize + (i * kPointerSize);
__ ldr(r6, MemOperand(sp)); // Copy of array literal.
__ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
@@ -1842,11 +1848,11 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
break;
case NAMED_PROPERTY:
EmitNamedPropertyLoad(property);
- PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
+ PrepareForBailoutForId(property->LoadId(), TOS_REG);
break;
case KEYED_PROPERTY:
EmitKeyedPropertyLoad(property);
- PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
+ PrepareForBailoutForId(property->LoadId(), TOS_REG);
break;
}
}
@@ -1903,7 +1909,7 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
__ mov(r2, Operand(key->handle()));
// Call load IC. It has arguments receiver and property name r0 and r2.
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
}
@@ -1911,7 +1917,7 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
// Call keyed load IC. It has arguments key and receiver in r0 and r1.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
}
@@ -1938,7 +1944,8 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&stub_call);
BinaryOpStub stub(op, mode);
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
__ jmp(&done);
@@ -2021,7 +2028,8 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
__ pop(r1);
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
context()->Plug(r0);
}
@@ -2152,7 +2160,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// in harmony mode.
if (var->IsStackAllocated() || var->IsContextSlot()) {
MemOperand location = VarOperand(var, r1);
- if (FLAG_debug_code && op == Token::INIT_LET) {
+ if (generate_debug_code_ && op == Token::INIT_LET) {
// Check for an uninitialized let binding.
__ ldr(r2, location);
__ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
@@ -2185,43 +2193,16 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
ASSERT(prop != NULL);
ASSERT(prop->key()->AsLiteral() != NULL);
- // If the assignment starts a block of assignments to the same object,
- // change to slow case to avoid the quadratic behavior of repeatedly
- // adding fast properties.
- if (expr->starts_initialization_block()) {
- __ push(result_register());
- __ ldr(ip, MemOperand(sp, kPointerSize)); // Receiver is now under value.
- __ push(ip);
- __ CallRuntime(Runtime::kToSlowProperties, 1);
- __ pop(result_register());
- }
-
// Record source code position before IC call.
SetSourcePosition(expr->position());
__ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
- // Load receiver to r1. Leave a copy in the stack if needed for turning the
- // receiver into fast case.
- if (expr->ends_initialization_block()) {
- __ ldr(r1, MemOperand(sp));
- } else {
- __ pop(r1);
- }
+ __ pop(r1);
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
-
- // If the assignment ends an initialization block, revert to fast case.
- if (expr->ends_initialization_block()) {
- __ push(r0); // Result of assignment, saved even if not needed.
- // Receiver is under the result value.
- __ ldr(ip, MemOperand(sp, kPointerSize));
- __ push(ip);
- __ CallRuntime(Runtime::kToFastProperties, 1);
- __ pop(r0);
- __ Drop(1);
- }
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
+
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(r0);
}
@@ -2230,44 +2211,16 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
// Assignment to a property, using a keyed store IC.
- // If the assignment starts a block of assignments to the same object,
- // change to slow case to avoid the quadratic behavior of repeatedly
- // adding fast properties.
- if (expr->starts_initialization_block()) {
- __ push(result_register());
- // Receiver is now under the key and value.
- __ ldr(ip, MemOperand(sp, 2 * kPointerSize));
- __ push(ip);
- __ CallRuntime(Runtime::kToSlowProperties, 1);
- __ pop(result_register());
- }
-
// Record source code position before IC call.
SetSourcePosition(expr->position());
__ pop(r1); // Key.
- // Load receiver to r2. Leave a copy in the stack if needed for turning the
- // receiver into fast case.
- if (expr->ends_initialization_block()) {
- __ ldr(r2, MemOperand(sp));
- } else {
- __ pop(r2);
- }
+ __ pop(r2);
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
-
- // If the assignment ends an initialization block, revert to fast case.
- if (expr->ends_initialization_block()) {
- __ push(r0); // Result of assignment, saved even if not needed.
- // Receiver is under the result value.
- __ ldr(ip, MemOperand(sp, kPointerSize));
- __ push(ip);
- __ CallRuntime(Runtime::kToFastProperties, 1);
- __ pop(r0);
- __ Drop(1);
- }
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
+
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(r0);
}
@@ -2280,6 +2233,7 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
if (key->IsPropertyName()) {
VisitForAccumulatorValue(expr->obj());
EmitNamedPropertyLoad(expr);
+ PrepareForBailoutForId(expr->LoadId(), TOS_REG);
context()->Plug(r0);
} else {
VisitForStackValue(expr->obj());
@@ -2293,9 +2247,11 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
void FullCodeGenerator::CallIC(Handle<Code> code,
RelocInfo::Mode rmode,
- unsigned ast_id) {
+ TypeFeedbackId ast_id) {
ic_total_count_++;
- __ Call(code, rmode, ast_id);
+ // All calls must have a predictable size in full-codegen code to ensure that
+ // the debugger can patch them correctly.
+ __ Call(code, rmode, ast_id, al, NEVER_INLINE_TARGET_ADDRESS);
}
void FullCodeGenerator::EmitCallWithIC(Call* expr,
@@ -2315,7 +2271,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
// Call the IC initialization code.
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- CallIC(ic, mode, expr->id());
+ CallIC(ic, mode, expr->CallFeedbackId());
RecordJSReturnSite(expr);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2348,7 +2304,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
__ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
RecordJSReturnSite(expr);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2368,16 +2324,14 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
// Record source position for debugger.
SetSourcePosition(expr->position());
- // Record call targets in unoptimized code, but not in the snapshot.
- if (!Serializer::enabled()) {
- flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
- Handle<Object> uninitialized =
- TypeFeedbackCells::UninitializedSentinel(isolate());
- Handle<JSGlobalPropertyCell> cell =
- isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
- RecordTypeFeedbackCell(expr->id(), cell);
- __ mov(r2, Operand(cell));
- }
+ // Record call targets in unoptimized code.
+ flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
+ __ mov(r2, Operand(cell));
CallFunctionStub stub(arg_count, flags);
__ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
@@ -2571,21 +2525,15 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ mov(r0, Operand(arg_count));
__ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
- // Record call targets in unoptimized code, but not in the snapshot.
- CallFunctionFlags flags;
- if (!Serializer::enabled()) {
- flags = RECORD_CALL_TARGET;
- Handle<Object> uninitialized =
- TypeFeedbackCells::UninitializedSentinel(isolate());
- Handle<JSGlobalPropertyCell> cell =
- isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
- RecordTypeFeedbackCell(expr->id(), cell);
- __ mov(r2, Operand(cell));
- } else {
- flags = NO_CALL_FUNCTION_FLAGS;
- }
+ // Record call targets in unoptimized code.
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
+ __ mov(r2, Operand(cell));
- CallConstructStub stub(flags);
+ CallConstructStub stub(RECORD_CALL_TARGET);
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(r0);
@@ -2727,7 +2675,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
- if (FLAG_debug_code) __ AbortIfSmi(r0);
+ __ AssertNotSmi(r0);
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
@@ -2742,27 +2690,31 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
__ b(eq, if_false);
// Look for valueOf symbol in the descriptor array, and indicate false if
- // found. The type is not checked, so if it is a transition it is a false
- // negative.
+ // found. Since we omit an enumeration index check, if it is added via a
+ // transition that shares its descriptor array, this is a false positive.
+ Label entry, loop, done;
+
+ // Skip loop if no descriptors are valid.
+ __ NumberOfOwnDescriptors(r3, r1);
+ __ cmp(r3, Operand(0));
+ __ b(eq, &done);
+
__ LoadInstanceDescriptors(r1, r4);
- __ ldr(r3, FieldMemOperand(r4, FixedArray::kLengthOffset));
- // r4: descriptor array
- // r3: length of descriptor array
- // Calculate the end of the descriptor array.
+ // r4: descriptor array.
+ // r3: valid entries in the descriptor array.
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kPointerSize == 4);
- __ add(r2, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
+ __ mul(r3, r3, ip);
+ // Calculate location of the first key name.
+ __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
+ // Calculate the end of the descriptor array.
+ __ mov(r2, r4);
__ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
- // Calculate location of the first key name.
- __ add(r4,
- r4,
- Operand(FixedArray::kHeaderSize - kHeapObjectTag +
- DescriptorArray::kFirstIndex * kPointerSize));
// Loop through all the keys in the descriptor array. If one of these is the
// symbol valueOf the result is false.
- Label entry, loop;
// The use of ip to store the valueOf symbol asumes that it is not otherwise
// used in the loop below.
__ mov(ip, Operand(FACTORY->value_of_symbol()));
@@ -2771,18 +2723,19 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
__ ldr(r3, MemOperand(r4, 0));
__ cmp(r3, ip);
__ b(eq, if_false);
- __ add(r4, r4, Operand(kPointerSize));
+ __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
__ bind(&entry);
__ cmp(r4, Operand(r2));
__ b(ne, &loop);
- // If a valueOf property is not found on the object check that it's
+ __ bind(&done);
+ // If a valueOf property is not found on the object check that its
// prototype is the un-modified String prototype. If not result is false.
__ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
__ JumpIfSmi(r2, if_false);
__ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
- __ ldr(r3, ContextOperand(cp, Context::GLOBAL_INDEX));
- __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalContextOffset));
+ __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
+ __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
__ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
__ cmp(r2, r3);
__ b(ne, if_false);
@@ -3059,13 +3012,14 @@ void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
// Convert 32 random bits in r0 to 0.(32 random bits) in a double
// by computing:
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
- if (CpuFeatures::IsSupported(VFP3)) {
+ if (CpuFeatures::IsSupported(VFP2)) {
__ PrepareCallCFunction(1, r0);
- __ ldr(r0, ContextOperand(context_register(), Context::GLOBAL_INDEX));
- __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
+ __ ldr(r0,
+ ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
+ __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset));
__ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
// 0x41300000 is the top half of 1.0 x 2^20 as a double.
// Create this constant using mov/orr to avoid PC relative load.
__ mov(r1, Operand(0x41000000));
@@ -3082,9 +3036,10 @@ void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
__ mov(r0, r4);
} else {
__ PrepareCallCFunction(2, r0);
- __ ldr(r1, ContextOperand(context_register(), Context::GLOBAL_INDEX));
+ __ ldr(r1,
+ ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
__ mov(r0, Operand(r4));
- __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalContextOffset));
+ __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset));
__ CallCFunction(
ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
}
@@ -3146,20 +3101,19 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
VisitForAccumulatorValue(args->at(0)); // Load the object.
- Label runtime, done;
+ Label runtime, done, not_date_object;
Register object = r0;
Register result = r0;
Register scratch0 = r9;
Register scratch1 = r1;
-#ifdef DEBUG
- __ AbortIfSmi(object);
+ __ JumpIfSmi(object, &not_date_object);
__ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
- __ Assert(eq, "Trying to get date field from non-date.");
-#endif
+ __ b(ne, &not_date_object);
if (index->value() == 0) {
__ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
+ __ jmp(&done);
} else {
if (index->value() < JSDate::kFirstUncachedField) {
ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
@@ -3176,8 +3130,12 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
__ PrepareCallCFunction(2, scratch1);
__ mov(r1, Operand(index));
__ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
- __ bind(&done);
+ __ jmp(&done);
}
+
+ __ bind(&not_date_object);
+ __ CallRuntime(Runtime::kThrowNotDateError, 0);
+ __ bind(&done);
context()->Plug(r0);
}
@@ -3188,7 +3146,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- if (CpuFeatures::IsSupported(VFP3)) {
+ if (CpuFeatures::IsSupported(VFP2)) {
MathPowStub stub(MathPowStub::ON_STACK);
__ CallStub(&stub);
} else {
@@ -3440,10 +3398,11 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
}
VisitForAccumulatorValue(args->last()); // Function.
- // Check for proxy.
- Label proxy, done;
- __ CompareObjectType(r0, r1, r1, JS_FUNCTION_PROXY_TYPE);
- __ b(eq, &proxy);
+ Label runtime, done;
+ // Check for non-function argument (including proxy).
+ __ JumpIfSmi(r0, &runtime);
+ __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
+ __ b(ne, &runtime);
// InvokeFunction requires the function in r1. Move it in there.
__ mov(r1, result_register());
@@ -3453,7 +3412,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
__ jmp(&done);
- __ bind(&proxy);
+ __ bind(&runtime);
__ push(r0);
__ CallRuntime(Runtime::kCall, args->length());
__ bind(&done);
@@ -3481,7 +3440,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
Handle<FixedArray> jsfunction_result_caches(
- isolate()->global_context()->jsfunction_result_caches());
+ isolate()->native_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
__ Abort("Attempt to use undefined cache.");
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
@@ -3493,8 +3452,8 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
Register key = r0;
Register cache = r1;
- __ ldr(cache, ContextOperand(cp, Context::GLOBAL_INDEX));
- __ ldr(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset));
+ __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
+ __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
__ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
__ ldr(cache,
FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
@@ -3591,9 +3550,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
- if (FLAG_debug_code) {
- __ AbortIfNotString(r0);
- }
+ __ AssertString(r0);
__ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
__ IndexFromHash(r0, r0);
@@ -3665,7 +3622,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// string_length: Accumulated sum of string lengths (smi).
// element: Current array element.
// elements_end: Array end.
- if (FLAG_debug_code) {
+ if (generate_debug_code_) {
__ cmp(array_length, Operand(0));
__ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
}
@@ -3863,7 +3820,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- CallIC(ic, mode, expr->id());
+ CallIC(ic, mode, expr->CallRuntimeFeedbackId());
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
} else {
@@ -4018,7 +3975,8 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
// accumulator register r0.
VisitForAccumulatorValue(expr->expression());
SetSourcePosition(expr->position());
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->UnaryOperationFeedbackId());
context()->Plug(r0);
}
@@ -4076,7 +4034,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
if (assign_type == VARIABLE) {
PrepareForBailout(expr->expression(), TOS_REG);
} else {
- PrepareForBailoutForId(expr->CountId(), TOS_REG);
+ PrepareForBailoutForId(prop->LoadId(), TOS_REG);
}
// Call ToNumber only if operand is not a smi.
@@ -4129,7 +4087,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
SetSourcePosition(expr->position());
BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
patch_site.EmitPatchInfo();
__ bind(&done);
@@ -4161,7 +4119,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4178,7 +4136,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4387,7 +4345,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
__ cmp(r0, Operand(0));
@@ -4471,7 +4429,7 @@ void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
Scope* declaration_scope = scope()->DeclarationScope();
if (declaration_scope->is_global_scope() ||
declaration_scope->is_module_scope()) {
- // Contexts nested in the global context have a canonical empty function
+ // Contexts nested in the native context have a canonical empty function
// as their closure, not the anonymous closure containing the global
// code. Pass a smi sentinel and let the runtime look up the empty
// function.
@@ -4501,14 +4459,57 @@ void FullCodeGenerator::EnterFinallyBlock() {
ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
STATIC_ASSERT(kSmiTag == 0);
__ add(r1, r1, Operand(r1)); // Convert to smi.
+
+ // Store result register while executing finally block.
+ __ push(r1);
+
+ // Store pending message while executing finally block.
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ mov(ip, Operand(pending_message_obj));
+ __ ldr(r1, MemOperand(ip));
+ __ push(r1);
+
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ mov(ip, Operand(has_pending_message));
+ __ ldr(r1, MemOperand(ip));
+ __ SmiTag(r1);
+ __ push(r1);
+
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ mov(ip, Operand(pending_message_script));
+ __ ldr(r1, MemOperand(ip));
__ push(r1);
}
void FullCodeGenerator::ExitFinallyBlock() {
ASSERT(!result_register().is(r1));
+ // Restore pending message from stack.
+ __ pop(r1);
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ mov(ip, Operand(pending_message_script));
+ __ str(r1, MemOperand(ip));
+
+ __ pop(r1);
+ __ SmiUntag(r1);
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ mov(ip, Operand(has_pending_message));
+ __ str(r1, MemOperand(ip));
+
+ __ pop(r1);
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ mov(ip, Operand(pending_message_obj));
+ __ str(r1, MemOperand(ip));
+
// Restore result register from stack.
__ pop(r1);
+
// Uncook return address and return.
__ pop(result_register());
ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);