summaryrefslogtreecommitdiffstats
path: root/chromium/v8/src/mips/full-codegen-mips.cc
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/v8/src/mips/full-codegen-mips.cc')
-rw-r--r--chromium/v8/src/mips/full-codegen-mips.cc1058
1 files changed, 462 insertions, 596 deletions
diff --git a/chromium/v8/src/mips/full-codegen-mips.cc b/chromium/v8/src/mips/full-codegen-mips.cc
index 3ce2ab5f19c..41acad355f9 100644
--- a/chromium/v8/src/mips/full-codegen-mips.cc
+++ b/chromium/v8/src/mips/full-codegen-mips.cc
@@ -1,31 +1,8 @@
// Copyright 2012 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "v8.h"
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/v8.h"
#if V8_TARGET_ARCH_MIPS
@@ -37,18 +14,18 @@
// places where we have to move a previous result in v0 to a0 for the
// next call: mov(a0, v0). This is not needed on the other architectures.
-#include "code-stubs.h"
-#include "codegen.h"
-#include "compiler.h"
-#include "debug.h"
-#include "full-codegen.h"
-#include "isolate-inl.h"
-#include "parser.h"
-#include "scopes.h"
-#include "stub-cache.h"
+#include "src/code-stubs.h"
+#include "src/codegen.h"
+#include "src/compiler.h"
+#include "src/debug.h"
+#include "src/full-codegen.h"
+#include "src/isolate-inl.h"
+#include "src/parser.h"
+#include "src/scopes.h"
+#include "src/stub-cache.h"
-#include "mips/code-stubs-mips.h"
-#include "mips/macro-assembler-mips.h"
+#include "src/mips/code-stubs-mips.h"
+#include "src/mips/macro-assembler-mips.h"
namespace v8 {
namespace internal {
@@ -84,7 +61,7 @@ class JumpPatchSite BASE_EMBEDDED {
__ bind(&patch_site_);
__ andi(at, reg, 0);
// Always taken before patched.
- __ Branch(target, eq, at, Operand(zero_reg));
+ __ BranchShort(target, eq, at, Operand(zero_reg));
}
// When initially emitting this ensure that a jump is never generated to skip
@@ -95,7 +72,7 @@ class JumpPatchSite BASE_EMBEDDED {
__ bind(&patch_site_);
__ andi(at, reg, 0);
// Never taken before patched.
- __ Branch(target, ne, at, Operand(zero_reg));
+ __ BranchShort(target, ne, at, Operand(zero_reg));
}
void EmitPatchInfo() {
@@ -138,6 +115,7 @@ void FullCodeGenerator::Generate() {
CompilationInfo* info = info_;
handler_table_ =
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
+
profiling_counter_ = isolate()->factory()->NewCell(
Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
SetFunctionPosition(function());
@@ -152,16 +130,21 @@ void FullCodeGenerator::Generate() {
}
#endif
- // Strict mode functions and builtins need to replace the receiver
- // with undefined when called as functions (without an explicit
- // receiver object). t1 is zero for method calls and non-zero for
- // function calls.
- if (!info->is_classic_mode() || info->is_native()) {
+ // Sloppy mode functions and builtins need to replace the receiver with the
+ // global proxy when called as functions (without an explicit receiver
+ // object).
+ if (info->strict_mode() == SLOPPY && !info->is_native()) {
Label ok;
- __ Branch(&ok, eq, t1, Operand(zero_reg));
int receiver_offset = info->scope()->num_parameters() * kPointerSize;
+ __ lw(at, MemOperand(sp, receiver_offset));
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
+ __ Branch(&ok, ne, a2, Operand(at));
+
+ __ lw(a2, GlobalObjectOperand());
+ __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
+
__ sw(a2, MemOperand(sp, receiver_offset));
+
__ bind(&ok);
}
@@ -171,7 +154,7 @@ void FullCodeGenerator::Generate() {
FrameScope frame_scope(masm_, StackFrame::MANUAL);
info->set_prologue_offset(masm_->pc_offset());
- __ Prologue(BUILD_FUNCTION_FRAME);
+ __ Prologue(info->IsCodePreAgingActive());
info->AddNoFrameRange(0, masm_->pc_offset());
{ Comment cmnt(masm_, "[ Allocate locals");
@@ -179,21 +162,35 @@ void FullCodeGenerator::Generate() {
// Generators allocate locals, if any, in context slots.
ASSERT(!info->function()->is_generator() || locals_count == 0);
if (locals_count > 0) {
- __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
- // Emit a loop to initialize stack cells for locals when optimizing for
- // size. Otherwise, unroll the loop for maximum performance.
+ if (locals_count >= 128) {
+ Label ok;
+ __ Subu(t5, sp, Operand(locals_count * kPointerSize));
+ __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
+ __ Branch(&ok, hs, t5, Operand(a2));
+ __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
+ __ bind(&ok);
+ }
__ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
- if (FLAG_optimize_for_size && locals_count > 4) {
- Label loop;
- __ li(a2, Operand(locals_count));
- __ bind(&loop);
- __ Subu(a2, a2, 1);
- __ push(t5);
- __ Branch(&loop, gt, a2, Operand(zero_reg));
- } else {
- for (int i = 0; i < locals_count; i++) {
- __ push(t5);
+ int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
+ if (locals_count >= kMaxPushes) {
+ int loop_iterations = locals_count / kMaxPushes;
+ __ li(a2, Operand(loop_iterations));
+ Label loop_header;
+ __ bind(&loop_header);
+ // Do pushes.
+ __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
+ for (int i = 0; i < kMaxPushes; i++) {
+ __ sw(t5, MemOperand(sp, i * kPointerSize));
}
+ // Continue loop if not done.
+ __ Subu(a2, a2, Operand(1));
+ __ Branch(&loop_header, ne, a2, Operand(zero_reg));
+ }
+ int remaining = locals_count % kMaxPushes;
+ // Emit the remaining pushes.
+ __ Subu(sp, sp, Operand(remaining * kPointerSize));
+ for (int i = 0; i < remaining; i++) {
+ __ sw(t5, MemOperand(sp, i * kPointerSize));
}
}
}
@@ -205,20 +202,25 @@ void FullCodeGenerator::Generate() {
if (heap_slots > 0) {
Comment cmnt(masm_, "[ Allocate context");
// Argument to NewContext is the function, which is still in a1.
- __ push(a1);
+ bool need_write_barrier = true;
if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
+ __ push(a1);
__ Push(info->scope()->GetScopeInfo());
- __ CallRuntime(Runtime::kNewGlobalContext, 2);
+ __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
} else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub(heap_slots);
+ FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
+ // Result of FastNewContextStub is always in new space.
+ need_write_barrier = false;
} else {
- __ CallRuntime(Runtime::kNewFunctionContext, 1);
+ __ push(a1);
+ __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
}
function_in_register = false;
- // Context is returned in both v0 and cp. It replaces the context
- // passed to us. It's saved in the stack and kept live in cp.
- __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ // Context is returned in v0. It replaces the context passed to us.
+ // It's saved in the stack and kept live in cp.
+ __ mov(cp, v0);
+ __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
// Copy any necessary parameters into the context.
int num_parameters = info->scope()->num_parameters();
for (int i = 0; i < num_parameters; i++) {
@@ -233,8 +235,15 @@ void FullCodeGenerator::Generate() {
__ sw(a0, target);
// Update the write barrier.
- __ RecordWriteContextSlot(
- cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
+ if (need_write_barrier) {
+ __ RecordWriteContextSlot(
+ cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
+ } else if (FLAG_debug_code) {
+ Label done;
+ __ JumpIfInNewSpace(cp, a0, &done);
+ __ Abort(kExpectedNewSpaceObject);
+ __ bind(&done);
+ }
}
}
}
@@ -262,14 +271,14 @@ void FullCodeGenerator::Generate() {
// The stub will rewrite receiever and parameter count if the previous
// stack frame was an arguments adapter frame.
ArgumentsAccessStub::Type type;
- if (!is_classic_mode()) {
+ if (strict_mode() == STRICT) {
type = ArgumentsAccessStub::NEW_STRICT;
} else if (function()->has_duplicate_parameters()) {
- type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
+ type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
} else {
- type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
+ type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
}
- ArgumentsAccessStub stub(type);
+ ArgumentsAccessStub stub(isolate(), type);
__ CallStub(&stub);
SetVar(arguments, v0, a1, a2);
@@ -293,7 +302,7 @@ void FullCodeGenerator::Generate() {
if (scope()->is_function_scope() && scope()->function() != NULL) {
VariableDeclaration* function = scope()->function();
ASSERT(function->proxy()->var()->mode() == CONST ||
- function->proxy()->var()->mode() == CONST_HARMONY);
+ function->proxy()->var()->mode() == CONST_LEGACY);
ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
VisitVariableDeclaration(function);
}
@@ -303,9 +312,12 @@ void FullCodeGenerator::Generate() {
{ Comment cmnt(masm_, "[ Stack check");
PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
Label ok;
- __ LoadRoot(t0, Heap::kStackLimitRootIndex);
- __ Branch(&ok, hs, sp, Operand(t0));
- __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
+ __ LoadRoot(at, Heap::kStackLimitRootIndex);
+ __ Branch(&ok, hs, sp, Operand(at));
+ Handle<Code> stack_check = isolate()->builtins()->StackCheck();
+ PredictableCodeSizeScope predictable(masm_,
+ masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
+ __ Call(stack_check, RelocInfo::CODE_TARGET);
__ bind(&ok);
}
@@ -341,11 +353,7 @@ void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
void FullCodeGenerator::EmitProfilingCounterReset() {
int reset_value = FLAG_interrupt_budget;
- if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
- // Self-optimization is a one-off thing: if it fails, don't try again.
- reset_value = Smi::kMaxValue;
- }
- if (isolate()->IsDebuggerActive()) {
+ if (info_->is_debug()) {
// Detect debug break requests as soon as possible.
reset_value = FLAG_interrupt_budget >> 4;
}
@@ -365,13 +373,10 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
Comment cmnt(masm_, "[ Back edge bookkeeping");
Label ok;
- int weight = 1;
- if (FLAG_weighted_back_edges) {
- ASSERT(back_edge_target->is_bound());
- int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
- weight = Min(kMaxBackEdgeWeight,
- Max(1, distance / kCodeSizeMultiplier));
- }
+ ASSERT(back_edge_target->is_bound());
+ int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
+ int weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance / kCodeSizeMultiplier));
EmitProfilingCounterDecrement(weight);
__ slt(at, a3, zero_reg);
__ beq(at, zero_reg, &ok);
@@ -404,32 +409,24 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(v0);
__ CallRuntime(Runtime::kTraceExit, 1);
}
- if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
- // Pretend that the exit is a backwards jump to the entry.
- int weight = 1;
- if (info_->ShouldSelfOptimize()) {
- weight = FLAG_interrupt_budget / FLAG_self_opt_count;
- } else if (FLAG_weighted_back_edges) {
- int distance = masm_->pc_offset();
- weight = Min(kMaxBackEdgeWeight,
- Max(1, distance / kCodeSizeMultiplier));
- }
- EmitProfilingCounterDecrement(weight);
- Label ok;
- __ Branch(&ok, ge, a3, Operand(zero_reg));
- __ push(v0);
- if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
- __ lw(a2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
- __ push(a2);
- __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
- } else {
- __ Call(isolate()->builtins()->InterruptCheck(),
- RelocInfo::CODE_TARGET);
- }
- __ pop(v0);
- EmitProfilingCounterReset();
- __ bind(&ok);
+ // Pretend that the exit is a backwards jump to the entry.
+ int weight = 1;
+ if (info_->ShouldSelfOptimize()) {
+ weight = FLAG_interrupt_budget / FLAG_self_opt_count;
+ } else {
+ int distance = masm_->pc_offset();
+ weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance / kCodeSizeMultiplier));
}
+ EmitProfilingCounterDecrement(weight);
+ Label ok;
+ __ Branch(&ok, ge, a3, Operand(zero_reg));
+ __ push(v0);
+ __ Call(isolate()->builtins()->InterruptCheck(),
+ RelocInfo::CODE_TARGET);
+ __ pop(v0);
+ EmitProfilingCounterReset();
+ __ bind(&ok);
#ifdef DEBUG
// Add a label for checking the size of the code used for returning.
@@ -686,7 +683,7 @@ void FullCodeGenerator::DoTest(Expression* condition,
Label* fall_through) {
__ mov(a0, result_register());
Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
- CallIC(ic, RelocInfo::CODE_TARGET, condition->test_id());
+ CallIC(ic, condition->test_id());
__ mov(at, zero_reg);
Split(ne, v0, Operand(at), if_true, if_false, fall_through);
}
@@ -809,7 +806,7 @@ void FullCodeGenerator::VisitVariableDeclaration(
VariableProxy* proxy = declaration->proxy();
VariableMode mode = declaration->mode();
Variable* variable = proxy->var();
- bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
+ bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
switch (variable->location()) {
case Variable::UNALLOCATED:
globals_->Add(variable->name(), zone());
@@ -859,7 +856,7 @@ void FullCodeGenerator::VisitVariableDeclaration(
__ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
__ Push(cp, a2, a1, a0);
}
- __ CallRuntime(Runtime::kDeclareContextSlot, 4);
+ __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
break;
}
}
@@ -915,7 +912,7 @@ void FullCodeGenerator::VisitFunctionDeclaration(
__ Push(cp, a2, a1);
// Push initial value for function declaration.
VisitForStackValue(declaration->fun());
- __ CallRuntime(Runtime::kDeclareContextSlot, 4);
+ __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
break;
}
}
@@ -987,7 +984,7 @@ void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
__ li(a1, Operand(pairs));
__ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
__ Push(cp, a1, a0);
- __ CallRuntime(Runtime::kDeclareGlobals, 3);
+ __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
// Return value is ignored.
}
@@ -995,7 +992,7 @@ void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
// Call the runtime to declare the modules.
__ Push(descriptions);
- __ CallRuntime(Runtime::kDeclareModules, 1);
+ __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
// Return value is ignored.
}
@@ -1051,9 +1048,18 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
- CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
+ CallIC(ic, clause->CompareId());
patch_site.EmitPatchInfo();
+ Label skip;
+ __ Branch(&skip);
+ PrepareForBailout(clause, TOS_REG);
+ __ LoadRoot(at, Heap::kTrueValueRootIndex);
+ __ Branch(&next_test, ne, v0, Operand(at));
+ __ Drop(1);
+ __ Branch(clause->body_target());
+ __ bind(&skip);
+
__ Branch(&next_test, ne, v0, Operand(zero_reg));
__ Drop(1); // Switch value is no longer needed.
__ Branch(clause->body_target());
@@ -1085,6 +1091,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Comment cmnt(masm_, "[ ForInStatement");
+ int slot = stmt->ForInFeedbackSlot();
SetStatementPosition(stmt);
Label loop, exit;
@@ -1157,10 +1164,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
// Set up the four remaining stack slots.
- __ push(v0); // Map.
__ li(a0, Operand(Smi::FromInt(0)));
- // Push enumeration cache, enumeration cache length (as smi) and zero.
- __ Push(a2, a1, a0);
+ // Push map, enumeration cache, enumeration cache length (as smi) and zero.
+ __ Push(v0, a2, a1, a0);
__ jmp(&loop);
__ bind(&no_descriptors);
@@ -1171,13 +1177,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Label non_proxy;
__ bind(&fixed_array);
- Handle<Cell> cell = isolate()->factory()->NewCell(
- Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
- isolate()));
- RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
- __ li(a1, cell);
- __ li(a2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
- __ sw(a2, FieldMemOperand(a1, Cell::kValueOffset));
+ __ li(a1, FeedbackVector());
+ __ li(a2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
+ __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(slot)));
__ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
__ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
@@ -1225,8 +1227,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// Convert the entry to a string or (smi) 0 if it isn't a property
// any more. If the property has been removed while iterating, we
// just skip it.
- __ push(a1); // Enumerable.
- __ push(a3); // Current entry.
+ __ Push(a1, a3); // Enumerable and current entry.
__ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
__ mov(a3, result_register());
__ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
@@ -1271,8 +1272,8 @@ void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
Iteration loop_statement(this, stmt);
increment_loop_depth();
- // var iterator = iterable[@@iterator]()
- VisitForAccumulatorValue(stmt->assign_iterator());
+ // var iterable = subject
+ VisitForAccumulatorValue(stmt->assign_iterable());
__ mov(a0, v0);
// As with for-in, skip the loop if the iterator is null or undefined.
@@ -1281,17 +1282,8 @@ void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
__ LoadRoot(at, Heap::kNullValueRootIndex);
__ Branch(loop_statement.break_label(), eq, a0, Operand(at));
- // Convert the iterator to a JS object.
- Label convert, done_convert;
- __ JumpIfSmi(a0, &convert);
- __ GetObjectType(a0, a1, a1);
- __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
- __ bind(&convert);
- __ push(a0);
- __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
- __ mov(a0, v0);
- __ bind(&done_convert);
- __ push(a0);
+ // var iterator = iterable[Symbol.iterator]();
+ VisitForEffect(stmt->assign_iterator());
// Loop entry.
__ bind(loop_statement.continue_label());
@@ -1338,7 +1330,9 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
- FastNewClosureStub stub(info->language_mode(), info->is_generator());
+ FastNewClosureStub stub(isolate(),
+ info->strict_mode(),
+ info->is_generator());
__ li(a2, Operand(info));
__ CallStub(&stub);
} else {
@@ -1346,7 +1340,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
__ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
: Heap::kFalseValueRootIndex);
__ Push(cp, a0, a1);
- __ CallRuntime(Runtime::kNewClosure, 3);
+ __ CallRuntime(Runtime::kHiddenNewClosure, 3);
}
context()->Plug(v0);
}
@@ -1368,7 +1362,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
Scope* s = scope();
while (s != NULL) {
if (s->num_heap_slots() > 0) {
- if (s->calls_non_strict_eval()) {
+ if (s->calls_sloppy_eval()) {
// Check that extension is NULL.
__ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
__ Branch(slow, ne, temp, Operand(zero_reg));
@@ -1380,7 +1374,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
}
// If no outer scope calls eval, we do not need to check more
// context extensions.
- if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
+ if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
s = s->outer_scope();
}
@@ -1405,11 +1399,10 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
__ lw(a0, GlobalObjectOperand());
__ li(a2, Operand(var->name()));
- RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
- ? RelocInfo::CODE_TARGET
- : RelocInfo::CODE_TARGET_CONTEXT;
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallIC(ic, mode);
+ ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
+ ? NOT_CONTEXTUAL
+ : CONTEXTUAL;
+ CallLoadIC(mode);
}
@@ -1422,7 +1415,7 @@ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
if (s->num_heap_slots() > 0) {
- if (s->calls_non_strict_eval()) {
+ if (s->calls_sloppy_eval()) {
// Check that extension is NULL.
__ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
__ Branch(slow, ne, temp, Operand(zero_reg));
@@ -1458,19 +1451,18 @@ void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
} else if (var->mode() == DYNAMIC_LOCAL) {
Variable* local = var->local_if_not_shadowed();
__ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
- if (local->mode() == LET ||
- local->mode() == CONST ||
- local->mode() == CONST_HARMONY) {
+ if (local->mode() == LET || local->mode() == CONST ||
+ local->mode() == CONST_LEGACY) {
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ subu(at, v0, at); // Sub as compare: at == 0 on eq.
- if (local->mode() == CONST) {
+ if (local->mode() == CONST_LEGACY) {
__ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
__ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
- } else { // LET || CONST_HARMONY
+ } else { // LET || CONST
__ Branch(done, ne, at, Operand(zero_reg));
__ li(a0, Operand(var->name()));
__ push(a0);
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
}
}
__ Branch(done);
@@ -1487,13 +1479,12 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
// variables.
switch (var->location()) {
case Variable::UNALLOCATED: {
- Comment cmnt(masm_, "Global variable");
+ Comment cmnt(masm_, "[ Global variable");
// Use inline caching. Variable name is passed in a2 and the global
// object (receiver) in a0.
__ lw(a0, GlobalObjectOperand());
__ li(a2, Operand(var->name()));
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ CallLoadIC(CONTEXTUAL);
context()->Plug(v0);
break;
}
@@ -1501,9 +1492,8 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
case Variable::PARAMETER:
case Variable::LOCAL:
case Variable::CONTEXT: {
- Comment cmnt(masm_, var->IsContextSlot()
- ? "Context variable"
- : "Stack variable");
+ Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
+ : "[ Stack variable");
if (var->binding_needs_init()) {
// var->scope() may be NULL when the proxy is located in eval code and
// refers to a potential outside binding. Currently those bindings are
@@ -1535,7 +1525,7 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
// Check that we always have valid source position.
ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
ASSERT(proxy->position() != RelocInfo::kNoPosition);
- skip_init_check = var->mode() != CONST &&
+ skip_init_check = var->mode() != CONST_LEGACY &&
var->initializer_position() < proxy->position();
}
@@ -1544,18 +1534,18 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
GetVar(v0, var);
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ subu(at, v0, at); // Sub as compare: at == 0 on eq.
- if (var->mode() == LET || var->mode() == CONST_HARMONY) {
+ if (var->mode() == LET || var->mode() == CONST) {
// Throw a reference error when using an uninitialized let/const
// binding in harmony mode.
Label done;
__ Branch(&done, ne, at, Operand(zero_reg));
__ li(a0, Operand(var->name()));
__ push(a0);
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
__ bind(&done);
} else {
// Uninitalized const bindings outside of harmony mode are unholed.
- ASSERT(var->mode() == CONST);
+ ASSERT(var->mode() == CONST_LEGACY);
__ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
__ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
}
@@ -1568,15 +1558,15 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
}
case Variable::LOOKUP: {
+ Comment cmnt(masm_, "[ Lookup variable");
Label done, slow;
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
__ bind(&slow);
- Comment cmnt(masm_, "Lookup variable");
__ li(a1, Operand(var->name()));
__ Push(cp, a1); // Context and name.
- __ CallRuntime(Runtime::kLoadContextSlot, 2);
+ __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
__ bind(&done);
context()->Plug(v0);
}
@@ -1608,7 +1598,7 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
__ li(a2, Operand(expr->pattern()));
__ li(a1, Operand(expr->flags()));
__ Push(t0, a3, a2, a1);
- __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
+ __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
__ mov(t1, v0);
__ bind(&materialized);
@@ -1620,7 +1610,7 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
__ bind(&runtime_allocate);
__ li(a0, Operand(Smi::FromInt(size)));
__ Push(t1, a0);
- __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
+ __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
__ pop(t1);
__ bind(&allocated);
@@ -1661,14 +1651,13 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
: ObjectLiteral::kNoFlags;
__ li(a0, Operand(Smi::FromInt(flags)));
int properties_count = constant_properties->length() / 2;
- if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
- expr->depth() > 1 || Serializer::enabled() ||
- flags != ObjectLiteral::kFastElements ||
+ if (expr->may_store_doubles() || expr->depth() > 1 ||
+ masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
__ Push(a3, a2, a1, a0);
- __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
+ __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
} else {
- FastCloneShallowObjectStub stub(properties_count);
+ FastCloneShallowObjectStub stub(isolate(), properties_count);
__ CallStub(&stub);
}
@@ -1705,10 +1694,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ mov(a0, result_register());
__ li(a2, Operand(key->value()));
__ lw(a1, MemOperand(sp));
- Handle<Code> ic = is_classic_mode()
- ? isolate()->builtins()->StoreIC_Initialize()
- : isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
+ CallStoreIC(key->LiteralFeedbackId());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -1798,8 +1784,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
- AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
- ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
+ AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
// If the only customer of allocation sites is transitioning, then
// we can turn it off if we don't have anywhere else to transition to.
@@ -1811,31 +1796,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
__ li(a2, Operand(Smi::FromInt(expr->literal_index())));
__ li(a1, Operand(constant_elements));
- if (has_fast_elements && constant_elements_values->map() ==
- isolate()->heap()->fixed_cow_array_map()) {
- FastCloneShallowArrayStub stub(
- FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
- allocation_site_mode,
- length);
- __ CallStub(&stub);
- __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
- 1, a1, a2);
- } else if (expr->depth() > 1 || Serializer::enabled() ||
- length > FastCloneShallowArrayStub::kMaximumClonedLength) {
+ if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
__ li(a0, Operand(Smi::FromInt(flags)));
__ Push(a3, a2, a1, a0);
- __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
+ __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
- ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
- FLAG_smi_only_arrays);
- FastCloneShallowArrayStub::Mode mode =
- FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
-
- if (has_fast_elements) {
- mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
- }
-
- FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
+ FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
}
@@ -1869,7 +1835,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else {
__ li(a3, Operand(Smi::FromInt(i)));
__ mov(a0, result_register());
- StoreArrayLiteralElementStub stub;
+ StoreArrayLiteralElementStub stub(isolate());
__ CallStub(&stub);
}
@@ -1885,13 +1851,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
void FullCodeGenerator::VisitAssignment(Assignment* expr) {
+ ASSERT(expr->target()->IsValidReferenceExpression());
+
Comment cmnt(masm_, "[ Assignment");
- // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
- // on the left-hand side.
- if (!expr->target()->IsValidLeftHandSide()) {
- VisitForEffect(expr->target());
- return;
- }
// Left-hand side can only be a property, a global or a (parameter or local)
// slot.
@@ -2030,7 +1992,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
__ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
__ Branch(&post_runtime, eq, sp, Operand(a1));
__ push(v0); // generator object
- __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
+ __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
__ bind(&post_runtime);
__ pop(result_register());
@@ -2070,9 +2032,9 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
__ bind(&l_catch);
__ mov(a0, v0);
handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
- __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw"
- __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
- __ Push(a3, a0); // iter, exception
+ __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw"
+ __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
+ __ Push(a2, a3, a0); // "throw", iter, except
__ jmp(&l_call);
// try { received = %yield result }
@@ -2098,33 +2060,41 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
__ mov(a1, cp);
__ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
kRAHasBeenSaved, kDontSaveFPRegs);
- __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
+ __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
- __ pop(v0); // result
+ __ pop(v0); // result
EmitReturnSequence();
__ mov(a0, v0);
- __ bind(&l_resume); // received in a0
+ __ bind(&l_resume); // received in a0
__ PopTryHandler();
// receiver = iter; f = 'next'; arg = received;
__ bind(&l_next);
- __ LoadRoot(a2, Heap::knext_stringRootIndex); // "next"
- __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
- __ Push(a3, a0); // iter, received
+ __ LoadRoot(a2, Heap::knext_stringRootIndex); // "next"
+ __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
+ __ Push(a2, a3, a0); // "next", iter, received
// result = receiver[f](arg);
__ bind(&l_call);
- Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(1);
- CallIC(ic);
+ __ lw(a1, MemOperand(sp, kPointerSize));
+ __ lw(a0, MemOperand(sp, 2 * kPointerSize));
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
+ CallIC(ic, TypeFeedbackId::None());
+ __ mov(a0, v0);
+ __ mov(a1, a0);
+ __ sw(a1, MemOperand(sp, 2 * kPointerSize));
+ CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
+ __ CallStub(&stub);
+
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ __ Drop(1); // The function is still on the stack; drop it.
// if (!result.done) goto l_try;
__ bind(&l_loop);
__ mov(a0, v0);
__ push(a0); // save result
__ LoadRoot(a2, Heap::kdone_stringRootIndex); // "done"
- Handle<Code> done_ic = isolate()->builtins()->LoadIC_Initialize();
- CallIC(done_ic); // result.done in v0
+ CallLoadIC(NOT_CONTEXTUAL); // result.done in v0
__ mov(a0, v0);
Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
CallIC(bool_ic);
@@ -2133,8 +2103,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
// result.value
__ pop(a0); // result
__ LoadRoot(a2, Heap::kvalue_stringRootIndex); // "value"
- Handle<Code> value_ic = isolate()->builtins()->LoadIC_Initialize();
- CallIC(value_ic); // result.value in v0
+ CallLoadIC(NOT_CONTEXTUAL); // result.value in v0
context()->DropAndPlug(2, v0); // drop iter and g
break;
}
@@ -2146,18 +2115,20 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
Expression *value,
JSGeneratorObject::ResumeMode resume_mode) {
// The value stays in a0, and is ultimately read by the resumed generator, as
- // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. a1
- // will hold the generator object until the activation has been resumed.
+ // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
+ // is read to throw the value when the resumed generator is already closed.
+ // a1 will hold the generator object until the activation has been resumed.
VisitForStackValue(generator);
VisitForAccumulatorValue(value);
__ pop(a1);
// Check generator state.
- Label wrong_state, done;
+ Label wrong_state, closed_state, done;
__ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
- STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
- STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
- __ Branch(&wrong_state, le, a3, Operand(zero_reg));
+ STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
+ STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
+ __ Branch(&closed_state, eq, a3, Operand(zero_reg));
+ __ Branch(&wrong_state, lt, a3, Operand(zero_reg));
// Load suspended function and context.
__ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
@@ -2226,14 +2197,29 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
ASSERT(!result_register().is(a1));
__ Push(a1, result_register());
__ Push(Smi::FromInt(resume_mode));
- __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
+ __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
// Not reached: the runtime call returns elsewhere.
__ stop("not-reached");
+ // Reach here when generator is closed.
+ __ bind(&closed_state);
+ if (resume_mode == JSGeneratorObject::NEXT) {
+ // Return completed iterator result when generator is closed.
+ __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
+ __ push(a2);
+ // Pop value from top-of-stack slot; box result into result register.
+ EmitCreateIteratorResult(true);
+ } else {
+ // Throw the provided value.
+ __ push(a0);
+ __ CallRuntime(Runtime::kHiddenThrow, 1);
+ }
+ __ jmp(&done);
+
// Throw error if we attempt to operate on a running generator.
__ bind(&wrong_state);
__ push(a1);
- __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
+ __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
__ bind(&done);
context()->Plug(result_register());
@@ -2244,14 +2230,14 @@ void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
Label gc_required;
Label allocated;
- Handle<Map> map(isolate()->native_context()->generator_result_map());
+ Handle<Map> map(isolate()->native_context()->iterator_result_map());
__ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&gc_required);
__ Push(Smi::FromInt(map->instance_size()));
- __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
+ __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
__ lw(context_register(),
MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2282,8 +2268,7 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
__ mov(a0, result_register());
__ li(a2, Operand(key->value()));
// Call load IC. It has arguments receiver and property name a0 and a2.
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
+ CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
}
@@ -2292,7 +2277,7 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
__ mov(a0, result_register());
// Call keyed load IC. It has arguments key and receiver in a0 and a1.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
+ CallIC(ic, prop->PropertyFeedbackId());
}
@@ -2319,9 +2304,8 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
patch_site.EmitJumpIfSmi(scratch1, &smi_case);
__ bind(&stub_call);
- BinaryOpICStub stub(op, mode);
- CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
- expr->BinaryOperationFeedbackId());
+ BinaryOpICStub stub(isolate(), op, mode);
+ CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
__ jmp(&done);
@@ -2330,13 +2314,11 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
// recording binary operation stub, see
switch (op) {
case Token::SAR:
- __ Branch(&stub_call);
__ GetLeastBitsFromSmi(scratch1, right, 5);
__ srav(right, left, scratch1);
__ And(v0, right, Operand(~kSmiTagMask));
break;
case Token::SHL: {
- __ Branch(&stub_call);
__ SmiUntag(scratch1, left);
__ GetLeastBitsFromSmi(scratch2, right, 5);
__ sllv(scratch1, scratch1, scratch2);
@@ -2346,7 +2328,6 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
break;
}
case Token::SHR: {
- __ Branch(&stub_call);
__ SmiUntag(scratch1, left);
__ GetLeastBitsFromSmi(scratch2, right, 5);
__ srlv(scratch1, scratch1, scratch2);
@@ -2401,22 +2382,16 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
OverwriteMode mode) {
__ mov(a0, result_register());
__ pop(a1);
- BinaryOpICStub stub(op, mode);
+ BinaryOpICStub stub(isolate(), op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
- CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
- expr->BinaryOperationFeedbackId());
+ CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
context()->Plug(v0);
}
void FullCodeGenerator::EmitAssignment(Expression* expr) {
- // Invalid left-hand sides are rewritten by the parser to have a 'throw
- // ReferenceError' on the left-hand side.
- if (!expr->IsValidLeftHandSide()) {
- VisitForEffect(expr);
- return;
- }
+ ASSERT(expr->IsValidReferenceExpression());
// Left-hand side can only be a property, a global or a (parameter or local)
// slot.
@@ -2442,10 +2417,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
__ mov(a1, result_register());
__ pop(a0); // Restore value.
__ li(a2, Operand(prop->key()->AsLiteral()->value()));
- Handle<Code> ic = is_classic_mode()
- ? isolate()->builtins()->StoreIC_Initialize()
- : isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic);
+ CallStoreIC();
break;
}
case KEYED_PROPERTY: {
@@ -2454,7 +2426,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
VisitForAccumulatorValue(prop->key());
__ mov(a1, result_register());
__ Pop(a0, a2); // a0 = restored value.
- Handle<Code> ic = is_classic_mode()
+ Handle<Code> ic = strict_mode() == SLOPPY
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
CallIC(ic);
@@ -2465,49 +2437,58 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
}
-void FullCodeGenerator::EmitVariableAssignment(Variable* var,
- Token::Value op) {
+void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
+ Variable* var, MemOperand location) {
+ __ sw(result_register(), location);
+ if (var->IsContextSlot()) {
+ // RecordWrite may destroy all its register arguments.
+ __ Move(a3, result_register());
+ int offset = Context::SlotOffset(var->index());
+ __ RecordWriteContextSlot(
+ a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
+ }
+}
+
+
+void FullCodeGenerator::EmitCallStoreContextSlot(
+ Handle<String> name, StrictMode strict_mode) {
+ __ li(a1, Operand(name));
+ __ li(a0, Operand(Smi::FromInt(strict_mode)));
+ __ Push(v0, cp, a1, a0); // Value, context, name, strict mode.
+ __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
+}
+
+
+void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
if (var->IsUnallocated()) {
// Global var, const, or let.
__ mov(a0, result_register());
__ li(a2, Operand(var->name()));
__ lw(a1, GlobalObjectOperand());
- Handle<Code> ic = is_classic_mode()
- ? isolate()->builtins()->StoreIC_Initialize()
- : isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ CallStoreIC();
- } else if (op == Token::INIT_CONST) {
+ } else if (op == Token::INIT_CONST_LEGACY) {
// Const initializers need a write barrier.
ASSERT(!var->IsParameter()); // No const parameters.
- if (var->IsStackLocal()) {
+ if (var->IsLookupSlot()) {
+ __ li(a0, Operand(var->name()));
+ __ Push(v0, cp, a0); // Context and name.
+ __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
+ } else {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
Label skip;
- __ lw(a1, StackOperand(var));
- __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
- __ Branch(&skip, ne, a1, Operand(t0));
- __ sw(result_register(), StackOperand(var));
+ MemOperand location = VarOperand(var, a1);
+ __ lw(a2, location);
+ __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
+ __ Branch(&skip, ne, a2, Operand(at));
+ EmitStoreToStackLocalOrContextSlot(var, location);
__ bind(&skip);
- } else {
- ASSERT(var->IsContextSlot() || var->IsLookupSlot());
- // Like var declarations, const declarations are hoisted to function
- // scope. However, unlike var initializers, const initializers are
- // able to drill a hole to that function context, even from inside a
- // 'with' context. We thus bypass the normal static scope lookup for
- // var->IsContextSlot().
- __ push(v0);
- __ li(a0, Operand(var->name()));
- __ Push(cp, a0); // Context and name.
- __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
}
} else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier.
if (var->IsLookupSlot()) {
- __ push(v0); // Value.
- __ li(a1, Operand(var->name()));
- __ li(a0, Operand(Smi::FromInt(language_mode())));
- __ Push(cp, a1, a0); // Context, name, strict mode.
- __ CallRuntime(Runtime::kStoreContextSlot, 4);
+ EmitCallStoreContextSlot(var->name(), strict_mode());
} else {
ASSERT(var->IsStackAllocated() || var->IsContextSlot());
Label assign;
@@ -2517,23 +2498,19 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
__ Branch(&assign, ne, a3, Operand(t0));
__ li(a3, Operand(var->name()));
__ push(a3);
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
// Perform the assignment.
__ bind(&assign);
- __ sw(result_register(), location);
- if (var->IsContextSlot()) {
- // RecordWrite may destroy all its register arguments.
- __ mov(a3, result_register());
- int offset = Context::SlotOffset(var->index());
- __ RecordWriteContextSlot(
- a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
- }
+ EmitStoreToStackLocalOrContextSlot(var, location);
}
- } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
+ } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
// Assignment to var or initializing assignment to let/const
// in harmony mode.
- if (var->IsStackAllocated() || var->IsContextSlot()) {
+ if (var->IsLookupSlot()) {
+ EmitCallStoreContextSlot(var->name(), strict_mode());
+ } else {
+ ASSERT((var->IsStackAllocated() || var->IsContextSlot()));
MemOperand location = VarOperand(var, a1);
if (generate_debug_code_ && op == Token::INIT_LET) {
// Check for an uninitialized let binding.
@@ -2541,24 +2518,10 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
__ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
}
- // Perform the assignment.
- __ sw(v0, location);
- if (var->IsContextSlot()) {
- __ mov(a3, v0);
- int offset = Context::SlotOffset(var->index());
- __ RecordWriteContextSlot(
- a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
- }
- } else {
- ASSERT(var->IsLookupSlot());
- __ push(v0); // Value.
- __ li(a1, Operand(var->name()));
- __ li(a0, Operand(Smi::FromInt(language_mode())));
- __ Push(cp, a1, a0); // Context, name, strict mode.
- __ CallRuntime(Runtime::kStoreContextSlot, 4);
+ EmitStoreToStackLocalOrContextSlot(var, location);
}
}
- // Non-initializing assignments to consts are ignored.
+ // Non-initializing assignments to consts are ignored.
}
@@ -2566,7 +2529,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
// Assignment to a property, using a named store IC.
Property* prop = expr->target()->AsProperty();
ASSERT(prop != NULL);
- ASSERT(prop->key()->AsLiteral() != NULL);
+ ASSERT(prop->key()->IsLiteral());
// Record source code position before IC call.
SetSourcePosition(expr->position());
@@ -2574,10 +2537,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
__ li(a2, Operand(prop->key()->AsLiteral()->value()));
__ pop(a1);
- Handle<Code> ic = is_classic_mode()
- ? isolate()->builtins()->StoreIC_Initialize()
- : isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
+ CallStoreIC(expr->AssignmentFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(v0);
@@ -2597,10 +2557,10 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
__ mov(a0, result_register());
__ Pop(a2, a1); // a1 = key.
- Handle<Code> ic = is_classic_mode()
+ Handle<Code> ic = strict_mode() == SLOPPY
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
+ CallIC(ic, expr->AssignmentFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(v0);
@@ -2627,73 +2587,70 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
void FullCodeGenerator::CallIC(Handle<Code> code,
- RelocInfo::Mode rmode,
TypeFeedbackId id) {
ic_total_count_++;
- __ Call(code, rmode, id);
+ __ Call(code, RelocInfo::CODE_TARGET, id);
}
-void FullCodeGenerator::EmitCallWithIC(Call* expr,
- Handle<Object> name,
- RelocInfo::Mode mode) {
- // Code common for calls using the IC.
- ZoneList<Expression*>* args = expr->arguments();
- int arg_count = args->length();
- { PreservePositionScope scope(masm()->positions_recorder());
- for (int i = 0; i < arg_count; i++) {
- VisitForStackValue(args->at(i));
+// Code common for calls using the IC.
+void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
+ Expression* callee = expr->expression();
+
+ CallIC::CallType call_type = callee->IsVariableProxy()
+ ? CallIC::FUNCTION
+ : CallIC::METHOD;
+
+ // Get the target function.
+ if (call_type == CallIC::FUNCTION) {
+ { StackValueContext context(this);
+ EmitVariableLoad(callee->AsVariableProxy());
+ PrepareForBailout(callee, NO_REGISTERS);
}
- __ li(a2, Operand(name));
+ // Push undefined as receiver. This is patched in the method prologue if it
+ // is a sloppy mode method.
+ __ Push(isolate()->factory()->undefined_value());
+ } else {
+ // Load the function from the receiver.
+ ASSERT(callee->IsProperty());
+ __ lw(v0, MemOperand(sp, 0));
+ EmitNamedPropertyLoad(callee->AsProperty());
+ PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
+ // Push the target function under the receiver.
+ __ lw(at, MemOperand(sp, 0));
+ __ push(at);
+ __ sw(v0, MemOperand(sp, kPointerSize));
}
- // Record source position for debugger.
- SetSourcePosition(expr->position());
- // Call the IC initialization code.
- Handle<Code> ic =
- isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- CallIC(ic, mode, expr->CallFeedbackId());
- RecordJSReturnSite(expr);
- // Restore context register.
- __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
- context()->Plug(v0);
+
+ EmitCall(expr, call_type);
}
-void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
- Expression* key) {
+// Code common for calls using the IC.
+void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
+ Expression* key) {
// Load the key.
VisitForAccumulatorValue(key);
- // Swap the name of the function and the receiver on the stack to follow
- // the calling convention for call ICs.
- __ pop(a1);
- __ push(v0);
- __ push(a1);
+ Expression* callee = expr->expression();
- // Code common for calls using the IC.
- ZoneList<Expression*>* args = expr->arguments();
- int arg_count = args->length();
- { PreservePositionScope scope(masm()->positions_recorder());
- for (int i = 0; i < arg_count; i++) {
- VisitForStackValue(args->at(i));
- }
- }
- // Record source position for debugger.
- SetSourcePosition(expr->position());
- // Call the IC initialization code.
- Handle<Code> ic =
- isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
- __ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
- CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
- RecordJSReturnSite(expr);
- // Restore context register.
- __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
- context()->DropAndPlug(1, v0); // Drop the key still on the stack.
+ // Load the function from the receiver.
+ ASSERT(callee->IsProperty());
+ __ lw(a1, MemOperand(sp, 0));
+ EmitKeyedPropertyLoad(callee->AsProperty());
+ PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
+
+ // Push the target function under the receiver.
+ __ lw(at, MemOperand(sp, 0));
+ __ push(at);
+ __ sw(v0, MemOperand(sp, kPointerSize));
+
+ EmitCall(expr, CallIC::METHOD);
}
-void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
- // Code common for calls using the call stub.
+void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
+ // Load the arguments.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
{ PreservePositionScope scope(masm()->positions_recorder());
@@ -2701,20 +2658,17 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
VisitForStackValue(args->at(i));
}
}
- // Record source position for debugger.
- SetSourcePosition(expr->position());
- // Record call targets.
- flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
- Handle<Object> uninitialized =
- TypeFeedbackCells::UninitializedSentinel(isolate());
- Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
- RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
- __ li(a2, Operand(cell));
-
- CallFunctionStub stub(arg_count, flags);
+ // Record source position of the IC call.
+ SetSourcePosition(expr->position());
+ Handle<Code> ic = CallIC::initialize_stub(
+ isolate(), arg_count, call_type);
+ __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
- __ CallStub(&stub, expr->CallFeedbackId());
+ // Don't assign a type feedback id to the IC, since type feedback is provided
+ // by the vector above.
+ CallIC(ic);
+
RecordJSReturnSite(expr);
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2734,15 +2688,15 @@ void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
int receiver_offset = 2 + info_->scope()->num_parameters();
__ lw(t1, MemOperand(fp, receiver_offset * kPointerSize));
- // t0: the language mode.
- __ li(t0, Operand(Smi::FromInt(language_mode())));
+ // t0: the strict mode.
+ __ li(t0, Operand(Smi::FromInt(strict_mode())));
// a1: the start position of the scope the calls resides in.
__ li(a1, Operand(Smi::FromInt(scope()->start_position())));
// Do the runtime call.
__ Push(t2, t1, t0, a1);
- __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
+ __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
}
@@ -2755,12 +2709,11 @@ void FullCodeGenerator::VisitCall(Call* expr) {
Comment cmnt(masm_, "[ Call");
Expression* callee = expr->expression();
- VariableProxy* proxy = callee->AsVariableProxy();
- Property* property = callee->AsProperty();
+ Call::CallType call_type = expr->GetCallType(isolate());
- if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
- // In a call to eval, we first call %ResolvePossiblyDirectEval to
- // resolve the function we need to call and the receiver of the
+ if (call_type == Call::POSSIBLY_EVAL_CALL) {
+ // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
+ // to resolve the function we need to call and the receiver of the
// call. Then we call the resolved function using the given
// arguments.
ZoneList<Expression*>* args = expr->arguments();
@@ -2789,20 +2742,18 @@ void FullCodeGenerator::VisitCall(Call* expr) {
}
// Record source position for debugger.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
+ CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, v0);
- } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
- // Push global object as receiver for the call IC.
- __ lw(a0, GlobalObjectOperand());
- __ push(a0);
- EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
- } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
+ } else if (call_type == Call::GLOBAL_CALL) {
+ EmitCallWithLoadIC(expr);
+ } else if (call_type == Call::LOOKUP_SLOT_CALL) {
// Call to a lookup slot (dynamically introduced variable).
+ VariableProxy* proxy = callee->AsVariableProxy();
Label slow, done;
{ PreservePositionScope scope(masm()->positions_recorder());
@@ -2817,7 +2768,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
ASSERT(!context_register().is(a2));
__ li(a2, Operand(proxy->name()));
__ Push(context_register(), a2);
- __ CallRuntime(Runtime::kLoadContextSlot, 2);
+ __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
__ Push(v0, v1); // Function, receiver.
// If fast case code has been generated, emit code to push the
@@ -2831,37 +2782,34 @@ void FullCodeGenerator::VisitCall(Call* expr) {
__ push(v0);
// The receiver is implicitly the global receiver. Indicate this
// by passing the hole to the call function stub.
- __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
+ __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
__ push(a1);
__ bind(&call);
}
// The receiver is either the global receiver or an object found
- // by LoadContextSlot. That object could be the hole if the
- // receiver is implicitly the global object.
- EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
- } else if (property != NULL) {
+ // by LoadContextSlot.
+ EmitCall(expr);
+ } else if (call_type == Call::PROPERTY_CALL) {
+ Property* property = callee->AsProperty();
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(property->obj());
}
if (property->key()->IsPropertyName()) {
- EmitCallWithIC(expr,
- property->key()->AsLiteral()->value(),
- RelocInfo::CODE_TARGET);
+ EmitCallWithLoadIC(expr);
} else {
- EmitKeyedCallWithIC(expr, property->key());
+ EmitKeyedCallWithLoadIC(expr, property->key());
}
} else {
+ ASSERT(call_type == Call::OTHER_CALL);
// Call to an arbitrary expression not handled specially above.
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(callee);
}
- // Load global receiver object.
- __ lw(a1, GlobalObjectOperand());
- __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
+ __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
__ push(a1);
// Emit function call.
- EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
+ EmitCall(expr);
}
#ifdef DEBUG
@@ -2898,14 +2846,17 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ lw(a1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
- Handle<Object> uninitialized =
- TypeFeedbackCells::UninitializedSentinel(isolate());
- Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
- RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
- __ li(a2, Operand(cell));
-
- CallConstructStub stub(RECORD_CALL_TARGET);
- __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
+ if (FLAG_pretenuring_call_new) {
+ EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
+ ASSERT(expr->AllocationSiteFeedbackSlot() ==
+ expr->CallNewFeedbackSlot() + 1);
+ }
+
+ __ li(a2, FeedbackVector());
+ __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
+
+ CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
+ __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(v0);
}
@@ -3023,8 +2974,8 @@ void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
__ JumpIfSmi(v0, if_false);
__ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
__ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
- __ And(at, a1, Operand(1 << Map::kIsUndetectable));
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
+ __ And(at, a1, Operand(1 << Map::kIsUndetectable));
Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -3079,7 +3030,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
__ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
// Calculate the end of the descriptor array.
__ mov(a2, t0);
- __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
+ __ sll(t1, a3, kPointerSizeLog2);
__ Addu(a2, a2, t1);
// Loop through all the keys in the descriptor array. If one of these is the
@@ -3280,7 +3231,7 @@ void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
VisitForAccumulatorValue(args->at(0));
__ mov(a1, v0);
__ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
- ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
+ ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
__ CallStub(&stub);
context()->Plug(v0);
}
@@ -3367,31 +3318,9 @@ void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
}
-void FullCodeGenerator::EmitLog(CallRuntime* expr) {
- // Conditionally generate a log call.
- // Args:
- // 0 (literal string): The type of logging (corresponds to the flags).
- // This is used to determine whether or not to generate the log call.
- // 1 (string): Format string. Access the string at argument index 2
- // with '%2s' (see Logger::LogRuntime for all the formats).
- // 2 (array): Arguments to the format string.
- ZoneList<Expression*>* args = expr->arguments();
- ASSERT_EQ(args->length(), 3);
- if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
- VisitForStackValue(args->at(1));
- VisitForStackValue(args->at(2));
- __ CallRuntime(Runtime::kLog, 2);
- }
-
- // Finally, we're expected to leave a value on the top of the stack.
- __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
- context()->Plug(v0);
-}
-
-
void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
- SubStringStub stub;
+ SubStringStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
@@ -3404,7 +3333,7 @@ void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
- RegExpExecStub stub;
+ RegExpExecStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 4);
VisitForStackValue(args->at(0));
@@ -3477,7 +3406,7 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
}
__ bind(&not_date_object);
- __ CallRuntime(Runtime::kThrowNotDateError, 0);
+ __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
__ bind(&done);
context()->Plug(v0);
}
@@ -3498,9 +3427,9 @@ void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
if (FLAG_debug_code) {
__ SmiTst(value, at);
- __ ThrowIf(ne, kNonSmiValue, at, Operand(zero_reg));
+ __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
__ SmiTst(index, at);
- __ ThrowIf(ne, kNonSmiIndex, at, Operand(zero_reg));
+ __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
__ SmiUntag(index, index);
static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
Register scratch = t5;
@@ -3535,9 +3464,9 @@ void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
if (FLAG_debug_code) {
__ SmiTst(value, at);
- __ ThrowIf(ne, kNonSmiValue, at, Operand(zero_reg));
+ __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
__ SmiTst(index, at);
- __ ThrowIf(ne, kNonSmiIndex, at, Operand(zero_reg));
+ __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
__ SmiUntag(index, index);
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
Register scratch = t5;
@@ -3563,7 +3492,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- MathPowStub stub(MathPowStub::ON_STACK);
+ MathPowStub stub(isolate(), MathPowStub::ON_STACK);
__ CallStub(&stub);
context()->Plug(v0);
}
@@ -3606,7 +3535,7 @@ void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
VisitForAccumulatorValue(args->at(0));
__ mov(a0, result_register());
- NumberToStringStub stub;
+ NumberToStringStub stub(isolate());
__ CallStub(&stub);
context()->Plug(v0);
}
@@ -3730,21 +3659,13 @@ void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT_EQ(2, args->length());
- if (FLAG_new_string_add) {
- VisitForStackValue(args->at(0));
- VisitForAccumulatorValue(args->at(1));
-
- __ pop(a1);
- __ mov(a0, result_register()); // NewStringAddStub requires args in a0, a1.
- NewStringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
- __ CallStub(&stub);
- } else {
- VisitForStackValue(args->at(0));
- VisitForStackValue(args->at(1));
+ VisitForStackValue(args->at(0));
+ VisitForAccumulatorValue(args->at(1));
- StringAddStub stub(STRING_ADD_CHECK_BOTH);
- __ CallStub(&stub);
- }
+ __ pop(a1);
+ __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
+ StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
+ __ CallStub(&stub);
context()->Plug(v0);
}
@@ -3756,35 +3677,12 @@ void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- StringCompareStub stub;
+ StringCompareStub stub(isolate());
__ CallStub(&stub);
context()->Plug(v0);
}
-void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
- // Load the argument on the stack and call the stub.
- TranscendentalCacheStub stub(TranscendentalCache::LOG,
- TranscendentalCacheStub::TAGGED);
- ZoneList<Expression*>* args = expr->arguments();
- ASSERT(args->length() == 1);
- VisitForStackValue(args->at(0));
- __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
- __ CallStub(&stub);
- context()->Plug(v0);
-}
-
-
-void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
- // Load the argument on the stack and call the runtime function.
- ZoneList<Expression*>* args = expr->arguments();
- ASSERT(args->length() == 1);
- VisitForStackValue(args->at(0));
- __ CallRuntime(Runtime::kMath_sqrt, 1);
- context()->Plug(v0);
-}
-
-
void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() >= 2);
@@ -3804,8 +3702,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
// InvokeFunction requires the function in a1. Move it in there.
__ mov(a1, result_register());
ParameterCount count(arg_count);
- __ InvokeFunction(a1, count, CALL_FUNCTION,
- NullCallWrapper(), CALL_AS_METHOD);
+ __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
__ jmp(&done);
@@ -3819,12 +3716,15 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
- RegExpConstructResultStub stub;
+ RegExpConstructResultStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- VisitForStackValue(args->at(2));
+ VisitForAccumulatorValue(args->at(2));
+ __ mov(a0, result_register());
+ __ pop(a1);
+ __ pop(a2);
__ CallStub(&stub);
context()->Plug(v0);
}
@@ -3877,50 +3777,13 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
__ bind(&not_found);
// Call runtime to perform the lookup.
__ Push(cache, key);
- __ CallRuntime(Runtime::kGetFromCache, 2);
+ __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
__ bind(&done);
context()->Plug(v0);
}
-void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
- ZoneList<Expression*>* args = expr->arguments();
- ASSERT_EQ(2, args->length());
-
- Register right = v0;
- Register left = a1;
- Register tmp = a2;
- Register tmp2 = a3;
-
- VisitForStackValue(args->at(0));
- VisitForAccumulatorValue(args->at(1)); // Result (right) in v0.
- __ pop(left);
-
- Label done, fail, ok;
- __ Branch(&ok, eq, left, Operand(right));
- // Fail if either is a non-HeapObject.
- __ And(tmp, left, Operand(right));
- __ JumpIfSmi(tmp, &fail);
- __ lw(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
- __ lbu(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
- __ Branch(&fail, ne, tmp2, Operand(JS_REGEXP_TYPE));
- __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
- __ Branch(&fail, ne, tmp, Operand(tmp2));
- __ lw(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
- __ lw(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
- __ Branch(&ok, eq, tmp, Operand(tmp2));
- __ bind(&fail);
- __ LoadRoot(v0, Heap::kFalseValueRootIndex);
- __ jmp(&done);
- __ bind(&ok);
- __ LoadRoot(v0, Heap::kTrueValueRootIndex);
- __ bind(&done);
-
- context()->Plug(v0);
-}
-
-
void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
VisitForAccumulatorValue(args->at(0));
@@ -4195,8 +4058,8 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
- Handle<String> name = expr->name();
- if (name->length() > 0 && name->Get(0) == '_') {
+ if (expr->function() != NULL &&
+ expr->function()->intrinsic_type == Runtime::INLINE) {
Comment cmnt(masm_, "[ InlineRuntimeCall");
EmitInlineRuntimeCall(expr);
return;
@@ -4204,34 +4067,48 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Comment cmnt(masm_, "[ CallRuntime");
ZoneList<Expression*>* args = expr->arguments();
+ int arg_count = args->length();
if (expr->is_jsruntime()) {
- // Prepare for calling JS runtime function.
+ // Push the builtins object as the receiver.
__ lw(a0, GlobalObjectOperand());
__ lw(a0, FieldMemOperand(a0, GlobalObject::kBuiltinsOffset));
__ push(a0);
- }
+ // Load the function from the receiver.
+ __ li(a2, Operand(expr->name()));
+ CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
- // Push the arguments ("left-to-right").
- int arg_count = args->length();
- for (int i = 0; i < arg_count; i++) {
- VisitForStackValue(args->at(i));
- }
+ // Push the target function under the receiver.
+ __ lw(at, MemOperand(sp, 0));
+ __ push(at);
+ __ sw(v0, MemOperand(sp, kPointerSize));
+
+ // Push the arguments ("left-to-right").
+ int arg_count = args->length();
+ for (int i = 0; i < arg_count; i++) {
+ VisitForStackValue(args->at(i));
+ }
+
+ // Record source position of the IC call.
+ SetSourcePosition(expr->position());
+ CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
+ __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
+ __ CallStub(&stub);
- if (expr->is_jsruntime()) {
- // Call the JS runtime function.
- __ li(a2, Operand(expr->name()));
- RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
- Handle<Code> ic =
- isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- CallIC(ic, mode, expr->CallRuntimeFeedbackId());
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+
+ context()->DropAndPlug(1, v0);
} else {
+ // Push the arguments ("left-to-right").
+ for (int i = 0; i < arg_count; i++) {
+ VisitForStackValue(args->at(i));
+ }
+
// Call the C runtime function.
__ CallRuntime(expr->function(), arg_count);
+ context()->Plug(v0);
}
- context()->Plug(v0);
}
@@ -4245,9 +4122,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
if (property != NULL) {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
- StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
- ? kNonStrictMode : kStrictMode;
- __ li(a1, Operand(Smi::FromInt(strict_mode_flag)));
+ __ li(a1, Operand(Smi::FromInt(strict_mode())));
__ push(a1);
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(v0);
@@ -4255,11 +4130,11 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
Variable* var = proxy->var();
// Delete of an unqualified identifier is disallowed in strict mode
// but "delete this" is allowed.
- ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
+ ASSERT(strict_mode() == SLOPPY || var->is_this());
if (var->IsUnallocated()) {
__ lw(a2, GlobalObjectOperand());
__ li(a1, Operand(var->name()));
- __ li(a0, Operand(Smi::FromInt(kNonStrictMode)));
+ __ li(a0, Operand(Smi::FromInt(SLOPPY)));
__ Push(a2, a1, a0);
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(v0);
@@ -4273,7 +4148,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
ASSERT(!context_register().is(a2));
__ li(a2, Operand(var->name()));
__ Push(context_register(), a2);
- __ CallRuntime(Runtime::kDeleteContextSlot, 2);
+ __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
context()->Plug(v0);
}
} else {
@@ -4348,16 +4223,11 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
+ ASSERT(expr->expression()->IsValidReferenceExpression());
+
Comment cmnt(masm_, "[ CountOperation");
SetSourcePosition(expr->position());
- // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
- // as the left-hand side.
- if (!expr->expression()->IsValidLeftHandSide()) {
- VisitForEffect(expr->expression());
- return;
- }
-
// Expression can only be a property, a global or a (parameter or local)
// slot.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
@@ -4443,7 +4313,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ jmp(&stub_call);
__ bind(&slow);
}
- ToNumberStub convert_stub;
+ ToNumberStub convert_stub(isolate());
__ CallStub(&convert_stub);
// Save result for postfix expressions.
@@ -4473,10 +4343,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// Record position before stub call.
SetSourcePosition(expr->position());
- BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
- CallIC(stub.GetCode(isolate()),
- RelocInfo::CODE_TARGET,
- expr->CountBinOpFeedbackId());
+ BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE);
+ CallIC(stub.GetCode(), expr->CountBinOpFeedbackId());
patch_site.EmitPatchInfo();
__ bind(&done);
@@ -4506,10 +4374,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ mov(a0, result_register()); // Value.
__ li(a2, Operand(prop->key()->AsLiteral()->value())); // Name.
__ pop(a1); // Receiver.
- Handle<Code> ic = is_classic_mode()
- ? isolate()->builtins()->StoreIC_Initialize()
- : isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
+ CallStoreIC(expr->CountStoreFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4523,10 +4388,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
case KEYED_PROPERTY: {
__ mov(a0, result_register()); // Value.
__ Pop(a2, a1); // a1 = key, a2 = receiver.
- Handle<Code> ic = is_classic_mode()
+ Handle<Code> ic = strict_mode() == SLOPPY
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
+ CallIC(ic, expr->CountStoreFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4546,16 +4411,16 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
ASSERT(!context()->IsTest());
VariableProxy* proxy = expr->AsVariableProxy();
if (proxy != NULL && proxy->var()->IsUnallocated()) {
- Comment cmnt(masm_, "Global variable");
+ Comment cmnt(masm_, "[ Global variable");
__ lw(a0, GlobalObjectOperand());
__ li(a2, Operand(proxy->name()));
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
- CallIC(ic);
+ CallLoadIC(NOT_CONTEXTUAL);
PrepareForBailout(expr, TOS_REG);
context()->Plug(v0);
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
+ Comment cmnt(masm_, "[ Lookup slot");
Label done, slow;
// Generate code for loading from variables potentially shadowed
@@ -4565,7 +4430,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
__ bind(&slow);
__ li(a0, Operand(proxy->name()));
__ Push(cp, a0);
- __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
+ __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
PrepareForBailout(expr, TOS_REG);
__ bind(&done);
@@ -4591,12 +4456,13 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
}
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
- if (check->Equals(isolate()->heap()->number_string())) {
+ Factory* factory = isolate()->factory();
+ if (String::Equals(check, factory->number_string())) {
__ JumpIfSmi(v0, if_true);
__ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
Split(eq, v0, Operand(at), if_true, if_false, fall_through);
- } else if (check->Equals(isolate()->heap()->string_string())) {
+ } else if (String::Equals(check, factory->string_string())) {
__ JumpIfSmi(v0, if_false);
// Check for undetectable objects => false.
__ GetObjectType(v0, v0, a1);
@@ -4605,20 +4471,20 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
__ And(a1, a1, Operand(1 << Map::kIsUndetectable));
Split(eq, a1, Operand(zero_reg),
if_true, if_false, fall_through);
- } else if (check->Equals(isolate()->heap()->symbol_string())) {
+ } else if (String::Equals(check, factory->symbol_string())) {
__ JumpIfSmi(v0, if_false);
__ GetObjectType(v0, v0, a1);
Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
- } else if (check->Equals(isolate()->heap()->boolean_string())) {
+ } else if (String::Equals(check, factory->boolean_string())) {
__ LoadRoot(at, Heap::kTrueValueRootIndex);
__ Branch(if_true, eq, v0, Operand(at));
__ LoadRoot(at, Heap::kFalseValueRootIndex);
Split(eq, v0, Operand(at), if_true, if_false, fall_through);
} else if (FLAG_harmony_typeof &&
- check->Equals(isolate()->heap()->null_string())) {
+ String::Equals(check, factory->null_string())) {
__ LoadRoot(at, Heap::kNullValueRootIndex);
Split(eq, v0, Operand(at), if_true, if_false, fall_through);
- } else if (check->Equals(isolate()->heap()->undefined_string())) {
+ } else if (String::Equals(check, factory->undefined_string())) {
__ LoadRoot(at, Heap::kUndefinedValueRootIndex);
__ Branch(if_true, eq, v0, Operand(at));
__ JumpIfSmi(v0, if_false);
@@ -4627,14 +4493,14 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
__ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
__ And(a1, a1, Operand(1 << Map::kIsUndetectable));
Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
- } else if (check->Equals(isolate()->heap()->function_string())) {
+ } else if (String::Equals(check, factory->function_string())) {
__ JumpIfSmi(v0, if_false);
STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
__ GetObjectType(v0, v0, a1);
__ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
if_true, if_false, fall_through);
- } else if (check->Equals(isolate()->heap()->object_string())) {
+ } else if (String::Equals(check, factory->object_string())) {
__ JumpIfSmi(v0, if_false);
if (!FLAG_harmony_typeof) {
__ LoadRoot(at, Heap::kNullValueRootIndex);
@@ -4686,7 +4552,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
- InstanceofStub stub(InstanceofStub::kNoFlags);
+ InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
// The stub returns 0 for true.
@@ -4712,7 +4578,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
- CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
+ CallIC(ic, expr->CompareOperationFeedbackId());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
@@ -4746,7 +4612,7 @@ void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
} else {
Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
- CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
+ CallIC(ic, expr->CompareOperationFeedbackId());
Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
}
context()->Plug(if_true, if_false);