diff options
author | Peter Varga <pvarga@inf.u-szeged.hu> | 2012-04-17 11:03:39 +0200 |
---|---|---|
committer | Qt by Nokia <qt-info@nokia.com> | 2012-04-26 08:55:55 +0200 |
commit | 227e72d1bba70d518639996aab4146b060507cd6 (patch) | |
tree | 723564e760e94a7d205a3d3300c44192e1d2f2e1 /src/3rdparty/v8/src/mips/builtins-mips.cc | |
parent | 5b1fcb16da41f711f27f2e8ff84de8b70a30b645 (diff) |
Updated V8 from git://github.com/v8/v8.git to 57f8959fb264354ba1a2e5118db512f588917061
Update V8 source to version 3.10.1.
* Added optimizations and stability improvements on all platforms.
* Various performance improvements.
* Cleanup ScopeInfo and SerializedScopeInfo.
* Introduce extended mode.
* Implemented performance improvements to the incremental garbage
collector.
* Fixed handling of arrays in DefineOwnProperty. (issue 1756)
* Fixed GCC 4.7 warnings.
* Performance improvements for large Smi-only arrays.
* Reduce the space used by the stack for the profiling thread.
* Reduced memory use immediately after starting V8.
* Fixed VFP detection through compiler defines. (issue 1996)
* Remove static initializers in v8. (issue 1859)
* Optimized boot-up memory use.
* Optimized regular expressions.
Change-Id: I2dad3092612de279179950dae4dd43daf0463a9f
Reviewed-by: Kent Hansen <kent.hansen@nokia.com>
Diffstat (limited to 'src/3rdparty/v8/src/mips/builtins-mips.cc')
-rw-r--r-- | src/3rdparty/v8/src/mips/builtins-mips.cc | 254 |
1 files changed, 147 insertions, 107 deletions
diff --git a/src/3rdparty/v8/src/mips/builtins-mips.cc b/src/3rdparty/v8/src/mips/builtins-mips.cc index a46a786..eeb84c3 100644 --- a/src/3rdparty/v8/src/mips/builtins-mips.cc +++ b/src/3rdparty/v8/src/mips/builtins-mips.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -67,24 +67,42 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, ASSERT(extra_args == NO_EXTRA_ARGUMENTS); } - // JumpToExternalReference expects a0 to contain the number of arguments + // JumpToExternalReference expects s0 to contain the number of arguments // including the receiver and the extra arguments. - __ Addu(a0, a0, Operand(num_extra_args + 1)); + __ Addu(s0, a0, num_extra_args + 1); + __ sll(s1, s0, kPointerSizeLog2); + __ Subu(s1, s1, kPointerSize); __ JumpToExternalReference(ExternalReference(id, masm->isolate())); } +// Load the built-in InternalArray function from the current context. +static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, + Register result) { + // Load the global context. + + __ lw(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); + __ lw(result, + FieldMemOperand(result, GlobalObject::kGlobalContextOffset)); + // Load the InternalArray function from the global context. + __ lw(result, + MemOperand(result, + Context::SlotOffset( + Context::INTERNAL_ARRAY_FUNCTION_INDEX))); +} + + // Load the built-in Array function from the current context. static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { // Load the global context. __ lw(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); __ lw(result, - FieldMemOperand(result, GlobalObject::kGlobalContextOffset)); + FieldMemOperand(result, GlobalObject::kGlobalContextOffset)); // Load the Array function from the global context. __ lw(result, - MemOperand(result, - Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); + MemOperand(result, + Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); } @@ -100,9 +118,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, Label* gc_required) { const int initial_capacity = JSArray::kPreallocatedArrayElements; STATIC_ASSERT(initial_capacity >= 0); - // Load the initial map from the array function. - __ lw(scratch1, FieldMemOperand(array_function, - JSFunction::kPrototypeOrInitialMapOffset)); + __ LoadInitialArrayMap(array_function, scratch2, scratch1); // Allocate the JSArray object together with space for a fixed array with the // requested elements. @@ -198,9 +214,7 @@ static void AllocateJSArray(MacroAssembler* masm, bool fill_with_hole, Label* gc_required) { // Load the initial map from the array function. - __ lw(elements_array_storage, - FieldMemOperand(array_function, - JSFunction::kPrototypeOrInitialMapOffset)); + __ LoadInitialArrayMap(array_function, scratch2, elements_array_storage); if (FLAG_debug_code) { // Assert that array size is not zero. __ Assert( @@ -308,7 +322,8 @@ static void AllocateJSArray(MacroAssembler* masm, static void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code) { Counters* counters = masm->isolate()->counters(); - Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array; + Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array, + has_non_smi_element, finish, cant_transition_map, not_double; // Check for array construction with zero arguments or one. __ Branch(&argc_one_or_more, ne, a0, Operand(zero_reg)); @@ -322,7 +337,7 @@ static void ArrayNativeCode(MacroAssembler* masm, t1, call_generic_code); __ IncrementCounter(counters->array_function_native(), 1, a3, t0); - // Setup return value, remove receiver from stack and return. + // Set up return value, remove receiver from stack and return. __ mov(v0, a2); __ Addu(sp, sp, Operand(kPointerSize)); __ Ret(); @@ -365,7 +380,7 @@ static void ArrayNativeCode(MacroAssembler* masm, call_generic_code); __ IncrementCounter(counters->array_function_native(), 1, a2, t0); - // Setup return value, remove receiver and argument from stack and return. + // Set up return value, remove receiver and argument from stack and return. __ mov(v0, a3); __ Addu(sp, sp, Operand(2 * kPointerSize)); __ Ret(); @@ -400,14 +415,22 @@ static void ArrayNativeCode(MacroAssembler* masm, // sp[0]: last argument Label loop, entry; - __ Branch(&entry); + __ Branch(USE_DELAY_SLOT, &entry); + __ mov(t3, sp); __ bind(&loop); - __ pop(a2); + __ lw(a2, MemOperand(t3)); + if (FLAG_smi_only_arrays) { + __ JumpIfNotSmi(a2, &has_non_smi_element); + } + __ Addu(t3, t3, kPointerSize); __ Addu(t1, t1, -kPointerSize); __ sw(a2, MemOperand(t1)); __ bind(&entry); __ Branch(&loop, lt, t0, Operand(t1)); + __ bind(&finish); + __ mov(sp, t3); + // Remove caller arguments and receiver from the stack, setup return value and // return. // a0: argc @@ -416,6 +439,77 @@ static void ArrayNativeCode(MacroAssembler* masm, __ Addu(sp, sp, Operand(kPointerSize)); __ mov(v0, a3); __ Ret(); + + __ bind(&has_non_smi_element); + // Double values are handled by the runtime. + __ CheckMap( + a2, t5, Heap::kHeapNumberMapRootIndex, ¬_double, DONT_DO_SMI_CHECK); + __ bind(&cant_transition_map); + __ UndoAllocationInNewSpace(a3, t0); + __ Branch(call_generic_code); + + __ bind(¬_double); + // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS. + // a3: JSArray + __ lw(a2, FieldMemOperand(a3, HeapObject::kMapOffset)); + __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS, + FAST_ELEMENTS, + a2, + t5, + &cant_transition_map); + __ sw(a2, FieldMemOperand(a3, HeapObject::kMapOffset)); + __ RecordWriteField(a3, + HeapObject::kMapOffset, + a2, + t5, + kRAHasNotBeenSaved, + kDontSaveFPRegs, + EMIT_REMEMBERED_SET, + OMIT_SMI_CHECK); + Label loop2; + __ bind(&loop2); + __ lw(a2, MemOperand(t3)); + __ Addu(t3, t3, kPointerSize); + __ Subu(t1, t1, kPointerSize); + __ sw(a2, MemOperand(t1)); + __ Branch(&loop2, lt, t0, Operand(t1)); + __ Branch(&finish); +} + + +void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- a0 : number of arguments + // -- ra : return address + // -- sp[...]: constructor arguments + // ----------------------------------- + Label generic_array_code, one_or_more_arguments, two_or_more_arguments; + + // Get the InternalArray function. + GenerateLoadInternalArrayFunction(masm, a1); + + if (FLAG_debug_code) { + // Initial map for the builtin InternalArray functions should be maps. + __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); + __ And(t0, a2, Operand(kSmiTagMask)); + __ Assert(ne, "Unexpected initial map for InternalArray function", + t0, Operand(zero_reg)); + __ GetObjectType(a2, a3, t0); + __ Assert(eq, "Unexpected initial map for InternalArray function", + t0, Operand(MAP_TYPE)); + } + + // Run the native code for the InternalArray function called as a normal + // function. + ArrayNativeCode(masm, &generic_array_code); + + // Jump to the generic array code if the specialized code cannot handle the + // construction. + __ bind(&generic_array_code); + + Handle<Code> array_code = + masm->isolate()->builtins()->InternalArrayCodeGeneric(); + __ Jump(array_code, RelocInfo::CODE_TARGET); } @@ -618,7 +712,9 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { } -void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { +static void Generate_JSConstructStubHelper(MacroAssembler* masm, + bool is_api_function, + bool count_constructions) { // ----------- S t a t e ------------- // -- a0 : number of arguments // -- a1 : constructor function @@ -626,46 +722,6 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { // -- sp[...]: constructor arguments // ----------------------------------- - Label slow, non_function_call; - // Check that the function is not a smi. - __ And(t0, a1, Operand(kSmiTagMask)); - __ Branch(&non_function_call, eq, t0, Operand(zero_reg)); - // Check that the function is a JSFunction. - __ GetObjectType(a1, a2, a2); - __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE)); - - // Jump to the function-specific construct stub. - __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); - __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kConstructStubOffset)); - __ Addu(t9, a2, Operand(Code::kHeaderSize - kHeapObjectTag)); - __ Jump(t9); - - // a0: number of arguments - // a1: called object - // a2: object type - Label do_call; - __ bind(&slow); - __ Branch(&non_function_call, ne, a2, Operand(JS_FUNCTION_PROXY_TYPE)); - __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); - __ jmp(&do_call); - - __ bind(&non_function_call); - __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); - __ bind(&do_call); - // CALL_NON_FUNCTION expects the non-function constructor as receiver - // (instead of the original receiver from the call site). The receiver is - // stack element argc. - // Set expected number of arguments to zero (not changing a0). - __ mov(a2, zero_reg); - __ SetCallKind(t1, CALL_AS_METHOD); - __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), - RelocInfo::CODE_TARGET); -} - - -static void Generate_JSConstructStubHelper(MacroAssembler* masm, - bool is_api_function, - bool count_constructions) { // Should never count constructions for api objects. ASSERT(!is_api_function || !count_constructions); @@ -705,8 +761,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Load the initial map and verify that it is in fact a map. // a1: constructor function __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); - __ And(t0, a2, Operand(kSmiTagMask)); - __ Branch(&rt_call, eq, t0, Operand(zero_reg)); + __ JumpIfSmi(a2, &rt_call); __ GetObjectType(a2, a3, t4); __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE)); @@ -834,7 +889,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Initialize the FixedArray. // a1: constructor - // a3: number of elements in properties array (un-tagged) + // a3: number of elements in properties array (untagged) // t4: JSObject // t5: start of next object __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex); @@ -903,27 +958,20 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // t4: JSObject __ bind(&allocated); __ push(t4); - - // Push the function and the allocated receiver from the stack. - // sp[0]: receiver (newly allocated object) - // sp[1]: constructor function - // sp[2]: number of arguments (smi-tagged) - __ lw(a1, MemOperand(sp, kPointerSize)); - __ MultiPushReversed(a1.bit() | t4.bit()); + __ push(t4); // Reload the number of arguments from the stack. - // a1: constructor function // sp[0]: receiver - // sp[1]: constructor function - // sp[2]: receiver - // sp[3]: constructor function - // sp[4]: number of arguments (smi-tagged) - __ lw(a3, MemOperand(sp, 4 * kPointerSize)); + // sp[1]: receiver + // sp[2]: constructor function + // sp[3]: number of arguments (smi-tagged) + __ lw(a1, MemOperand(sp, 2 * kPointerSize)); + __ lw(a3, MemOperand(sp, 3 * kPointerSize)); - // Setup pointer to last argument. + // Set up pointer to last argument. __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); - // Setup number of arguments for function call below. + // Set up number of arguments for function call below. __ srl(a0, a3, kSmiTagSize); // Copy arguments and receiver to the expression stack. @@ -932,10 +980,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // a2: address of last argument (caller sp) // a3: number of arguments (smi-tagged) // sp[0]: receiver - // sp[1]: constructor function - // sp[2]: receiver - // sp[3]: constructor function - // sp[4]: number of arguments (smi-tagged) + // sp[1]: receiver + // sp[2]: constructor function + // sp[3]: number of arguments (smi-tagged) Label loop, entry; __ jmp(&entry); __ bind(&loop); @@ -963,13 +1010,10 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, NullCallWrapper(), CALL_AS_METHOD); } - // Pop the function from the stack. - // v0: result - // sp[0]: constructor function - // sp[2]: receiver - // sp[3]: constructor function - // sp[4]: number of arguments (smi-tagged) - __ Pop(); + // Store offset of return address for deoptimizer. + if (!is_api_function && !count_constructions) { + masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); + } // Restore context from the frame. __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); @@ -984,8 +1028,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // sp[0]: receiver (newly allocated object) // sp[1]: constructor function // sp[2]: number of arguments (smi-tagged) - __ And(t0, v0, Operand(kSmiTagMask)); - __ Branch(&use_receiver, eq, t0, Operand(zero_reg)); + __ JumpIfSmi(v0, &use_receiver); // If the type of the result (stored in its map) is less than // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense. @@ -1039,7 +1082,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // ----------- S t a t e ------------- // -- a0: code entry // -- a1: function - // -- a2: reveiver_pointer + // -- a2: receiver_pointer // -- a3: argc // -- s0: argv // ----------------------------------- @@ -1054,17 +1097,12 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Set up the context from the function argument. __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); - // Set up the roots register. - ExternalReference roots_array_start = - ExternalReference::roots_array_start(masm->isolate()); - __ li(s6, Operand(roots_array_start)); - // Push the function and the receiver onto the stack. __ Push(a1, a2); // Copy arguments to the stack in a loop. // a3: argc - // s0: argv, ie points to first arg + // s0: argv, i.e. points to first arg Label loop, entry; __ sll(t0, a3, kPointerSizeLog2); __ addu(t2, s0, t0); @@ -1093,7 +1131,8 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Invoke the code and pass argc as a0. __ mov(a0, a3); if (is_construct) { - __ Call(masm->isolate()->builtins()->JSConstructCall()); + CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); + __ CallStub(&stub); } else { ParameterCount actual(a0); __ InvokeFunction(a1, actual, CALL_FUNCTION, @@ -1284,8 +1323,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ sll(at, a0, kPointerSizeLog2); __ addu(at, sp, at); __ lw(a1, MemOperand(at)); - __ And(at, a1, Operand(kSmiTagMask)); - __ Branch(&non_function, eq, at, Operand(zero_reg)); + __ JumpIfSmi(a1, &non_function); __ GetObjectType(a1, a2, a2); __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE)); @@ -1524,8 +1562,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { __ Branch(&push_receiver, ne, t3, Operand(zero_reg)); // Compute the receiver in non-strict mode. - __ And(t3, a0, Operand(kSmiTagMask)); - __ Branch(&call_to_object, eq, t3, Operand(zero_reg)); + __ JumpIfSmi(a0, &call_to_object); __ LoadRoot(a1, Heap::kNullValueRootIndex); __ Branch(&use_global_receiver, eq, a0, Operand(a1)); __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); @@ -1698,8 +1735,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { __ bind(&too_few); EnterArgumentsAdaptorFrame(masm); - // TODO(MIPS): Optimize these loops. - // Calculate copy start address into a0 and copy end address is fp. // a0: actual number of arguments as a smi // a1: function @@ -1721,9 +1756,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { Label copy; __ bind(©); __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver. - __ push(t0); + __ Subu(sp, sp, kPointerSize); __ Subu(a0, a0, kPointerSize); - __ Branch(©, ne, a0, Operand(t3)); + __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t3)); + __ sw(t0, MemOperand(sp)); // In the delay slot. // Fill the remaining expected arguments with undefined. // a1: function @@ -1736,8 +1772,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { Label fill; __ bind(&fill); - __ push(t0); - __ Branch(&fill, ne, sp, Operand(a2)); + __ Subu(sp, sp, kPointerSize); + __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2)); + __ sw(t0, MemOperand(sp)); } // Call the entry point. @@ -1745,6 +1782,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { __ Call(a3); + // Store offset of return address for deoptimizer. + masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); + // Exit frame and return. LeaveArgumentsAdaptorFrame(masm); __ Ret(); |