summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSergio Ahumada <sergio.ahumada@digia.com>2013-03-19 09:25:14 +0100
committerSergio Ahumada <sergio.ahumada@digia.com>2013-03-19 09:56:31 +0100
commit6313e1fe4c27755adde87e62db1c2f9fac534ae4 (patch)
treec57bb29f65e02fbfcc07895a8cc2903fff9300ba
parentb5a49a260d03249c386f1b63c249089383dd81fa (diff)
parentcac65e7a222b848a735a974b0aeb43209b0cfa18 (diff)
Merge branch 'dev' into stable
This starts Qt 5.1 release cycle Change-Id: I892bbc73c276842894a720f761ce31ad1b015672
-rw-r--r--src/3rdparty/v8/.gitignore26
-rw-r--r--src/3rdparty/v8/AUTHORS6
-rw-r--r--src/3rdparty/v8/ChangeLog583
-rw-r--r--src/3rdparty/v8/DEPS2
-rw-r--r--src/3rdparty/v8/Makefile112
-rw-r--r--src/3rdparty/v8/Makefile.android92
-rw-r--r--src/3rdparty/v8/OWNERS11
-rw-r--r--src/3rdparty/v8/PRESUBMIT.py71
-rw-r--r--src/3rdparty/v8/SConstruct49
-rw-r--r--src/3rdparty/v8/build/android.gypi93
-rw-r--r--src/3rdparty/v8/build/common.gypi104
-rw-r--r--src/3rdparty/v8/build/standalone.gypi11
-rw-r--r--src/3rdparty/v8/include/v8-debug.h10
-rw-r--r--src/3rdparty/v8/include/v8-preparser.h7
-rw-r--r--src/3rdparty/v8/include/v8-profiler.h33
-rw-r--r--src/3rdparty/v8/include/v8-testing.h7
-rw-r--r--src/3rdparty/v8/include/v8.h351
-rw-r--r--src/3rdparty/v8/preparser/preparser-process.cc6
-rw-r--r--src/3rdparty/v8/samples/lineprocessor.cc38
-rw-r--r--src/3rdparty/v8/samples/process.cc14
-rw-r--r--src/3rdparty/v8/samples/shell.cc2
-rwxr-xr-xsrc/3rdparty/v8/src/SConscript28
-rw-r--r--src/3rdparty/v8/src/accessors.cc195
-rw-r--r--src/3rdparty/v8/src/accessors.h4
-rw-r--r--src/3rdparty/v8/src/allocation-inl.h4
-rw-r--r--src/3rdparty/v8/src/allocation.h14
-rw-r--r--src/3rdparty/v8/src/api.cc515
-rw-r--r--src/3rdparty/v8/src/api.h143
-rw-r--r--src/3rdparty/v8/src/arm/assembler-arm-inl.h158
-rw-r--r--src/3rdparty/v8/src/arm/assembler-arm.cc363
-rw-r--r--src/3rdparty/v8/src/arm/assembler-arm.h125
-rw-r--r--src/3rdparty/v8/src/arm/builtins-arm.cc107
-rw-r--r--src/3rdparty/v8/src/arm/code-stubs-arm.cc625
-rw-r--r--src/3rdparty/v8/src/arm/code-stubs-arm.h20
-rw-r--r--src/3rdparty/v8/src/arm/codegen-arm.cc127
-rw-r--r--src/3rdparty/v8/src/arm/codegen-arm.h3
-rw-r--r--src/3rdparty/v8/src/arm/constants-arm.h37
-rw-r--r--src/3rdparty/v8/src/arm/cpu-arm.cc7
-rw-r--r--src/3rdparty/v8/src/arm/debug-arm.cc4
-rw-r--r--src/3rdparty/v8/src/arm/deoptimizer-arm.cc162
-rw-r--r--src/3rdparty/v8/src/arm/disasm-arm.cc36
-rw-r--r--src/3rdparty/v8/src/arm/full-codegen-arm.cc425
-rw-r--r--src/3rdparty/v8/src/arm/ic-arm.cc273
-rw-r--r--src/3rdparty/v8/src/arm/lithium-arm.cc501
-rw-r--r--src/3rdparty/v8/src/arm/lithium-arm.h651
-rw-r--r--src/3rdparty/v8/src/arm/lithium-codegen-arm.cc1580
-rw-r--r--src/3rdparty/v8/src/arm/lithium-codegen-arm.h87
-rw-r--r--src/3rdparty/v8/src/arm/lithium-gap-resolver-arm.cc4
-rw-r--r--src/3rdparty/v8/src/arm/macro-assembler-arm.cc490
-rw-r--r--src/3rdparty/v8/src/arm/macro-assembler-arm.h128
-rw-r--r--src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.cc201
-rw-r--r--src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.h26
-rw-r--r--src/3rdparty/v8/src/arm/simulator-arm.cc284
-rw-r--r--src/3rdparty/v8/src/arm/simulator-arm.h48
-rw-r--r--src/3rdparty/v8/src/arm/stub-cache-arm.cc555
-rw-r--r--src/3rdparty/v8/src/array.js168
-rw-r--r--src/3rdparty/v8/src/assembler.cc136
-rw-r--r--src/3rdparty/v8/src/assembler.h85
-rw-r--r--src/3rdparty/v8/src/ast.cc110
-rw-r--r--src/3rdparty/v8/src/ast.h471
-rw-r--r--src/3rdparty/v8/src/atomicops.h6
-rw-r--r--src/3rdparty/v8/src/atomicops_internals_x86_msvc.h6
-rw-r--r--src/3rdparty/v8/src/bootstrapper.cc837
-rw-r--r--src/3rdparty/v8/src/bootstrapper.h4
-rw-r--r--src/3rdparty/v8/src/builtins.cc154
-rw-r--r--src/3rdparty/v8/src/builtins.h41
-rw-r--r--src/3rdparty/v8/src/checks.h8
-rw-r--r--src/3rdparty/v8/src/code-stubs.cc70
-rw-r--r--src/3rdparty/v8/src/code-stubs.h63
-rw-r--r--src/3rdparty/v8/src/codegen.h6
-rw-r--r--src/3rdparty/v8/src/collection.js71
-rw-r--r--src/3rdparty/v8/src/compilation-cache.cc40
-rw-r--r--src/3rdparty/v8/src/compilation-cache.h21
-rw-r--r--src/3rdparty/v8/src/compiler-intrinsics.h24
-rw-r--r--src/3rdparty/v8/src/compiler.cc562
-rw-r--r--src/3rdparty/v8/src/compiler.h198
-rw-r--r--src/3rdparty/v8/src/contexts.cc83
-rw-r--r--src/3rdparty/v8/src/contexts.h105
-rw-r--r--src/3rdparty/v8/src/conversions-inl.h36
-rw-r--r--src/3rdparty/v8/src/conversions.h11
-rw-r--r--src/3rdparty/v8/src/counters.cc26
-rw-r--r--src/3rdparty/v8/src/counters.h60
-rw-r--r--src/3rdparty/v8/src/cpu-profiler.h2
-rw-r--r--src/3rdparty/v8/src/d8.cc710
-rw-r--r--src/3rdparty/v8/src/d8.h25
-rw-r--r--src/3rdparty/v8/src/date.js41
-rw-r--r--src/3rdparty/v8/src/dateparser-inl.h6
-rw-r--r--src/3rdparty/v8/src/debug-agent.cc6
-rw-r--r--src/3rdparty/v8/src/debug-debugger.js38
-rw-r--r--src/3rdparty/v8/src/debug.cc429
-rw-r--r--src/3rdparty/v8/src/debug.h30
-rw-r--r--src/3rdparty/v8/src/deoptimizer.cc615
-rw-r--r--src/3rdparty/v8/src/deoptimizer.h109
-rw-r--r--src/3rdparty/v8/src/disassembler.cc4
-rw-r--r--src/3rdparty/v8/src/elements-kind.cc139
-rw-r--r--src/3rdparty/v8/src/elements-kind.h229
-rw-r--r--src/3rdparty/v8/src/elements.cc636
-rw-r--r--src/3rdparty/v8/src/elements.h16
-rw-r--r--src/3rdparty/v8/src/execution.cc76
-rw-r--r--src/3rdparty/v8/src/execution.h5
-rw-r--r--src/3rdparty/v8/src/extensions/gc-extension.cc6
-rw-r--r--src/3rdparty/v8/src/extensions/statistics-extension.cc153
-rw-r--r--src/3rdparty/v8/src/extensions/statistics-extension.h49
-rw-r--r--src/3rdparty/v8/src/factory.cc226
-rw-r--r--src/3rdparty/v8/src/factory.h56
-rw-r--r--src/3rdparty/v8/src/flag-definitions.h78
-rw-r--r--src/3rdparty/v8/src/flags.cc14
-rw-r--r--src/3rdparty/v8/src/frames.cc66
-rw-r--r--src/3rdparty/v8/src/frames.h9
-rw-r--r--src/3rdparty/v8/src/full-codegen.cc138
-rw-r--r--src/3rdparty/v8/src/full-codegen.h59
-rw-r--r--src/3rdparty/v8/src/func-name-inferrer.cc15
-rw-r--r--src/3rdparty/v8/src/func-name-inferrer.h10
-rw-r--r--src/3rdparty/v8/src/gdb-jit.cc128
-rw-r--r--src/3rdparty/v8/src/global-handles.cc45
-rw-r--r--src/3rdparty/v8/src/global-handles.h15
-rw-r--r--src/3rdparty/v8/src/globals.h34
-rw-r--r--src/3rdparty/v8/src/handles-inl.h30
-rw-r--r--src/3rdparty/v8/src/handles.cc203
-rw-r--r--src/3rdparty/v8/src/handles.h45
-rw-r--r--src/3rdparty/v8/src/hashmap.h100
-rw-r--r--src/3rdparty/v8/src/heap-inl.h94
-rw-r--r--src/3rdparty/v8/src/heap-profiler.cc14
-rw-r--r--src/3rdparty/v8/src/heap-profiler.h6
-rw-r--r--src/3rdparty/v8/src/heap.cc1163
-rw-r--r--src/3rdparty/v8/src/heap.h325
-rw-r--r--src/3rdparty/v8/src/hydrogen-instructions.cc483
-rw-r--r--src/3rdparty/v8/src/hydrogen-instructions.h994
-rw-r--r--src/3rdparty/v8/src/hydrogen.cc2942
-rw-r--r--src/3rdparty/v8/src/hydrogen.h326
-rw-r--r--src/3rdparty/v8/src/ia32/assembler-ia32-inl.h48
-rw-r--r--src/3rdparty/v8/src/ia32/assembler-ia32.cc61
-rw-r--r--src/3rdparty/v8/src/ia32/assembler-ia32.h24
-rw-r--r--src/3rdparty/v8/src/ia32/builtins-ia32.cc90
-rw-r--r--src/3rdparty/v8/src/ia32/code-stubs-ia32.cc272
-rw-r--r--src/3rdparty/v8/src/ia32/codegen-ia32.cc103
-rw-r--r--src/3rdparty/v8/src/ia32/codegen-ia32.h4
-rw-r--r--src/3rdparty/v8/src/ia32/deoptimizer-ia32.cc240
-rw-r--r--src/3rdparty/v8/src/ia32/disasm-ia32.cc22
-rw-r--r--src/3rdparty/v8/src/ia32/frames-ia32.h6
-rw-r--r--src/3rdparty/v8/src/ia32/full-codegen-ia32.cc373
-rw-r--r--src/3rdparty/v8/src/ia32/ic-ia32.cc234
-rw-r--r--src/3rdparty/v8/src/ia32/lithium-codegen-ia32.cc1240
-rw-r--r--src/3rdparty/v8/src/ia32/lithium-codegen-ia32.h54
-rw-r--r--src/3rdparty/v8/src/ia32/lithium-gap-resolver-ia32.cc4
-rw-r--r--src/3rdparty/v8/src/ia32/lithium-ia32.cc554
-rw-r--r--src/3rdparty/v8/src/ia32/lithium-ia32.h672
-rw-r--r--src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc427
-rw-r--r--src/3rdparty/v8/src/ia32/macro-assembler-ia32.h65
-rw-r--r--src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.cc179
-rw-r--r--src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.h25
-rw-r--r--src/3rdparty/v8/src/ia32/simulator-ia32.h8
-rw-r--r--src/3rdparty/v8/src/ia32/stub-cache-ia32.cc436
-rw-r--r--src/3rdparty/v8/src/ic-inl.h10
-rw-r--r--src/3rdparty/v8/src/ic.cc330
-rw-r--r--src/3rdparty/v8/src/ic.h32
-rw-r--r--src/3rdparty/v8/src/incremental-marking-inl.h29
-rw-r--r--src/3rdparty/v8/src/incremental-marking.cc393
-rw-r--r--src/3rdparty/v8/src/incremental-marking.h51
-rw-r--r--src/3rdparty/v8/src/interface.cc46
-rw-r--r--src/3rdparty/v8/src/interface.h59
-rw-r--r--src/3rdparty/v8/src/isolate.cc180
-rw-r--r--src/3rdparty/v8/src/isolate.h91
-rw-r--r--src/3rdparty/v8/src/json-parser.h266
-rw-r--r--src/3rdparty/v8/src/json-stringifier.h800
-rw-r--r--src/3rdparty/v8/src/json.js142
-rw-r--r--src/3rdparty/v8/src/jsregexp.cc741
-rw-r--r--src/3rdparty/v8/src/jsregexp.h317
-rw-r--r--src/3rdparty/v8/src/list-inl.h46
-rw-r--r--src/3rdparty/v8/src/list.h62
-rw-r--r--src/3rdparty/v8/src/lithium-allocator.cc89
-rw-r--r--src/3rdparty/v8/src/lithium-allocator.h7
-rw-r--r--src/3rdparty/v8/src/lithium.cc213
-rw-r--r--src/3rdparty/v8/src/lithium.h152
-rw-r--r--src/3rdparty/v8/src/liveedit-debugger.js18
-rw-r--r--src/3rdparty/v8/src/liveedit.cc290
-rw-r--r--src/3rdparty/v8/src/liveedit.h10
-rw-r--r--src/3rdparty/v8/src/liveobjectlist.cc4
-rw-r--r--src/3rdparty/v8/src/log.cc113
-rw-r--r--src/3rdparty/v8/src/log.h25
-rw-r--r--src/3rdparty/v8/src/mark-compact-inl.h26
-rw-r--r--src/3rdparty/v8/src/mark-compact.cc1234
-rw-r--r--src/3rdparty/v8/src/mark-compact.h149
-rw-r--r--src/3rdparty/v8/src/messages.cc19
-rw-r--r--src/3rdparty/v8/src/messages.js440
-rw-r--r--src/3rdparty/v8/src/mips/assembler-mips-inl.h10
-rw-r--r--src/3rdparty/v8/src/mips/assembler-mips.cc26
-rw-r--r--src/3rdparty/v8/src/mips/assembler-mips.h46
-rw-r--r--src/3rdparty/v8/src/mips/builtins-mips.cc72
-rw-r--r--src/3rdparty/v8/src/mips/code-stubs-mips.cc337
-rw-r--r--src/3rdparty/v8/src/mips/codegen-mips.cc4
-rw-r--r--src/3rdparty/v8/src/mips/deoptimizer-mips.cc155
-rw-r--r--src/3rdparty/v8/src/mips/full-codegen-mips.cc372
-rw-r--r--src/3rdparty/v8/src/mips/ic-mips.cc275
-rw-r--r--src/3rdparty/v8/src/mips/lithium-codegen-mips.cc1200
-rw-r--r--src/3rdparty/v8/src/mips/lithium-codegen-mips.h64
-rw-r--r--src/3rdparty/v8/src/mips/lithium-gap-resolver-mips.cc4
-rw-r--r--src/3rdparty/v8/src/mips/lithium-mips.cc482
-rw-r--r--src/3rdparty/v8/src/mips/lithium-mips.h626
-rw-r--r--src/3rdparty/v8/src/mips/macro-assembler-mips.cc280
-rw-r--r--src/3rdparty/v8/src/mips/macro-assembler-mips.h61
-rw-r--r--src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.cc183
-rw-r--r--src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.h23
-rw-r--r--src/3rdparty/v8/src/mips/simulator-mips.cc18
-rw-r--r--src/3rdparty/v8/src/mips/simulator-mips.h10
-rw-r--r--src/3rdparty/v8/src/mips/stub-cache-mips.cc451
-rw-r--r--src/3rdparty/v8/src/mirror-debugger.js162
-rw-r--r--src/3rdparty/v8/src/misc-intrinsics.h2
-rw-r--r--src/3rdparty/v8/src/mksnapshot.cc106
-rw-r--r--src/3rdparty/v8/src/object-observe.js240
-rw-r--r--src/3rdparty/v8/src/objects-debug.cc346
-rw-r--r--src/3rdparty/v8/src/objects-inl.h1408
-rw-r--r--src/3rdparty/v8/src/objects-printer.cc538
-rw-r--r--src/3rdparty/v8/src/objects-visiting-inl.h535
-rw-r--r--src/3rdparty/v8/src/objects-visiting.cc2
-rw-r--r--src/3rdparty/v8/src/objects-visiting.h219
-rw-r--r--src/3rdparty/v8/src/objects.cc5007
-rw-r--r--src/3rdparty/v8/src/objects.h1747
-rw-r--r--src/3rdparty/v8/src/optimizing-compiler-thread.cc132
-rw-r--r--src/3rdparty/v8/src/optimizing-compiler-thread.h101
-rw-r--r--src/3rdparty/v8/src/parser.cc809
-rw-r--r--src/3rdparty/v8/src/parser.h77
-rw-r--r--src/3rdparty/v8/src/platform-cygwin.cc6
-rw-r--r--src/3rdparty/v8/src/platform-freebsd.cc6
-rw-r--r--src/3rdparty/v8/src/platform-linux.cc106
-rw-r--r--src/3rdparty/v8/src/platform-macos.cc27
-rw-r--r--src/3rdparty/v8/src/platform-nullos.cc13
-rw-r--r--src/3rdparty/v8/src/platform-openbsd.cc11
-rw-r--r--src/3rdparty/v8/src/platform-posix.cc5
-rw-r--r--src/3rdparty/v8/src/platform-qnx.cc19
-rw-r--r--src/3rdparty/v8/src/platform-solaris.cc14
-rw-r--r--src/3rdparty/v8/src/platform-tls-win32.h2
-rw-r--r--src/3rdparty/v8/src/platform-win32.cc125
-rw-r--r--src/3rdparty/v8/src/platform.h32
-rw-r--r--src/3rdparty/v8/src/preparser.cc13
-rw-r--r--src/3rdparty/v8/src/profile-generator-inl.h21
-rw-r--r--src/3rdparty/v8/src/profile-generator.cc507
-rw-r--r--src/3rdparty/v8/src/profile-generator.h63
-rw-r--r--src/3rdparty/v8/src/property-details.h73
-rw-r--r--src/3rdparty/v8/src/property.cc107
-rw-r--r--src/3rdparty/v8/src/property.h179
-rw-r--r--src/3rdparty/v8/src/proxy.js9
-rw-r--r--src/3rdparty/v8/src/regexp-macro-assembler-irregexp.cc20
-rw-r--r--src/3rdparty/v8/src/regexp-macro-assembler-irregexp.h7
-rw-r--r--src/3rdparty/v8/src/regexp-macro-assembler-tracer.cc23
-rw-r--r--src/3rdparty/v8/src/regexp-macro-assembler-tracer.h3
-rw-r--r--src/3rdparty/v8/src/regexp-macro-assembler.cc21
-rw-r--r--src/3rdparty/v8/src/regexp-macro-assembler.h27
-rw-r--r--src/3rdparty/v8/src/regexp-stack.cc1
-rw-r--r--src/3rdparty/v8/src/regexp.js15
-rw-r--r--src/3rdparty/v8/src/rewriter.cc14
-rw-r--r--src/3rdparty/v8/src/runtime-profiler.cc62
-rw-r--r--src/3rdparty/v8/src/runtime.cc2403
-rw-r--r--src/3rdparty/v8/src/runtime.h50
-rw-r--r--src/3rdparty/v8/src/safepoint-table.cc19
-rw-r--r--src/3rdparty/v8/src/safepoint-table.h19
-rwxr-xr-xsrc/3rdparty/v8/src/scanner.cc6
-rw-r--r--src/3rdparty/v8/src/scopeinfo.cc16
-rw-r--r--src/3rdparty/v8/src/scopes.cc399
-rw-r--r--src/3rdparty/v8/src/scopes.h66
-rw-r--r--src/3rdparty/v8/src/serialize.cc716
-rw-r--r--src/3rdparty/v8/src/serialize.h233
-rw-r--r--src/3rdparty/v8/src/small-pointer-list.h22
-rw-r--r--src/3rdparty/v8/src/smart-pointers.h (renamed from src/3rdparty/v8/src/smart-array-pointer.h)67
-rw-r--r--src/3rdparty/v8/src/snapshot-common.cc79
-rw-r--r--src/3rdparty/v8/src/snapshot-empty.cc8
-rw-r--r--src/3rdparty/v8/src/snapshot.h11
-rw-r--r--src/3rdparty/v8/src/spaces-inl.h17
-rw-r--r--src/3rdparty/v8/src/spaces.cc194
-rw-r--r--src/3rdparty/v8/src/spaces.h111
-rw-r--r--src/3rdparty/v8/src/splay-tree-inl.h15
-rw-r--r--src/3rdparty/v8/src/splay-tree.h34
-rw-r--r--src/3rdparty/v8/src/store-buffer.cc6
-rw-r--r--src/3rdparty/v8/src/store-buffer.h2
-rw-r--r--src/3rdparty/v8/src/string-stream.cc17
-rw-r--r--src/3rdparty/v8/src/stub-cache.cc175
-rw-r--r--src/3rdparty/v8/src/stub-cache.h81
-rw-r--r--src/3rdparty/v8/src/token.h1
-rw-r--r--src/3rdparty/v8/src/transitions-inl.h220
-rw-r--r--src/3rdparty/v8/src/transitions.cc160
-rw-r--r--src/3rdparty/v8/src/transitions.h207
-rw-r--r--src/3rdparty/v8/src/type-info.cc173
-rw-r--r--src/3rdparty/v8/src/type-info.h32
-rw-r--r--src/3rdparty/v8/src/unicode-inl.h2
-rw-r--r--src/3rdparty/v8/src/unicode.h6
-rw-r--r--src/3rdparty/v8/src/utils.h62
-rw-r--r--src/3rdparty/v8/src/v8-counters.cc58
-rw-r--r--src/3rdparty/v8/src/v8-counters.h144
-rw-r--r--src/3rdparty/v8/src/v8.cc26
-rw-r--r--src/3rdparty/v8/src/v8.h6
-rw-r--r--src/3rdparty/v8/src/v8globals.h70
-rw-r--r--src/3rdparty/v8/src/v8natives.js20
-rw-r--r--src/3rdparty/v8/src/v8threads.cc23
-rw-r--r--src/3rdparty/v8/src/v8threads.h5
-rw-r--r--src/3rdparty/v8/src/v8utils.cc17
-rw-r--r--src/3rdparty/v8/src/v8utils.h5
-rw-r--r--src/3rdparty/v8/src/variables.cc7
-rw-r--r--src/3rdparty/v8/src/variables.h15
-rw-r--r--src/3rdparty/v8/src/version.cc4
-rw-r--r--src/3rdparty/v8/src/vm-state-inl.h2
-rw-r--r--src/3rdparty/v8/src/win32-headers.h24
-rw-r--r--src/3rdparty/v8/src/x64/assembler-x64-inl.h45
-rw-r--r--src/3rdparty/v8/src/x64/assembler-x64.cc25
-rw-r--r--src/3rdparty/v8/src/x64/assembler-x64.h27
-rw-r--r--src/3rdparty/v8/src/x64/builtins-x64.cc96
-rw-r--r--src/3rdparty/v8/src/x64/code-stubs-x64.cc335
-rw-r--r--src/3rdparty/v8/src/x64/codegen-x64.cc91
-rw-r--r--src/3rdparty/v8/src/x64/codegen-x64.h2
-rw-r--r--src/3rdparty/v8/src/x64/deoptimizer-x64.cc158
-rw-r--r--src/3rdparty/v8/src/x64/disasm-x64.cc3
-rw-r--r--src/3rdparty/v8/src/x64/full-codegen-x64.cc381
-rw-r--r--src/3rdparty/v8/src/x64/ic-x64.cc240
-rw-r--r--src/3rdparty/v8/src/x64/lithium-codegen-x64.cc1263
-rw-r--r--src/3rdparty/v8/src/x64/lithium-codegen-x64.h52
-rw-r--r--src/3rdparty/v8/src/x64/lithium-gap-resolver-x64.cc4
-rw-r--r--src/3rdparty/v8/src/x64/lithium-x64.cc511
-rw-r--r--src/3rdparty/v8/src/x64/lithium-x64.h605
-rw-r--r--src/3rdparty/v8/src/x64/macro-assembler-x64.cc494
-rw-r--r--src/3rdparty/v8/src/x64/macro-assembler-x64.h76
-rw-r--r--src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.cc201
-rw-r--r--src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.h30
-rw-r--r--src/3rdparty/v8/src/x64/simulator-x64.h8
-rw-r--r--src/3rdparty/v8/src/x64/stub-cache-x64.cc437
-rw-r--r--src/3rdparty/v8/src/zone-inl.h36
-rw-r--r--src/3rdparty/v8/src/zone.cc10
-rw-r--r--src/3rdparty/v8/src/zone.h80
-rw-r--r--src/3rdparty/v8/test/benchmarks/testcfg.py5
-rw-r--r--src/3rdparty/v8/test/cctest/cctest.gyp3
-rw-r--r--src/3rdparty/v8/test/cctest/cctest.h19
-rw-r--r--src/3rdparty/v8/test/cctest/cctest.status28
-rw-r--r--src/3rdparty/v8/test/cctest/test-alloc.cc26
-rw-r--r--src/3rdparty/v8/test/cctest/test-api.cc1409
-rw-r--r--src/3rdparty/v8/test/cctest/test-assembler-arm.cc14
-rw-r--r--src/3rdparty/v8/test/cctest/test-ast.cc6
-rw-r--r--src/3rdparty/v8/test/cctest/test-compiler.cc76
-rw-r--r--src/3rdparty/v8/test/cctest/test-dataflow.cc4
-rw-r--r--src/3rdparty/v8/test/cctest/test-debug.cc210
-rw-r--r--src/3rdparty/v8/test/cctest/test-decls.cc327
-rw-r--r--src/3rdparty/v8/test/cctest/test-dictionary.cc28
-rw-r--r--src/3rdparty/v8/test/cctest/test-disasm-arm.cc8
-rw-r--r--src/3rdparty/v8/test/cctest/test-flags.cc15
-rw-r--r--src/3rdparty/v8/test/cctest/test-func-name-inference.cc49
-rw-r--r--src/3rdparty/v8/test/cctest/test-heap-profiler.cc207
-rw-r--r--src/3rdparty/v8/test/cctest/test-heap.cc876
-rw-r--r--src/3rdparty/v8/test/cctest/test-list.cc2
-rw-r--r--src/3rdparty/v8/test/cctest/test-liveedit.cc5
-rw-r--r--src/3rdparty/v8/test/cctest/test-mark-compact.cc31
-rw-r--r--src/3rdparty/v8/test/cctest/test-object-observe.cc196
-rwxr-xr-xsrc/3rdparty/v8/test/cctest/test-parsing.cc39
-rw-r--r--src/3rdparty/v8/test/cctest/test-platform-linux.cc6
-rw-r--r--src/3rdparty/v8/test/cctest/test-platform-win32.cc7
-rw-r--r--src/3rdparty/v8/test/cctest/test-random.cc6
-rw-r--r--src/3rdparty/v8/test/cctest/test-regexp.cc234
-rw-r--r--src/3rdparty/v8/test/cctest/test-serialize.cc363
-rw-r--r--src/3rdparty/v8/test/cctest/test-sockets.cc2
-rw-r--r--src/3rdparty/v8/test/cctest/test-strings.cc63
-rw-r--r--src/3rdparty/v8/test/cctest/test-utils.cc16
-rw-r--r--src/3rdparty/v8/test/cctest/test-weakmaps.cc3
-rw-r--r--src/3rdparty/v8/test/cctest/testcfg.py70
-rw-r--r--src/3rdparty/v8/test/es5conform/testcfg.py5
-rw-r--r--src/3rdparty/v8/test/message/message.status2
-rw-r--r--src/3rdparty/v8/test/message/testcfg.py97
-rw-r--r--src/3rdparty/v8/test/message/try-catch-finally-no-message.out52
-rw-r--r--src/3rdparty/v8/test/mjsunit/accessor-map-sharing.js192
-rw-r--r--src/3rdparty/v8/test/mjsunit/array-bounds-check-removal.js39
-rw-r--r--src/3rdparty/v8/test/mjsunit/array-construct-transition.js6
-rw-r--r--src/3rdparty/v8/test/mjsunit/array-iteration.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/array-literal-transitions.js22
-rw-r--r--src/3rdparty/v8/test/mjsunit/assert-opt-and-deopt.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/bugs/bug-2337.js53
-rw-r--r--src/3rdparty/v8/test/mjsunit/compare-known-objects-slow.js69
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/alloc-object-huge.js4
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/inline-accessors.js368
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/inline-arguments.js88
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/inline-construct.js98
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/inline-literals.js39
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/optimized-closures.js57
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/proto-chain-load.js44
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/regress-gvn.js5
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/regress-or.js8
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/rotate.js224
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/uint32.js173
-rw-r--r--src/3rdparty/v8/test/mjsunit/count-based-osr.js3
-rw-r--r--src/3rdparty/v8/test/mjsunit/d8-os.js6
-rw-r--r--src/3rdparty/v8/test/mjsunit/date-parse.js3
-rw-r--r--src/3rdparty/v8/test/mjsunit/date.js22
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-break-inline.js1
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js34
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized.js31
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-liveedit-double-call.js142
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-liveedit-restart-frame.js153
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-multiple-breakpoints.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-script-breakpoints-closure.js67
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-script-breakpoints-nested.js82
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-script.js6
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part1.js190
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part2.js83
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part3.js80
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part4.js80
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part5.js77
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part6.js79
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part7.js79
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part8.js (renamed from src/3rdparty/v8/test/mjsunit/debug-stepout-scope.js)189
-rw-r--r--src/3rdparty/v8/test/mjsunit/deopt-minus-zero.js56
-rw-r--r--src/3rdparty/v8/test/mjsunit/elements-kind.js14
-rw-r--r--src/3rdparty/v8/test/mjsunit/elements-length-no-holey.js33
-rw-r--r--src/3rdparty/v8/test/mjsunit/elements-transition-hoisting.js69
-rw-r--r--src/3rdparty/v8/test/mjsunit/elements-transition.js10
-rw-r--r--src/3rdparty/v8/test/mjsunit/error-accessors.js54
-rw-r--r--src/3rdparty/v8/test/mjsunit/eval-stack-trace.js203
-rw-r--r--src/3rdparty/v8/test/mjsunit/external-array.js365
-rw-r--r--src/3rdparty/v8/test/mjsunit/fast-array-length.js37
-rw-r--r--src/3rdparty/v8/test/mjsunit/fast-non-keyed.js113
-rw-r--r--src/3rdparty/v8/test/mjsunit/fast-prototype.js113
-rw-r--r--src/3rdparty/v8/test/mjsunit/fuzz-natives-part1.js (renamed from src/3rdparty/v8/test/mjsunit/fuzz-natives.js)5
-rw-r--r--src/3rdparty/v8/test/mjsunit/fuzz-natives-part2.js222
-rw-r--r--src/3rdparty/v8/test/mjsunit/fuzz-natives-part3.js222
-rw-r--r--src/3rdparty/v8/test/mjsunit/fuzz-natives-part4.js222
-rw-r--r--src/3rdparty/v8/test/mjsunit/greedy.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/block-conflicts.js3
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/block-let-crankshaft.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/collections.js80
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/debug-blockscopes.js4
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/module-linking.js183
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/module-parsing.js31
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/module-recompile.js87
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/module-resolution.js17
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/object-observe.js591
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/proxies-json.js178
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/proxies.js53
-rw-r--r--src/3rdparty/v8/test/mjsunit/json-recursive.js61
-rw-r--r--src/3rdparty/v8/test/mjsunit/json.js40
-rw-r--r--src/3rdparty/v8/test/mjsunit/json2.js153
-rw-r--r--src/3rdparty/v8/test/mjsunit/limit-locals.js9
-rw-r--r--src/3rdparty/v8/test/mjsunit/math-floor-negative.js59
-rw-r--r--src/3rdparty/v8/test/mjsunit/math-floor-of-div-minus-zero.js40
-rw-r--r--src/3rdparty/v8/test/mjsunit/math-floor-part1.js (renamed from src/3rdparty/v8/test/mjsunit/math-floor.js)73
-rw-r--r--src/3rdparty/v8/test/mjsunit/math-floor-part2.js76
-rw-r--r--src/3rdparty/v8/test/mjsunit/math-floor-part3.js78
-rw-r--r--src/3rdparty/v8/test/mjsunit/math-floor-part4.js76
-rw-r--r--src/3rdparty/v8/test/mjsunit/mirror-object.js52
-rw-r--r--src/3rdparty/v8/test/mjsunit/mjsunit.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/mjsunit.status67
-rw-r--r--src/3rdparty/v8/test/mjsunit/mul-exhaustive-part1.js491
-rw-r--r--src/3rdparty/v8/test/mjsunit/mul-exhaustive-part10.js470
-rw-r--r--src/3rdparty/v8/test/mjsunit/mul-exhaustive-part2.js525
-rw-r--r--src/3rdparty/v8/test/mjsunit/mul-exhaustive-part3.js532
-rw-r--r--src/3rdparty/v8/test/mjsunit/mul-exhaustive-part4.js509
-rw-r--r--src/3rdparty/v8/test/mjsunit/mul-exhaustive-part5.js505
-rw-r--r--src/3rdparty/v8/test/mjsunit/mul-exhaustive-part6.js554
-rw-r--r--src/3rdparty/v8/test/mjsunit/mul-exhaustive-part7.js497
-rw-r--r--src/3rdparty/v8/test/mjsunit/mul-exhaustive-part8.js526
-rw-r--r--src/3rdparty/v8/test/mjsunit/mul-exhaustive-part9.js533
-rw-r--r--src/3rdparty/v8/test/mjsunit/mul-exhaustive.js4629
-rw-r--r--src/3rdparty/v8/test/mjsunit/new-function.js34
-rw-r--r--src/3rdparty/v8/test/mjsunit/numops-fuzz-part1.js1172
-rw-r--r--src/3rdparty/v8/test/mjsunit/numops-fuzz-part2.js1178
-rw-r--r--src/3rdparty/v8/test/mjsunit/numops-fuzz-part3.js1178
-rw-r--r--src/3rdparty/v8/test/mjsunit/numops-fuzz-part4.js1177
-rw-r--r--src/3rdparty/v8/test/mjsunit/numops-fuzz.js4609
-rw-r--r--src/3rdparty/v8/test/mjsunit/object-define-property.js109
-rw-r--r--src/3rdparty/v8/test/mjsunit/override-read-only-property.js10
-rw-r--r--src/3rdparty/v8/test/mjsunit/packed-elements.js112
-rw-r--r--src/3rdparty/v8/test/mjsunit/parse-int-float.js8
-rwxr-xr-x[-rw-r--r--]src/3rdparty/v8/test/mjsunit/pixel-array-rounding.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/readonly.js228
-rwxr-xr-x[-rw-r--r--]src/3rdparty/v8/test/mjsunit/regexp-capture-3.js3
-rwxr-xr-xsrc/3rdparty/v8/test/mjsunit/regexp-capture.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/regexp-global.js254
-rw-r--r--src/3rdparty/v8/test/mjsunit/regexp-results-cache.js78
-rw-r--r--src/3rdparty/v8/test/mjsunit/regexp.js11
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1117.js15
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1118.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-115100.js36
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-117409.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1199637.js4
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-128146.js11
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-131923.js30
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-131994.js70
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-133211.js35
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-133211b.js39
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-136048.js34
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-137768.js73
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-143967.js34
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-145201.js107
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-148378.js38
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1563.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1591.js48
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1849.js6
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1878.js4
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1969.js5045
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2119.js36
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2153.js32
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2156.js39
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2163.js70
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2170.js58
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2172.js35
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2185-2.js145
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2185.js36
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2186.js49
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2193.js58
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2219.js32
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2225.js65
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2226.js36
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2234.js41
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2249.js33
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2250.js68
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2261.js113
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2284.js32
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2285.js32
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2286.js32
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2289.js34
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2291.js36
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2294.js70
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2296.js40
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2318.js66
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2322.js36
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2326.js54
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2336.js53
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2339.js59
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2346.js123
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2373.js29
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2374.js33
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2398.js41
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-334.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-builtin-array-op.js38
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-cnlt-elements.js43
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-cnlt-enum-indices.js45
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-cntl-descriptors-enum.js46
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-convert-enum.js60
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-convert-enum2.js46
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-convert-transition.js40
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-119926.js4
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-122271.js8
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-125148.js90
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-134055.js63
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-134609.js59
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-135008.js45
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-135066.js53
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-137689.js47
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-138887.js48
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-140083.js44
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-142087.js38
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-142218.js44
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-145961.js39
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-146910.js38
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-147475.js48
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-148376.js35
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-150545.js53
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-150729.js39
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-157019.js54
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-157520.js38
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-158185.js39
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-160010.js33
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-debug-code-recompilation.js3
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-deep-proto.js45
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-delete-empty-double.js40
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-iteration-order.js42
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-json-stringify-gc.js41
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-load-elements.js49
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-smi-only-concat.js4
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-undefined-store-keyed-fast-element.js37
-rw-r--r--src/3rdparty/v8/test/mjsunit/stack-traces.js14
-rw-r--r--src/3rdparty/v8/test/mjsunit/str-to-num.js9
-rw-r--r--src/3rdparty/v8/test/mjsunit/string-charcodeat.js3
-rw-r--r--src/3rdparty/v8/test/mjsunit/testcfg.py82
-rw-r--r--src/3rdparty/v8/test/mjsunit/try-finally-continue.js72
-rw-r--r--src/3rdparty/v8/test/mjsunit/typed-array-slice.js61
-rw-r--r--src/3rdparty/v8/test/mjsunit/unbox-double-arrays.js9
-rw-r--r--src/3rdparty/v8/test/mjsunit/with-readonly.js6
-rw-r--r--src/3rdparty/v8/test/mozilla/mozilla.status99
-rw-r--r--src/3rdparty/v8/test/mozilla/testcfg.py125
-rw-r--r--src/3rdparty/v8/test/preparser/preparser.status5
-rw-r--r--src/3rdparty/v8/test/preparser/strict-identifiers.pyt2
-rw-r--r--src/3rdparty/v8/test/preparser/testcfg.py106
-rw-r--r--src/3rdparty/v8/test/sputnik/sputnik.status2
-rw-r--r--src/3rdparty/v8/test/sputnik/testcfg.py5
-rw-r--r--src/3rdparty/v8/test/test262/test262.status39
-rw-r--r--src/3rdparty/v8/test/test262/testcfg.py105
-rwxr-xr-xsrc/3rdparty/v8/tools/android-build.sh0
-rwxr-xr-xsrc/3rdparty/v8/tools/android-ll-prof.sh69
-rwxr-xr-xsrc/3rdparty/v8/tools/android-run.py109
-rwxr-xr-xsrc/3rdparty/v8/tools/android-sync.sh105
-rwxr-xr-x[-rw-r--r--]src/3rdparty/v8/tools/bash-completion.sh0
-rwxr-xr-x[-rw-r--r--]src/3rdparty/v8/tools/check-static-initializers.sh0
-rw-r--r--src/3rdparty/v8/tools/common-includes.sh37
-rwxr-xr-xsrc/3rdparty/v8/tools/fuzz-harness.sh92
-rw-r--r--src/3rdparty/v8/tools/gen-postmortem-metadata.py6
-rwxr-xr-xsrc/3rdparty/v8/tools/grokdump.py911
-rw-r--r--src/3rdparty/v8/tools/gyp/v8.gyp81
-rwxr-xr-xsrc/3rdparty/v8/tools/linux-tick-processor10
-rwxr-xr-xsrc/3rdparty/v8/tools/ll_prof.py27
-rwxr-xr-x[-rw-r--r--]src/3rdparty/v8/tools/merge-to-branch.sh4
-rwxr-xr-xsrc/3rdparty/v8/tools/presubmit.py1
-rwxr-xr-xsrc/3rdparty/v8/tools/push-to-trunk.sh18
-rwxr-xr-xsrc/3rdparty/v8/tools/run-tests.py369
-rwxr-xr-xsrc/3rdparty/v8/tools/status-file-converter.py39
-rwxr-xr-xsrc/3rdparty/v8/tools/test-server.py224
-rwxr-xr-xsrc/3rdparty/v8/tools/test-wrapper-gypbuild.py36
-rwxr-xr-xsrc/3rdparty/v8/tools/test.py29
-rw-r--r--src/3rdparty/v8/tools/testrunner/README174
-rw-r--r--src/3rdparty/v8/tools/testrunner/__init__.py26
-rw-r--r--src/3rdparty/v8/tools/testrunner/local/__init__.py26
-rw-r--r--src/3rdparty/v8/tools/testrunner/local/commands.py153
-rw-r--r--src/3rdparty/v8/tools/testrunner/local/execution.py182
-rw-r--r--src/3rdparty/v8/tools/testrunner/local/old_statusfile.py460
-rw-r--r--src/3rdparty/v8/tools/testrunner/local/progress.py238
-rw-r--r--src/3rdparty/v8/tools/testrunner/local/statusfile.py145
-rw-r--r--src/3rdparty/v8/tools/testrunner/local/testsuite.py184
-rw-r--r--src/3rdparty/v8/tools/testrunner/local/utils.py108
-rw-r--r--src/3rdparty/v8/tools/testrunner/local/verbose.py99
-rw-r--r--src/3rdparty/v8/tools/testrunner/network/__init__.py26
-rw-r--r--src/3rdparty/v8/tools/testrunner/network/distro.py90
-rw-r--r--src/3rdparty/v8/tools/testrunner/network/endpoint.py124
-rw-r--r--src/3rdparty/v8/tools/testrunner/network/network_execution.py253
-rw-r--r--src/3rdparty/v8/tools/testrunner/network/perfdata.py120
-rw-r--r--src/3rdparty/v8/tools/testrunner/objects/__init__.py26
-rw-r--r--src/3rdparty/v8/tools/testrunner/objects/context.py50
-rw-r--r--src/3rdparty/v8/tools/testrunner/objects/output.py60
-rw-r--r--src/3rdparty/v8/tools/testrunner/objects/peer.py80
-rw-r--r--src/3rdparty/v8/tools/testrunner/objects/testcase.py83
-rw-r--r--src/3rdparty/v8/tools/testrunner/objects/workpacket.py90
-rw-r--r--src/3rdparty/v8/tools/testrunner/server/__init__.py26
-rw-r--r--src/3rdparty/v8/tools/testrunner/server/compression.py112
-rw-r--r--src/3rdparty/v8/tools/testrunner/server/constants.py51
-rw-r--r--src/3rdparty/v8/tools/testrunner/server/daemon.py147
-rw-r--r--src/3rdparty/v8/tools/testrunner/server/local_handler.py119
-rw-r--r--src/3rdparty/v8/tools/testrunner/server/main.py245
-rw-r--r--src/3rdparty/v8/tools/testrunner/server/presence_handler.py120
-rw-r--r--src/3rdparty/v8/tools/testrunner/server/signatures.py63
-rw-r--r--src/3rdparty/v8/tools/testrunner/server/status_handler.py112
-rw-r--r--src/3rdparty/v8/tools/testrunner/server/work_handler.py150
-rw-r--r--src/3rdparty/v8/tools/tickprocessor-driver.js4
-rw-r--r--src/3rdparty/v8/tools/tickprocessor.js23
-rw-r--r--src/v8/v8.pri15
-rw-r--r--tests/auto/v8/tst_v8.cpp6
-rw-r--r--tests/auto/v8/v8main.cpp1
-rw-r--r--tests/auto/v8/v8test.cpp63
-rw-r--r--tests/auto/v8/v8test.h1
639 files changed, 72827 insertions, 38485 deletions
diff --git a/src/3rdparty/v8/.gitignore b/src/3rdparty/v8/.gitignore
index 088daea..ec0660f 100644
--- a/src/3rdparty/v8/.gitignore
+++ b/src/3rdparty/v8/.gitignore
@@ -9,9 +9,11 @@
*.pdb
*.pyc
*.scons*
+*.sln
*.so
*.suo
*.user
+*.vcproj
*.xcodeproj
#*#
*~
@@ -20,17 +22,35 @@ d8
d8_g
shell
shell_g
+/build/Debug
/build/gyp
-/obj/
-/out/
+/build/Release
+/obj
+/out
+/test/cctest/cctest.status2
/test/es5conform/data
+/test/message/message.status2
+/test/mjsunit/mjsunit.status2
+/test/mozilla/CHECKED_OUT_VERSION
/test/mozilla/data
+/test/mozilla/downloaded_*
+/test/mozilla/mozilla.status2
+/test/preparser/preparser.status2
/test/sputnik/sputniktests
/test/test262/data
+/test/test262/test262-*
+/test/test262/test262.status2
+/third_party
+/tools/jsfunfuzz
+/tools/jsfunfuzz.zip
/tools/oom_dump/oom_dump
/tools/oom_dump/oom_dump.o
/tools/visual_studio/Debug
/tools/visual_studio/Release
-/xcodebuild/
+/xcodebuild
TAGS
*.Makefile
+GTAGS
+GRTAGS
+GSYMS
+GPATH
diff --git a/src/3rdparty/v8/AUTHORS b/src/3rdparty/v8/AUTHORS
index 6e46b3d..c279e7c 100644
--- a/src/3rdparty/v8/AUTHORS
+++ b/src/3rdparty/v8/AUTHORS
@@ -20,13 +20,16 @@ Burcu Dogan <burcujdogan@gmail.com>
Craig Schlenter <craig.schlenter@gmail.com>
Daniel Andersson <kodandersson@gmail.com>
Daniel James <dnljms@gmail.com>
+Derek J Conrod <dconrod@codeaurora.org>
Dineel D Sule <dsule@codeaurora.org>
Erich Ocean <erich.ocean@me.com>
Fedor Indutny <fedor@indutny.com>
Filipe David Manana <fdmanana@gmail.com>
+Haitao Feng <haitao.feng@intel.com>
Ioseb Dzmanashvili <ioseb.dzmanashvili@gmail.com>
Jan de Mooij <jandemooij@gmail.com>
Jay Freeman <saurik@saurik.com>
+James Pike <g00gle@chilon.net>
Joel Stanley <joel.stan@gmail.com>
John Jozwiak <jjozwiak@codeaurora.org>
Jonathan Liu <net147@gmail.com>
@@ -42,14 +45,17 @@ Paolo Giarrusso <p.giarrusso@gmail.com>
Patrick Gansterer <paroga@paroga.com>
Peter Varga <pvarga@inf.u-szeged.hu>
Rafal Krypa <rafal@krypa.net>
+Rajeev R Krithivasan <rkrithiv@codeaurora.org>
Rene Rebe <rene@exactcode.de>
Robert Mustacchi <rm@fingolfin.org>
Rodolph Perfetta <rodolph.perfetta@arm.com>
Ryan Dahl <coldredlemur@gmail.com>
+Sandro Santilli <strk@keybit.net>
Sanjoy Das <sanjoy@playingwithpointers.com>
Subrato K De <subratokde@codeaurora.org>
Tobias Burnus <burnus@net-b.de>
Vlad Burlik <vladbph@gmail.com>
+Xi Qian <xi.qian@intel.com>
Yuqiang Xian <yuqiang.xian@intel.com>
Zaheer Ahmad <zahmad@codeaurora.org>
Zhongping Wang <kewpie.w.zp@gmail.com>
diff --git a/src/3rdparty/v8/ChangeLog b/src/3rdparty/v8/ChangeLog
index 97dac40..bb9ed30 100644
--- a/src/3rdparty/v8/ChangeLog
+++ b/src/3rdparty/v8/ChangeLog
@@ -1,3 +1,586 @@
+2012-11-12: Version 3.15.2
+
+ Function::GetScriptOrigin supplies sourceURL when script name is
+ not available. (Chromium issue 159413)
+
+ Made formatting error message side-effect-free. (issue 2398)
+
+ Fixed length check in JSON.stringify. (Chromium issue 160010)
+
+ ES6: Added support for Set and Map clear method (issue 2400)
+
+ Fixed slack tracking when instance prototype changes.
+ (Chromium issue 157019)
+
+ Fixed disabling of code flusher while marking. (Chromium issue 159140)
+
+ Added a test case for object grouping in a scavenger GC (issue 2077)
+
+ Support shared library build of Android for v8.
+ (Chromium issue 158821)
+
+ ES6: Added support for size to Set and Map (issue 2395)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-11-06: Version 3.15.1
+
+ Put incremental code flushing behind a flag. (Chromium issue 159140)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-10-31: Version 3.15.0
+
+ Loosened aligned code target requirement on ARM (issue 2380)
+
+ Fixed JSON.parse to treat leading zeros correctly.
+ (Chromium issue 158185)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-10-22: Version 3.14.5
+
+ Killed off the SCons based build.
+
+ Added a faster API for creating v8::Integer objects.
+
+ Speeded up function deoptimization by avoiding quadratic pass over
+ optimized function list. (Chromium issue 155270)
+
+ Always invoke the default Array.sort functions from builtin functions.
+ (issue 2372)
+
+ Reverted recent CPU profiler changes because they broke --prof.
+ (issue 2364)
+
+ Switched code flushing to use different JSFunction field.
+ (issue 1609)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-10-15: Version 3.14.4
+
+ Allow evals for debugger even if they are prohibited in the debugee
+ context. (Chromium issue 154733)
+
+ Enabled --verify-heap in release mode (issue 2120)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-10-11: Version 3.14.3
+
+ Use native context to retrieve ErrorMessageForCodeGenerationFromStrings
+ (Chromium issue 155076).
+
+ Bumped variable limit further to 2^17 (Chromium issue 151625).
+
+ Performance and stability improvements on all platforms.
+
+
+2012-10-10: Version 3.14.2
+
+ ARM: allowed VFP3 instructions when hardfloat is enabled.
+ (Chromium issue 152506)
+
+ Fixed instance_descriptors() and PushStackTraceAndDie regressions.
+ (Chromium issue 151749)
+
+ Made GDBJIT interface compile again. (issue 1804)
+
+ Fixed Accessors::FunctionGetPrototype's proto chain traversal.
+ (Chromium issue 143967)
+
+ Made sure that names of temporaries do not clash with real variables.
+ (issue 2322)
+
+ Rejected local module declarations. (Chromium issue 150628)
+
+ Rejected uses of lexical for-loop variable on the RHS. (issue 2322)
+
+ Fixed slot recording of code target patches.
+ (Chromium issue 152615,chromium:144230)
+
+ Changed the Android makefile to use GCC 4.6 instead of GCC 4.4.3.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-10-01: Version 3.14.1
+
+ Don't set -m32 flag when compiling with Android ARM compiler.
+ (Chromium issue 143889)
+
+ Restore the descriptor array before returning allocation failure.
+ (Chromium issue 151750)
+
+ Lowered kMaxVirtualRegisters (v8 issue 2139, Chromium issues 123822 and
+ 128252).
+
+ Pull more recent gyp in 'make dependencies'.
+
+ Made sure that the generic KeyedStoreIC changes length and element_kind
+ atomically (issue 2346).
+
+ Bumped number of allowed variables per scope to 65535, to address GWT.
+ (Chromium issue 151625)
+
+ Support sourceURL for dynamically inserted scripts (issue 2342).
+
+ Performance and stability improvements on all platforms.
+
+
+2012-09-20: Version 3.14.0
+
+ Fixed missing slot recording during clearing of CallICs.
+ (Chromium issue 144230)
+
+ Fixed LBoundsCheck on x64 to handle (stack slot + constant) correctly.
+ (Chromium issue 150729)
+
+ Fixed minus zero test. (Issue 2133)
+
+ Fixed setting array length to zero for slow elements.
+ (Chromium issue 146910)
+
+ Fixed lost arguments dropping in HLeaveInlined.
+ (Chromium issue 150545)
+
+ Fixed casting error for receiver of interceptors.
+ (Chromium issue 149912)
+
+ Throw a more descriptive exception when blocking 'eval' via CSP.
+ (Chromium issue 140191)
+
+ Fixed debugger's eval when close to stack overflow. (issue 2318)
+
+ Added checks to live edit. (issue 2297)
+
+ Switched on code compaction on incremental GCs.
+
+ Fixed caching of optimized code for OSR. (issue 2326)
+
+ Not mask exception thrown by toString in String::UtfValue etc.
+ (issue 2317)
+
+ Fixed API check for length of external arrays. (Chromium issue 148896)
+
+ Ensure correct enumeration indices in the dict (Chromium issue 148376)
+
+ Correctly initialize regexp global cache. (Chromium issue 148378)
+
+ Fixed arguments object materialization during deopt. (issue 2261)
+
+ Introduced new API to expose external string resource regardless of
+ encoding.
+
+ Fixed CHECK failure in LCodeGen::DoWrapReceiver when
+ --deopt-every-n-times flag is present
+ (Chromium issue 148389)
+
+ Fixed edge case of extension with NULL as source string.
+ (Chromium issue 144649)
+
+ Fixed array index dehoisting. (Chromium issue 141395)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-09-11: Version 3.13.7
+
+ Enable/disable LiveEdit using the (C++) debug API.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-09-06: Version 3.13.6
+
+ Added validity checking to API functions and calls.
+
+ Disabled accessor inlining (Chromium issue 134609).
+
+ Fixed bug in Math.min/max in optimized code (Chromium issue 145961).
+
+ Directly use %ObjectKeys in json stringify (Chromium issue 2312).
+
+ Fixed VS2005 build (issue 2313).
+
+ Activated fixed ES5 readonly semantics by default.
+
+ Added hardfp flag to the Makefile.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-08-29: Version 3.13.5
+
+ Release stack trace data after firing Error.stack accessor.
+ (issue 2308)
+
+ Added a new API V8::SetJitCodeEventHandler to push code name and
+ location to users such as profilers.
+
+ Allocate block-scoped global bindings to global context.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-08-28: Version 3.13.4
+
+ Print reason for disabling optimization. Kill --trace-bailout flag.
+
+ Provided option to disable full DEBUG build on Android.
+
+ Introduced global contexts to represent lexical global scope(s).
+
+ Fixed rounding in Uint8ClampedArray setter. (issue 2294)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-08-21: Version 3.13.3
+
+ Performance and stability improvements on all platforms.
+
+
+2012-08-20: Version 3.13.2
+
+ Performance and stability improvements on all platforms.
+
+
+2012-08-16: Version 3.13.1
+
+ Performance and stability improvements on all platforms.
+
+
+2012-08-10: Version 3.13.0
+
+ Added histograms for total allocated/live heap size, as well as
+ allocated size and percentage of total for map and cell space.
+
+ Fixed parseInt's octal parsing behavior (ECMA-262 Annex E 15.1.2.2).
+ (issue 1645)
+
+ Added checks for interceptors to negative lookup code in Crankshaft.
+ (Chromium issue 140473)
+
+ Made incremental marking clear ICs and type feedback cells.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-08-01: Version 3.12.19
+
+ Performance and stability improvements on all platforms.
+
+
+2012-07-30: Version 3.12.18
+
+ Forced using bit-pattern for signed zero double. (issue 2239)
+
+ Made sure double to int conversion is correct. (issue 2260)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-07-27: Version 3.12.17
+
+ Always set the callee's context when calling a function from optimized
+ code.
+ (Chromium issue 138887)
+
+ Fixed building with GCC 3.x
+ (issue 2016, 2017)
+
+ Improved API calls that return empty handles.
+ (issue 2245)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-07-25: Version 3.12.16
+
+ Performance and stability improvements on all platforms.
+
+
+2012-07-24: Version 3.12.15
+
+ Added PRESERVE_ASCII_NULL option to String::WriteAscii.
+ (issue 2252)
+
+ Added dependency to HLoadKeyed* instructions to prevent invalid
+ hoisting. (Chromium issue 137768)
+
+ Enabled building d8 for Android on Mac.
+
+ Interpret negative hexadecimal literals as NaN.
+ (issue 2240)
+
+ Expose counters in javascript when using --track-gc-object-stats.
+
+ Enabled building and testing V8 on Android IA.
+
+ Added --trace-parse flag to parser.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-07-18: Version 3.12.14
+
+ Deactivated optimization of packed arrays.
+ (Chromium issue 137768)
+
+ Fixed broken accessor transition.
+ (Chromium issue 137689)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-07-17: Version 3.12.13
+
+ Fixed missing tagging of stack value in finally block.
+ (Chromium issue 137496)
+
+ Added more support for heap analysis.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-07-16: Version 3.12.12
+
+ Added an option to the tickprocessor to specify the directory for lib
+ lookup.
+
+ Fixed ICs for slow objects with native accessor (Chromium issue 137002).
+
+ Fixed transcendental cache on ARM in optimized code (issue 2234).
+
+ New heap inspection tools: counters for object sizes and counts,
+ histograms for external fragmentation.
+
+ Incorporated constness into inferred interfaces (in preparation for
+ handling imports) (issue 1569).
+
+ Performance and stability improvements on all platforms.
+
+
+2012-07-12: Version 3.12.11
+
+ Renamed "mips" arch to "mipsel" in the GYP build.
+
+ Fixed computation of call targets on prototypes in Crankshaft.
+ (Chromium issue 125148)
+
+ Removed use of __lookupGetter__ when generating stack trace.
+ (issue 1591)
+
+ Turned on ES 5.2 globals semantics by default.
+ (issue 1991, Chromium issue 80591)
+
+ Synced preparser and parser wrt syntax error in switch..case.
+ (issue 2210)
+
+ Fixed reporting of octal literals in strict mode when preparsing.
+ (issue 2220)
+
+ Fixed inline constructors for Harmony Proxy prototypes.
+ (issue 2225)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-07-10: Version 3.12.10
+
+ Re-enabled and fixed issue with array bounds check elimination
+ (Chromium issue 132114).
+
+ Fixed Debug::Break crash. (Chromium issue 131642)
+
+ Added optimizing compiler support for JavaScript getters.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-07-06: Version 3.12.9
+
+ Correctly advance the scanner when scanning unicode regexp flag.
+ (Chromium issue 136084)
+
+ Fixed unhandlified code calling Harmony Proxy traps.
+ (issue 2219)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-07-05: Version 3.12.8
+
+ Implemented TypedArray.set and ArrayBuffer.slice in d8.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-07-03: Version 3.12.7
+
+ Fixed lazy compilation for strict eval scopes.
+ (Chromium issue 135066)
+
+ Made MACOSX_DEPLOYMENT_TARGET configurable in GYP.
+ (issue 2151)
+
+ Report "hidden properties" in heap profiler for properties case.
+ (issue 2212)
+
+ Activated optimization of packed arrays by default.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-06-29: Version 3.12.6
+
+ Cleaned up hardfp ABI detection for ARM (V8 issue 2140).
+
+ Extended TypedArray support in d8.
+
+
+2012-06-28: Version 3.12.5
+
+ Fixed lazy parsing heuristics to respect outer scope.
+ (Chromium issue 135008)
+
+ Allow using test-wrapper-gypbuild.py on Windows when no python
+ interpreter is registered.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-06-27: Version 3.12.4
+
+ Removed -fomit-frame-pointer flag from Release builds to make
+ the stack walkable by TCMalloc (Chromium issue 133723).
+
+ Ported r7868 (constant masking) to x64 (issue 1374).
+
+ Expose more detailed memory statistics (issue 2201).
+
+ Fixed Harmony Maps and WeakMaps for undefined values
+ (Chromium issue 132744).
+
+ Correctly throw reference error in strict mode with ICs disabled
+ (issue 2119).
+
+ Performance and stability improvements on all platforms.
+
+
+2012-06-25: Version 3.12.3
+
+ Reverted r11835 'Unify promotion and allocation limit computation' due
+ to V8 Splay performance regression on Mac. (Chromium issue 134183)
+
+ Fixed sharing of literal boilerplates for optimized code. (issue 2193)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-06-22: Version 3.12.2
+
+ Made near-jump check more strict in LoadNamedFieldPolymorphic on
+ ia32/x64. (Chromium issue 134055)
+
+ Fixed lazy sweeping heuristics to prevent old-space expansion.
+ (issue 2194)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-06-21: Version 3.12.1
+
+ Performance and stability improvements on all platforms.
+
+
+2012-06-20: Version 3.12.0
+
+ Fixed Chromium issues:
+ 115100, 129628, 131994, 132727, 132741, 132742, 133211
+
+ Fixed V8 issues:
+ 915, 1914, 2034, 2087, 2094, 2134, 2156, 2166, 2172, 2177, 2179, 2185
+
+ Added --extra-code flag to mksnapshot to load JS code into the VM
+ before creating the snapshot.
+
+ Support 'restart call frame' command in the debugger.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-06-13: Version 3.11.10
+
+ Implemented heap profiler memory usage reporting.
+
+ Preserved error message during finally block in try..finally.
+ (Chromium issue 129171)
+
+ Fixed EnsureCanContainElements to properly handle double values.
+ (issue 2170)
+
+ Improved heuristics to keep objects in fast mode with inherited
+ constructors.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-06-06: Version 3.11.9
+
+ Implemented ES5-conformant semantics for inherited setters and read-only
+ properties. Currently behind --es5_readonly flag, because it breaks
+ WebKit bindings.
+
+ Exposed last seen heap object id via v8 public api.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-05-31: Version 3.11.8
+
+ Avoid overdeep recursion in regexp where a guarded expression with a
+ minimum repetition count is inside another quantifier.
+ (Chromium issue 129926)
+
+ Fixed missing write barrier in store field stub.
+ (issues 2143, 1465, Chromium issue 129355)
+
+ Proxies: Fixed receiver for setters inherited from proxies.
+ Proxies: Fixed ToStringArray function so that it does not reject some
+ keys.
+ (issue 1543)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-05-29: Version 3.11.7
+
+ Get better function names in stack traces.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-05-24: Version 3.11.6
+
+ Fixed RegExp.prototype.toString for incompatible receivers
+ (issue 1981).
+
+ Performance and stability improvements on all platforms.
+
+
+2012-05-23: Version 3.11.5
+
+ Performance and stability improvements on all platforms.
+
+
2012-05-22: Version 3.11.4
Some cleanup to common.gypi. This fixes some host/target combinations
diff --git a/src/3rdparty/v8/DEPS b/src/3rdparty/v8/DEPS
index e50d1d2..8d66960 100644
--- a/src/3rdparty/v8/DEPS
+++ b/src/3rdparty/v8/DEPS
@@ -5,7 +5,7 @@
deps = {
# Remember to keep the revision in sync with the Makefile.
"v8/build/gyp":
- "http://gyp.googlecode.com/svn/trunk@1282",
+ "http://gyp.googlecode.com/svn/trunk@1501",
}
deps_os = {
diff --git a/src/3rdparty/v8/Makefile b/src/3rdparty/v8/Makefile
index 0d825c0..b65ea4c 100644
--- a/src/3rdparty/v8/Makefile
+++ b/src/3rdparty/v8/Makefile
@@ -30,11 +30,12 @@
CXX ?= g++
LINK ?= g++
OUTDIR ?= out
-TESTJOBS ?= -j16
+TESTJOBS ?=
GYPFLAGS ?=
TESTFLAGS ?=
ANDROID_NDK_ROOT ?=
-ANDROID_TOOL_PREFIX = $(ANDROID_NDK_ROOT)/toolchain/bin/arm-linux-androideabi
+ANDROID_TOOLCHAIN ?=
+ANDROID_V8 ?= /data/local/v8
# Special build flags. Use them like this: "make library=shared"
@@ -57,10 +58,21 @@ endif
ifeq ($(objectprint), on)
GYPFLAGS += -Dv8_object_print=1
endif
+# verifyheap=on
+ifeq ($(verifyheap), on)
+ GYPFLAGS += -Dv8_enable_verify_heap=1
+endif
# snapshot=off
ifeq ($(snapshot), off)
GYPFLAGS += -Dv8_use_snapshot='false'
endif
+# extrachecks=on/off
+ifeq ($(extrachecks), on)
+ GYPFLAGS += -Dv8_enable_extra_checks=1
+endif
+ifeq ($(extrachecks), off)
+ GYPFLAGS += -Dv8_enable_extra_checks=0
+endif
# gdbjit=on
ifeq ($(gdbjit), on)
GYPFLAGS += -Dv8_enable_gdbjit=1
@@ -71,9 +83,9 @@ ifeq ($(liveobjectlist), on)
endif
# vfp3=off
ifeq ($(vfp3), off)
- GYPFLAGS += -Dv8_can_use_vfp_instructions=false
+ GYPFLAGS += -Dv8_can_use_vfp3_instructions=false
else
- GYPFLAGS += -Dv8_can_use_vfp_instructions=true
+ GYPFLAGS += -Dv8_can_use_vfp3_instructions=true
endif
# debuggersupport=off
ifeq ($(debuggersupport), off)
@@ -95,6 +107,14 @@ endif
ifeq ($(strictaliasing), off)
GYPFLAGS += -Dv8_no_strict_aliasing=1
endif
+# regexp=interpreted
+ifeq ($(regexp), interpreted)
+ GYPFLAGS += -Dv8_interpreted_regexp=1
+endif
+# hardfp=on
+ifeq ($(hardfp), on)
+ GYPFLAGS += -Dv8_use_arm_eabi_hardfloat=true
+endif
# ----------------- available targets: --------------------
# - "dependencies": pulls in external dependencies (currently: GYP)
@@ -103,7 +123,7 @@ endif
# - every combination <arch>.<mode>, e.g. "ia32.release"
# - "native": current host's architecture, release mode
# - any of the above with .check appended, e.g. "ia32.release.check"
-# - "android": cross-compile for Android/ARM (release mode)
+# - "android": cross-compile for Android/ARM
# - default (no target specified): build all DEFAULT_ARCHES and MODES
# - "check": build all targets and run all tests
# - "<arch>.clean" for any <arch> in ARCHES
@@ -113,9 +133,10 @@ endif
# Architectures and modes to be compiled. Consider these to be internal
# variables, don't override them (use the targets instead).
-ARCHES = ia32 x64 arm mips
+ARCHES = ia32 x64 arm mipsel
DEFAULT_ARCHES = ia32 x64 arm
MODES = release debug
+ANDROID_ARCHES = android_ia32 android_arm
# List of files that trigger Makefile regeneration:
GYPFILES = build/all.gyp build/common.gypi build/standalone.gypi \
@@ -124,15 +145,19 @@ GYPFILES = build/all.gyp build/common.gypi build/standalone.gypi \
# Generates all combinations of ARCHES and MODES, e.g. "ia32.release".
BUILDS = $(foreach mode,$(MODES),$(addsuffix .$(mode),$(ARCHES)))
+ANDROID_BUILDS = $(foreach mode,$(MODES), \
+ $(addsuffix .$(mode),$(ANDROID_ARCHES)))
# Generates corresponding test targets, e.g. "ia32.release.check".
CHECKS = $(addsuffix .check,$(BUILDS))
+ANDROID_CHECKS = $(addsuffix .check,$(ANDROID_BUILDS))
# File where previously used GYPFLAGS are stored.
ENVFILE = $(OUTDIR)/environment
.PHONY: all check clean dependencies $(ENVFILE).new native \
$(ARCHES) $(MODES) $(BUILDS) $(CHECKS) $(addsuffix .clean,$(ARCHES)) \
$(addsuffix .check,$(MODES)) $(addsuffix .check,$(ARCHES)) \
- must-set-ANDROID_NDK_ROOT
+ $(ANDROID_ARCHES) $(ANDROID_BUILDS) $(ANDROID_CHECKS) \
+ must-set-ANDROID_NDK_ROOT_OR_TOOLCHAIN
# Target definitions. "all" is the default.
all: $(MODES)
@@ -143,6 +168,10 @@ buildbot:
$(MAKE) -C "$(OUTDIR)" BUILDTYPE=$(BUILDTYPE) \
builddir="$(abspath $(OUTDIR))/$(BUILDTYPE)"
+mips mips.release mips.debug:
+ @echo "V8 does not support big-endian MIPS builds at the moment," \
+ "please use little-endian builds (mipsel)."
+
# Compile targets. MODES and ARCHES are convenience targets.
.SECONDEXPANSION:
$(MODES): $(addsuffix .$$@,$(DEFAULT_ARCHES))
@@ -162,42 +191,53 @@ native: $(OUTDIR)/Makefile.native
CXX="$(CXX)" LINK="$(LINK)" BUILDTYPE=Release \
builddir="$(shell pwd)/$(OUTDIR)/$@"
-# TODO(jkummerow): add "android.debug" when we need it.
-android android.release: $(OUTDIR)/Makefile.android
- @$(MAKE) -C "$(OUTDIR)" -f Makefile.android \
- CXX="$(ANDROID_TOOL_PREFIX)-g++" \
- AR="$(ANDROID_TOOL_PREFIX)-ar" \
- RANLIB="$(ANDROID_TOOL_PREFIX)-ranlib" \
- CC="$(ANDROID_TOOL_PREFIX)-gcc" \
- LD="$(ANDROID_TOOL_PREFIX)-ld" \
- LINK="$(ANDROID_TOOL_PREFIX)-g++" \
- BUILDTYPE=Release \
- builddir="$(shell pwd)/$(OUTDIR)/android.release"
+$(ANDROID_ARCHES): $(addprefix $$@.,$(MODES))
+
+$(ANDROID_BUILDS): $(GYPFILES) $(ENVFILE) build/android.gypi \
+ must-set-ANDROID_NDK_ROOT_OR_TOOLCHAIN Makefile.android
+ @$(MAKE) -f Makefile.android $@ \
+ ARCH="$(basename $@)" \
+ MODE="$(subst .,,$(suffix $@))" \
+ OUTDIR="$(OUTDIR)" \
+ GYPFLAGS="$(GYPFLAGS)"
# Test targets.
check: all
- @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) \
+ @tools/run-tests.py $(TESTJOBS) --outdir=$(OUTDIR) \
--arch=$(shell echo $(DEFAULT_ARCHES) | sed -e 's/ /,/g') \
$(TESTFLAGS)
$(addsuffix .check,$(MODES)): $$(basename $$@)
- @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) \
+ @tools/run-tests.py $(TESTJOBS) --outdir=$(OUTDIR) \
--mode=$(basename $@) $(TESTFLAGS)
$(addsuffix .check,$(ARCHES)): $$(basename $$@)
- @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) \
+ @tools/run-tests.py $(TESTJOBS) --outdir=$(OUTDIR) \
--arch=$(basename $@) $(TESTFLAGS)
$(CHECKS): $$(basename $$@)
- @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) \
+ @tools/run-tests.py $(TESTJOBS) --outdir=$(OUTDIR) \
--arch-and-mode=$(basename $@) $(TESTFLAGS)
+$(addsuffix .sync, $(ANDROID_BUILDS)): $$(basename $$@)
+ @tools/android-sync.sh $(basename $@) $(OUTDIR) \
+ $(shell pwd) $(ANDROID_V8)
+
+$(addsuffix .check, $(ANDROID_BUILDS)): $$(basename $$@).sync
+ @tools/run-tests.py $(TESTJOBS) --outdir=$(OUTDIR) \
+ --arch-and-mode=$(basename $@) \
+ --timeout=600 \
+ --command-prefix="tools/android-run.py"
+
+$(addsuffix .check, $(ANDROID_ARCHES)): \
+ $(addprefix $$(basename $$@).,$(MODES)).check
+
native.check: native
- @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR)/native \
+ @tools/run-tests.py $(TESTJOBS) --outdir=$(OUTDIR)/native \
--arch-and-mode=. $(TESTFLAGS)
# Clean targets. You can clean each architecture individually, or everything.
-$(addsuffix .clean,$(ARCHES)) android.clean:
+$(addsuffix .clean, $(ARCHES) $(ANDROID_ARCHES)):
rm -f $(OUTDIR)/Makefile.$(basename $@)
rm -rf $(OUTDIR)/$(basename $@).release
rm -rf $(OUTDIR)/$(basename $@).debug
@@ -208,11 +248,11 @@ native.clean:
rm -rf $(OUTDIR)/native
find $(OUTDIR) -regex '.*\(host\|target\).native\.mk' -delete
-clean: $(addsuffix .clean,$(ARCHES)) native.clean android.clean
+clean: $(addsuffix .clean, $(ARCHES) $(ANDROID_ARCHES)) native.clean
# GYP file generation targets.
-MAKEFILES = $(addprefix $(OUTDIR)/Makefile.,$(ARCHES))
-$(MAKEFILES): $(GYPFILES) $(ENVFILE)
+OUT_MAKEFILES = $(addprefix $(OUTDIR)/Makefile.,$(ARCHES))
+$(OUT_MAKEFILES): $(GYPFILES) $(ENVFILE)
GYP_GENERATORS=make \
build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
-Ibuild/standalone.gypi --depth=. \
@@ -224,18 +264,11 @@ $(OUTDIR)/Makefile.native: $(GYPFILES) $(ENVFILE)
build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
-Ibuild/standalone.gypi --depth=. -S.native $(GYPFLAGS)
-$(OUTDIR)/Makefile.android: $(GYPFILES) $(ENVFILE) build/android.gypi \
- must-set-ANDROID_NDK_ROOT
- GYP_GENERATORS=make \
- CC="${ANDROID_TOOL_PREFIX}-gcc" \
- CXX="${ANDROID_TOOL_PREFIX}-g++" \
- build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
- -Ibuild/standalone.gypi --depth=. -Ibuild/android.gypi \
- -S.android $(GYPFLAGS)
-
-must-set-ANDROID_NDK_ROOT:
+must-set-ANDROID_NDK_ROOT_OR_TOOLCHAIN:
ifndef ANDROID_NDK_ROOT
- $(error ANDROID_NDK_ROOT is not set)
+ifndef ANDROID_TOOLCHAIN
+ $(error ANDROID_NDK_ROOT or ANDROID_TOOLCHAIN must be set))
+endif
endif
# Replaces the old with the new environment file if they're different, which
@@ -251,6 +284,7 @@ $(ENVFILE).new:
echo "CXX=$(CXX)" >> $(ENVFILE).new
# Dependencies.
+# Remember to keep these in sync with the DEPS file.
dependencies:
svn checkout --force http://gyp.googlecode.com/svn/trunk build/gyp \
- --revision 1282
+ --revision 1501
diff --git a/src/3rdparty/v8/Makefile.android b/src/3rdparty/v8/Makefile.android
new file mode 100644
index 0000000..8e4ce08
--- /dev/null
+++ b/src/3rdparty/v8/Makefile.android
@@ -0,0 +1,92 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Those definitions should be consistent with the main Makefile
+ANDROID_ARCHES = android_ia32 android_arm
+MODES = release debug
+
+# Generates all combinations of ANDROID ARCHES and MODES,
+# e.g. "android_ia32.release" or "android_arm.release"
+ANDROID_BUILDS = $(foreach mode,$(MODES), \
+ $(addsuffix .$(mode),$(ANDROID_ARCHES)))
+
+HOST_OS = $(shell uname -s | sed -e 's/Linux/linux/;s/Darwin/mac/')
+ifeq ($(HOST_OS), linux)
+ TOOLCHAIN_DIR = linux-x86
+else
+ ifeq ($(HOST_OS), mac)
+ TOOLCHAIN_DIR = darwin-x86
+ else
+ $(error Host platform "${HOST_OS}" is not supported)
+ endif
+endif
+
+ifeq ($(ARCH), android_arm)
+ DEFINES = target_arch=arm v8_target_arch=arm android_target_arch=arm
+ DEFINES += arm_neon=0 armv7=1
+ TOOLCHAIN_ARCH = arm-linux-androideabi-4.6
+else
+ ifeq ($(ARCH), android_ia32)
+ DEFINES = target_arch=ia32 v8_target_arch=ia32 android_target_arch=x86
+ TOOLCHAIN_ARCH = x86-4.6
+ else
+ $(error Target architecture "${ARCH}" is not supported)
+ endif
+endif
+
+TOOLCHAIN_PATH = ${ANDROID_NDK_ROOT}/toolchains/${TOOLCHAIN_ARCH}/prebuilt
+ANDROID_TOOLCHAIN ?= ${TOOLCHAIN_PATH}/${TOOLCHAIN_DIR}
+ifeq ($(wildcard $(ANDROID_TOOLCHAIN)),)
+ $(error Cannot find Android toolchain in "${ANDROID_TOOLCHAIN}")
+endif
+
+# For mksnapshot host generation.
+DEFINES += host_os=${HOST_OS}
+
+.SECONDEXPANSION:
+$(ANDROID_BUILDS): $(OUTDIR)/Makefile.$$(basename $$@)
+ @$(MAKE) -C "$(OUTDIR)" -f Makefile.$(basename $@) \
+ CXX="$(ANDROID_TOOLCHAIN)/bin/*-g++" \
+ AR="$(ANDROID_TOOLCHAIN)/bin/*-ar" \
+ RANLIB="$(ANDROID_TOOLCHAIN)/bin/*-ranlib" \
+ CC="$(ANDROID_TOOLCHAIN)/bin/*-gcc" \
+ LD="$(ANDROID_TOOLCHAIN)/bin/*-ld" \
+ LINK="$(ANDROID_TOOLCHAIN)/bin/*-g++" \
+ BUILDTYPE=$(shell echo $(subst .,,$(suffix $@)) | \
+ python -c "print raw_input().capitalize()") \
+ builddir="$(shell pwd)/$(OUTDIR)/$@"
+
+# Android GYP file generation targets.
+ANDROID_MAKEFILES = $(addprefix $(OUTDIR)/Makefile.,$(ANDROID_ARCHES))
+$(ANDROID_MAKEFILES):
+ @GYP_GENERATORS=make-android \
+ GYP_DEFINES="${DEFINES}" \
+ CC="${ANDROID_TOOLCHAIN}/bin/*-gcc" \
+ CXX="${ANDROID_TOOLCHAIN}/bin/*-g++" \
+ build/gyp/gyp --generator-output="${OUTDIR}" build/all.gyp \
+ -Ibuild/standalone.gypi --depth=. -Ibuild/android.gypi \
+ -S.${ARCH} ${GYPFLAGS}
diff --git a/src/3rdparty/v8/OWNERS b/src/3rdparty/v8/OWNERS
new file mode 100644
index 0000000..941e5fe
--- /dev/null
+++ b/src/3rdparty/v8/OWNERS
@@ -0,0 +1,11 @@
+danno@chromium.org
+jkummerow@chromium.org
+mmassi@chromium.org
+mstarzinger@chromium.org
+mvstanton@chromium.org
+rossberg@chromium.org
+svenpanne@chromium.org
+ulan@chromium.org
+vegorov@chromium.org
+verwaest@chromium.org
+yangguo@chromium.org
diff --git a/src/3rdparty/v8/PRESUBMIT.py b/src/3rdparty/v8/PRESUBMIT.py
new file mode 100644
index 0000000..0077be9
--- /dev/null
+++ b/src/3rdparty/v8/PRESUBMIT.py
@@ -0,0 +1,71 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Top-level presubmit script for V8.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details about the presubmit API built into gcl.
+"""
+
+def _V8PresubmitChecks(input_api, output_api):
+ """Runs the V8 presubmit checks."""
+ import sys
+ sys.path.append(input_api.os_path.join(
+ input_api.PresubmitLocalPath(), 'tools'))
+ from presubmit import CppLintProcessor
+ from presubmit import SourceProcessor
+
+ results = []
+ if not CppLintProcessor().Run(input_api.PresubmitLocalPath()):
+ results.append(output_api.PresubmitError("C++ lint check failed"))
+ if not SourceProcessor().Run(input_api.PresubmitLocalPath()):
+ results.append(output_api.PresubmitError(
+ "Copyright header and trailing whitespaces check failed"))
+ return results
+
+
+def _CommonChecks(input_api, output_api):
+ """Checks common to both upload and commit."""
+ results = []
+ results.extend(input_api.canned_checks.CheckOwners(
+ input_api, output_api, source_file_filter=None))
+ return results
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ results = []
+ results.extend(_CommonChecks(input_api, output_api))
+ return results
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ results = []
+ results.extend(_CommonChecks(input_api, output_api))
+ results.extend(input_api.canned_checks.CheckChangeHasDescription(
+ input_api, output_api))
+ results.extend(_V8PresubmitChecks(input_api, output_api))
+ return results
diff --git a/src/3rdparty/v8/SConstruct b/src/3rdparty/v8/SConstruct
index ebce7ff..5f8616a 100644
--- a/src/3rdparty/v8/SConstruct
+++ b/src/3rdparty/v8/SConstruct
@@ -59,7 +59,7 @@ LIBRARY_FLAGS = {
'CPPDEFINES': ['V8_INTERPRETED_REGEXP']
},
'mode:debug': {
- 'CPPDEFINES': ['V8_ENABLE_CHECKS', 'OBJECT_PRINT']
+ 'CPPDEFINES': ['V8_ENABLE_CHECKS', 'OBJECT_PRINT', 'VERIFY_HEAP']
},
'objectprint:on': {
'CPPDEFINES': ['OBJECT_PRINT'],
@@ -1157,6 +1157,11 @@ SIMPLE_OPTIONS = {
'default': 'on',
'help': 'use fpu instructions when building the snapshot [MIPS only]'
},
+ 'I_know_I_should_build_with_GYP': {
+ 'values': ['yes', 'no'],
+ 'default': 'no',
+ 'help': 'grace period: temporarily override SCons deprecation'
+ }
}
@@ -1257,7 +1262,35 @@ def IsLegal(env, option, values):
return True
+def WarnAboutDeprecation():
+ print """
+ #####################################################################
+ # #
+ # LAST WARNING: Building V8 with SCons is deprecated. #
+ # #
+ # This only works because you have overridden the kill switch. #
+ # #
+ # MIGRATE TO THE GYP-BASED BUILD NOW! #
+ # #
+ # Instructions: http://code.google.com/p/v8/wiki/BuildingWithGYP. #
+ # #
+ #####################################################################
+ """
+
+
def VerifyOptions(env):
+ if env['I_know_I_should_build_with_GYP'] != 'yes':
+ Abort("Building V8 with SCons is no longer supported. Please use GYP "
+ "instead; you can find instructions are at "
+ "http://code.google.com/p/v8/wiki/BuildingWithGYP.\n\n"
+ "Quitting.\n\n"
+ "For a limited grace period, you can specify "
+ "\"I_know_I_should_build_with_GYP=yes\" to override.")
+ else:
+ WarnAboutDeprecation()
+ import atexit
+ atexit.register(WarnAboutDeprecation)
+
if not IsLegal(env, 'mode', ['debug', 'release']):
return False
if not IsLegal(env, 'sample', ["shell", "process", "lineprocessor"]):
@@ -1600,18 +1633,4 @@ try:
except:
pass
-
-def WarnAboutDeprecation():
- print """
-#######################################################
-# WARNING: Building V8 with SCons is deprecated and #
-# will not work much longer. Please switch to using #
-# the GYP-based build now. Instructions are at #
-# http://code.google.com/p/v8/wiki/BuildingWithGYP. #
-#######################################################
- """
-
-WarnAboutDeprecation()
-import atexit
-atexit.register(WarnAboutDeprecation)
Build()
diff --git a/src/3rdparty/v8/build/android.gypi b/src/3rdparty/v8/build/android.gypi
index ffd0648..67a9d35 100644
--- a/src/3rdparty/v8/build/android.gypi
+++ b/src/3rdparty/v8/build/android.gypi
@@ -33,35 +33,40 @@
'variables': {
# Location of Android NDK.
'variables': {
- 'variables': {
- 'android_ndk_root%': '<!(/bin/echo -n $ANDROID_NDK_ROOT)',
- 'android_target_arch%': 'arm', # target_arch in android terms.
-
- # Switch between different build types, currently only '0' is
- # supported.
- 'android_build_type%': 0,
- },
- 'android_ndk_root%': '<(android_ndk_root)',
- 'android_ndk_sysroot': '<(android_ndk_root)/platforms/android-9/arch-<(android_target_arch)',
- 'android_build_type%': '<(android_build_type)',
+ 'android_ndk_root%': '<!(/bin/echo -n $ANDROID_NDK_ROOT)',
+ 'android_toolchain%': '<!(/bin/echo -n $ANDROID_TOOLCHAIN)',
+ # Switch between different build types, currently only '0' is
+ # supported.
+ 'android_build_type%': 0,
},
- 'android_ndk_root%': '<(android_ndk_root)',
- 'android_ndk_sysroot': '<(android_ndk_sysroot)',
- 'android_ndk_include': '<(android_ndk_sysroot)/usr/include',
- 'android_ndk_lib': '<(android_ndk_sysroot)/usr/lib',
+ 'conditions': [
+ ['android_ndk_root==""', {
+ 'variables': {
+ 'android_sysroot': '<(android_toolchain)/sysroot/',
+ 'android_stlport': '<(android_toolchain)/sources/cxx-stl/stlport/',
+ },
+ 'android_include': '<(android_sysroot)/usr/include',
+ 'android_lib': '<(android_sysroot)/usr/lib',
+ 'android_stlport_include': '<(android_stlport)/stlport',
+ 'android_stlport_libs': '<(android_stlport)/libs',
+ }, {
+ 'variables': {
+ 'android_sysroot': '<(android_ndk_root)/platforms/android-9/arch-<(android_target_arch)',
+ 'android_stlport': '<(android_ndk_root)/sources/cxx-stl/stlport/',
+ },
+ 'android_include': '<(android_sysroot)/usr/include',
+ 'android_lib': '<(android_sysroot)/usr/lib',
+ 'android_stlport_include': '<(android_stlport)/stlport',
+ 'android_stlport_libs': '<(android_stlport)/libs',
+ }],
+ ],
# Enable to use the system stlport, otherwise statically
# link the NDK one?
'use_system_stlport%': '<(android_build_type)',
'android_stlport_library': 'stlport_static',
# Copy it out one scope.
'android_build_type%': '<(android_build_type)',
-
'OS': 'android',
- 'target_arch': 'arm',
- 'v8_target_arch': 'arm',
- 'armv7': 1,
- 'arm_neon': 0,
- 'arm_fpu': 'vfpv3',
}, # variables
'target_defaults': {
'defines': [
@@ -100,10 +105,7 @@
'-Wno-error=non-virtual-dtor', # TODO(michaelbai): Fix warnings.
# Note: This include is in cflags to ensure that it comes after
# all of the includes.
- '-I<(android_ndk_include)',
- '-march=armv7-a',
- '-mtune=cortex-a8',
- '-mfpu=vfp3',
+ '-I<(android_include)',
],
'defines': [
'ANDROID',
@@ -120,9 +122,6 @@
'ldflags': [
'-nostdlib',
'-Wl,--no-undefined',
- '-Wl,--icf=safe', # Enable identical code folding to reduce size
- # Don't export symbols from statically linked libraries.
- '-Wl,--exclude-libs=ALL',
],
'libraries!': [
'-lrt', # librt is built into Bionic.
@@ -144,8 +143,21 @@
'conditions': [
['android_build_type==0', {
'ldflags': [
- '-Wl,-rpath-link=<(android_ndk_lib)',
- '-L<(android_ndk_lib)',
+ '-Wl,-rpath-link=<(android_lib)',
+ '-L<(android_lib)',
+ ],
+ }],
+ ['target_arch == "arm"', {
+ 'ldflags': [
+ # Enable identical code folding to reduce size.
+ '-Wl,--icf=safe',
+ ],
+ }],
+ ['target_arch=="arm" and armv7==1', {
+ 'cflags': [
+ '-march=armv7-a',
+ '-mtune=cortex-a8',
+ '-mfpu=vfp3',
],
}],
# NOTE: The stlport header include paths below are specified in
@@ -156,22 +168,22 @@
# The include ordering here is important; change with caution.
['use_system_stlport==0', {
'cflags': [
- '-I<(android_ndk_root)/sources/cxx-stl/stlport/stlport',
+ '-I<(android_stlport_include)',
],
'conditions': [
['target_arch=="arm" and armv7==1', {
'ldflags': [
- '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/armeabi-v7a',
+ '-L<(android_stlport_libs)/armeabi-v7a',
],
}],
['target_arch=="arm" and armv7==0', {
'ldflags': [
- '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/armeabi',
+ '-L<(android_stlport_libs)/armeabi',
],
}],
['target_arch=="ia32"', {
'ldflags': [
- '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/x86',
+ '-L<(android_stlport_libs)/x86',
],
}],
],
@@ -194,17 +206,24 @@
'-Wl,--gc-sections',
'-Wl,-z,nocopyreloc',
# crtbegin_dynamic.o should be the last item in ldflags.
- '<(android_ndk_lib)/crtbegin_dynamic.o',
+ '<(android_lib)/crtbegin_dynamic.o',
],
'libraries': [
# crtend_android.o needs to be the last item in libraries.
# Do not add any libraries after this!
- '<(android_ndk_lib)/crtend_android.o',
+ '<(android_lib)/crtend_android.o',
],
}],
['_type=="shared_library"', {
'ldflags': [
'-Wl,-shared,-Bsymbolic',
+ '<(android_lib)/crtbegin_so.o',
+ ],
+ }],
+ ['_type=="static_library"', {
+ 'ldflags': [
+ # Don't export symbols from statically linked libraries.
+ '-Wl,--exclude-libs=ALL',
],
}],
],
@@ -222,4 +241,4 @@
}],
], # target_conditions
}, # target_defaults
-} \ No newline at end of file
+}
diff --git a/src/3rdparty/v8/build/common.gypi b/src/3rdparty/v8/build/common.gypi
index 1726d2a..9559d98 100644
--- a/src/3rdparty/v8/build/common.gypi
+++ b/src/3rdparty/v8/build/common.gypi
@@ -43,12 +43,13 @@
# access is allowed for all CPUs.
'v8_can_use_unaligned_accesses%': 'default',
- # Setting 'v8_can_use_vfp_instructions' to 'true' will enable use of ARM VFP
+ # Setting 'v8_can_use_vfp2_instructions' to 'true' will enable use of ARM VFP
# instructions in the V8 generated code. VFP instructions will be enabled
# both for the snapshot and for the ARM target. Leaving the default value
# of 'false' will avoid VFP instructions in the snapshot and use CPU feature
# probing when running on the target.
- 'v8_can_use_vfp_instructions%': 'false',
+ 'v8_can_use_vfp2_instructions%': 'false',
+ 'v8_can_use_vfp3_instructions%': 'false',
# Similar to vfp but on MIPS.
'v8_can_use_fpu_instructions%': 'true',
@@ -69,13 +70,18 @@
'v8_enable_disassembler%': 0,
- 'v8_object_print%': 0,
+ # Enable extra checks in API functions and other strategic places.
+ 'v8_enable_extra_checks%': 1,
'v8_enable_gdbjit%': 0,
+ 'v8_object_print%': 0,
+
# Enable profiling support. Only required on Windows.
'v8_enable_prof%': 0,
+ 'v8_enable_verify_heap%': 0,
+
# Some versions of GCC 4.5 seem to need -fno-strict-aliasing.
'v8_no_strict_aliasing%': 0,
@@ -95,6 +101,10 @@
# For a shared library build, results in "libv8-<(soname_version).so".
'soname_version%': '',
+
+ # Interpreted regexp engine exists as platform-independent alternative
+ # based where the regular expression is compiled to a bytecode.
+ 'v8_interpreted_regexp%': 0,
},
'target_defaults': {
'conditions': [
@@ -104,17 +114,31 @@
['v8_enable_disassembler==1', {
'defines': ['ENABLE_DISASSEMBLER',],
}],
- ['v8_object_print==1', {
- 'defines': ['OBJECT_PRINT',],
+ ['v8_enable_extra_checks==1', {
+ 'defines': ['ENABLE_EXTRA_CHECKS',],
}],
['v8_enable_gdbjit==1', {
'defines': ['ENABLE_GDB_JIT_INTERFACE',],
}],
+ ['v8_object_print==1', {
+ 'defines': ['OBJECT_PRINT',],
+ }],
+ ['v8_enable_verify_heap==1', {
+ 'defines': ['VERIFY_HEAP',],
+ }],
+ ['v8_interpreted_regexp==1', {
+ 'defines': ['V8_INTERPRETED_REGEXP',],
+ }],
['v8_target_arch=="arm"', {
'defines': [
'V8_TARGET_ARCH_ARM',
],
'conditions': [
+ ['armv7==1', {
+ 'defines': [
+ 'CAN_USE_ARMV7_INSTRUCTIONS=1',
+ ],
+ }],
[ 'v8_can_use_unaligned_accesses=="true"', {
'defines': [
'CAN_USE_UNALIGNED_ACCESSES=1',
@@ -125,15 +149,20 @@
'CAN_USE_UNALIGNED_ACCESSES=0',
],
}],
- [ 'v8_can_use_vfp_instructions=="true"', {
+ [ 'v8_can_use_vfp2_instructions=="true"', {
'defines': [
- 'CAN_USE_VFP_INSTRUCTIONS',
+ 'CAN_USE_VFP2_INSTRUCTIONS',
+ ],
+ }],
+ [ 'v8_can_use_vfp3_instructions=="true"', {
+ 'defines': [
+ 'CAN_USE_VFP3_INSTRUCTIONS',
],
}],
[ 'v8_use_arm_eabi_hardfloat=="true"', {
'defines': [
'USE_EABI_HARDFLOAT=1',
- 'CAN_USE_VFP_INSTRUCTIONS',
+ 'CAN_USE_VFP3_INSTRUCTIONS',
],
'target_conditions': [
['_toolset=="target"', {
@@ -152,12 +181,12 @@
'V8_TARGET_ARCH_IA32',
],
}], # v8_target_arch=="ia32"
- ['v8_target_arch=="mips"', {
+ ['v8_target_arch=="mipsel"', {
'defines': [
'V8_TARGET_ARCH_MIPS',
],
'variables': {
- 'mipscompiler': '<!($(echo ${CXX:-$(which g++)}) -v 2>&1 | grep -q "^Target: mips-" && echo "yes" || echo "no")',
+ 'mipscompiler': '<!($(echo ${CXX:-$(which g++)}) -v 2>&1 | grep -q "^Target: mips" && echo "yes" || echo "no")',
},
'conditions': [
['mipscompiler=="yes"', {
@@ -176,10 +205,11 @@
['mips_arch_variant=="mips32r2"', {
'cflags': ['-mips32r2', '-Wa,-mips32r2'],
}],
+ ['mips_arch_variant=="mips32r1"', {
+ 'cflags': ['-mips32', '-Wa,-mips32'],
+ }],
['mips_arch_variant=="loongson"', {
'cflags': ['-mips3', '-Wa,-mips3'],
- }, {
- 'cflags': ['-mips32', '-Wa,-mips32'],
}],
],
}],
@@ -207,7 +237,7 @@
'defines': ['_MIPS_ARCH_LOONGSON',],
}],
],
- }], # v8_target_arch=="mips"
+ }], # v8_target_arch=="mipsel"
['v8_target_arch=="x64"', {
'defines': [
'V8_TARGET_ARCH_X64',
@@ -220,6 +250,7 @@
'StackReserveSize': '2097152',
},
},
+ 'msvs_configuration_platform': 'x64',
}], # v8_target_arch=="x64"
['v8_use_liveobjectlist=="true"', {
'defines': [
@@ -238,6 +269,11 @@
'defines': [
'WIN32',
],
+ 'msvs_configuration_attributes': {
+ 'OutputDirectory': '<(DEPTH)\\build\\$(ConfigurationName)',
+ 'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
+ 'CharacterSet': '1',
+ },
}],
['OS=="win" and v8_enable_prof==1', {
'msvs_settings': {
@@ -260,13 +296,13 @@
['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris" \
or OS=="netbsd" or OS=="mac" or OS=="android") and \
(v8_target_arch=="arm" or v8_target_arch=="ia32" or \
- v8_target_arch=="mips")', {
+ v8_target_arch=="mipsel")', {
# Check whether the host compiler and target compiler support the
# '-m32' option and set it if so.
'target_conditions': [
['_toolset=="host"', {
'variables': {
- 'm32flag': '<!((echo | $(echo ${CXX_host:-${CXX:-$(which g++)}}) -m32 -E - > /dev/null 2>&1) && echo -n "-m32" || true)',
+ 'm32flag': '<!((echo | $(echo ${CXX_host:-$(which g++)}) -m32 -E - > /dev/null 2>&1) && echo "-m32" || true)',
},
'cflags': [ '<(m32flag)' ],
'ldflags': [ '<(m32flag)' ],
@@ -276,10 +312,15 @@
}],
['_toolset=="target"', {
'variables': {
- 'm32flag': '<!((echo | $(echo ${CXX_target:-${CXX:-$(which g++)}}) -m32 -E - > /dev/null 2>&1) && echo -n "-m32" || true)',
+ 'm32flag': '<!((echo | $(echo ${CXX_target:-${CXX:-$(which g++)}}) -m32 -E - > /dev/null 2>&1) && echo "-m32" || true)',
+ 'clang%': 0,
},
- 'cflags': [ '<(m32flag)' ],
- 'ldflags': [ '<(m32flag)' ],
+ 'conditions': [
+ ['OS!="android" or clang==1', {
+ 'cflags': [ '<(m32flag)' ],
+ 'ldflags': [ '<(m32flag)' ],
+ }],
+ ],
'xcode_settings': {
'ARCHS': [ 'i386' ],
},
@@ -300,6 +341,7 @@
'ENABLE_DISASSEMBLER',
'V8_ENABLE_CHECKS',
'OBJECT_PRINT',
+ 'VERIFY_HEAP',
],
'msvs_settings': {
'VCCLCompilerTool': {
@@ -322,6 +364,20 @@
'cflags': [ '-Wall', '<(werror)', '-W', '-Wno-unused-parameter',
'-Wnon-virtual-dtor', '-Woverloaded-virtual' ],
}],
+ ['OS=="android"', {
+ 'variables': {
+ 'android_full_debug%': 1,
+ },
+ 'conditions': [
+ ['android_full_debug==0', {
+ # Disable full debug if we want a faster v8 in a debug build.
+ # TODO(2304): pass DISABLE_DEBUG_ASSERT instead of hiding DEBUG.
+ 'defines!': [
+ 'DEBUG',
+ ],
+ }],
+ ],
+ }],
],
}, # Debug
'Release': {
@@ -335,7 +391,6 @@
'cflags': [
'-fdata-sections',
'-ffunction-sections',
- '-fomit-frame-pointer',
'-O3',
],
'conditions': [
@@ -359,26 +414,23 @@
},
}], # OS=="mac"
['OS=="win"', {
- 'msvs_configuration_attributes': {
- 'OutputDirectory': '<(DEPTH)\\build\\$(ConfigurationName)',
- 'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
- 'CharacterSet': '1',
- },
'msvs_settings': {
'VCCLCompilerTool': {
'Optimization': '2',
'InlineFunctionExpansion': '2',
'EnableIntrinsicFunctions': 'true',
'FavorSizeOrSpeed': '0',
- 'OmitFramePointers': 'true',
'StringPooling': 'true',
-
'conditions': [
['OS=="win" and component=="shared_library"', {
'RuntimeLibrary': '2', #/MD
}, {
'RuntimeLibrary': '0', #/MT
}],
+ ['v8_target_arch=="x64"', {
+ # TODO(2207): remove this option once the bug is fixed.
+ 'WholeProgramOptimization': 'true',
+ }],
],
},
'VCLinkerTool': {
diff --git a/src/3rdparty/v8/build/standalone.gypi b/src/3rdparty/v8/build/standalone.gypi
index ebdf557..7145a16 100644
--- a/src/3rdparty/v8/build/standalone.gypi
+++ b/src/3rdparty/v8/build/standalone.gypi
@@ -33,6 +33,7 @@
'component%': 'static_library',
'visibility%': 'hidden',
'msvs_multi_core_compile%': '1',
+ 'mac_deployment_target%': '10.5',
'variables': {
'variables': {
'variables': {
@@ -45,7 +46,7 @@
# to gyp.
'host_arch%':
'<!(uname -m | sed -e "s/i.86/ia32/;\
- s/x86_64/x64/;s/amd64/x64/;s/arm.*/arm/;s/mips.*/mips/")',
+ s/x86_64/x64/;s/amd64/x64/;s/arm.*/arm/;s/mips.*/mipsel/")',
}, {
# OS!="linux" and OS!="freebsd" and OS!="openbsd" and
# OS!="netbsd" and OS!="mac"
@@ -66,8 +67,9 @@
'werror%': '-Werror',
'conditions': [
['(v8_target_arch=="arm" and host_arch!="arm") or \
- (v8_target_arch=="mips" and host_arch!="mips") or \
- (v8_target_arch=="x64" and host_arch!="x64")', {
+ (v8_target_arch=="mipsel" and host_arch!="mipsel") or \
+ (v8_target_arch=="x64" and host_arch!="x64") or \
+ (OS=="android")', {
'want_separate_host_toolset': 1,
}, {
'want_separate_host_toolset': 0,
@@ -191,7 +193,8 @@
'GCC_TREAT_WARNINGS_AS_ERRORS': 'YES', # -Werror
'GCC_VERSION': '4.2',
'GCC_WARN_ABOUT_MISSING_NEWLINE': 'YES', # -Wnewline-eof
- 'MACOSX_DEPLOYMENT_TARGET': '10.4', # -mmacosx-version-min=10.4
+ # MACOSX_DEPLOYMENT_TARGET maps to -mmacosx-version-min
+ 'MACOSX_DEPLOYMENT_TARGET': '<(mac_deployment_target)',
'PREBINDING': 'NO', # No -Wl,-prebind
'SYMROOT': '<(DEPTH)/xcodebuild',
'USE_HEADERMAP': 'NO',
diff --git a/src/3rdparty/v8/include/v8-debug.h b/src/3rdparty/v8/include/v8-debug.h
index 9e85dc4..f432de0 100644
--- a/src/3rdparty/v8/include/v8-debug.h
+++ b/src/3rdparty/v8/include/v8-debug.h
@@ -321,7 +321,7 @@ class EXPORT Debug {
* \endcode
*/
static Local<Value> Call(v8::Handle<v8::Function> fun,
- Handle<Value> data = Handle<Value>());
+ Handle<Value> data = Handle<Value>());
/**
* Returns a mirror object for the given object.
@@ -388,6 +388,14 @@ class EXPORT Debug {
* to change.
*/
static Local<Context> GetDebugContext();
+
+
+ /**
+ * Enable/disable LiveEdit functionality for the given Isolate
+ * (default Isolate if not provided). V8 will abort if LiveEdit is
+ * unexpectedly used. LiveEdit is enabled by default.
+ */
+ static void SetLiveEditEnabled(bool enable, Isolate* isolate = NULL);
};
diff --git a/src/3rdparty/v8/include/v8-preparser.h b/src/3rdparty/v8/include/v8-preparser.h
index f11d05e..389949d 100644
--- a/src/3rdparty/v8/include/v8-preparser.h
+++ b/src/3rdparty/v8/include/v8-preparser.h
@@ -55,11 +55,12 @@
// Setup for Linux shared library export. There is no need to distinguish
// between building or using the V8 shared library, but we should not
// export symbols when we are building a static library.
-#if defined(__GNUC__) && (__GNUC__ >= 4) && defined(V8_SHARED)
+#if defined(__GNUC__) && ((__GNUC__ >= 4) || \
+ (__GNUC__ == 3 && __GNUC_MINOR__ >= 3)) && defined(V8_SHARED)
#define V8EXPORT __attribute__ ((visibility("default")))
-#else // defined(__GNUC__) && (__GNUC__ >= 4)
+#else
#define V8EXPORT
-#endif // defined(__GNUC__) && (__GNUC__ >= 4)
+#endif
#endif // _WIN32
diff --git a/src/3rdparty/v8/include/v8-profiler.h b/src/3rdparty/v8/include/v8-profiler.h
index 8f380f2..c1e9a9e 100644
--- a/src/3rdparty/v8/include/v8-profiler.h
+++ b/src/3rdparty/v8/include/v8-profiler.h
@@ -50,11 +50,12 @@
// Setup for Linux shared library export. See v8.h in this directory for
// information on how to build/use V8 as shared library.
-#if defined(__GNUC__) && (__GNUC__ >= 4) && defined(V8_SHARED)
+#if defined(__GNUC__) && ((__GNUC__ >= 4) || \
+ (__GNUC__ == 3 && __GNUC_MINOR__ >= 3)) && defined(V8_SHARED)
#define V8EXPORT __attribute__ ((visibility("default")))
-#else // defined(__GNUC__) && (__GNUC__ >= 4)
+#else
#define V8EXPORT
-#endif // defined(__GNUC__) && (__GNUC__ >= 4)
+#endif
#endif // _WIN32
@@ -280,32 +281,12 @@ class V8EXPORT HeapGraphNode {
/** Returns node's own size, in bytes. */
int GetSelfSize() const;
- /**
- * Returns node's retained size, in bytes. That is, self + sizes of
- * the objects that are reachable only from this object. In other
- * words, the size of memory that will be reclaimed having this node
- * collected.
- */
- int GetRetainedSize() const;
-
/** Returns child nodes count of the node. */
int GetChildrenCount() const;
/** Retrieves a child by index. */
const HeapGraphEdge* GetChild(int index) const;
- /** Returns retainer nodes count of the node. */
- int GetRetainersCount() const;
-
- /** Returns a retainer by index. */
- const HeapGraphEdge* GetRetainer(int index) const;
-
- /**
- * Returns a dominator node. This is the node that participates in every
- * path from the snapshot root to the current node.
- */
- const HeapGraphNode* GetDominatorNode() const;
-
/**
* Finds and returns a value from the heap corresponding to this node,
* if the value is still reachable.
@@ -448,11 +429,12 @@ class V8EXPORT HeapProfiler {
* reports updates for all previous time intervals via the OutputStream
* object. Updates on each time interval are provided as a stream of the
* HeapStatsUpdate structure instances.
+ * The return value of the function is the last seen heap object Id.
*
* StartHeapObjectsTracking must be called before the first call to this
* method.
*/
- static void PushHeapObjectsStats(OutputStream* stream);
+ static SnapshotObjectId PushHeapObjectsStats(OutputStream* stream);
/**
* Stops tracking of heap objects population statistics, cleans up all
@@ -481,6 +463,9 @@ class V8EXPORT HeapProfiler {
/** Returns the number of currently existing persistent handles. */
static int GetPersistentHandleCount();
+
+ /** Returns memory used for profiler internal data and snapshots. */
+ static size_t GetMemorySizeUsedByProfiler();
};
diff --git a/src/3rdparty/v8/include/v8-testing.h b/src/3rdparty/v8/include/v8-testing.h
index 245f74d..59eebf9 100644
--- a/src/3rdparty/v8/include/v8-testing.h
+++ b/src/3rdparty/v8/include/v8-testing.h
@@ -50,11 +50,12 @@
// Setup for Linux shared library export. See v8.h in this directory for
// information on how to build/use V8 as shared library.
-#if defined(__GNUC__) && (__GNUC__ >= 4) && defined(V8_SHARED)
+#if defined(__GNUC__) && ((__GNUC__ >= 4) || \
+ (__GNUC__ == 3 && __GNUC_MINOR__ >= 3)) && defined(V8_SHARED)
#define V8EXPORT __attribute__ ((visibility("default")))
-#else // defined(__GNUC__) && (__GNUC__ >= 4)
+#else
#define V8EXPORT
-#endif // defined(__GNUC__) && (__GNUC__ >= 4)
+#endif
#endif // _WIN32
diff --git a/src/3rdparty/v8/include/v8.h b/src/3rdparty/v8/include/v8.h
index 3416cd4..27c5855 100644
--- a/src/3rdparty/v8/include/v8.h
+++ b/src/3rdparty/v8/include/v8.h
@@ -63,15 +63,16 @@
#else // _WIN32
// Setup for Linux shared library export.
-#if defined(__GNUC__) && (__GNUC__ >= 4) && defined(V8_SHARED)
+#if defined(__GNUC__) && ((__GNUC__ >= 4) || \
+ (__GNUC__ == 3 && __GNUC_MINOR__ >= 3)) && defined(V8_SHARED)
#ifdef BUILDING_V8_SHARED
#define V8EXPORT __attribute__ ((visibility("default")))
#else
#define V8EXPORT
#endif
-#else // defined(__GNUC__) && (__GNUC__ >= 4)
+#else
#define V8EXPORT
-#endif // defined(__GNUC__) && (__GNUC__ >= 4)
+#endif
#endif // _WIN32
@@ -100,6 +101,7 @@ class Function;
class Date;
class ImplementationUtilities;
class Signature;
+class AccessorSignature;
template <class T> class Handle;
template <class T> class Local;
template <class T> class Persistent;
@@ -379,6 +381,7 @@ template <class T> class Persistent : public Handle<T> {
* cell remain and IsEmpty will still return false.
*/
inline void Dispose();
+ inline void Dispose(Isolate* isolate);
/**
* Make the reference to this object weak. When only weak handles
@@ -388,7 +391,7 @@ template <class T> class Persistent : public Handle<T> {
*/
inline void MakeWeak(void* parameters, WeakReferenceCallback callback);
- /** Clears the weak reference to this object.*/
+ /** Clears the weak reference to this object. */
inline void ClearWeak();
/**
@@ -399,15 +402,27 @@ template <class T> class Persistent : public Handle<T> {
* or followed by a global GC epilogue callback.
*/
inline void MarkIndependent();
+ inline void MarkIndependent(Isolate* isolate);
/**
- *Checks if the handle holds the only reference to an object.
+ * Marks the reference to this object partially dependent. Partially
+ * dependent handles only depend on other partially dependent handles and
+ * these dependencies are provided through object groups. It provides a way
+ * to build smaller object groups for young objects that represent only a
+ * subset of all external dependencies. This mark is automatically cleared
+ * after each garbage collection.
*/
+ inline void MarkPartiallyDependent();
+ inline void MarkPartiallyDependent(Isolate* isolate);
+
+ /** Returns true if this handle was previously marked as independent. */
+ inline bool IsIndependent() const;
+ inline bool IsIndependent(Isolate* isolate) const;
+
+ /** Checks if the handle holds the only reference to an object. */
inline bool IsNearDeath() const;
- /**
- * Returns true if the handle's reference is weak.
- */
+ /** Returns true if the handle's reference is weak. */
inline bool IsWeak() const;
/**
@@ -416,6 +431,12 @@ template <class T> class Persistent : public Handle<T> {
*/
inline void SetWrapperClassId(uint16_t class_id);
+ /**
+ * Returns the class ID previously assigned to this handle or 0 if no class
+ * ID was previously assigned.
+ */
+ inline uint16_t WrapperClassId() const;
+
private:
friend class ImplementationUtilities;
friend class ObjectTemplate;
@@ -1032,6 +1053,11 @@ class Boolean : public Primitive {
*/
class String : public Primitive {
public:
+ enum Encoding {
+ UNKNOWN_ENCODING = 0x1,
+ TWO_BYTE_ENCODING = 0x0,
+ ASCII_ENCODING = 0x4
+ };
/**
* Returns the number of characters in this string.
*/
@@ -1087,16 +1113,6 @@ class String : public Primitive {
V8EXPORT static uint32_t ComputeHash(char *string, int length);
/**
- * Returns true if this string is equal to the external
- * string data provided.
- */
- V8EXPORT bool Equals(uint16_t *string, int length);
- V8EXPORT bool Equals(char *string, int length);
- inline bool Equals(Handle<Value> that) const {
- return v8::Value::Equals(that);
- }
-
- /**
* Write the contents of the string to an external buffer.
* If no arguments are given, expects the buffer to be large
* enough to hold the entire string and NULL terminator. Copies
@@ -1124,11 +1140,10 @@ class String : public Primitive {
enum WriteOptions {
NO_OPTIONS = 0,
HINT_MANY_WRITES_EXPECTED = 1,
- NO_NULL_TERMINATION = 2
+ NO_NULL_TERMINATION = 2,
+ PRESERVE_ASCII_NULL = 4
};
- V8EXPORT uint16_t GetCharacter(int index);
-
// 16-bit character codes.
V8EXPORT int Write(uint16_t* buffer,
int start = 0,
@@ -1241,6 +1256,14 @@ class String : public Primitive {
};
/**
+ * If the string is an external string, return the ExternalStringResourceBase
+ * regardless of the encoding, otherwise return NULL. The encoding of the
+ * string is returned in encoding_out.
+ */
+ inline ExternalStringResourceBase* GetExternalStringResourceBase(
+ Encoding* encoding_out) const;
+
+ /**
* Get the ExternalStringResource for an external string. Returns
* NULL if IsExternal() doesn't return true.
*/
@@ -1403,6 +1426,8 @@ class String : public Primitive {
};
private:
+ V8EXPORT void VerifyExternalStringResourceBase(ExternalStringResourceBase* v,
+ Encoding encoding) const;
V8EXPORT void VerifyExternalStringResource(ExternalStringResource* val) const;
V8EXPORT static void CheckCast(v8::Value* obj);
};
@@ -1429,6 +1454,8 @@ class Integer : public Number {
public:
V8EXPORT static Local<Integer> New(int32_t value);
V8EXPORT static Local<Integer> NewFromUnsigned(uint32_t value);
+ V8EXPORT static Local<Integer> New(int32_t value, Isolate*);
+ V8EXPORT static Local<Integer> NewFromUnsigned(uint32_t value, Isolate*);
V8EXPORT int64_t Value() const;
static inline Integer* Cast(v8::Value* obj);
private:
@@ -1613,6 +1640,12 @@ class Object : public Value {
V8EXPORT Local<String> ObjectProtoToString();
/**
+ * Returns the function invoked as a constructor for this object.
+ * May be the null value.
+ */
+ V8EXPORT Local<Value> GetConstructor();
+
+ /**
* Returns the name of the function invoked as a constructor for this object.
*/
V8EXPORT Local<String> GetConstructorName();
@@ -2371,7 +2404,8 @@ class V8EXPORT FunctionTemplate : public Template {
AccessorSetter setter,
Handle<Value> data,
AccessControl settings,
- PropertyAttribute attributes);
+ PropertyAttribute attributes,
+ Handle<AccessorSignature> signature);
void SetNamedInstancePropertyHandler(NamedPropertyGetter getter,
NamedPropertySetter setter,
NamedPropertyQuery query,
@@ -2430,13 +2464,20 @@ class V8EXPORT ObjectTemplate : public Template {
* cross-context access.
* \param attribute The attributes of the property for which an accessor
* is added.
+ * \param signature The signature describes valid receivers for the accessor
+ * and is used to perform implicit instance checks against them. If the
+ * receiver is incompatible (i.e. is not an instance of the constructor as
+ * defined by FunctionTemplate::HasInstance()), an implicit TypeError is
+ * thrown and no callback is invoked.
*/
void SetAccessor(Handle<String> name,
AccessorGetter getter,
AccessorSetter setter = 0,
Handle<Value> data = Handle<Value>(),
AccessControl settings = DEFAULT,
- PropertyAttribute attribute = None);
+ PropertyAttribute attribute = None,
+ Handle<AccessorSignature> signature =
+ Handle<AccessorSignature>());
/**
* Sets a named property handler on the object template.
@@ -2545,7 +2586,7 @@ class V8EXPORT ObjectTemplate : public Template {
void SetHasExternalResource(bool value);
/**
- * Mark object instances of the template as using the user object
+ * Mark object instances of the template as using the user object
* comparison callback.
*/
void MarkAsUseUserObjectComparison();
@@ -2558,8 +2599,8 @@ class V8EXPORT ObjectTemplate : public Template {
/**
- * A Signature specifies which receivers and arguments a function can
- * legally be called with.
+ * A Signature specifies which receivers and arguments are valid
+ * parameters to a function.
*/
class V8EXPORT Signature : public Data {
public:
@@ -2573,6 +2614,19 @@ class V8EXPORT Signature : public Data {
/**
+ * An AccessorSignature specifies which receivers are valid parameters
+ * to an accessor callback.
+ */
+class V8EXPORT AccessorSignature : public Data {
+ public:
+ static Local<AccessorSignature> New(Handle<FunctionTemplate> receiver =
+ Handle<FunctionTemplate>());
+ private:
+ AccessorSignature();
+};
+
+
+/**
* A utility for determining the type of objects based on the template
* they were constructed from.
*/
@@ -2709,7 +2763,7 @@ bool V8EXPORT SetResourceConstraints(ResourceConstraints* constraints);
typedef void (*FatalErrorCallback)(const char* location, const char* message);
-typedef void (*MessageCallback)(Handle<Message> message, Handle<Value> data);
+typedef void (*MessageCallback)(Handle<Message> message, Handle<Value> error);
/**
@@ -2777,8 +2831,8 @@ typedef void (*FailedAccessCheckCallback)(Local<Object> target,
AccessType type,
Local<Value> data);
-// --- User Object Comparisoa nCallback ---
-typedef bool (*UserObjectComparisonCallback)(Local<Object> lhs,
+// --- User Object Comparison Callback ---
+typedef bool (*UserObjectComparisonCallback)(Local<Object> lhs,
Local<Object> rhs);
// --- AllowCodeGenerationFromStrings callbacks ---
@@ -2826,6 +2880,7 @@ class V8EXPORT HeapStatistics {
HeapStatistics();
size_t total_heap_size() { return total_heap_size_; }
size_t total_heap_size_executable() { return total_heap_size_executable_; }
+ size_t total_physical_size() { return total_physical_size_; }
size_t used_heap_size() { return used_heap_size_; }
size_t heap_size_limit() { return heap_size_limit_; }
@@ -2834,11 +2889,15 @@ class V8EXPORT HeapStatistics {
void set_total_heap_size_executable(size_t size) {
total_heap_size_executable_ = size;
}
+ void set_total_physical_size(size_t size) {
+ total_physical_size_ = size;
+ }
void set_used_heap_size(size_t size) { used_heap_size_ = size; }
void set_heap_size_limit(size_t size) { heap_size_limit_ = size; }
size_t total_heap_size_;
size_t total_heap_size_executable_;
+ size_t total_physical_size_;
size_t used_heap_size_;
size_t heap_size_limit_;
@@ -2992,17 +3051,86 @@ typedef bool (*EntropySource)(unsigned char* buffer, size_t length);
* resolving the location of a return address on the stack. Profilers that
* change the return address on the stack can use this to resolve the stack
* location to whereever the profiler stashed the original return address.
- * When invoked, return_addr_location will point to a location on stack where
- * a machine return address resides, this function should return either the
- * same pointer, or a pointer to the profiler's copy of the original return
- * address.
+ *
+ * \param return_addr_location points to a location on stack where a machine
+ * return address resides.
+ * \returns either return_addr_location, or else a pointer to the profiler's
+ * copy of the original return address.
+ *
+ * \note the resolver function must not cause garbage collection.
*/
typedef uintptr_t (*ReturnAddressLocationResolver)(
uintptr_t return_addr_location);
/**
- * Interface for iterating though all external resources in the heap.
+ * FunctionEntryHook is the type of the profile entry hook called at entry to
+ * any generated function when function-level profiling is enabled.
+ *
+ * \param function the address of the function that's being entered.
+ * \param return_addr_location points to a location on stack where the machine
+ * return address resides. This can be used to identify the caller of
+ * \p function, and/or modified to divert execution when \p function exits.
+ *
+ * \note the entry hook must not cause garbage collection.
+ */
+typedef void (*FunctionEntryHook)(uintptr_t function,
+ uintptr_t return_addr_location);
+
+
+/**
+ * A JIT code event is issued each time code is added, moved or removed.
+ *
+ * \note removal events are not currently issued.
+ */
+struct JitCodeEvent {
+ enum EventType {
+ CODE_ADDED,
+ CODE_MOVED,
+ CODE_REMOVED
+ };
+
+ // Type of event.
+ EventType type;
+ // Start of the instructions.
+ void* code_start;
+ // Size of the instructions.
+ size_t code_len;
+
+ union {
+ // Only valid for CODE_ADDED.
+ struct {
+ // Name of the object associated with the code, note that the string is
+ // not zero-terminated.
+ const char* str;
+ // Number of chars in str.
+ size_t len;
+ } name;
+ // New location of instructions. Only valid for CODE_MOVED.
+ void* new_code_start;
+ };
+};
+
+/**
+ * Option flags passed to the SetJitCodeEventHandler function.
+ */
+enum JitCodeEventOptions {
+ kJitCodeEventDefault = 0,
+ // Generate callbacks for already existent code.
+ kJitCodeEventEnumExisting = 1
+};
+
+
+/**
+ * Callback function passed to SetJitCodeEventHandler.
+ *
+ * \param event code add, move or removal event.
+ */
+typedef void (*JitCodeEventHandler)(const JitCodeEvent* event);
+
+
+/**
+ * Interface for iterating through all external resources in the heap.
*/
class V8EXPORT ExternalResourceVisitor { // NOLINT
public:
@@ -3012,6 +3140,17 @@ class V8EXPORT ExternalResourceVisitor { // NOLINT
/**
+ * Interface for iterating through all the persistent handles in the heap.
+ */
+class V8EXPORT PersistentHandleVisitor { // NOLINT
+ public:
+ virtual ~PersistentHandleVisitor() {}
+ virtual void VisitPersistentHandle(Persistent<Value>,
+ uint16_t) {}
+};
+
+
+/**
* Container class for static utility functions.
*/
class V8EXPORT V8 {
@@ -3076,8 +3215,7 @@ class V8EXPORT V8 {
* The same message listener can be added more than once and in that
* case it will be called more than once for each message.
*/
- static bool AddMessageListener(MessageCallback that,
- Handle<Value> data = Handle<Value>());
+ static bool AddMessageListener(MessageCallback that);
/**
* Remove all message listeners from the specified callback function.
@@ -3226,7 +3364,10 @@ class V8EXPORT V8 {
* After each garbage collection, object groups are removed. It is
* intended to be used in the before-garbage-collection callback
* function, for instance to simulate DOM tree connections among JS
- * wrapper objects.
+ * wrapper objects. Object groups for all dependent handles need to
+ * be provided for kGCTypeMarkSweepCompact collections, for all other
+ * garbage collection types it is sufficient to provide object groups
+ * for partially dependent handles only.
* See v8-profiler.h for RetainedObjectInfo interface description.
*/
static void AddObjectGroup(Persistent<Value>* objects,
@@ -3265,6 +3406,43 @@ class V8EXPORT V8 {
ReturnAddressLocationResolver return_address_resolver);
/**
+ * Allows the host application to provide the address of a function that's
+ * invoked on entry to every V8-generated function.
+ * Note that \p entry_hook is invoked at the very start of each
+ * generated function.
+ *
+ * \param entry_hook a function that will be invoked on entry to every
+ * V8-generated function.
+ * \returns true on success on supported platforms, false on failure.
+ * \note Setting a new entry hook function when one is already active will
+ * fail.
+ */
+ static bool SetFunctionEntryHook(FunctionEntryHook entry_hook);
+
+ /**
+ * Allows the host application to provide the address of a function that is
+ * notified each time code is added, moved or removed.
+ *
+ * \param options options for the JIT code event handler.
+ * \param event_handler the JIT code event handler, which will be invoked
+ * each time code is added, moved or removed.
+ * \note \p event_handler won't get notified of existent code.
+ * \note since code removal notifications are not currently issued, the
+ * \p event_handler may get notifications of code that overlaps earlier
+ * code notifications. This happens when code areas are reused, and the
+ * earlier overlapping code areas should therefore be discarded.
+ * \note the events passed to \p event_handler and the strings they point to
+ * are not guaranteed to live past each call. The \p event_handler must
+ * copy strings and other parameters it needs to keep around.
+ * \note the set of events declared in JitCodeEvent::EventType is expected to
+ * grow over time, and the JitCodeEvent structure is expected to accrue
+ * new members. The \p event_handler function must ignore event codes
+ * it does not recognize to maintain future compatibility.
+ */
+ static void SetJitCodeEventHandler(JitCodeEventOptions options,
+ JitCodeEventHandler event_handler);
+
+ /**
* Adjusts the amount of registered external memory. Used to give
* V8 an indication of the amount of externally allocated memory
* that is kept alive by JavaScript objects. V8 uses this to decide
@@ -3379,12 +3557,18 @@ class V8EXPORT V8 {
/**
* Iterates through all external resources referenced from current isolate
- * heap. This method is not expected to be used except for debugging purposes
- * and may be quite slow.
+ * heap. GC is not invoked prior to iterating, therefore there is no
+ * guarantee that visited objects are still alive.
*/
static void VisitExternalResources(ExternalResourceVisitor* visitor);
/**
+ * Iterates through all the persistent handles in the current isolate's heap
+ * that have class_ids.
+ */
+ static void VisitHandlesWithClassIds(PersistentHandleVisitor* visitor);
+
+ /**
* Optional notification that the embedder is idle.
* V8 uses the notification to reduce memory footprint.
* This call can be used repeatedly if the embedder remains idle.
@@ -3417,15 +3601,26 @@ class V8EXPORT V8 {
static internal::Object** GlobalizeReference(internal::Object** handle);
static void DisposeGlobal(internal::Object** global_handle);
+ static void DisposeGlobal(internal::Isolate* isolate,
+ internal::Object** global_handle);
static void MakeWeak(internal::Object** global_handle,
void* data,
WeakReferenceCallback);
static void ClearWeak(internal::Object** global_handle);
static void MarkIndependent(internal::Object** global_handle);
+ static void MarkIndependent(internal::Isolate* isolate,
+ internal::Object** global_handle);
+ static void MarkPartiallyDependent(internal::Object** global_handle);
+ static void MarkPartiallyDependent(internal::Isolate* isolate,
+ internal::Object** global_handle);
+ static bool IsGlobalIndependent(internal::Object** global_handle);
+ static bool IsGlobalIndependent(internal::Isolate* isolate,
+ internal::Object** global_handle);
static bool IsGlobalNearDeath(internal::Object** global_handle);
static bool IsGlobalWeak(internal::Object** global_handle);
static void SetWrapperClassId(internal::Object** global_handle,
uint16_t class_id);
+ static uint16_t GetWrapperClassId(internal::Object** global_handle);
template <class T> friend class Handle;
template <class T> friend class Local;
@@ -3677,7 +3872,7 @@ class V8EXPORT Context {
* with the debugger to provide additional information on the context through
* the debugger API.
*/
- void SetData(Handle<String> data);
+ void SetData(Handle<Value> data);
Local<Value> GetData();
/**
@@ -3702,6 +3897,13 @@ class V8EXPORT Context {
bool IsCodeGenerationFromStringsAllowed();
/**
+ * Sets the error description for the exception that is thrown when
+ * code generation from strings is not allowed and 'eval' or the 'Function'
+ * constructor are called.
+ */
+ void SetErrorMessageForCodeGenerationFromStrings(Handle<String> message);
+
+ /**
* Stack-allocated class which sets the execution context for all
* operations executed within a local scope.
*/
@@ -4010,7 +4212,9 @@ class Internals {
static const int kForeignAddressOffset = kApiPointerSize;
static const int kJSObjectHeaderSize = 3 * kApiPointerSize;
static const int kFullStringRepresentationMask = 0x07;
+ static const int kStringEncodingMask = 0x4;
static const int kExternalTwoByteRepresentationTag = 0x02;
+ static const int kExternalAsciiRepresentationTag = 0x06;
static const int kIsolateStateOffset = 0;
static const int kIsolateEmbedderDataOffset = 1 * kApiPointerSize;
@@ -4019,7 +4223,7 @@ class Internals {
static const int kNullValueRootIndex = 7;
static const int kTrueValueRootIndex = 8;
static const int kFalseValueRootIndex = 9;
- static const int kEmptySymbolRootIndex = 128;
+ static const int kEmptySymbolRootIndex = 118;
static const int kJSObjectType = 0xaa;
static const int kFirstNonstringType = 0x80;
@@ -4139,6 +4343,21 @@ Persistent<T> Persistent<T>::New(Handle<T> that) {
template <class T>
+bool Persistent<T>::IsIndependent() const {
+ if (this->IsEmpty()) return false;
+ return V8::IsGlobalIndependent(reinterpret_cast<internal::Object**>(**this));
+}
+
+
+template <class T>
+bool Persistent<T>::IsIndependent(Isolate* isolate) const {
+ if (this->IsEmpty()) return false;
+ return V8::IsGlobalIndependent(reinterpret_cast<internal::Isolate*>(isolate),
+ reinterpret_cast<internal::Object**>(**this));
+}
+
+
+template <class T>
bool Persistent<T>::IsNearDeath() const {
if (this->IsEmpty()) return false;
return V8::IsGlobalNearDeath(reinterpret_cast<internal::Object**>(**this));
@@ -4160,6 +4379,14 @@ void Persistent<T>::Dispose() {
template <class T>
+void Persistent<T>::Dispose(Isolate* isolate) {
+ if (this->IsEmpty()) return;
+ V8::DisposeGlobal(reinterpret_cast<internal::Isolate*>(isolate),
+ reinterpret_cast<internal::Object**>(**this));
+}
+
+
+template <class T>
Persistent<T>::Persistent() : Handle<T>() { }
template <class T>
@@ -4180,10 +4407,32 @@ void Persistent<T>::MarkIndependent() {
}
template <class T>
+void Persistent<T>::MarkIndependent(Isolate* isolate) {
+ V8::MarkIndependent(reinterpret_cast<internal::Isolate*>(isolate),
+ reinterpret_cast<internal::Object**>(**this));
+}
+
+template <class T>
+void Persistent<T>::MarkPartiallyDependent() {
+ V8::MarkPartiallyDependent(reinterpret_cast<internal::Object**>(**this));
+}
+
+template <class T>
+void Persistent<T>::MarkPartiallyDependent(Isolate* isolate) {
+ V8::MarkPartiallyDependent(reinterpret_cast<internal::Isolate*>(isolate),
+ reinterpret_cast<internal::Object**>(**this));
+}
+
+template <class T>
void Persistent<T>::SetWrapperClassId(uint16_t class_id) {
V8::SetWrapperClassId(reinterpret_cast<internal::Object**>(**this), class_id);
}
+template <class T>
+uint16_t Persistent<T>::WrapperClassId() const {
+ return V8::GetWrapperClassId(reinterpret_cast<internal::Object**>(**this));
+}
+
Arguments::Arguments(internal::Object** implicit_args,
internal::Object** values, int length,
bool is_construct_call)
@@ -4365,6 +4614,26 @@ String::ExternalStringResource* String::GetExternalStringResource() const {
}
+String::ExternalStringResourceBase* String::GetExternalStringResourceBase(
+ String::Encoding* encoding_out) const {
+ typedef internal::Object O;
+ typedef internal::Internals I;
+ O* obj = *reinterpret_cast<O**>(const_cast<String*>(this));
+ int type = I::GetInstanceType(obj) & I::kFullStringRepresentationMask;
+ *encoding_out = static_cast<Encoding>(type & I::kStringEncodingMask);
+ ExternalStringResourceBase* resource = NULL;
+ if (type == I::kExternalAsciiRepresentationTag ||
+ type == I::kExternalTwoByteRepresentationTag) {
+ void* value = I::ReadField<void*>(obj, I::kStringResourceOffset);
+ resource = static_cast<ExternalStringResourceBase*>(value);
+ }
+#ifdef V8_ENABLE_CHECKS
+ VerifyExternalStringResourceBase(resource, *encoding_out);
+#endif
+ return resource;
+}
+
+
bool Value::IsUndefined() const {
#ifdef V8_ENABLE_CHECKS
return FullIsUndefined();
diff --git a/src/3rdparty/v8/preparser/preparser-process.cc b/src/3rdparty/v8/preparser/preparser-process.cc
index 368f63f..1bcc804 100644
--- a/src/3rdparty/v8/preparser/preparser-process.cc
+++ b/src/3rdparty/v8/preparser/preparser-process.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -202,7 +202,7 @@ void fail(v8::PreParserData* data, const char* message, ...) {
fflush(stderr);
if (data != NULL) {
// Print preparser data to stdout.
- uint32_t size = data->size();
+ uint32_t size = static_cast<uint32_t>(data->size());
fprintf(stderr, "LOG: data size: %u\n", size);
if (!WriteBuffer(stdout, data->data(), size)) {
perror("ERROR: Writing data");
@@ -232,7 +232,7 @@ struct ExceptionExpectation {
void CheckException(v8::PreParserData* data,
ExceptionExpectation* expects) {
- PreparseDataInterpreter reader(data->data(), data->size());
+ PreparseDataInterpreter reader(data->data(), static_cast<int>(data->size()));
if (expects->throws) {
if (!reader.throws()) {
if (expects->type == NULL) {
diff --git a/src/3rdparty/v8/samples/lineprocessor.cc b/src/3rdparty/v8/samples/lineprocessor.cc
index 7a84a2a..26e787f 100644
--- a/src/3rdparty/v8/samples/lineprocessor.cc
+++ b/src/3rdparty/v8/samples/lineprocessor.cc
@@ -25,19 +25,11 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// This controls whether this sample is compiled with debugger support.
-// You may trace its usages in source text to see what parts of program
-// are responsible for debugging support.
-// Note that V8 itself should be compiled with enabled debugger support
-// to have it all working.
-#define SUPPORT_DEBUGGING
-
#include <v8.h>
-#ifdef SUPPORT_DEBUGGING
+#ifdef ENABLE_DEBUGGER_SUPPORT
#include <v8-debug.h>
-#endif
+#endif // ENABLE_DEBUGGER_SUPPORT
#include <fcntl.h>
#include <string.h>
@@ -116,7 +108,7 @@ bool RunCppCycle(v8::Handle<v8::Script> script, v8::Local<v8::Context> context,
bool report_exceptions);
-#ifdef SUPPORT_DEBUGGING
+#ifdef ENABLE_DEBUGGER_SUPPORT
v8::Persistent<v8::Context> debug_message_context;
void DispatchDebugMessages() {
@@ -135,7 +127,7 @@ void DispatchDebugMessages() {
v8::Debug::ProcessDebugMessages();
}
-#endif
+#endif // ENABLE_DEBUGGER_SUPPORT
int RunMain(int argc, char* argv[]) {
@@ -146,11 +138,11 @@ int RunMain(int argc, char* argv[]) {
v8::Handle<v8::Value> script_name(NULL);
int script_param_counter = 0;
-#ifdef SUPPORT_DEBUGGING
+#ifdef ENABLE_DEBUGGER_SUPPORT
int port_number = -1;
bool wait_for_connection = false;
bool support_callback = false;
-#endif
+#endif // ENABLE_DEBUGGER_SUPPORT
MainCycleType cycle_type = CycleInCpp;
@@ -164,7 +156,7 @@ int RunMain(int argc, char* argv[]) {
cycle_type = CycleInCpp;
} else if (strcmp(str, "--main-cycle-in-js") == 0) {
cycle_type = CycleInJs;
-#ifdef SUPPORT_DEBUGGING
+#ifdef ENABLE_DEBUGGER_SUPPORT
} else if (strcmp(str, "--callback") == 0) {
support_callback = true;
} else if (strcmp(str, "--wait-for-connection") == 0) {
@@ -172,7 +164,7 @@ int RunMain(int argc, char* argv[]) {
} else if (strcmp(str, "-p") == 0 && i + 1 < argc) {
port_number = atoi(argv[i + 1]); // NOLINT
i++;
-#endif
+#endif // ENABLE_DEBUGGER_SUPPORT
} else if (strncmp(str, "--", 2) == 0) {
printf("Warning: unknown flag %s.\nTry --help for options\n", str);
} else if (strcmp(str, "-e") == 0 && i + 1 < argc) {
@@ -219,7 +211,7 @@ int RunMain(int argc, char* argv[]) {
// Enter the newly created execution environment.
v8::Context::Scope context_scope(context);
-#ifdef SUPPORT_DEBUGGING
+#ifdef ENABLE_DEBUGGER_SUPPORT
debug_message_context = v8::Persistent<v8::Context>::New(context);
v8::Locker locker;
@@ -231,7 +223,7 @@ int RunMain(int argc, char* argv[]) {
if (port_number != -1) {
v8::Debug::EnableAgent("lineprocessor", port_number, wait_for_connection);
}
-#endif
+#endif // ENABLE_DEBUGGER_SUPPORT
bool report_exceptions = true;
@@ -272,9 +264,9 @@ int RunMain(int argc, char* argv[]) {
bool RunCppCycle(v8::Handle<v8::Script> script, v8::Local<v8::Context> context,
bool report_exceptions) {
-#ifdef SUPPORT_DEBUGGING
+#ifdef ENABLE_DEBUGGER_SUPPORT
v8::Locker lock;
-#endif
+#endif // ENABLE_DEBUGGER_SUPPORT
v8::Handle<v8::String> fun_name = v8::String::New("ProcessLine");
v8::Handle<v8::Value> process_val =
@@ -347,7 +339,7 @@ v8::Handle<v8::String> ReadFile(const char* name) {
char* chars = new char[size + 1];
chars[size] = '\0';
for (int i = 0; i < size;) {
- int read = fread(&chars[i], 1, size - i, file);
+ int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
i += read;
}
fclose(file);
@@ -427,9 +419,9 @@ v8::Handle<v8::String> ReadLine() {
char* res;
{
-#ifdef SUPPORT_DEBUGGING
+#ifdef ENABLE_DEBUGGER_SUPPORT
v8::Unlocker unlocker;
-#endif
+#endif // ENABLE_DEBUGGER_SUPPORT
res = fgets(buffer, kBufferSize, stdin);
}
if (res == NULL) {
diff --git a/src/3rdparty/v8/samples/process.cc b/src/3rdparty/v8/samples/process.cc
index c0cee4c..ae6a550 100644
--- a/src/3rdparty/v8/samples/process.cc
+++ b/src/3rdparty/v8/samples/process.cc
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -351,7 +351,7 @@ Handle<Value> JsHttpRequestProcessor::MapGet(Local<String> name,
// Otherwise fetch the value and wrap it in a JavaScript string
const string& value = (*iter).second;
- return String::New(value.c_str(), value.length());
+ return String::New(value.c_str(), static_cast<int>(value.length()));
}
@@ -443,7 +443,7 @@ Handle<Value> JsHttpRequestProcessor::GetPath(Local<String> name,
const string& path = request->Path();
// Wrap the result in a JavaScript string and return it.
- return String::New(path.c_str(), path.length());
+ return String::New(path.c_str(), static_cast<int>(path.length()));
}
@@ -451,7 +451,7 @@ Handle<Value> JsHttpRequestProcessor::GetReferrer(Local<String> name,
const AccessorInfo& info) {
HttpRequest* request = UnwrapRequest(info.Holder());
const string& path = request->Referrer();
- return String::New(path.c_str(), path.length());
+ return String::New(path.c_str(), static_cast<int>(path.length()));
}
@@ -459,7 +459,7 @@ Handle<Value> JsHttpRequestProcessor::GetHost(Local<String> name,
const AccessorInfo& info) {
HttpRequest* request = UnwrapRequest(info.Holder());
const string& path = request->Host();
- return String::New(path.c_str(), path.length());
+ return String::New(path.c_str(), static_cast<int>(path.length()));
}
@@ -467,7 +467,7 @@ Handle<Value> JsHttpRequestProcessor::GetUserAgent(Local<String> name,
const AccessorInfo& info) {
HttpRequest* request = UnwrapRequest(info.Holder());
const string& path = request->UserAgent();
- return String::New(path.c_str(), path.length());
+ return String::New(path.c_str(), static_cast<int>(path.length()));
}
@@ -557,7 +557,7 @@ Handle<String> ReadFile(const string& name) {
char* chars = new char[size + 1];
chars[size] = '\0';
for (int i = 0; i < size;) {
- int read = fread(&chars[i], 1, size - i, file);
+ int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
i += read;
}
fclose(file);
diff --git a/src/3rdparty/v8/samples/shell.cc b/src/3rdparty/v8/samples/shell.cc
index db0cc1a..821ef75 100644
--- a/src/3rdparty/v8/samples/shell.cc
+++ b/src/3rdparty/v8/samples/shell.cc
@@ -205,7 +205,7 @@ v8::Handle<v8::String> ReadFile(const char* name) {
char* chars = new char[size + 1];
chars[size] = '\0';
for (int i = 0; i < size;) {
- int read = fread(&chars[i], 1, size - i, file);
+ int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
i += read;
}
fclose(file);
diff --git a/src/3rdparty/v8/src/SConscript b/src/3rdparty/v8/src/SConscript
index 0d0b535..16bfb55 100755
--- a/src/3rdparty/v8/src/SConscript
+++ b/src/3rdparty/v8/src/SConscript
@@ -43,8 +43,8 @@ SOURCES = {
assembler.cc
ast.cc
atomicops_internals_x86_gcc.cc
- bignum.cc
bignum-dtoa.cc
+ bignum.cc
bootstrapper.cc
builtins.cc
cached-powers.cc
@@ -67,26 +67,30 @@ SOURCES = {
disassembler.cc
diy-fp.cc
dtoa.cc
+ elements-kind.cc
elements.cc
execution.cc
+ extensions/externalize-string-extension.cc
+ extensions/gc-extension.cc
+ extensions/statistics-extension.cc
factory.cc
+ fast-dtoa.cc
+ fixed-dtoa.cc
flags.cc
frames.cc
full-codegen.cc
func-name-inferrer.cc
gdb-jit.cc
global-handles.cc
- fast-dtoa.cc
- fixed-dtoa.cc
handles.cc
heap-profiler.cc
heap.cc
- hydrogen.cc
hydrogen-instructions.cc
+ hydrogen.cc
ic.cc
incremental-marking.cc
- interface.cc
inspector.cc
+ interface.cc
interpreter-irregexp.cc
isolate.cc
jsregexp.cc
@@ -98,34 +102,37 @@ SOURCES = {
log.cc
mark-compact.cc
messages.cc
- objects.cc
objects-printer.cc
objects-visiting.cc
+ objects.cc
once.cc
+ optimizing-compiler-thread.cc
parser.cc
- preparser.cc
preparse-data.cc
+ preparser.cc
profile-generator.cc
property.cc
regexp-macro-assembler-irregexp.cc
regexp-macro-assembler.cc
regexp-stack.cc
rewriter.cc
- runtime.cc
runtime-profiler.cc
+ runtime.cc
safepoint-table.cc
- scanner.cc
scanner-character-streams.cc
+ scanner.cc
scopeinfo.cc
scopes.cc
serialize.cc
snapshot-common.cc
spaces.cc
+ store-buffer.cc
string-search.cc
string-stream.cc
strtod.cc
stub-cache.cc
token.cc
+ transitions.cc
type-info.cc
unicode.cc
utils.cc
@@ -136,10 +143,7 @@ SOURCES = {
v8utils.cc
variables.cc
version.cc
- store-buffer.cc
zone.cc
- extensions/gc-extension.cc
- extensions/externalize-string-extension.cc
"""),
'arch:arm': Split("""
arm/builtins-arm.cc
diff --git a/src/3rdparty/v8/src/accessors.cc b/src/3rdparty/v8/src/accessors.cc
index 8048738..c2f245c 100644
--- a/src/3rdparty/v8/src/accessors.cc
+++ b/src/3rdparty/v8/src/accessors.cc
@@ -42,15 +42,11 @@ namespace internal {
template <class C>
-static C* FindInPrototypeChain(Object* obj, bool* found_it) {
- ASSERT(!*found_it);
- Heap* heap = HEAP;
- while (!Is<C>(obj)) {
- if (obj == heap->null_value()) return NULL;
- obj = obj->GetPrototype();
+static C* FindInstanceOf(Object* obj) {
+ for (Object* cur = obj; !cur->IsNull(); cur = cur->GetPrototype()) {
+ if (Is<C>(cur)) return C::cast(cur);
}
- *found_it = true;
- return C::cast(obj);
+ return NULL;
}
@@ -81,10 +77,8 @@ MaybeObject* Accessors::ReadOnlySetAccessor(JSObject*, Object* value, void*) {
MaybeObject* Accessors::ArrayGetLength(Object* object, void*) {
// Traverse the prototype chain until we reach an array.
- bool found_it = false;
- JSArray* holder = FindInPrototypeChain<JSArray>(object, &found_it);
- if (!found_it) return Smi::FromInt(0);
- return holder->length();
+ JSArray* holder = FindInstanceOf<JSArray>(object);
+ return holder == NULL ? Smi::FromInt(0) : holder->length();
}
@@ -92,15 +86,56 @@ MaybeObject* Accessors::ArrayGetLength(Object* object, void*) {
Object* Accessors::FlattenNumber(Object* value) {
if (value->IsNumber() || !value->IsJSValue()) return value;
JSValue* wrapper = JSValue::cast(value);
- ASSERT(Isolate::Current()->context()->global_context()->number_function()->
+ ASSERT(Isolate::Current()->context()->native_context()->number_function()->
has_initial_map());
- Map* number_map = Isolate::Current()->context()->global_context()->
+ Map* number_map = Isolate::Current()->context()->native_context()->
number_function()->initial_map();
if (wrapper->map() == number_map) return wrapper->value();
return value;
}
+static MaybeObject* ArraySetLengthObserved(Isolate* isolate,
+ Handle<JSArray> array,
+ Handle<Object> new_length_handle) {
+ List<Handle<String> > indices;
+ List<Handle<Object> > old_values;
+ Handle<Object> old_length_handle(array->length(), isolate);
+ uint32_t old_length = 0;
+ CHECK(old_length_handle->ToArrayIndex(&old_length));
+ uint32_t new_length = 0;
+ CHECK(new_length_handle->ToArrayIndex(&new_length));
+ // TODO(adamk): This loop can be very slow for arrays in dictionary mode.
+ // Find another way to iterate over arrays with dictionary elements.
+ for (uint32_t i = old_length - 1; i + 1 > new_length; --i) {
+ PropertyAttributes attributes = array->GetLocalElementAttribute(i);
+ if (attributes == ABSENT) continue;
+ // A non-configurable property will cause the truncation operation to
+ // stop at this index.
+ if (attributes == DONT_DELETE) break;
+ // TODO(adamk): Don't fetch the old value if it's an accessor.
+ old_values.Add(Object::GetElement(array, i));
+ indices.Add(isolate->factory()->Uint32ToString(i));
+ }
+
+ MaybeObject* result = array->SetElementsLength(*new_length_handle);
+ Handle<Object> hresult;
+ if (!result->ToHandle(&hresult)) return result;
+
+ CHECK(array->length()->ToArrayIndex(&new_length));
+ if (old_length != new_length) {
+ for (int i = 0; i < indices.length(); ++i) {
+ JSObject::EnqueueChangeRecord(
+ array, "deleted", indices[i], old_values[i]);
+ }
+ JSObject::EnqueueChangeRecord(
+ array, "updated", isolate->factory()->length_symbol(),
+ old_length_handle);
+ }
+ return *hresult;
+}
+
+
MaybeObject* Accessors::ArraySetLength(JSObject* object, Object* value, void*) {
Isolate* isolate = object->GetIsolate();
@@ -118,7 +153,7 @@ MaybeObject* Accessors::ArraySetLength(JSObject* object, Object* value, void*) {
HandleScope scope(isolate);
// Protect raw pointers.
- Handle<JSObject> object_handle(object, isolate);
+ Handle<JSArray> array_handle(JSArray::cast(object), isolate);
Handle<Object> value_handle(value, isolate);
bool has_exception;
@@ -128,7 +163,11 @@ MaybeObject* Accessors::ArraySetLength(JSObject* object, Object* value, void*) {
if (has_exception) return Failure::Exception();
if (uint32_v->Number() == number_v->Number()) {
- return Handle<JSArray>::cast(object_handle)->SetElementsLength(*uint32_v);
+ if (FLAG_harmony_observation && array_handle->map()->is_observed()) {
+ return ArraySetLengthObserved(isolate, array_handle, uint32_v);
+ } else {
+ return array_handle->SetElementsLength(*uint32_v);
+ }
}
return isolate->Throw(
*isolate->factory()->NewRangeError("invalid_array_length",
@@ -448,15 +487,12 @@ const AccessorDescriptor Accessors::ScriptEvalFromFunctionName = {
MaybeObject* Accessors::FunctionGetPrototype(Object* object, void*) {
Heap* heap = Isolate::Current()->heap();
- bool found_it = false;
- JSFunction* function = FindInPrototypeChain<JSFunction>(object, &found_it);
- if (!found_it) return heap->undefined_value();
+ JSFunction* function = FindInstanceOf<JSFunction>(object);
+ if (function == NULL) return heap->undefined_value();
while (!function->should_have_prototype()) {
- found_it = false;
- function = FindInPrototypeChain<JSFunction>(object->GetPrototype(),
- &found_it);
+ function = FindInstanceOf<JSFunction>(function->GetPrototype());
// There has to be one because we hit the getter.
- ASSERT(found_it);
+ ASSERT(function != NULL);
}
if (!function->has_prototype()) {
@@ -477,9 +513,8 @@ MaybeObject* Accessors::FunctionSetPrototype(JSObject* object,
Object* value,
void*) {
Heap* heap = object->GetHeap();
- bool found_it = false;
- JSFunction* function = FindInPrototypeChain<JSFunction>(object, &found_it);
- if (!found_it) return heap->undefined_value();
+ JSFunction* function = FindInstanceOf<JSFunction>(object);
+ if (function == NULL) return heap->undefined_value();
if (!function->should_have_prototype()) {
// Since we hit this accessor, object will have no prototype property.
return object->SetLocalPropertyIgnoreAttributes(heap->prototype_symbol(),
@@ -509,22 +544,20 @@ const AccessorDescriptor Accessors::FunctionPrototype = {
MaybeObject* Accessors::FunctionGetLength(Object* object, void*) {
- bool found_it = false;
- JSFunction* function = FindInPrototypeChain<JSFunction>(object, &found_it);
- if (!found_it) return Smi::FromInt(0);
+ JSFunction* function = FindInstanceOf<JSFunction>(object);
+ if (function == NULL) return Smi::FromInt(0);
// Check if already compiled.
- if (!function->shared()->is_compiled()) {
- // If the function isn't compiled yet, the length is not computed
- // correctly yet. Compile it now and return the right length.
- HandleScope scope;
- Handle<JSFunction> handle(function);
- if (!JSFunction::CompileLazy(handle, KEEP_EXCEPTION)) {
- return Failure::Exception();
- }
- return Smi::FromInt(handle->shared()->length());
- } else {
+ if (function->shared()->is_compiled()) {
return Smi::FromInt(function->shared()->length());
}
+ // If the function isn't compiled yet, the length is not computed correctly
+ // yet. Compile it now and return the right length.
+ HandleScope scope;
+ Handle<JSFunction> handle(function);
+ if (JSFunction::CompileLazy(handle, KEEP_EXCEPTION)) {
+ return Smi::FromInt(handle->shared()->length());
+ }
+ return Failure::Exception();
}
@@ -541,10 +574,8 @@ const AccessorDescriptor Accessors::FunctionLength = {
MaybeObject* Accessors::FunctionGetName(Object* object, void*) {
- bool found_it = false;
- JSFunction* holder = FindInPrototypeChain<JSFunction>(object, &found_it);
- if (!found_it) return HEAP->undefined_value();
- return holder->shared()->name();
+ JSFunction* holder = FindInstanceOf<JSFunction>(object);
+ return holder == NULL ? HEAP->undefined_value() : holder->shared()->name();
}
@@ -589,9 +620,8 @@ static MaybeObject* ConstructArgumentsObjectForInlinedFunction(
MaybeObject* Accessors::FunctionGetArguments(Object* object, void*) {
Isolate* isolate = Isolate::Current();
HandleScope scope(isolate);
- bool found_it = false;
- JSFunction* holder = FindInPrototypeChain<JSFunction>(object, &found_it);
- if (!found_it) return isolate->heap()->undefined_value();
+ JSFunction* holder = FindInstanceOf<JSFunction>(object);
+ if (holder == NULL) return isolate->heap()->undefined_value();
Handle<JSFunction> function(holder, isolate);
if (function->shared()->native()) return isolate->heap()->null_value();
@@ -727,9 +757,8 @@ MaybeObject* Accessors::FunctionGetCaller(Object* object, void*) {
Isolate* isolate = Isolate::Current();
HandleScope scope(isolate);
AssertNoAllocation no_alloc;
- bool found_it = false;
- JSFunction* holder = FindInPrototypeChain<JSFunction>(object, &found_it);
- if (!found_it) return isolate->heap()->undefined_value();
+ JSFunction* holder = FindInstanceOf<JSFunction>(object);
+ if (holder == NULL) return isolate->heap()->undefined_value();
if (holder->shared()->native()) return isolate->heap()->null_value();
Handle<JSFunction> function(holder, isolate);
@@ -755,6 +784,9 @@ MaybeObject* Accessors::FunctionGetCaller(Object* object, void*) {
caller = potential_caller;
potential_caller = it.next();
}
+ if (!caller->shared()->native() && potential_caller != NULL) {
+ caller = potential_caller;
+ }
// If caller is bound, return null. This is compatible with JSC, and
// allows us to make bound functions use the strict function map
// and its associated throwing caller and arguments.
@@ -802,4 +834,69 @@ const AccessorDescriptor Accessors::ObjectPrototype = {
0
};
+
+//
+// Accessors::MakeModuleExport
+//
+
+static v8::Handle<v8::Value> ModuleGetExport(
+ v8::Local<v8::String> property,
+ const v8::AccessorInfo& info) {
+ JSModule* instance = JSModule::cast(*v8::Utils::OpenHandle(*info.Holder()));
+ Context* context = Context::cast(instance->context());
+ ASSERT(context->IsModuleContext());
+ int slot = info.Data()->Int32Value();
+ Object* value = context->get(slot);
+ if (value->IsTheHole()) {
+ Handle<String> name = v8::Utils::OpenHandle(*property);
+ Isolate* isolate = instance->GetIsolate();
+ isolate->ScheduleThrow(
+ *isolate->factory()->NewReferenceError("not_defined",
+ HandleVector(&name, 1)));
+ return v8::Handle<v8::Value>();
+ }
+ return v8::Utils::ToLocal(Handle<Object>(value));
+}
+
+
+static void ModuleSetExport(
+ v8::Local<v8::String> property,
+ v8::Local<v8::Value> value,
+ const v8::AccessorInfo& info) {
+ JSModule* instance = JSModule::cast(*v8::Utils::OpenHandle(*info.Holder()));
+ Context* context = Context::cast(instance->context());
+ ASSERT(context->IsModuleContext());
+ int slot = info.Data()->Int32Value();
+ Object* old_value = context->get(slot);
+ if (old_value->IsTheHole()) {
+ Handle<String> name = v8::Utils::OpenHandle(*property);
+ Isolate* isolate = instance->GetIsolate();
+ isolate->ScheduleThrow(
+ *isolate->factory()->NewReferenceError("not_defined",
+ HandleVector(&name, 1)));
+ return;
+ }
+ context->set(slot, *v8::Utils::OpenHandle(*value));
+}
+
+
+Handle<AccessorInfo> Accessors::MakeModuleExport(
+ Handle<String> name,
+ int index,
+ PropertyAttributes attributes) {
+ Factory* factory = name->GetIsolate()->factory();
+ Handle<AccessorInfo> info = factory->NewAccessorInfo();
+ info->set_property_attributes(attributes);
+ info->set_all_can_read(true);
+ info->set_all_can_write(true);
+ info->set_name(*name);
+ info->set_data(Smi::FromInt(index));
+ Handle<Object> getter = v8::FromCData(&ModuleGetExport);
+ Handle<Object> setter = v8::FromCData(&ModuleSetExport);
+ info->set_getter(*getter);
+ if (!(attributes & ReadOnly)) info->set_setter(*setter);
+ return info;
+}
+
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/accessors.h b/src/3rdparty/v8/src/accessors.h
index 36b9a99..250f742 100644
--- a/src/3rdparty/v8/src/accessors.h
+++ b/src/3rdparty/v8/src/accessors.h
@@ -85,6 +85,10 @@ class Accessors : public AllStatic {
void*);
static MaybeObject* FunctionGetArguments(Object* object, void*);
+ // Accessor infos.
+ static Handle<AccessorInfo> MakeModuleExport(
+ Handle<String> name, int index, PropertyAttributes attributes);
+
private:
// Accessor functions only used through the descriptor.
static MaybeObject* FunctionGetLength(Object* object, void*);
diff --git a/src/3rdparty/v8/src/allocation-inl.h b/src/3rdparty/v8/src/allocation-inl.h
index 04a3fe6..d32db4b 100644
--- a/src/3rdparty/v8/src/allocation-inl.h
+++ b/src/3rdparty/v8/src/allocation-inl.h
@@ -34,12 +34,12 @@ namespace v8 {
namespace internal {
-void* PreallocatedStorage::New(size_t size) {
+void* PreallocatedStorageAllocationPolicy::New(size_t size) {
return Isolate::Current()->PreallocatedStorageNew(size);
}
-void PreallocatedStorage::Delete(void* p) {
+void PreallocatedStorageAllocationPolicy::Delete(void* p) {
return Isolate::Current()->PreallocatedStorageDelete(p);
}
diff --git a/src/3rdparty/v8/src/allocation.h b/src/3rdparty/v8/src/allocation.h
index 31067dd..45bde4c 100644
--- a/src/3rdparty/v8/src/allocation.h
+++ b/src/3rdparty/v8/src/allocation.h
@@ -104,7 +104,7 @@ char* StrNDup(const char* str, int n);
// and free. Used as the default policy for lists.
class FreeStoreAllocationPolicy {
public:
- INLINE(static void* New(size_t size)) { return Malloced::New(size); }
+ INLINE(void* New(size_t size)) { return Malloced::New(size); }
INLINE(static void Delete(void* p)) { Malloced::Delete(p); }
};
@@ -117,12 +117,6 @@ class PreallocatedStorage {
explicit PreallocatedStorage(size_t size);
size_t size() { return size_; }
- // TODO(isolates): Get rid of these-- we'll have to change the allocator
- // interface to include a pointer to an isolate to do this
- // efficiently.
- static inline void* New(size_t size);
- static inline void Delete(void* p);
-
private:
size_t size_;
PreallocatedStorage* previous_;
@@ -137,6 +131,12 @@ class PreallocatedStorage {
};
+struct PreallocatedStorageAllocationPolicy {
+ INLINE(void* New(size_t size));
+ INLINE(static void Delete(void* ptr));
+};
+
+
} } // namespace v8::internal
#endif // V8_ALLOCATION_H_
diff --git a/src/3rdparty/v8/src/api.cc b/src/3rdparty/v8/src/api.cc
index 70d0a8a..cbb3a04 100644
--- a/src/3rdparty/v8/src/api.cc
+++ b/src/3rdparty/v8/src/api.cc
@@ -33,6 +33,7 @@
#include "../include/v8-profiler.h"
#include "../include/v8-testing.h"
#include "bootstrapper.h"
+#include "code-stubs.h"
#include "compiler.h"
#include "conversions-inl.h"
#include "counters.h"
@@ -540,7 +541,9 @@ Extension::Extension(const char* name,
source_(source, source_length_),
dep_count_(dep_count),
deps_(deps),
- auto_enable_(false) { }
+ auto_enable_(false) {
+ CHECK(source != NULL || source_length_ == 0);
+}
v8::Handle<Primitive> Undefined() {
@@ -640,11 +643,48 @@ void V8::ClearWeak(i::Object** obj) {
void V8::MarkIndependent(i::Object** object) {
i::Isolate* isolate = i::Isolate::Current();
- LOG_API(isolate, "MakeIndependent");
+ LOG_API(isolate, "MarkIndependent");
isolate->global_handles()->MarkIndependent(object);
}
+void V8::MarkIndependent(i::Isolate* isolate, i::Object** object) {
+ ASSERT(isolate == i::Isolate::Current());
+ LOG_API(isolate, "MarkIndependent");
+ isolate->global_handles()->MarkIndependent(object);
+}
+
+
+void V8::MarkPartiallyDependent(i::Object** object) {
+ i::Isolate* isolate = i::Isolate::Current();
+ LOG_API(isolate, "MarkPartiallyDependent");
+ isolate->global_handles()->MarkPartiallyDependent(object);
+}
+
+
+void V8::MarkPartiallyDependent(i::Isolate* isolate, i::Object** object) {
+ ASSERT(isolate == i::Isolate::Current());
+ LOG_API(isolate, "MarkPartiallyDependent");
+ isolate->global_handles()->MarkPartiallyDependent(object);
+}
+
+
+bool V8::IsGlobalIndependent(i::Object** obj) {
+ i::Isolate* isolate = i::Isolate::Current();
+ LOG_API(isolate, "IsGlobalIndependent");
+ if (!isolate->IsInitialized()) return false;
+ return i::GlobalHandles::IsIndependent(obj);
+}
+
+
+bool V8::IsGlobalIndependent(i::Isolate* isolate, i::Object** obj) {
+ ASSERT(isolate == i::Isolate::Current());
+ LOG_API(isolate, "IsGlobalIndependent");
+ if (!isolate->IsInitialized()) return false;
+ return i::GlobalHandles::IsIndependent(obj);
+}
+
+
bool V8::IsGlobalNearDeath(i::Object** obj) {
i::Isolate* isolate = i::Isolate::Current();
LOG_API(isolate, "IsGlobalNearDeath");
@@ -668,6 +708,14 @@ void V8::DisposeGlobal(i::Object** obj) {
isolate->global_handles()->Destroy(obj);
}
+
+void V8::DisposeGlobal(i::Isolate* isolate, i::Object** obj) {
+ ASSERT(isolate == i::Isolate::Current());
+ LOG_API(isolate, "DisposeGlobal");
+ if (!isolate->IsInitialized()) return;
+ isolate->global_handles()->Destroy(obj);
+}
+
// --- H a n d l e s ---
@@ -762,13 +810,13 @@ void Context::Exit() {
}
-void Context::SetData(v8::Handle<String> data) {
+void Context::SetData(v8::Handle<Value> data) {
i::Handle<i::Context> env = Utils::OpenHandle(this);
i::Isolate* isolate = env->GetIsolate();
if (IsDeadCheck(isolate, "v8::Context::SetData()")) return;
i::Handle<i::Object> raw_data = Utils::OpenHandle(*data);
- ASSERT(env->IsGlobalContext());
- if (env->IsGlobalContext()) {
+ ASSERT(env->IsNativeContext());
+ if (env->IsNativeContext()) {
env->set_data(*raw_data);
}
}
@@ -778,16 +826,13 @@ v8::Local<v8::Value> Context::GetData() {
i::Handle<i::Context> env = Utils::OpenHandle(this);
i::Isolate* isolate = env->GetIsolate();
if (IsDeadCheck(isolate, "v8::Context::GetData()")) {
- return v8::Local<Value>();
+ return Local<Value>();
}
- i::Object* raw_result = NULL;
- ASSERT(env->IsGlobalContext());
- if (env->IsGlobalContext()) {
- raw_result = env->data();
- } else {
+ ASSERT(env->IsNativeContext());
+ if (!env->IsNativeContext()) {
return Local<Value>();
}
- i::Handle<i::Object> result(raw_result, isolate);
+ i::Handle<i::Object> result(env->data(), isolate);
return Utils::ToLocal(result);
}
@@ -990,6 +1035,12 @@ Local<Signature> Signature::New(Handle<FunctionTemplate> receiver,
}
+Local<AccessorSignature> AccessorSignature::New(
+ Handle<FunctionTemplate> receiver) {
+ return Utils::AccessorSignatureToLocal(Utils::OpenHandle(*receiver));
+}
+
+
Local<TypeSwitch> TypeSwitch::New(Handle<FunctionTemplate> type) {
Handle<FunctionTemplate> types[1] = { type };
return TypeSwitch::New(1, types);
@@ -1057,9 +1108,9 @@ static i::Handle<i::AccessorInfo> MakeAccessorInfo(
AccessorSetter setter,
v8::Handle<Value> data,
v8::AccessControl settings,
- v8::PropertyAttribute attributes) {
+ v8::PropertyAttribute attributes,
+ v8::Handle<AccessorSignature> signature) {
i::Handle<i::AccessorInfo> obj = FACTORY->NewAccessorInfo();
- ASSERT(getter != NULL);
SET_FIELD_WRAPPED(obj, set_getter, getter);
SET_FIELD_WRAPPED(obj, set_setter, setter);
if (data.IsEmpty()) data = v8::Undefined();
@@ -1069,6 +1120,9 @@ static i::Handle<i::AccessorInfo> MakeAccessorInfo(
if (settings & ALL_CAN_WRITE) obj->set_all_can_write(true);
if (settings & PROHIBITS_OVERWRITING) obj->set_prohibits_overwriting(true);
obj->set_property_attributes(static_cast<PropertyAttributes>(attributes));
+ if (!signature.IsEmpty()) {
+ obj->set_expected_receiver_type(*Utils::OpenHandle(*signature));
+ }
return obj;
}
@@ -1079,7 +1133,8 @@ void FunctionTemplate::AddInstancePropertyAccessor(
AccessorSetter setter,
v8::Handle<Value> data,
v8::AccessControl settings,
- v8::PropertyAttribute attributes) {
+ v8::PropertyAttribute attributes,
+ v8::Handle<AccessorSignature> signature) {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
if (IsDeadCheck(isolate,
"v8::FunctionTemplate::AddInstancePropertyAccessor()")) {
@@ -1088,9 +1143,9 @@ void FunctionTemplate::AddInstancePropertyAccessor(
ENTER_V8(isolate);
i::HandleScope scope(isolate);
- i::Handle<i::AccessorInfo> obj = MakeAccessorInfo(name,
- getter, setter, data,
- settings, attributes);
+ i::Handle<i::AccessorInfo> obj = MakeAccessorInfo(name, getter, setter, data,
+ settings, attributes,
+ signature);
i::Handle<i::Object> list(Utils::OpenHandle(this)->property_accessors());
if (list->IsUndefined()) {
list = NeanderArray().value();
@@ -1277,7 +1332,8 @@ void ObjectTemplate::SetAccessor(v8::Handle<String> name,
AccessorSetter setter,
v8::Handle<Value> data,
AccessControl settings,
- PropertyAttribute attribute) {
+ PropertyAttribute attribute,
+ v8::Handle<AccessorSignature> signature) {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetAccessor()")) return;
ENTER_V8(isolate);
@@ -1291,7 +1347,8 @@ void ObjectTemplate::SetAccessor(v8::Handle<String> name,
setter,
data,
settings,
- attribute);
+ attribute,
+ signature);
}
@@ -1593,9 +1650,10 @@ Local<Script> Script::New(v8::Handle<String> source,
name_obj,
line_offset,
column_offset,
+ isolate->global_context(),
NULL,
pre_data_impl,
- Utils::OpenHandle(*script_data),
+ Utils::OpenHandle(*script_data, true),
i::NOT_NATIVES_CODE,
compile_flags);
has_pending_exception = result.is_null();
@@ -1670,7 +1728,7 @@ Local<Value> Script::Run(Handle<Object> qml) {
fun = i::Handle<i::JSFunction>(i::JSFunction::cast(*obj), isolate);
}
EXCEPTION_PREAMBLE(isolate);
- i::Handle<i::Object> qmlglobal = Utils::OpenHandle(*qml);
+ i::Handle<i::Object> qmlglobal = Utils::OpenHandle(*qml, true);
i::Handle<i::Object> receiver(
isolate->context()->global_proxy(), isolate);
i::Handle<i::Object> result =
@@ -3102,6 +3160,17 @@ Local<String> v8::Object::ObjectProtoToString() {
}
+Local<Value> v8::Object::GetConstructor() {
+ i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+ ON_BAILOUT(isolate, "v8::Object::GetConstructor()",
+ return Local<v8::Function>());
+ ENTER_V8(isolate);
+ i::Handle<i::JSObject> self = Utils::OpenHandle(this);
+ i::Handle<i::Object> constructor(self->GetConstructor());
+ return Utils::ToLocal(constructor);
+}
+
+
Local<String> v8::Object::GetConstructorName() {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
ON_BAILOUT(isolate, "v8::Object::GetConstructorName()",
@@ -3163,9 +3232,10 @@ bool Object::SetAccessor(Handle<String> name,
ON_BAILOUT(isolate, "v8::Object::SetAccessor()", return false);
ENTER_V8(isolate);
i::HandleScope scope(isolate);
- i::Handle<i::AccessorInfo> info = MakeAccessorInfo(name,
- getter, setter, data,
- settings, attributes);
+ v8::Handle<AccessorSignature> signature;
+ i::Handle<i::AccessorInfo> info = MakeAccessorInfo(name, getter, setter, data,
+ settings, attributes,
+ signature);
bool fast = Utils::OpenHandle(this)->HasFastProperties();
i::Handle<i::Object> result = i::SetAccessor(Utils::OpenHandle(this), info);
if (result.is_null() || result->IsUndefined()) return false;
@@ -3293,7 +3363,7 @@ void v8::Object::TurnOnAccessCheck() {
i::Deoptimizer::DeoptimizeGlobalObject(*obj);
i::Handle<i::Map> new_map =
- isolate->factory()->CopyMapDropTransitions(i::Handle<i::Map>(obj->map()));
+ isolate->factory()->CopyMap(i::Handle<i::Map>(obj->map()));
new_map->set_is_access_check_needed(true);
obj->set_map(*new_map);
}
@@ -3328,7 +3398,7 @@ static i::Context* GetCreationContext(i::JSObject* object) {
} else {
function = i::JSFunction::cast(constructor);
}
- return function->context()->global_context();
+ return function->context()->native_context();
}
@@ -3357,13 +3427,15 @@ bool v8::Object::SetHiddenValue(v8::Handle<v8::String> key,
v8::Handle<v8::Value> value) {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
ON_BAILOUT(isolate, "v8::Object::SetHiddenValue()", return false);
+ if (value.IsEmpty()) return DeleteHiddenValue(key);
ENTER_V8(isolate);
i::HandleScope scope(isolate);
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
i::Handle<i::String> key_obj = Utils::OpenHandle(*key);
+ i::Handle<i::String> key_symbol = FACTORY->LookupSymbol(key_obj);
i::Handle<i::Object> value_obj = Utils::OpenHandle(*value);
i::Handle<i::Object> result =
- i::JSObject::SetHiddenProperty(self, key_obj, value_obj);
+ i::JSObject::SetHiddenProperty(self, key_symbol, value_obj);
return *result == *self;
}
@@ -3375,7 +3447,8 @@ v8::Local<v8::Value> v8::Object::GetHiddenValue(v8::Handle<v8::String> key) {
ENTER_V8(isolate);
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
i::Handle<i::String> key_obj = Utils::OpenHandle(*key);
- i::Handle<i::Object> result(self->GetHiddenProperty(*key_obj));
+ i::Handle<i::String> key_symbol = FACTORY->LookupSymbol(key_obj);
+ i::Handle<i::Object> result(self->GetHiddenProperty(*key_symbol));
if (result->IsUndefined()) return v8::Local<v8::Value>();
return Utils::ToLocal(result);
}
@@ -3388,7 +3461,8 @@ bool v8::Object::DeleteHiddenValue(v8::Handle<v8::String> key) {
i::HandleScope scope(isolate);
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
i::Handle<i::String> key_obj = Utils::OpenHandle(*key);
- self->DeleteHiddenProperty(*key_obj);
+ i::Handle<i::String> key_symbol = FACTORY->LookupSymbol(key_obj);
+ self->DeleteHiddenProperty(*key_symbol);
return true;
}
@@ -3456,7 +3530,7 @@ void v8::Object::SetIndexedPropertiesToPixelData(uint8_t* data, int length) {
ON_BAILOUT(isolate, "v8::SetElementsToPixelData()", return);
ENTER_V8(isolate);
i::HandleScope scope(isolate);
- if (!ApiCheck(length <= i::ExternalPixelArray::kMaxLength,
+ if (!ApiCheck(length >= 0 && length <= i::ExternalPixelArray::kMaxLength,
"v8::Object::SetIndexedPropertiesToPixelData()",
"length exceeds max acceptable value")) {
return;
@@ -3512,7 +3586,7 @@ void v8::Object::SetIndexedPropertiesToExternalArrayData(
ON_BAILOUT(isolate, "v8::SetIndexedPropertiesToExternalArrayData()", return);
ENTER_V8(isolate);
i::HandleScope scope(isolate);
- if (!ApiCheck(length <= i::ExternalArray::kMaxLength,
+ if (!ApiCheck(length >= 0 && length <= i::ExternalArray::kMaxLength,
"v8::Object::SetIndexedPropertiesToExternalArrayData()",
"length exceeds max acceptable value")) {
return;
@@ -3745,8 +3819,9 @@ ScriptOrigin Function::GetScriptOrigin() const {
i::Handle<i::JSFunction> func = Utils::OpenHandle(this);
if (func->shared()->script()->IsScript()) {
i::Handle<i::Script> script(i::Script::cast(func->shared()->script()));
+ i::Handle<i::Object> scriptName = GetScriptNameOrSourceURL(script);
v8::ScriptOrigin origin(
- Utils::ToLocal(i::Handle<i::Object>(script->name())),
+ Utils::ToLocal(scriptName),
v8::Integer::New(script->line_offset()->value()),
v8::Integer::New(script->column_offset()->value()));
return origin;
@@ -3929,26 +4004,6 @@ uint32_t String::ComputeHash(char *string, int length) {
}
-uint16_t String::GetCharacter(int index) {
- i::Handle<i::String> str = Utils::OpenHandle(this);
- return str->Get(index);
-}
-
-
-bool String::Equals(uint16_t *string, int length) {
- i::Handle<i::String> str = Utils::OpenHandle(this);
- if (IsDeadCheck(str->GetIsolate(), "v8::String::Equals()")) return 0;
- return str->SlowEqualsExternal(string, length);
-}
-
-
-bool String::Equals(char *string, int length) {
- i::Handle<i::String> str = Utils::OpenHandle(this);
- if (IsDeadCheck(str->GetIsolate(), "v8::String::Equals()")) return 0;
- return str->SlowEqualsExternal(string, length);
-}
-
-
int String::WriteUtf8(char* buffer,
int capacity,
int* nchars_ref,
@@ -3958,6 +4013,9 @@ int String::WriteUtf8(char* buffer,
LOG_API(isolate, "String::WriteUtf8");
ENTER_V8(isolate);
i::Handle<i::String> str = Utils::OpenHandle(this);
+ if (options & HINT_MANY_WRITES_EXPECTED) {
+ FlattenString(str); // Flatten the string for efficiency.
+ }
int string_length = str->length();
if (str->IsAsciiRepresentation()) {
int len;
@@ -4014,11 +4072,7 @@ int String::WriteUtf8(char* buffer,
// Slow case.
i::StringInputBuffer& write_input_buffer = *isolate->write_input_buffer();
isolate->string_tracker()->RecordWrite(str);
- if (options & HINT_MANY_WRITES_EXPECTED) {
- // Flatten the string for efficiency. This applies whether we are
- // using StringInputBuffer or Get(i) to access the characters.
- FlattenString(str);
- }
+
write_input_buffer.Reset(0, *str);
int len = str->length();
// Encode the first K - 3 bytes directly into the buffer since we
@@ -4060,8 +4114,9 @@ int String::WriteUtf8(char* buffer,
c,
unibrow::Utf16::kNoPreviousCharacter);
if (pos + written <= capacity) {
- for (int j = 0; j < written; j++)
+ for (int j = 0; j < written; j++) {
buffer[pos + j] = intermediate[j];
+ }
pos += written;
nchars++;
} else {
@@ -4074,8 +4129,9 @@ int String::WriteUtf8(char* buffer,
}
if (nchars_ref != NULL) *nchars_ref = nchars;
if (!(options & NO_NULL_TERMINATION) &&
- (i == len && (capacity == -1 || pos < capacity)))
+ (i == len && (capacity == -1 || pos < capacity))) {
buffer[pos++] = '\0';
+ }
return pos;
}
@@ -4088,28 +4144,45 @@ int String::WriteAscii(char* buffer,
if (IsDeadCheck(isolate, "v8::String::WriteAscii()")) return 0;
LOG_API(isolate, "String::WriteAscii");
ENTER_V8(isolate);
- i::StringInputBuffer& write_input_buffer = *isolate->write_input_buffer();
ASSERT(start >= 0 && length >= -1);
i::Handle<i::String> str = Utils::OpenHandle(this);
isolate->string_tracker()->RecordWrite(str);
if (options & HINT_MANY_WRITES_EXPECTED) {
- // Flatten the string for efficiency. This applies whether we are
- // using StringInputBuffer or Get(i) to access the characters.
- str->TryFlatten();
+ FlattenString(str); // Flatten the string for efficiency.
}
+
+ if (str->IsAsciiRepresentation()) {
+ // WriteToFlat is faster than using the StringInputBuffer.
+ if (length == -1) length = str->length() + 1;
+ int len = i::Min(length, str->length() - start);
+ i::String::WriteToFlat(*str, buffer, start, start + len);
+ if (!(options & PRESERVE_ASCII_NULL)) {
+ for (int i = 0; i < len; i++) {
+ if (buffer[i] == '\0') buffer[i] = ' ';
+ }
+ }
+ if (!(options & NO_NULL_TERMINATION) && length > len) {
+ buffer[len] = '\0';
+ }
+ return len;
+ }
+
+ i::StringInputBuffer& write_input_buffer = *isolate->write_input_buffer();
int end = length;
- if ( (length == -1) || (length > str->length() - start) )
+ if ((length == -1) || (length > str->length() - start)) {
end = str->length() - start;
+ }
if (end < 0) return 0;
write_input_buffer.Reset(start, *str);
int i;
for (i = 0; i < end; i++) {
char c = static_cast<char>(write_input_buffer.GetNext());
- if (c == '\0') c = ' ';
+ if (c == '\0' && !(options & PRESERVE_ASCII_NULL)) c = ' ';
buffer[i] = c;
}
- if (!(options & NO_NULL_TERMINATION) && (length == -1 || i < length))
+ if (!(options & NO_NULL_TERMINATION) && (length == -1 || i < length)) {
buffer[i] = '\0';
+ }
return i;
}
@@ -4128,7 +4201,7 @@ int String::Write(uint16_t* buffer,
if (options & HINT_MANY_WRITES_EXPECTED) {
// Flatten the string for efficiency. This applies whether we are
// using StringInputBuffer or Get(i) to access the characters.
- str->TryFlatten();
+ FlattenString(str);
}
int end = start + length;
if ((length == -1) || (length > str->length() - start) )
@@ -4176,6 +4249,29 @@ void v8::String::VerifyExternalStringResource(
CHECK_EQ(expected, value);
}
+void v8::String::VerifyExternalStringResourceBase(
+ v8::String::ExternalStringResourceBase* value, Encoding encoding) const {
+ i::Handle<i::String> str = Utils::OpenHandle(this);
+ const v8::String::ExternalStringResourceBase* expected;
+ Encoding expectedEncoding;
+ if (i::StringShape(*str).IsExternalAscii()) {
+ const void* resource =
+ i::Handle<i::ExternalAsciiString>::cast(str)->resource();
+ expected = reinterpret_cast<const ExternalStringResourceBase*>(resource);
+ expectedEncoding = ASCII_ENCODING;
+ } else if (i::StringShape(*str).IsExternalTwoByte()) {
+ const void* resource =
+ i::Handle<i::ExternalTwoByteString>::cast(str)->resource();
+ expected = reinterpret_cast<const ExternalStringResourceBase*>(resource);
+ expectedEncoding = TWO_BYTE_ENCODING;
+ } else {
+ expected = NULL;
+ expectedEncoding = str->IsAsciiRepresentation() ? ASCII_ENCODING
+ : TWO_BYTE_ENCODING;
+ }
+ CHECK_EQ(expected, value);
+ CHECK_EQ(expectedEncoding, encoding);
+}
const v8::String::ExternalAsciiStringResource*
v8::String::GetExternalAsciiStringResource() const {
@@ -4314,8 +4410,9 @@ void v8::Object::SetPointerInInternalField(int index, void* value) {
i::Handle<i::Foreign> foreign =
isolate->factory()->NewForeign(
reinterpret_cast<i::Address>(value), i::TENURED);
- if (!foreign.is_null())
- Utils::OpenHandle(this)->SetInternalField(index, *foreign);
+ if (!foreign.is_null()) {
+ Utils::OpenHandle(this)->SetInternalField(index, *foreign);
+ }
}
ASSERT_EQ(value, GetPointerFromInternalField(index));
}
@@ -4372,6 +4469,20 @@ void v8::V8::SetReturnAddressLocationResolver(
}
+bool v8::V8::SetFunctionEntryHook(FunctionEntryHook entry_hook) {
+ return i::ProfileEntryHookStub::SetFunctionEntryHook(entry_hook);
+}
+
+
+void v8::V8::SetJitCodeEventHandler(
+ JitCodeEventOptions options, JitCodeEventHandler event_handler) {
+ i::Isolate* isolate = i::Isolate::Current();
+ // Ensure that logging is initialized for our isolate.
+ isolate->InitializeLoggingAndCounters();
+ isolate->logger()->SetCodeEventHandler(options, event_handler);
+}
+
+
bool v8::V8::Dispose() {
i::Isolate* isolate = i::Isolate::Current();
if (!ApiCheck(isolate != NULL && isolate->IsDefaultIsolate(),
@@ -4386,6 +4497,7 @@ bool v8::V8::Dispose() {
HeapStatistics::HeapStatistics(): total_heap_size_(0),
total_heap_size_executable_(0),
+ total_physical_size_(0),
used_heap_size_(0),
heap_size_limit_(0) { }
@@ -4395,6 +4507,7 @@ void v8::V8::GetHeapStatistics(HeapStatistics* heap_statistics) {
// Isolate is unitialized thus heap is not configured yet.
heap_statistics->set_total_heap_size(0);
heap_statistics->set_total_heap_size_executable(0);
+ heap_statistics->set_total_physical_size(0);
heap_statistics->set_used_heap_size(0);
heap_statistics->set_heap_size_limit(0);
return;
@@ -4404,6 +4517,7 @@ void v8::V8::GetHeapStatistics(HeapStatistics* heap_statistics) {
heap_statistics->set_total_heap_size(heap->CommittedMemory());
heap_statistics->set_total_heap_size_executable(
heap->CommittedMemoryExecutable());
+ heap_statistics->set_total_physical_size(heap->CommittedPhysicalMemory());
heap_statistics->set_used_heap_size(heap->SizeOfObjects());
heap_statistics->set_heap_size_limit(heap->MaxReserved());
}
@@ -4416,6 +4530,30 @@ void v8::V8::VisitExternalResources(ExternalResourceVisitor* visitor) {
}
+void v8::V8::VisitHandlesWithClassIds(PersistentHandleVisitor* visitor) {
+ i::Isolate* isolate = i::Isolate::Current();
+ IsDeadCheck(isolate, "v8::V8::VisitHandlesWithClassId");
+
+ i::AssertNoAllocation no_allocation;
+
+ class VisitorAdapter : public i::ObjectVisitor {
+ public:
+ explicit VisitorAdapter(PersistentHandleVisitor* visitor)
+ : visitor_(visitor) {}
+ virtual void VisitPointers(i::Object** start, i::Object** end) {
+ UNREACHABLE();
+ }
+ virtual void VisitEmbedderReference(i::Object** p, uint16_t class_id) {
+ visitor_->VisitPersistentHandle(ToApi<Value>(i::Handle<i::Object>(p)),
+ class_id);
+ }
+ private:
+ PersistentHandleVisitor* visitor_;
+ } visitor_adapter(visitor);
+ isolate->global_handles()->IterateAllRootsWithClassIds(&visitor_adapter);
+}
+
+
bool v8::V8::IdleNotification(int hint) {
// Returning true tells the caller that it need not
// continue to call IdleNotification.
@@ -4506,7 +4644,7 @@ Persistent<Context> v8::Context::New(
// Create the environment.
env = isolate->bootstrapper()->CreateEnvironment(
isolate,
- Utils::OpenHandle(*global_object),
+ Utils::OpenHandle(*global_object, true),
proxy_template,
extensions);
@@ -4550,7 +4688,7 @@ void v8::Context::UseDefaultSecurityToken() {
}
ENTER_V8(isolate);
i::Handle<i::Context> env = Utils::OpenHandle(this);
- env->set_security_token(env->global());
+ env->set_security_token(env->global_object());
}
@@ -4595,7 +4733,7 @@ v8::Local<v8::Context> Context::GetCurrent() {
if (IsDeadCheck(isolate, "v8::Context::GetCurrent()")) {
return Local<Context>();
}
- i::Handle<i::Object> current = isolate->global_context();
+ i::Handle<i::Object> current = isolate->native_context();
if (current.is_null()) return Local<Context>();
i::Handle<i::Context> context = i::Handle<i::Context>::cast(current);
return Utils::ToLocal(context);
@@ -4608,7 +4746,7 @@ v8::Local<v8::Context> Context::GetCalling() {
return Local<Context>();
}
i::Handle<i::Object> calling =
- isolate->GetCallingGlobalContext();
+ isolate->GetCallingNativeContext();
if (calling.is_null()) return Local<Context>();
i::Handle<i::Context> context = i::Handle<i::Context>::cast(calling);
return Utils::ToLocal(context);
@@ -4625,8 +4763,8 @@ v8::Local<v8::Object> Context::GetCallingQmlGlobal() {
i::JavaScriptFrameIterator it;
if (it.done()) return Local<Object>();
context = i::Context::cast(it.frame()->context());
- if (!context->qml_global()->IsUndefined()) {
- i::Handle<i::Object> qmlglobal(context->qml_global());
+ if (!context->qml_global_object()->IsUndefined()) {
+ i::Handle<i::Object> qmlglobal(context->qml_global_object());
return Utils::ToLocal(i::Handle<i::JSObject>::cast(qmlglobal));
} else {
return Local<Object>();
@@ -4676,9 +4814,9 @@ void Context::ReattachGlobal(Handle<Object> global_object) {
i::Object** ctx = reinterpret_cast<i::Object**>(this);
i::Handle<i::Context> context =
i::Handle<i::Context>::cast(i::Handle<i::Object>(ctx));
- isolate->bootstrapper()->ReattachGlobal(
- context,
- Utils::OpenHandle(*global_object));
+ i::Handle<i::JSGlobalProxy> global_proxy =
+ i::Handle<i::JSGlobalProxy>::cast(Utils::OpenHandle(*global_object));
+ isolate->bootstrapper()->ReattachGlobal(context, global_proxy);
}
@@ -4710,11 +4848,32 @@ bool Context::IsCodeGenerationFromStringsAllowed() {
}
+void Context::SetErrorMessageForCodeGenerationFromStrings(
+ Handle<String> error) {
+ i::Isolate* isolate = i::Isolate::Current();
+ if (IsDeadCheck(isolate,
+ "v8::Context::SetErrorMessageForCodeGenerationFromStrings()")) {
+ return;
+ }
+ ENTER_V8(isolate);
+ i::Object** ctx = reinterpret_cast<i::Object**>(this);
+ i::Handle<i::Context> context =
+ i::Handle<i::Context>::cast(i::Handle<i::Object>(ctx));
+ i::Handle<i::Object> error_handle = Utils::OpenHandle(*error);
+ context->set_error_message_for_code_gen_from_strings(*error_handle);
+}
+
+
void V8::SetWrapperClassId(i::Object** global_handle, uint16_t class_id) {
i::GlobalHandles::SetWrapperClassId(global_handle, class_id);
}
+uint16_t V8::GetWrapperClassId(internal::Object** global_handle) {
+ return i::GlobalHandles::GetWrapperClassId(global_handle);
+}
+
+
Local<v8::Object> ObjectTemplate::NewInstance() {
i::Isolate* isolate = i::Isolate::Current();
ON_BAILOUT(isolate, "v8::ObjectTemplate::NewInstance()",
@@ -4932,6 +5091,7 @@ Local<String> v8::String::NewExternal(
EnsureInitializedForIsolate(isolate, "v8::String::NewExternal()");
LOG_API(isolate, "String::NewExternal");
ENTER_V8(isolate);
+ CHECK(resource && resource->data());
i::Handle<i::String> result = NewExternalStringHandle(isolate, resource);
isolate->heap()->external_string_table()->AddString(*result);
return Utils::ToLocal(result);
@@ -4952,6 +5112,7 @@ bool v8::String::MakeExternal(v8::String::ExternalStringResource* resource) {
if (isolate->heap()->IsInGCPostProcessing()) {
return false;
}
+ CHECK(resource && resource->data());
bool result = obj->MakeExternal(resource);
if (result && !obj->IsSymbol()) {
isolate->heap()->external_string_table()->AddString(*obj);
@@ -4966,6 +5127,7 @@ Local<String> v8::String::NewExternal(
EnsureInitializedForIsolate(isolate, "v8::String::NewExternal()");
LOG_API(isolate, "String::NewExternal");
ENTER_V8(isolate);
+ CHECK(resource && resource->data());
i::Handle<i::String> result = NewExternalAsciiStringHandle(isolate, resource);
isolate->heap()->external_string_table()->AddString(*result);
return Utils::ToLocal(result);
@@ -4987,6 +5149,7 @@ bool v8::String::MakeExternal(
if (isolate->heap()->IsInGCPostProcessing()) {
return false;
}
+ CHECK(resource && resource->data());
bool result = obj->MakeExternal(resource);
if (result && !obj->IsSymbol()) {
isolate->heap()->external_string_table()->AddString(*obj);
@@ -5236,7 +5399,7 @@ Local<Object> Array::CloneElementAt(uint32_t index) {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
ON_BAILOUT(isolate, "v8::Array::CloneElementAt()", return Local<Object>());
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
- if (!self->HasFastElements()) {
+ if (!self->HasFastObjectElements()) {
return Local<Object>();
}
i::FixedArray* elms = i::FixedArray::cast(self->elements());
@@ -5282,24 +5445,39 @@ Local<Number> v8::Number::New(double value) {
Local<Integer> v8::Integer::New(int32_t value) {
i::Isolate* isolate = i::Isolate::UncheckedCurrent();
EnsureInitializedForIsolate(isolate, "v8::Integer::New()");
+ return v8::Integer::New(value, reinterpret_cast<Isolate*>(isolate));
+}
+
+
+Local<Integer> Integer::NewFromUnsigned(uint32_t value) {
+ i::Isolate* isolate = i::Isolate::Current();
+ EnsureInitializedForIsolate(isolate, "v8::Integer::NewFromUnsigned()");
+ return Integer::NewFromUnsigned(value, reinterpret_cast<Isolate*>(isolate));
+}
+
+
+Local<Integer> v8::Integer::New(int32_t value, Isolate* isolate) {
+ i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate);
+ ASSERT(internal_isolate->IsInitialized());
if (i::Smi::IsValid(value)) {
return Utils::IntegerToLocal(i::Handle<i::Object>(i::Smi::FromInt(value),
- isolate));
+ internal_isolate));
}
- ENTER_V8(isolate);
- i::Handle<i::Object> result = isolate->factory()->NewNumber(value);
+ ENTER_V8(internal_isolate);
+ i::Handle<i::Object> result = internal_isolate->factory()->NewNumber(value);
return Utils::IntegerToLocal(result);
}
-Local<Integer> Integer::NewFromUnsigned(uint32_t value) {
+Local<Integer> v8::Integer::NewFromUnsigned(uint32_t value, Isolate* isolate) {
+ i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate);
+ ASSERT(internal_isolate->IsInitialized());
bool fits_into_int32_t = (value & (1 << 31)) == 0;
if (fits_into_int32_t) {
- return Integer::New(static_cast<int32_t>(value));
+ return Integer::New(static_cast<int32_t>(value), isolate);
}
- i::Isolate* isolate = i::Isolate::Current();
- ENTER_V8(isolate);
- i::Handle<i::Object> result = isolate->factory()->NewNumber(value);
+ ENTER_V8(internal_isolate);
+ i::Handle<i::Object> result = internal_isolate->factory()->NewNumber(value);
return Utils::IntegerToLocal(result);
}
@@ -5309,19 +5487,14 @@ void V8::IgnoreOutOfMemoryException() {
}
-bool V8::AddMessageListener(MessageCallback that, Handle<Value> data) {
+bool V8::AddMessageListener(MessageCallback that) {
i::Isolate* isolate = i::Isolate::Current();
EnsureInitializedForIsolate(isolate, "v8::V8::AddMessageListener()");
ON_BAILOUT(isolate, "v8::V8::AddMessageListener()", return false);
ENTER_V8(isolate);
i::HandleScope scope(isolate);
NeanderArray listeners(isolate->factory()->message_listeners());
- NeanderObject obj(2);
- obj.set(0, *isolate->factory()->NewForeign(FUNCTION_ADDR(that)));
- obj.set(1, data.IsEmpty() ?
- isolate->heap()->undefined_value() :
- *Utils::OpenHandle(*data));
- listeners.add(obj.value());
+ listeners.add(isolate->factory()->NewForeign(FUNCTION_ADDR(that)));
return true;
}
@@ -5336,8 +5509,7 @@ void V8::RemoveMessageListeners(MessageCallback that) {
for (int i = 0; i < listeners.length(); i++) {
if (listeners.get(i)->IsUndefined()) continue; // skip deleted ones
- NeanderObject listener(i::JSObject::cast(listeners.get(i)));
- i::Handle<i::Foreign> callback_obj(i::Foreign::cast(listener.get(0)));
+ i::Handle<i::Foreign> callback_obj(i::Foreign::cast(listeners.get(i)));
if (callback_obj->foreign_address() == FUNCTION_ADDR(that)) {
listeners.set(i, isolate->heap()->undefined_value());
}
@@ -5366,6 +5538,8 @@ void V8::SetCreateHistogramFunction(CreateHistogramCallback callback) {
i::Isolate* isolate = EnterIsolateIfNeeded();
if (IsDeadCheck(isolate, "v8::V8::SetCreateHistogramFunction()")) return;
isolate->stats_table()->SetCreateHistogramFunction(callback);
+ isolate->InitializeLoggingAndCounters();
+ isolate->counters()->ResetHistograms();
}
void V8::SetAddHistogramSampleFunction(AddHistogramSampleCallback callback) {
@@ -5426,8 +5600,9 @@ void V8::AddImplicitReferences(Persistent<Object> parent,
intptr_t V8::AdjustAmountOfExternalAllocatedMemory(intptr_t change_in_bytes) {
- i::Isolate* isolate = i::Isolate::Current();
- if (IsDeadCheck(isolate, "v8::V8::AdjustAmountOfExternalAllocatedMemory()")) {
+ i::Isolate* isolate = i::Isolate::UncheckedCurrent();
+ if (isolate == NULL || !isolate->IsInitialized() ||
+ IsDeadCheck(isolate, "v8::V8::AdjustAmountOfExternalAllocatedMemory()")) {
return 0;
}
return isolate->heap()->AdjustAmountOfExternalAllocatedMemory(
@@ -5779,7 +5954,8 @@ bool Debug::SetDebugEventListener(EventCallback that, Handle<Value> data) {
foreign =
isolate->factory()->NewForeign(FUNCTION_ADDR(EventCallbackWrapper));
}
- isolate->debugger()->SetEventListener(foreign, Utils::OpenHandle(*data));
+ isolate->debugger()->SetEventListener(foreign,
+ Utils::OpenHandle(*data, true));
return true;
}
@@ -5794,7 +5970,8 @@ bool Debug::SetDebugEventListener2(EventCallback2 that, Handle<Value> data) {
if (that != NULL) {
foreign = isolate->factory()->NewForeign(FUNCTION_ADDR(that));
}
- isolate->debugger()->SetEventListener(foreign, Utils::OpenHandle(*data));
+ isolate->debugger()->SetEventListener(foreign,
+ Utils::OpenHandle(*data, true));
return true;
}
@@ -5805,7 +5982,7 @@ bool Debug::SetDebugEventListener(v8::Handle<v8::Object> that,
ON_BAILOUT(isolate, "v8::Debug::SetDebugEventListener()", return false);
ENTER_V8(isolate);
isolate->debugger()->SetEventListener(Utils::OpenHandle(*that),
- Utils::OpenHandle(*data));
+ Utils::OpenHandle(*data, true));
return true;
}
@@ -5944,7 +6121,7 @@ Local<Value> Debug::GetMirror(v8::Handle<v8::Value> obj) {
v8::HandleScope scope;
i::Debug* isolate_debug = isolate->debug();
isolate_debug->Load();
- i::Handle<i::JSObject> debug(isolate_debug->debug_context()->global());
+ i::Handle<i::JSObject> debug(isolate_debug->debug_context()->global_object());
i::Handle<i::String> name =
isolate->factory()->LookupAsciiSymbol("MakeMirror");
i::Handle<i::Object> fun_obj = i::GetProperty(debug, name);
@@ -5976,6 +6153,7 @@ void Debug::ProcessDebugMessages() {
i::Execution::ProcessDebugMessages(true);
}
+
Local<Context> Debug::GetDebugContext() {
i::Isolate* isolate = i::Isolate::Current();
EnsureInitializedForIsolate(isolate, "v8::Debug::GetDebugContext()");
@@ -5983,6 +6161,20 @@ Local<Context> Debug::GetDebugContext() {
return Utils::ToLocal(i::Isolate::Current()->debugger()->GetDebugContext());
}
+
+void Debug::SetLiveEditEnabled(bool enable, Isolate* isolate) {
+ // If no isolate is supplied, use the default isolate.
+ i::Debugger* debugger;
+ if (isolate != NULL) {
+ i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate);
+ debugger = internal_isolate->debugger();
+ } else {
+ debugger = i::Isolate::GetDefaultIsolateDebugger();
+ }
+ debugger->set_live_edit_enabled(enable);
+}
+
+
#endif // ENABLE_DEBUGGER_SUPPORT
@@ -6252,13 +6444,6 @@ int HeapGraphNode::GetSelfSize() const {
}
-int HeapGraphNode::GetRetainedSize() const {
- i::Isolate* isolate = i::Isolate::Current();
- IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainedSize");
- return ToInternal(this)->retained_size();
-}
-
-
int HeapGraphNode::GetChildrenCount() const {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetChildrenCount");
@@ -6274,28 +6459,6 @@ const HeapGraphEdge* HeapGraphNode::GetChild(int index) const {
}
-int HeapGraphNode::GetRetainersCount() const {
- i::Isolate* isolate = i::Isolate::Current();
- IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainersCount");
- return ToInternal(this)->retainers().length();
-}
-
-
-const HeapGraphEdge* HeapGraphNode::GetRetainer(int index) const {
- i::Isolate* isolate = i::Isolate::Current();
- IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainer");
- return reinterpret_cast<const HeapGraphEdge*>(
- ToInternal(this)->retainers()[index]);
-}
-
-
-const HeapGraphNode* HeapGraphNode::GetDominatorNode() const {
- i::Isolate* isolate = i::Isolate::Current();
- IsDeadCheck(isolate, "v8::HeapSnapshot::GetDominatorNode");
- return reinterpret_cast<const HeapGraphNode*>(ToInternal(this)->dominator());
-}
-
-
v8::Handle<v8::Value> HeapGraphNode::GetHeapValue() const {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphNode::GetHeapValue");
@@ -6465,7 +6628,7 @@ void HeapProfiler::StopHeapObjectsTracking() {
}
-void HeapProfiler::PushHeapObjectsStats(OutputStream* stream) {
+SnapshotObjectId HeapProfiler::PushHeapObjectsStats(OutputStream* stream) {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapProfiler::PushHeapObjectsStats");
return i::HeapProfiler::PushHeapObjectsStats(stream);
@@ -6492,6 +6655,11 @@ int HeapProfiler::GetPersistentHandleCount() {
}
+size_t HeapProfiler::GetMemorySizeUsedByProfiler() {
+ return i::HeapProfiler::GetMemorySizeUsedByProfiler();
+}
+
+
v8::Testing::StressType internal::Testing::stress_type_ =
v8::Testing::kStressTypeOpt;
@@ -6553,6 +6721,7 @@ void Testing::PrepareStressRun(int run) {
void Testing::DeoptimizeAll() {
+ i::HandleScope scope;
internal::Deoptimizer::DeoptimizeAll();
}
@@ -6591,12 +6760,28 @@ char* HandleScopeImplementer::RestoreThread(char* storage) {
void HandleScopeImplementer::IterateThis(ObjectVisitor* v) {
+#ifdef DEBUG
+ bool found_block_before_deferred = false;
+#endif
// Iterate over all handles in the blocks except for the last.
for (int i = blocks()->length() - 2; i >= 0; --i) {
Object** block = blocks()->at(i);
- v->VisitPointers(block, &block[kHandleBlockSize]);
+ if (last_handle_before_deferred_block_ != NULL &&
+ (last_handle_before_deferred_block_ < &block[kHandleBlockSize]) &&
+ (last_handle_before_deferred_block_ >= block)) {
+ v->VisitPointers(block, last_handle_before_deferred_block_);
+ ASSERT(!found_block_before_deferred);
+#ifdef DEBUG
+ found_block_before_deferred = true;
+#endif
+ } else {
+ v->VisitPointers(block, &block[kHandleBlockSize]);
+ }
}
+ ASSERT(last_handle_before_deferred_block_ == NULL ||
+ found_block_before_deferred);
+
// Iterate over live handles in the last block (if any).
if (!blocks()->is_empty()) {
v->VisitPointers(blocks()->last(), handle_scope_data_.next);
@@ -6624,4 +6809,66 @@ char* HandleScopeImplementer::Iterate(ObjectVisitor* v, char* storage) {
return storage + ArchiveSpacePerThread();
}
+
+DeferredHandles* HandleScopeImplementer::Detach(Object** prev_limit) {
+ DeferredHandles* deferred =
+ new DeferredHandles(isolate()->handle_scope_data()->next, isolate());
+
+ while (!blocks_.is_empty()) {
+ Object** block_start = blocks_.last();
+ Object** block_limit = &block_start[kHandleBlockSize];
+ // We should not need to check for NoHandleAllocation here. Assert
+ // this.
+ ASSERT(prev_limit == block_limit ||
+ !(block_start <= prev_limit && prev_limit <= block_limit));
+ if (prev_limit == block_limit) break;
+ deferred->blocks_.Add(blocks_.last());
+ blocks_.RemoveLast();
+ }
+
+ // deferred->blocks_ now contains the blocks installed on the
+ // HandleScope stack since BeginDeferredScope was called, but in
+ // reverse order.
+
+ ASSERT(prev_limit == NULL || !blocks_.is_empty());
+
+ ASSERT(!blocks_.is_empty() && prev_limit != NULL);
+ ASSERT(last_handle_before_deferred_block_ != NULL);
+ last_handle_before_deferred_block_ = NULL;
+ return deferred;
+}
+
+
+void HandleScopeImplementer::BeginDeferredScope() {
+ ASSERT(last_handle_before_deferred_block_ == NULL);
+ last_handle_before_deferred_block_ = isolate()->handle_scope_data()->next;
+}
+
+
+DeferredHandles::~DeferredHandles() {
+ isolate_->UnlinkDeferredHandles(this);
+
+ for (int i = 0; i < blocks_.length(); i++) {
+#ifdef DEBUG
+ HandleScope::ZapRange(blocks_[i], &blocks_[i][kHandleBlockSize]);
+#endif
+ isolate_->handle_scope_implementer()->ReturnBlock(blocks_[i]);
+ }
+}
+
+
+void DeferredHandles::Iterate(ObjectVisitor* v) {
+ ASSERT(!blocks_.is_empty());
+
+ ASSERT((first_block_limit_ >= blocks_.first()) &&
+ (first_block_limit_ <= &(blocks_.first())[kHandleBlockSize]));
+
+ v->VisitPointers(blocks_.first(), first_block_limit_);
+
+ for (int i = 1; i < blocks_.length(); i++) {
+ v->VisitPointers(blocks_[i], &blocks_[i][kHandleBlockSize]);
+ }
+}
+
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/api.h b/src/3rdparty/v8/src/api.h
index 3ad57f4..7197b6c 100644
--- a/src/3rdparty/v8/src/api.h
+++ b/src/3rdparty/v8/src/api.h
@@ -105,13 +105,13 @@ NeanderArray::NeanderArray(v8::internal::Handle<v8::internal::Object> obj)
v8::internal::Object* NeanderObject::get(int offset) {
- ASSERT(value()->HasFastElements());
+ ASSERT(value()->HasFastObjectElements());
return v8::internal::FixedArray::cast(value()->elements())->get(offset);
}
void NeanderObject::set(int offset, v8::internal::Object* value) {
- ASSERT(value_->HasFastElements());
+ ASSERT(value_->HasFastObjectElements());
v8::internal::FixedArray::cast(value_->elements())->set(offset, value);
}
@@ -159,6 +159,27 @@ class RegisteredExtension {
};
+#define OPEN_HANDLE_LIST(V) \
+ V(Template, TemplateInfo) \
+ V(FunctionTemplate, FunctionTemplateInfo) \
+ V(ObjectTemplate, ObjectTemplateInfo) \
+ V(Signature, SignatureInfo) \
+ V(AccessorSignature, FunctionTemplateInfo) \
+ V(TypeSwitch, TypeSwitchInfo) \
+ V(Data, Object) \
+ V(RegExp, JSRegExp) \
+ V(Object, JSObject) \
+ V(Array, JSArray) \
+ V(String, String) \
+ V(Script, Object) \
+ V(Function, JSFunction) \
+ V(Message, JSObject) \
+ V(Context, Context) \
+ V(External, Foreign) \
+ V(StackTrace, JSArray) \
+ V(StackFrame, JSObject)
+
+
class Utils {
public:
static bool ReportApiFailure(const char* location, const char* message);
@@ -200,43 +221,18 @@ class Utils {
v8::internal::Handle<v8::internal::ObjectTemplateInfo> obj);
static inline Local<Signature> ToLocal(
v8::internal::Handle<v8::internal::SignatureInfo> obj);
+ static inline Local<AccessorSignature> AccessorSignatureToLocal(
+ v8::internal::Handle<v8::internal::FunctionTemplateInfo> obj);
static inline Local<TypeSwitch> ToLocal(
v8::internal::Handle<v8::internal::TypeSwitchInfo> obj);
- static inline v8::internal::Handle<v8::internal::TemplateInfo>
- OpenHandle(const Template* that);
- static inline v8::internal::Handle<v8::internal::FunctionTemplateInfo>
- OpenHandle(const FunctionTemplate* that);
- static inline v8::internal::Handle<v8::internal::ObjectTemplateInfo>
- OpenHandle(const ObjectTemplate* that);
- static inline v8::internal::Handle<v8::internal::Object>
- OpenHandle(const Data* data);
- static inline v8::internal::Handle<v8::internal::JSRegExp>
- OpenHandle(const RegExp* data);
- static inline v8::internal::Handle<v8::internal::JSObject>
- OpenHandle(const v8::Object* data);
- static inline v8::internal::Handle<v8::internal::JSArray>
- OpenHandle(const v8::Array* data);
- static inline v8::internal::Handle<v8::internal::String>
- OpenHandle(const String* data);
- static inline v8::internal::Handle<v8::internal::Object>
- OpenHandle(const Script* data);
- static inline v8::internal::Handle<v8::internal::JSFunction>
- OpenHandle(const Function* data);
- static inline v8::internal::Handle<v8::internal::JSObject>
- OpenHandle(const Message* message);
- static inline v8::internal::Handle<v8::internal::JSArray>
- OpenHandle(const StackTrace* stack_trace);
- static inline v8::internal::Handle<v8::internal::JSObject>
- OpenHandle(const StackFrame* stack_frame);
- static inline v8::internal::Handle<v8::internal::Context>
- OpenHandle(const v8::Context* context);
- static inline v8::internal::Handle<v8::internal::SignatureInfo>
- OpenHandle(const v8::Signature* sig);
- static inline v8::internal::Handle<v8::internal::TypeSwitchInfo>
- OpenHandle(const v8::TypeSwitch* that);
- static inline v8::internal::Handle<v8::internal::Foreign>
- OpenHandle(const v8::External* that);
+#define DECLARE_OPEN_HANDLE(From, To) \
+ static inline v8::internal::Handle<v8::internal::To> \
+ OpenHandle(const From* that, bool allow_empty_handle = false);
+
+OPEN_HANDLE_LIST(DECLARE_OPEN_HANDLE)
+
+#undef DECLARE_OPEN_HANDLE
};
@@ -253,7 +249,7 @@ v8::internal::Handle<T> v8::internal::Handle<T>::EscapeFrom(
if (!is_null()) {
handle = *this;
}
- return Utils::OpenHandle(*scope->Close(Utils::ToLocal(handle)));
+ return Utils::OpenHandle(*scope->Close(Utils::ToLocal(handle)), true);
}
@@ -276,6 +272,7 @@ MAKE_TO_LOCAL(ToLocal, Foreign, External)
MAKE_TO_LOCAL(ToLocal, FunctionTemplateInfo, FunctionTemplate)
MAKE_TO_LOCAL(ToLocal, ObjectTemplateInfo, ObjectTemplate)
MAKE_TO_LOCAL(ToLocal, SignatureInfo, Signature)
+MAKE_TO_LOCAL(AccessorSignatureToLocal, FunctionTemplateInfo, AccessorSignature)
MAKE_TO_LOCAL(ToLocal, TypeSwitchInfo, TypeSwitch)
MAKE_TO_LOCAL(MessageToLocal, Object, Message)
MAKE_TO_LOCAL(StackTraceToLocal, JSArray, StackTrace)
@@ -289,32 +286,18 @@ MAKE_TO_LOCAL(Uint32ToLocal, Object, Uint32)
// Implementations of OpenHandle
-#define MAKE_OPEN_HANDLE(From, To) \
- v8::internal::Handle<v8::internal::To> Utils::OpenHandle(\
- const v8::From* that) { \
- return v8::internal::Handle<v8::internal::To>( \
+#define MAKE_OPEN_HANDLE(From, To) \
+ v8::internal::Handle<v8::internal::To> Utils::OpenHandle( \
+ const v8::From* that, bool allow_empty_handle) { \
+ EXTRA_CHECK(allow_empty_handle || that != NULL); \
+ return v8::internal::Handle<v8::internal::To>( \
reinterpret_cast<v8::internal::To**>(const_cast<v8::From*>(that))); \
}
-MAKE_OPEN_HANDLE(Template, TemplateInfo)
-MAKE_OPEN_HANDLE(FunctionTemplate, FunctionTemplateInfo)
-MAKE_OPEN_HANDLE(ObjectTemplate, ObjectTemplateInfo)
-MAKE_OPEN_HANDLE(Signature, SignatureInfo)
-MAKE_OPEN_HANDLE(TypeSwitch, TypeSwitchInfo)
-MAKE_OPEN_HANDLE(Data, Object)
-MAKE_OPEN_HANDLE(RegExp, JSRegExp)
-MAKE_OPEN_HANDLE(Object, JSObject)
-MAKE_OPEN_HANDLE(Array, JSArray)
-MAKE_OPEN_HANDLE(String, String)
-MAKE_OPEN_HANDLE(Script, Object)
-MAKE_OPEN_HANDLE(Function, JSFunction)
-MAKE_OPEN_HANDLE(Message, JSObject)
-MAKE_OPEN_HANDLE(Context, Context)
-MAKE_OPEN_HANDLE(External, Foreign)
-MAKE_OPEN_HANDLE(StackTrace, JSArray)
-MAKE_OPEN_HANDLE(StackFrame, JSObject)
+OPEN_HANDLE_LIST(MAKE_OPEN_HANDLE)
#undef MAKE_OPEN_HANDLE
+#undef OPEN_HANDLE_LIST
namespace internal {
@@ -386,6 +369,32 @@ class StringTracker {
};
+class DeferredHandles {
+ public:
+ ~DeferredHandles();
+
+ private:
+ DeferredHandles(Object** first_block_limit, Isolate* isolate)
+ : next_(NULL),
+ previous_(NULL),
+ first_block_limit_(first_block_limit),
+ isolate_(isolate) {
+ isolate->LinkDeferredHandles(this);
+ }
+
+ void Iterate(ObjectVisitor* v);
+
+ List<Object**> blocks_;
+ DeferredHandles* next_;
+ DeferredHandles* previous_;
+ Object** first_block_limit_;
+ Isolate* isolate_;
+
+ friend class HandleScopeImplementer;
+ friend class Isolate;
+};
+
+
// This class is here in order to be able to declare it a friend of
// HandleScope. Moving these methods to be members of HandleScope would be
// neat in some ways, but it would expose internal implementation details in
@@ -403,7 +412,8 @@ class HandleScopeImplementer {
entered_contexts_(0),
saved_contexts_(0),
spare_(NULL),
- call_depth_(0) { }
+ call_depth_(0),
+ last_handle_before_deferred_block_(NULL) { }
~HandleScopeImplementer() {
DeleteArray(spare_);
@@ -439,6 +449,13 @@ class HandleScopeImplementer {
inline bool HasSavedContexts();
inline List<internal::Object**>* blocks() { return &blocks_; }
+ Isolate* isolate() const { return isolate_; }
+
+ void ReturnBlock(Object** block) {
+ ASSERT(block != NULL);
+ if (spare_ != NULL) DeleteArray(spare_);
+ spare_ = block;
+ }
private:
void ResetAfterArchive() {
@@ -446,6 +463,7 @@ class HandleScopeImplementer {
entered_contexts_.Initialize(0);
saved_contexts_.Initialize(0);
spare_ = NULL;
+ last_handle_before_deferred_block_ = NULL;
call_depth_ = 0;
}
@@ -463,6 +481,9 @@ class HandleScopeImplementer {
ASSERT(call_depth_ == 0);
}
+ void BeginDeferredScope();
+ DeferredHandles* Detach(Object** prev_limit);
+
Isolate* isolate_;
List<internal::Object**> blocks_;
// Used as a stack to keep track of entered contexts.
@@ -471,6 +492,7 @@ class HandleScopeImplementer {
List<Context*> saved_contexts_;
Object** spare_;
int call_depth_;
+ Object** last_handle_before_deferred_block_;
// This is only used for threading support.
v8::ImplementationUtilities::HandleScopeData handle_scope_data_;
@@ -478,6 +500,9 @@ class HandleScopeImplementer {
char* RestoreThreadHelper(char* from);
char* ArchiveThreadHelper(char* to);
+ friend class DeferredHandles;
+ friend class DeferredHandleScope;
+
DISALLOW_COPY_AND_ASSIGN(HandleScopeImplementer);
};
diff --git a/src/3rdparty/v8/src/arm/assembler-arm-inl.h b/src/3rdparty/v8/src/arm/assembler-arm-inl.h
index d5db686..acd61fe 100644
--- a/src/3rdparty/v8/src/arm/assembler-arm-inl.h
+++ b/src/3rdparty/v8/src/arm/assembler-arm-inl.h
@@ -75,7 +75,7 @@ Address RelocInfo::target_address_address() {
ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY
|| rmode_ == EMBEDDED_OBJECT
|| rmode_ == EXTERNAL_REFERENCE);
- return reinterpret_cast<Address>(Assembler::target_address_address_at(pc_));
+ return reinterpret_cast<Address>(Assembler::target_pointer_address_at(pc_));
}
@@ -97,25 +97,30 @@ void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
Object* RelocInfo::target_object() {
ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
- return Memory::Object_at(Assembler::target_address_address_at(pc_));
+ return reinterpret_cast<Object*>(Assembler::target_pointer_at(pc_));
}
Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
- return Memory::Object_Handle_at(Assembler::target_address_address_at(pc_));
+ return Handle<Object>(reinterpret_cast<Object**>(
+ Assembler::target_pointer_at(pc_)));
}
Object** RelocInfo::target_object_address() {
+ // Provide a "natural pointer" to the embedded object,
+ // which can be de-referenced during heap iteration.
ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
- return reinterpret_cast<Object**>(Assembler::target_address_address_at(pc_));
+ reconstructed_obj_ptr_ =
+ reinterpret_cast<Object*>(Assembler::target_pointer_at(pc_));
+ return &reconstructed_obj_ptr_;
}
void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
- Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target));
+ Assembler::set_target_pointer_at(pc_, reinterpret_cast<Address>(target));
if (mode == UPDATE_WRITE_BARRIER &&
host() != NULL &&
target->IsHeapObject()) {
@@ -127,7 +132,8 @@ void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
Address* RelocInfo::target_reference_address() {
ASSERT(rmode_ == EXTERNAL_REFERENCE);
- return reinterpret_cast<Address*>(Assembler::target_address_address_at(pc_));
+ reconstructed_adr_ptr_ = Assembler::target_address_at(pc_);
+ return &reconstructed_adr_ptr_;
}
@@ -141,10 +147,7 @@ Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
JSGlobalPropertyCell* RelocInfo::target_cell() {
ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
- Address address = Memory::Address_at(pc_);
- Object* object = HeapObject::FromAddress(
- address - JSGlobalPropertyCell::kValueOffset);
- return reinterpret_cast<JSGlobalPropertyCell*>(object);
+ return JSGlobalPropertyCell::FromValueAddress(Memory::Address_at(pc_));
}
@@ -162,6 +165,24 @@ void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
}
+static const int kNoCodeAgeSequenceLength = 3;
+
+Code* RelocInfo::code_age_stub() {
+ ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+ return Code::GetCodeFromTargetAddress(
+ Memory::Address_at(pc_ + Assembler::kInstrSize *
+ (kNoCodeAgeSequenceLength - 1)));
+}
+
+
+void RelocInfo::set_code_age_stub(Code* stub) {
+ ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+ Memory::Address_at(pc_ + Assembler::kInstrSize *
+ (kNoCodeAgeSequenceLength - 1)) =
+ stub->instruction_start();
+}
+
+
Address RelocInfo::call_address() {
// The 2 instructions offset assumes patched debug break slot or return
// sequence.
@@ -235,6 +256,8 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
visitor->VisitGlobalPropertyCell(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
visitor->VisitExternalReference(this);
+ } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+ visitor->VisitCodeAgeSequence(this);
#ifdef ENABLE_DEBUGGER_SUPPORT
// TODO(isolates): Get a cached isolate below.
} else if (((RelocInfo::IsJSReturn(mode) &&
@@ -261,6 +284,8 @@ void RelocInfo::Visit(Heap* heap) {
StaticVisitor::VisitGlobalPropertyCell(heap, this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
StaticVisitor::VisitExternalReference(this);
+ } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+ StaticVisitor::VisitCodeAgeSequence(heap, this);
#ifdef ENABLE_DEBUGGER_SUPPORT
} else if (heap->isolate()->debug()->has_break_points() &&
((RelocInfo::IsJSReturn(mode) &&
@@ -329,7 +354,7 @@ void Assembler::emit(Instr x) {
}
-Address Assembler::target_address_address_at(Address pc) {
+Address Assembler::target_pointer_address_at(Address pc) {
Address target_pc = pc;
Instr instr = Memory::int32_at(target_pc);
// If we have a bx instruction, the instruction before the bx is
@@ -359,8 +384,63 @@ Address Assembler::target_address_address_at(Address pc) {
}
-Address Assembler::target_address_at(Address pc) {
- return Memory::Address_at(target_address_address_at(pc));
+Address Assembler::target_pointer_at(Address pc) {
+ if (IsMovW(Memory::int32_at(pc))) {
+ ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
+ Instruction* instr = Instruction::At(pc);
+ Instruction* next_instr = Instruction::At(pc + kInstrSize);
+ return reinterpret_cast<Address>(
+ (next_instr->ImmedMovwMovtValue() << 16) |
+ instr->ImmedMovwMovtValue());
+ }
+ return Memory::Address_at(target_pointer_address_at(pc));
+}
+
+
+Address Assembler::target_address_from_return_address(Address pc) {
+ // Returns the address of the call target from the return address that will
+ // be returned to after a call.
+#ifdef USE_BLX
+ // Call sequence on V7 or later is :
+ // movw ip, #... @ call address low 16
+ // movt ip, #... @ call address high 16
+ // blx ip
+ // @ return address
+ // Or pre-V7 or cases that need frequent patching:
+ // ldr ip, [pc, #...] @ call address
+ // blx ip
+ // @ return address
+ Address candidate = pc - 2 * Assembler::kInstrSize;
+ Instr candidate_instr(Memory::int32_at(candidate));
+ if (IsLdrPcImmediateOffset(candidate_instr)) {
+ return candidate;
+ }
+ candidate = pc - 3 * Assembler::kInstrSize;
+ ASSERT(IsMovW(Memory::int32_at(candidate)) &&
+ IsMovT(Memory::int32_at(candidate + kInstrSize)));
+ return candidate;
+#else
+ // Call sequence is:
+ // mov lr, pc
+ // ldr pc, [pc, #...] @ call address
+ // @ return address
+ return pc - kInstrSize;
+#endif
+}
+
+
+Address Assembler::return_address_from_call_start(Address pc) {
+#ifdef USE_BLX
+ if (IsLdrPcImmediateOffset(Memory::int32_at(pc))) {
+ return pc + kInstrSize * 2;
+ } else {
+ ASSERT(IsMovW(Memory::int32_at(pc)));
+ ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
+ return pc + kInstrSize * 3;
+ }
+#else
+ return pc + kInstrSize;
+#endif
}
@@ -376,17 +456,53 @@ void Assembler::set_external_target_at(Address constant_pool_entry,
}
+static Instr EncodeMovwImmediate(uint32_t immediate) {
+ ASSERT(immediate < 0x10000);
+ return ((immediate & 0xf000) << 4) | (immediate & 0xfff);
+}
+
+
+void Assembler::set_target_pointer_at(Address pc, Address target) {
+ if (IsMovW(Memory::int32_at(pc))) {
+ ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
+ uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
+ uint32_t immediate = reinterpret_cast<uint32_t>(target);
+ uint32_t intermediate = instr_ptr[0];
+ intermediate &= ~EncodeMovwImmediate(0xFFFF);
+ intermediate |= EncodeMovwImmediate(immediate & 0xFFFF);
+ instr_ptr[0] = intermediate;
+ intermediate = instr_ptr[1];
+ intermediate &= ~EncodeMovwImmediate(0xFFFF);
+ intermediate |= EncodeMovwImmediate(immediate >> 16);
+ instr_ptr[1] = intermediate;
+ ASSERT(IsMovW(Memory::int32_at(pc)));
+ ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
+ CPU::FlushICache(pc, 2 * kInstrSize);
+ } else {
+ ASSERT(IsLdrPcImmediateOffset(Memory::int32_at(pc)));
+ Memory::Address_at(target_pointer_address_at(pc)) = target;
+ // Intuitively, we would think it is necessary to always flush the
+ // instruction cache after patching a target address in the code as follows:
+ // CPU::FlushICache(pc, sizeof(target));
+ // However, on ARM, no instruction is actually patched in the case
+ // of embedded constants of the form:
+ // ldr ip, [pc, #...]
+ // since the instruction accessing this address in the constant pool remains
+ // unchanged.
+ }
+}
+
+
+Address Assembler::target_address_at(Address pc) {
+ return target_pointer_at(pc);
+}
+
+
void Assembler::set_target_address_at(Address pc, Address target) {
- Memory::Address_at(target_address_address_at(pc)) = target;
- // Intuitively, we would think it is necessary to flush the instruction cache
- // after patching a target address in the code as follows:
- // CPU::FlushICache(pc, sizeof(target));
- // However, on ARM, no instruction was actually patched by the assignment
- // above; the target address is not part of an instruction, it is patched in
- // the constant pool and is read via a data access; the instruction accessing
- // this address in the constant pool remains unchanged.
+ set_target_pointer_at(pc, target);
}
+
} } // namespace v8::internal
#endif // V8_ARM_ASSEMBLER_ARM_INL_H_
diff --git a/src/3rdparty/v8/src/arm/assembler-arm.cc b/src/3rdparty/v8/src/arm/assembler-arm.cc
index ec28da4..b679efa 100644
--- a/src/3rdparty/v8/src/arm/assembler-arm.cc
+++ b/src/3rdparty/v8/src/arm/assembler-arm.cc
@@ -32,7 +32,7 @@
// The original source code covered by the above license above has been
// modified significantly by Google Inc.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
#include "v8.h"
@@ -52,17 +52,20 @@ unsigned CpuFeatures::found_by_runtime_probing_ = 0;
// Get the CPU features enabled by the build. For cross compilation the
-// preprocessor symbols CAN_USE_ARMV7_INSTRUCTIONS and CAN_USE_VFP_INSTRUCTIONS
+// preprocessor symbols CAN_USE_ARMV7_INSTRUCTIONS and CAN_USE_VFP3_INSTRUCTIONS
// can be defined to enable ARMv7 and VFPv3 instructions when building the
// snapshot.
-static uint64_t CpuFeaturesImpliedByCompiler() {
- uint64_t answer = 0;
+static unsigned CpuFeaturesImpliedByCompiler() {
+ unsigned answer = 0;
#ifdef CAN_USE_ARMV7_INSTRUCTIONS
answer |= 1u << ARMv7;
-#endif // def CAN_USE_ARMV7_INSTRUCTIONS
-#ifdef CAN_USE_VFP_INSTRUCTIONS
- answer |= 1u << VFP3 | 1u << ARMv7;
-#endif // def CAN_USE_VFP_INSTRUCTIONS
+#endif // CAN_USE_ARMV7_INSTRUCTIONS
+#ifdef CAN_USE_VFP3_INSTRUCTIONS
+ answer |= 1u << VFP3 | 1u << VFP2 | 1u << ARMv7;
+#endif // CAN_USE_VFP3_INSTRUCTIONS
+#ifdef CAN_USE_VFP2_INSTRUCTIONS
+ answer |= 1u << VFP2;
+#endif // CAN_USE_VFP2_INSTRUCTIONS
#ifdef __arm__
// If the compiler is allowed to use VFP then we can use VFP too in our code
@@ -70,18 +73,21 @@ static uint64_t CpuFeaturesImpliedByCompiler() {
// point support implies VFPv3, see ARM DDI 0406B, page A1-6.
#if defined(CAN_USE_ARMV7_INSTRUCTIONS) && defined(__VFP_FP__) \
&& !defined(__SOFTFP__)
- answer |= 1u << VFP3 | 1u << ARMv7;
+ answer |= 1u << VFP3 | 1u << ARMv7 | 1u << VFP2;
#endif // defined(CAN_USE_ARMV7_INSTRUCTIONS) && defined(__VFP_FP__)
// && !defined(__SOFTFP__)
-#endif // def __arm__
+#endif // _arm__
+ if (answer & (1u << ARMv7)) {
+ answer |= 1u << UNALIGNED_ACCESSES;
+ }
return answer;
}
void CpuFeatures::Probe() {
- unsigned standard_features = (OS::CpuFeaturesImpliedByPlatform() |
- CpuFeaturesImpliedByCompiler());
+ unsigned standard_features = static_cast<unsigned>(
+ OS::CpuFeaturesImpliedByPlatform()) | CpuFeaturesImpliedByCompiler();
ASSERT(supported_ == 0 || supported_ == standard_features);
#ifdef DEBUG
initialized_ = true;
@@ -101,27 +107,53 @@ void CpuFeatures::Probe() {
// For the simulator=arm build, use VFP when FLAG_enable_vfp3 is
// enabled. VFPv3 implies ARMv7, see ARM DDI 0406B, page A1-6.
if (FLAG_enable_vfp3) {
- supported_ |= 1u << VFP3 | 1u << ARMv7;
+ supported_ |= 1u << VFP3 | 1u << ARMv7 | 1u << VFP2;
}
// For the simulator=arm build, use ARMv7 when FLAG_enable_armv7 is enabled
if (FLAG_enable_armv7) {
supported_ |= 1u << ARMv7;
}
-#else // def __arm__
+
+ if (FLAG_enable_sudiv) {
+ supported_ |= 1u << SUDIV;
+ }
+
+ if (FLAG_enable_movw_movt) {
+ supported_ |= 1u << MOVW_MOVT_IMMEDIATE_LOADS;
+ }
+#else // __arm__
// Probe for additional features not already known to be available.
if (!IsSupported(VFP3) && OS::ArmCpuHasFeature(VFP3)) {
// This implementation also sets the VFP flags if runtime
- // detection of VFP returns true. VFPv3 implies ARMv7, see ARM DDI
+ // detection of VFP returns true. VFPv3 implies ARMv7 and VFP2, see ARM DDI
// 0406B, page A1-6.
- supported_ |= 1u << VFP3 | 1u << ARMv7;
- found_by_runtime_probing_ |= 1u << VFP3 | 1u << ARMv7;
+ found_by_runtime_probing_ |= 1u << VFP3 | 1u << ARMv7 | 1u << VFP2;
+ } else if (!IsSupported(VFP2) && OS::ArmCpuHasFeature(VFP2)) {
+ found_by_runtime_probing_ |= 1u << VFP2;
}
if (!IsSupported(ARMv7) && OS::ArmCpuHasFeature(ARMv7)) {
- supported_ |= 1u << ARMv7;
found_by_runtime_probing_ |= 1u << ARMv7;
}
+
+ if (!IsSupported(SUDIV) && OS::ArmCpuHasFeature(SUDIV)) {
+ found_by_runtime_probing_ |= 1u << SUDIV;
+ }
+
+ if (!IsSupported(UNALIGNED_ACCESSES) && OS::ArmCpuHasFeature(ARMv7)) {
+ found_by_runtime_probing_ |= 1u << UNALIGNED_ACCESSES;
+ }
+
+ if (OS::GetCpuImplementer() == QUALCOMM_IMPLEMENTER &&
+ OS::ArmCpuHasFeature(ARMv7)) {
+ found_by_runtime_probing_ |= 1u << MOVW_MOVT_IMMEDIATE_LOADS;
+ }
+
+ supported_ |= found_by_runtime_probing_;
#endif
+
+ // Assert that VFP3 implies VFP2 and ARMv7.
+ ASSERT(!IsSupported(VFP3) || (IsSupported(VFP2) && IsSupported(ARMv7)));
}
@@ -292,8 +324,8 @@ static const int kMinimalBufferSize = 4*KB;
Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
: AssemblerBase(arg_isolate),
- positions_recorder_(this),
- emit_debug_code_(FLAG_debug_code) {
+ recorded_ast_id_(TypeFeedbackId::None()),
+ positions_recorder_(this) {
if (buffer == NULL) {
// Do our own buffer management.
if (buffer_size <= kMinimalBufferSize) {
@@ -705,12 +737,6 @@ void Assembler::next(Label* L) {
}
-static Instr EncodeMovwImmediate(uint32_t immediate) {
- ASSERT(immediate < 0x10000);
- return ((immediate & 0xf000) << 4) | (immediate & 0xfff);
-}
-
-
// Low-level code emission routines depending on the addressing mode.
// If this returns true then you have to use the rotate_imm and immed_8
// that it returns, because it may have already changed the instruction
@@ -746,7 +772,7 @@ static bool fits_shifter(uint32_t imm32,
}
}
} else if ((*instr & kCmpCmnMask) == kCmpCmnPattern) {
- if (fits_shifter(-imm32, rotate_imm, immed_8, NULL)) {
+ if (fits_shifter(-static_cast<int>(imm32), rotate_imm, immed_8, NULL)) {
*instr ^= kCmpCmnFlip;
return true;
}
@@ -754,7 +780,7 @@ static bool fits_shifter(uint32_t imm32,
Instr alu_insn = (*instr & kALUMask);
if (alu_insn == ADD ||
alu_insn == SUB) {
- if (fits_shifter(-imm32, rotate_imm, immed_8, NULL)) {
+ if (fits_shifter(-static_cast<int>(imm32), rotate_imm, immed_8, NULL)) {
*instr ^= kAddSubFlip;
return true;
}
@@ -775,13 +801,14 @@ static bool fits_shifter(uint32_t imm32,
// if they can be encoded in the ARM's 12 bits of immediate-offset instruction
// space. There is no guarantee that the relocated location can be similarly
// encoded.
-bool Operand::must_use_constant_pool() const {
+bool Operand::must_output_reloc_info(const Assembler* assembler) const {
if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) {
#ifdef DEBUG
if (!Serializer::enabled()) {
Serializer::TooLateToEnableNow();
}
#endif // def DEBUG
+ if (assembler != NULL && assembler->predictable_code_size()) return true;
return Serializer::enabled();
} else if (rmode_ == RelocInfo::NONE) {
return false;
@@ -790,24 +817,28 @@ bool Operand::must_use_constant_pool() const {
}
-bool Operand::is_single_instruction(Instr instr) const {
+static bool use_movw_movt(const Operand& x, const Assembler* assembler) {
+ if (Assembler::use_immediate_embedded_pointer_loads(assembler)) {
+ return true;
+ }
+ if (x.must_output_reloc_info(assembler)) {
+ return false;
+ }
+ return CpuFeatures::IsSupported(ARMv7);
+}
+
+
+bool Operand::is_single_instruction(const Assembler* assembler,
+ Instr instr) const {
if (rm_.is_valid()) return true;
uint32_t dummy1, dummy2;
- if (must_use_constant_pool() ||
+ if (must_output_reloc_info(assembler) ||
!fits_shifter(imm32_, &dummy1, &dummy2, &instr)) {
// The immediate operand cannot be encoded as a shifter operand, or use of
// constant pool is required. For a mov instruction not setting the
// condition code additional instruction conventions can be used.
if ((instr & ~kCondMask) == 13*B21) { // mov, S not set
- if (must_use_constant_pool() ||
- !CpuFeatures::IsSupported(ARMv7)) {
- // mov instruction will be an ldr from constant pool (one instruction).
- return true;
- } else {
- // mov instruction will be a mov or movw followed by movt (two
- // instructions).
- return false;
- }
+ return !use_movw_movt(*this, assembler);
} else {
// If this is not a mov or mvn instruction there will always an additional
// instructions - either mov or ldr. The mov might actually be two
@@ -823,6 +854,29 @@ bool Operand::is_single_instruction(Instr instr) const {
}
+void Assembler::move_32_bit_immediate(Condition cond,
+ Register rd,
+ SBit s,
+ const Operand& x) {
+ if (rd.code() != pc.code() && s == LeaveCC) {
+ if (use_movw_movt(x, this)) {
+ if (x.must_output_reloc_info(this)) {
+ RecordRelocInfo(x.rmode_, x.imm32_, DONT_USE_CONSTANT_POOL);
+ // Make sure the movw/movt doesn't get separated.
+ BlockConstPoolFor(2);
+ }
+ emit(cond | 0x30*B20 | rd.code()*B12 |
+ EncodeMovwImmediate(x.imm32_ & 0xffff));
+ movt(rd, static_cast<uint32_t>(x.imm32_) >> 16, cond);
+ return;
+ }
+ }
+
+ RecordRelocInfo(x.rmode_, x.imm32_, USE_CONSTANT_POOL);
+ ldr(rd, MemOperand(pc, 0), cond);
+}
+
+
void Assembler::addrmod1(Instr instr,
Register rn,
Register rd,
@@ -833,7 +887,7 @@ void Assembler::addrmod1(Instr instr,
// Immediate.
uint32_t rotate_imm;
uint32_t immed_8;
- if (x.must_use_constant_pool() ||
+ if (x.must_output_reloc_info(this) ||
!fits_shifter(x.imm32_, &rotate_imm, &immed_8, &instr)) {
// The immediate operand cannot be encoded as a shifter operand, so load
// it first to register ip and change the original instruction to use ip.
@@ -842,24 +896,19 @@ void Assembler::addrmod1(Instr instr,
CHECK(!rn.is(ip)); // rn should never be ip, or will be trashed
Condition cond = Instruction::ConditionField(instr);
if ((instr & ~kCondMask) == 13*B21) { // mov, S not set
- if (x.must_use_constant_pool() ||
- !CpuFeatures::IsSupported(ARMv7)) {
- RecordRelocInfo(x.rmode_, x.imm32_);
- ldr(rd, MemOperand(pc, 0), cond);
- } else {
- // Will probably use movw, will certainly not use constant pool.
- mov(rd, Operand(x.imm32_ & 0xffff), LeaveCC, cond);
- movt(rd, static_cast<uint32_t>(x.imm32_) >> 16, cond);
- }
+ move_32_bit_immediate(cond, rd, LeaveCC, x);
} else {
- // If this is not a mov or mvn instruction we may still be able to avoid
- // a constant pool entry by using mvn or movw.
- if (!x.must_use_constant_pool() &&
- (instr & kMovMvnMask) != kMovMvnPattern) {
- mov(ip, x, LeaveCC, cond);
- } else {
- RecordRelocInfo(x.rmode_, x.imm32_);
+ if ((instr & kMovMvnMask) == kMovMvnPattern) {
+ // Moves need to use a constant pool entry.
+ RecordRelocInfo(x.rmode_, x.imm32_, USE_CONSTANT_POOL);
ldr(ip, MemOperand(pc, 0), cond);
+ } else if (x.must_output_reloc_info(this)) {
+ // Otherwise, use most efficient form of fetching from constant pool.
+ move_32_bit_immediate(cond, ip, LeaveCC, x);
+ } else {
+ // If this is not a mov or mvn instruction we may still be able to
+ // avoid a constant pool entry by using mvn or movw.
+ mov(ip, x, LeaveCC, cond);
}
addrmod1(instr, rn, rd, Operand(ip));
}
@@ -1166,6 +1215,9 @@ void Assembler::mov(Register dst, const Operand& src, SBit s, Condition cond) {
void Assembler::movw(Register reg, uint32_t immediate, Condition cond) {
ASSERT(immediate < 0x10000);
+ // May use movw if supported, but on unsupported platforms will try to use
+ // equivalent rotated immed_8 value and other tricks before falling back to a
+ // constant pool load.
mov(reg, Operand(immediate), LeaveCC, cond);
}
@@ -1195,6 +1247,22 @@ void Assembler::mla(Register dst, Register src1, Register src2, Register srcA,
}
+void Assembler::mls(Register dst, Register src1, Register src2, Register srcA,
+ Condition cond) {
+ ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc) && !srcA.is(pc));
+ emit(cond | B22 | B21 | dst.code()*B16 | srcA.code()*B12 |
+ src2.code()*B8 | B7 | B4 | src1.code());
+}
+
+
+void Assembler::sdiv(Register dst, Register src1, Register src2,
+ Condition cond) {
+ ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc));
+ emit(cond | B26 | B25| B24 | B20 | dst.code()*B16 | 0xf * B12 |
+ src2.code()*B8 | B4 | src1.code());
+}
+
+
void Assembler::mul(Register dst, Register src1, Register src2,
SBit s, Condition cond) {
ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc));
@@ -1379,7 +1447,7 @@ void Assembler::msr(SRegisterFieldMask fields, const Operand& src,
// Immediate.
uint32_t rotate_imm;
uint32_t immed_8;
- if (src.must_use_constant_pool() ||
+ if (src.must_output_reloc_info(this) ||
!fits_shifter(src.imm32_, &rotate_imm, &immed_8, NULL)) {
// Immediate operand cannot be encoded, load it first to register ip.
RecordRelocInfo(src.rmode_, src.imm32_);
@@ -1656,7 +1724,7 @@ void Assembler::vldr(const DwVfpRegister dst,
// Instruction details available in ARM DDI 0406A, A8-628.
// cond(31-28) | 1101(27-24)| U001(23-20) | Rbase(19-16) |
// Vdst(15-12) | 1011(11-8) | offset
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
int u = 1;
if (offset < 0) {
offset = -offset;
@@ -1698,7 +1766,7 @@ void Assembler::vldr(const SwVfpRegister dst,
// Instruction details available in ARM DDI 0406A, A8-628.
// cond(31-28) | 1101(27-24)| U001(23-20) | Rbase(19-16) |
// Vdst(15-12) | 1010(11-8) | offset
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
int u = 1;
if (offset < 0) {
offset = -offset;
@@ -1742,7 +1810,7 @@ void Assembler::vstr(const DwVfpRegister src,
// Instruction details available in ARM DDI 0406A, A8-786.
// cond(31-28) | 1101(27-24)| U000(23-20) | | Rbase(19-16) |
// Vsrc(15-12) | 1011(11-8) | (offset/4)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
int u = 1;
if (offset < 0) {
offset = -offset;
@@ -1783,7 +1851,7 @@ void Assembler::vstr(const SwVfpRegister src,
// Instruction details available in ARM DDI 0406A, A8-786.
// cond(31-28) | 1101(27-24)| U000(23-20) | Rbase(19-16) |
// Vdst(15-12) | 1010(11-8) | (offset/4)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
int u = 1;
if (offset < 0) {
offset = -offset;
@@ -1814,7 +1882,7 @@ void Assembler::vstr(const SwVfpRegister src,
const Condition cond) {
ASSERT(!operand.rm().is_valid());
ASSERT(operand.am_ == Offset);
- vldr(src, operand.rn(), operand.offset(), cond);
+ vstr(src, operand.rn(), operand.offset(), cond);
}
@@ -1826,7 +1894,7 @@ void Assembler::vldm(BlockAddrMode am,
// Instruction details available in ARM DDI 0406A, A8-626.
// cond(31-28) | 110(27-25)| PUDW1(24-20) | Rbase(19-16) |
// first(15-12) | 1010(11-8) | (count * 2)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
ASSERT_LE(first.code(), last.code());
ASSERT(am == ia || am == ia_w || am == db_w);
ASSERT(!base.is(pc));
@@ -1834,6 +1902,7 @@ void Assembler::vldm(BlockAddrMode am,
int sd, d;
first.split_code(&sd, &d);
int count = last.code() - first.code() + 1;
+ ASSERT(count <= 16);
emit(cond | B27 | B26 | am | d*B22 | B20 | base.code()*B16 | sd*B12 |
0xB*B8 | count*2);
}
@@ -1847,7 +1916,7 @@ void Assembler::vstm(BlockAddrMode am,
// Instruction details available in ARM DDI 0406A, A8-784.
// cond(31-28) | 110(27-25)| PUDW0(24-20) | Rbase(19-16) |
// first(15-12) | 1011(11-8) | (count * 2)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
ASSERT_LE(first.code(), last.code());
ASSERT(am == ia || am == ia_w || am == db_w);
ASSERT(!base.is(pc));
@@ -1855,6 +1924,7 @@ void Assembler::vstm(BlockAddrMode am,
int sd, d;
first.split_code(&sd, &d);
int count = last.code() - first.code() + 1;
+ ASSERT(count <= 16);
emit(cond | B27 | B26 | am | d*B22 | base.code()*B16 | sd*B12 |
0xB*B8 | count*2);
}
@@ -1867,7 +1937,7 @@ void Assembler::vldm(BlockAddrMode am,
// Instruction details available in ARM DDI 0406A, A8-626.
// cond(31-28) | 110(27-25)| PUDW1(24-20) | Rbase(19-16) |
// first(15-12) | 1010(11-8) | (count/2)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
ASSERT_LE(first.code(), last.code());
ASSERT(am == ia || am == ia_w || am == db_w);
ASSERT(!base.is(pc));
@@ -1888,7 +1958,7 @@ void Assembler::vstm(BlockAddrMode am,
// Instruction details available in ARM DDI 0406A, A8-784.
// cond(31-28) | 110(27-25)| PUDW0(24-20) | Rbase(19-16) |
// first(15-12) | 1011(11-8) | (count/2)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
ASSERT_LE(first.code(), last.code());
ASSERT(am == ia || am == ia_w || am == db_w);
ASSERT(!base.is(pc));
@@ -1911,7 +1981,7 @@ static void DoubleAsTwoUInt32(double d, uint32_t* lo, uint32_t* hi) {
// Only works for little endian floating point formats.
// We don't support VFP on the mixed endian floating point platform.
static bool FitsVMOVDoubleImmediate(double d, uint32_t *encoding) {
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsSupported(VFP3));
// VMOV can accept an immediate of the form:
//
@@ -1961,13 +2031,14 @@ static bool FitsVMOVDoubleImmediate(double d, uint32_t *encoding) {
void Assembler::vmov(const DwVfpRegister dst,
double imm,
+ const Register scratch,
const Condition cond) {
// Dd = immediate
// Instruction details available in ARM DDI 0406B, A8-640.
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
uint32_t enc;
- if (FitsVMOVDoubleImmediate(imm, &enc)) {
+ if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) {
// The double can be encoded in the instruction.
emit(cond | 0xE*B24 | 0xB*B20 | dst.code()*B12 | 0xB*B8 | enc);
} else {
@@ -1975,22 +2046,22 @@ void Assembler::vmov(const DwVfpRegister dst,
// using vldr from a constant pool.
uint32_t lo, hi;
DoubleAsTwoUInt32(imm, &lo, &hi);
+ mov(ip, Operand(lo));
- if (lo == hi) {
- // If the lo and hi parts of the double are equal, the literal is easier
- // to create. This is the case with 0.0.
- mov(ip, Operand(lo));
- vmov(dst, ip, ip);
- } else {
+ if (scratch.is(no_reg)) {
// Move the low part of the double into the lower of the corresponsing S
// registers of D register dst.
- mov(ip, Operand(lo));
vmov(dst.low(), ip, cond);
// Move the high part of the double into the higher of the corresponsing S
// registers of D register dst.
mov(ip, Operand(hi));
vmov(dst.high(), ip, cond);
+ } else {
+ // Move the low and high parts of the double to a D register in one
+ // instruction.
+ mov(scratch, Operand(hi));
+ vmov(dst, ip, scratch, cond);
}
}
}
@@ -2001,7 +2072,7 @@ void Assembler::vmov(const SwVfpRegister dst,
const Condition cond) {
// Sd = Sm
// Instruction details available in ARM DDI 0406B, A8-642.
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
int sd, d, sm, m;
dst.split_code(&sd, &d);
src.split_code(&sm, &m);
@@ -2014,7 +2085,7 @@ void Assembler::vmov(const DwVfpRegister dst,
const Condition cond) {
// Dd = Dm
// Instruction details available in ARM DDI 0406B, A8-642.
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(cond | 0xE*B24 | 0xB*B20 |
dst.code()*B12 | 0x5*B9 | B8 | B6 | src.code());
}
@@ -2028,7 +2099,7 @@ void Assembler::vmov(const DwVfpRegister dst,
// Instruction details available in ARM DDI 0406A, A8-646.
// cond(31-28) | 1100(27-24)| 010(23-21) | op=0(20) | Rt2(19-16) |
// Rt(15-12) | 1011(11-8) | 00(7-6) | M(5) | 1(4) | Vm
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
ASSERT(!src1.is(pc) && !src2.is(pc));
emit(cond | 0xC*B24 | B22 | src2.code()*B16 |
src1.code()*B12 | 0xB*B8 | B4 | dst.code());
@@ -2043,7 +2114,7 @@ void Assembler::vmov(const Register dst1,
// Instruction details available in ARM DDI 0406A, A8-646.
// cond(31-28) | 1100(27-24)| 010(23-21) | op=1(20) | Rt2(19-16) |
// Rt(15-12) | 1011(11-8) | 00(7-6) | M(5) | 1(4) | Vm
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
ASSERT(!dst1.is(pc) && !dst2.is(pc));
emit(cond | 0xC*B24 | B22 | B20 | dst2.code()*B16 |
dst1.code()*B12 | 0xB*B8 | B4 | src.code());
@@ -2057,7 +2128,7 @@ void Assembler::vmov(const SwVfpRegister dst,
// Instruction details available in ARM DDI 0406A, A8-642.
// cond(31-28) | 1110(27-24)| 000(23-21) | op=0(20) | Vn(19-16) |
// Rt(15-12) | 1010(11-8) | N(7)=0 | 00(6-5) | 1(4) | 0000(3-0)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
ASSERT(!src.is(pc));
int sn, n;
dst.split_code(&sn, &n);
@@ -2072,7 +2143,7 @@ void Assembler::vmov(const Register dst,
// Instruction details available in ARM DDI 0406A, A8-642.
// cond(31-28) | 1110(27-24)| 000(23-21) | op=1(20) | Vn(19-16) |
// Rt(15-12) | 1010(11-8) | N(7)=0 | 00(6-5) | 1(4) | 0000(3-0)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
ASSERT(!dst.is(pc));
int sn, n;
src.split_code(&sn, &n);
@@ -2197,7 +2268,7 @@ void Assembler::vcvt_f64_s32(const DwVfpRegister dst,
const SwVfpRegister src,
VFPConversionMode mode,
const Condition cond) {
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(EncodeVCVT(F64, dst.code(), S32, src.code(), mode, cond));
}
@@ -2206,7 +2277,7 @@ void Assembler::vcvt_f32_s32(const SwVfpRegister dst,
const SwVfpRegister src,
VFPConversionMode mode,
const Condition cond) {
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(EncodeVCVT(F32, dst.code(), S32, src.code(), mode, cond));
}
@@ -2215,7 +2286,7 @@ void Assembler::vcvt_f64_u32(const DwVfpRegister dst,
const SwVfpRegister src,
VFPConversionMode mode,
const Condition cond) {
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(EncodeVCVT(F64, dst.code(), U32, src.code(), mode, cond));
}
@@ -2224,7 +2295,7 @@ void Assembler::vcvt_s32_f64(const SwVfpRegister dst,
const DwVfpRegister src,
VFPConversionMode mode,
const Condition cond) {
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(EncodeVCVT(S32, dst.code(), F64, src.code(), mode, cond));
}
@@ -2233,7 +2304,7 @@ void Assembler::vcvt_u32_f64(const SwVfpRegister dst,
const DwVfpRegister src,
VFPConversionMode mode,
const Condition cond) {
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(EncodeVCVT(U32, dst.code(), F64, src.code(), mode, cond));
}
@@ -2242,7 +2313,7 @@ void Assembler::vcvt_f64_f32(const DwVfpRegister dst,
const SwVfpRegister src,
VFPConversionMode mode,
const Condition cond) {
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(EncodeVCVT(F64, dst.code(), F32, src.code(), mode, cond));
}
@@ -2251,7 +2322,7 @@ void Assembler::vcvt_f32_f64(const SwVfpRegister dst,
const DwVfpRegister src,
VFPConversionMode mode,
const Condition cond) {
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(EncodeVCVT(F32, dst.code(), F64, src.code(), mode, cond));
}
@@ -2259,6 +2330,7 @@ void Assembler::vcvt_f32_f64(const SwVfpRegister dst,
void Assembler::vneg(const DwVfpRegister dst,
const DwVfpRegister src,
const Condition cond) {
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(cond | 0xE*B24 | 0xB*B20 | B16 | dst.code()*B12 |
0x5*B9 | B8 | B6 | src.code());
}
@@ -2267,6 +2339,7 @@ void Assembler::vneg(const DwVfpRegister dst,
void Assembler::vabs(const DwVfpRegister dst,
const DwVfpRegister src,
const Condition cond) {
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(cond | 0xE*B24 | 0xB*B20 | dst.code()*B12 |
0x5*B9 | B8 | 0x3*B6 | src.code());
}
@@ -2281,7 +2354,7 @@ void Assembler::vadd(const DwVfpRegister dst,
// Instruction details available in ARM DDI 0406A, A8-536.
// cond(31-28) | 11100(27-23)| D=?(22) | 11(21-20) | Vn(19-16) |
// Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=0 | 0(6) | M=?(5) | 0(4) | Vm(3-0)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(cond | 0xE*B24 | 0x3*B20 | src1.code()*B16 |
dst.code()*B12 | 0x5*B9 | B8 | src2.code());
}
@@ -2296,7 +2369,7 @@ void Assembler::vsub(const DwVfpRegister dst,
// Instruction details available in ARM DDI 0406A, A8-784.
// cond(31-28) | 11100(27-23)| D=?(22) | 11(21-20) | Vn(19-16) |
// Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=0 | 1(6) | M=?(5) | 0(4) | Vm(3-0)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(cond | 0xE*B24 | 0x3*B20 | src1.code()*B16 |
dst.code()*B12 | 0x5*B9 | B8 | B6 | src2.code());
}
@@ -2311,7 +2384,7 @@ void Assembler::vmul(const DwVfpRegister dst,
// Instruction details available in ARM DDI 0406A, A8-784.
// cond(31-28) | 11100(27-23)| D=?(22) | 10(21-20) | Vn(19-16) |
// Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=0 | 0(6) | M=?(5) | 0(4) | Vm(3-0)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(cond | 0xE*B24 | 0x2*B20 | src1.code()*B16 |
dst.code()*B12 | 0x5*B9 | B8 | src2.code());
}
@@ -2326,7 +2399,7 @@ void Assembler::vdiv(const DwVfpRegister dst,
// Instruction details available in ARM DDI 0406A, A8-584.
// cond(31-28) | 11101(27-23)| D=?(22) | 00(21-20) | Vn(19-16) |
// Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=? | 0(6) | M=?(5) | 0(4) | Vm(3-0)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(cond | 0xE*B24 | B23 | src1.code()*B16 |
dst.code()*B12 | 0x5*B9 | B8 | src2.code());
}
@@ -2339,7 +2412,7 @@ void Assembler::vcmp(const DwVfpRegister src1,
// Instruction details available in ARM DDI 0406A, A8-570.
// cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0100 (19-16) |
// Vd(15-12) | 101(11-9) | sz(8)=1 | E(7)=0 | 1(6) | M(5)=? | 0(4) | Vm(3-0)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(cond | 0xE*B24 |B23 | 0x3*B20 | B18 |
src1.code()*B12 | 0x5*B9 | B8 | B6 | src2.code());
}
@@ -2352,7 +2425,7 @@ void Assembler::vcmp(const DwVfpRegister src1,
// Instruction details available in ARM DDI 0406A, A8-570.
// cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0101 (19-16) |
// Vd(15-12) | 101(11-9) | sz(8)=1 | E(7)=0 | 1(6) | M(5)=? | 0(4) | 0000(3-0)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
ASSERT(src2 == 0.0);
emit(cond | 0xE*B24 |B23 | 0x3*B20 | B18 | B16 |
src1.code()*B12 | 0x5*B9 | B8 | B6);
@@ -2363,7 +2436,7 @@ void Assembler::vmsr(Register dst, Condition cond) {
// Instruction details available in ARM DDI 0406A, A8-652.
// cond(31-28) | 1110 (27-24) | 1110(23-20)| 0001 (19-16) |
// Rt(15-12) | 1010 (11-8) | 0(7) | 00 (6-5) | 1(4) | 0000(3-0)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(cond | 0xE*B24 | 0xE*B20 | B16 |
dst.code()*B12 | 0xA*B8 | B4);
}
@@ -2373,7 +2446,7 @@ void Assembler::vmrs(Register dst, Condition cond) {
// Instruction details available in ARM DDI 0406A, A8-652.
// cond(31-28) | 1110 (27-24) | 1111(23-20)| 0001 (19-16) |
// Rt(15-12) | 1010 (11-8) | 0(7) | 00 (6-5) | 1(4) | 0000(3-0)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(cond | 0xE*B24 | 0xF*B20 | B16 |
dst.code()*B12 | 0xA*B8 | B4);
}
@@ -2384,7 +2457,7 @@ void Assembler::vsqrt(const DwVfpRegister dst,
const Condition cond) {
// cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0001 (19-16) |
// Vd(15-12) | 101(11-9) | sz(8)=1 | 11 (7-6) | M(5)=? | 0(4) | Vm(3-0)
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
emit(cond | 0xE*B24 | B23 | 0x3*B20 | B16 |
dst.code()*B12 | 0x5*B9 | B8 | 3*B6 | src.code());
}
@@ -2392,15 +2465,35 @@ void Assembler::vsqrt(const DwVfpRegister dst,
// Pseudo instructions.
void Assembler::nop(int type) {
- // This is mov rx, rx.
- ASSERT(0 <= type && type <= 14); // mov pc, pc is not a nop.
+ // ARMv6{K/T2} and v7 have an actual NOP instruction but it serializes
+ // some of the CPU's pipeline and has to issue. Older ARM chips simply used
+ // MOV Rx, Rx as NOP and it performs better even in newer CPUs.
+ // We therefore use MOV Rx, Rx, even on newer CPUs, and use Rx to encode
+ // a type.
+ ASSERT(0 <= type && type <= 14); // mov pc, pc isn't a nop.
emit(al | 13*B21 | type*B12 | type);
}
+bool Assembler::IsMovT(Instr instr) {
+ instr &= ~(((kNumberOfConditions - 1) << 28) | // Mask off conditions
+ ((kNumRegisters-1)*B12) | // mask out register
+ EncodeMovwImmediate(0xFFFF)); // mask out immediate value
+ return instr == 0x34*B20;
+}
+
+
+bool Assembler::IsMovW(Instr instr) {
+ instr &= ~(((kNumberOfConditions - 1) << 28) | // Mask off conditions
+ ((kNumRegisters-1)*B12) | // mask out destination
+ EncodeMovwImmediate(0xFFFF)); // mask out immediate value
+ return instr == 0x30*B20;
+}
+
+
bool Assembler::IsNop(Instr instr, int type) {
+ ASSERT(0 <= type && type <= 14); // mov pc, pc isn't a nop.
// Check for mov rx, rx where x = type.
- ASSERT(0 <= type && type <= 14); // mov pc, pc is not a nop.
return instr == (al | 13*B21 | type*B12 | type);
}
@@ -2435,6 +2528,14 @@ void Assembler::RecordComment(const char* msg) {
}
+void Assembler::RecordConstPool(int size) {
+ // We only need this for debugger support, to correctly compute offsets in the
+ // code.
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ RecordRelocInfo(RelocInfo::CONST_POOL, static_cast<intptr_t>(size));
+#endif
+}
+
void Assembler::GrowBuffer() {
if (!own_buffer_) FATAL("external code buffer is too small");
@@ -2508,15 +2609,21 @@ void Assembler::dd(uint32_t data) {
}
-void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
+void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data,
+ UseConstantPoolMode mode) {
// We do not try to reuse pool constants.
RelocInfo rinfo(pc_, rmode, data, NULL);
- if (rmode >= RelocInfo::JS_RETURN && rmode <= RelocInfo::DEBUG_BREAK_SLOT) {
+ if (((rmode >= RelocInfo::JS_RETURN) &&
+ (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) ||
+ (rmode == RelocInfo::CONST_POOL) ||
+ mode == DONT_USE_CONSTANT_POOL) {
// Adjust code for new modes.
ASSERT(RelocInfo::IsDebugBreakSlot(rmode)
|| RelocInfo::IsJSReturn(rmode)
|| RelocInfo::IsComment(rmode)
- || RelocInfo::IsPosition(rmode));
+ || RelocInfo::IsPosition(rmode)
+ || RelocInfo::IsConstPool(rmode)
+ || mode == DONT_USE_CONSTANT_POOL);
// These modes do not need an entry in the constant pool.
} else {
ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo);
@@ -2542,7 +2649,10 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
}
ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here
if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
- RelocInfo reloc_info_with_ast_id(pc_, rmode, RecordedAstId(), NULL);
+ RelocInfo reloc_info_with_ast_id(pc_,
+ rmode,
+ RecordedAstId().ToInt(),
+ NULL);
ClearRecordedAstId();
reloc_info_writer.Write(&reloc_info_with_ast_id);
} else {
@@ -2602,13 +2712,15 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
// pool (include the jump over the pool and the constant pool marker and
// the gap to the relocation information).
int jump_instr = require_jump ? kInstrSize : 0;
- int needed_space = jump_instr + kInstrSize +
- num_pending_reloc_info_ * kInstrSize + kGap;
+ int size = jump_instr + kInstrSize + num_pending_reloc_info_ * kPointerSize;
+ int needed_space = size + kGap;
while (buffer_space() <= needed_space) GrowBuffer();
{
// Block recursive calls to CheckConstPool.
BlockConstPoolScope block_const_pool(this);
+ RecordComment("[ Constant Pool");
+ RecordConstPool(size);
// Emit jump over constant pool if necessary.
Label after_pool;
@@ -2616,32 +2728,33 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
b(&after_pool);
}
- RecordComment("[ Constant Pool");
-
- // Put down constant pool marker "Undefined instruction" as specified by
- // A5.6 (ARMv7) Instruction set encoding.
- emit(kConstantPoolMarker | num_pending_reloc_info_);
+ // Put down constant pool marker "Undefined instruction".
+ emit(kConstantPoolMarker |
+ EncodeConstantPoolLength(num_pending_reloc_info_));
// Emit constant pool entries.
for (int i = 0; i < num_pending_reloc_info_; i++) {
RelocInfo& rinfo = pending_reloc_info_[i];
ASSERT(rinfo.rmode() != RelocInfo::COMMENT &&
rinfo.rmode() != RelocInfo::POSITION &&
- rinfo.rmode() != RelocInfo::STATEMENT_POSITION);
+ rinfo.rmode() != RelocInfo::STATEMENT_POSITION &&
+ rinfo.rmode() != RelocInfo::CONST_POOL);
Instr instr = instr_at(rinfo.pc());
// Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0.
- ASSERT(IsLdrPcImmediateOffset(instr) &&
- GetLdrRegisterImmediateOffset(instr) == 0);
-
- int delta = pc_ - rinfo.pc() - kPcLoadDelta;
- // 0 is the smallest delta:
- // ldr rd, [pc, #0]
- // constant pool marker
- // data
- ASSERT(is_uint12(delta));
-
- instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta));
+ if (IsLdrPcImmediateOffset(instr) &&
+ GetLdrRegisterImmediateOffset(instr) == 0) {
+ int delta = pc_ - rinfo.pc() - kPcLoadDelta;
+ // 0 is the smallest delta:
+ // ldr rd, [pc, #0]
+ // constant pool marker
+ // data
+ ASSERT(is_uint12(delta));
+
+ instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta));
+ } else {
+ ASSERT(IsMovW(instr));
+ }
emit(rinfo.data());
}
diff --git a/src/3rdparty/v8/src/arm/assembler-arm.h b/src/3rdparty/v8/src/arm/assembler-arm.h
index e2d5f59..8418aee 100644
--- a/src/3rdparty/v8/src/arm/assembler-arm.h
+++ b/src/3rdparty/v8/src/arm/assembler-arm.h
@@ -424,8 +424,8 @@ class Operand BASE_EMBEDDED {
// the instruction this operand is used for is a MOV or MVN instruction the
// actual instruction to use is required for this calculation. For other
// instructions instr is ignored.
- bool is_single_instruction(Instr instr = 0) const;
- bool must_use_constant_pool() const;
+ bool is_single_instruction(const Assembler* assembler, Instr instr = 0) const;
+ bool must_output_reloc_info(const Assembler* assembler) const;
inline int32_t immediate() const {
ASSERT(!rm_.is_valid());
@@ -510,6 +510,11 @@ class CpuFeatures : public AllStatic {
static bool IsSupported(CpuFeature f) {
ASSERT(initialized_);
if (f == VFP3 && !FLAG_enable_vfp3) return false;
+ if (f == VFP2 && !FLAG_enable_vfp2) return false;
+ if (f == SUDIV && !FLAG_enable_sudiv) return false;
+ if (f == UNALIGNED_ACCESSES && !FLAG_enable_unaligned_accesses) {
+ return false;
+ }
return (supported_ & (1u << f)) != 0;
}
@@ -535,6 +540,8 @@ class CpuFeatures : public AllStatic {
public:
explicit Scope(CpuFeature f) {
unsigned mask = 1u << f;
+ // VFP2 and ARMv7 are implied by VFP3.
+ if (f == VFP3) mask |= 1u << VFP2 | 1u << ARMv7;
ASSERT(CpuFeatures::IsSupported(f));
ASSERT(!Serializer::enabled() ||
(CpuFeatures::found_by_runtime_probing_ & mask) == 0);
@@ -642,9 +649,6 @@ class Assembler : public AssemblerBase {
Assembler(Isolate* isolate, void* buffer, int buffer_size);
~Assembler();
- // Overrides the default provided by FLAG_debug_code.
- void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
-
// GetCode emits any pending (non-emitted) code and fills the descriptor
// desc. GetCode() is idempotent; it returns the same result if no other
// Assembler functions are invoked in between GetCode() calls.
@@ -677,13 +681,25 @@ class Assembler : public AssemblerBase {
void label_at_put(Label* L, int at_offset);
// Return the address in the constant pool of the code target address used by
- // the branch/call instruction at pc.
- INLINE(static Address target_address_address_at(Address pc));
+ // the branch/call instruction at pc, or the object in a mov.
+ INLINE(static Address target_pointer_address_at(Address pc));
+
+ // Read/Modify the pointer in the branch/call/move instruction at pc.
+ INLINE(static Address target_pointer_at(Address pc));
+ INLINE(static void set_target_pointer_at(Address pc, Address target));
// Read/Modify the code target address in the branch/call instruction at pc.
INLINE(static Address target_address_at(Address pc));
INLINE(static void set_target_address_at(Address pc, Address target));
+ // Return the code target address at a call site from the return address
+ // of that call in the instruction stream.
+ INLINE(static Address target_address_from_return_address(Address pc));
+
+ // Given the address of the beginning of a call, return the address
+ // in the instruction stream that the call will return from.
+ INLINE(static Address return_address_from_call_start(Address pc));
+
// This sets the branch destination (which is in the constant pool on ARM).
// This is for calls and branches within generated code.
inline static void deserialization_set_special_target_at(
@@ -702,22 +718,6 @@ class Assembler : public AssemblerBase {
// Size of an instruction.
static const int kInstrSize = sizeof(Instr);
- // Distance between the instruction referring to the address of the call
- // target and the return address.
-#ifdef USE_BLX
- // Call sequence is:
- // ldr ip, [pc, #...] @ call address
- // blx ip
- // @ return address
- static const int kCallTargetAddressOffset = 2 * kInstrSize;
-#else
- // Call sequence is:
- // mov lr, pc
- // ldr pc, [pc, #...] @ call address
- // @ return address
- static const int kCallTargetAddressOffset = kInstrSize;
-#endif
-
// Distance between start of patched return sequence and the emitted address
// to jump to.
#ifdef USE_BLX
@@ -746,6 +746,12 @@ class Assembler : public AssemblerBase {
static const int kPatchDebugBreakSlotAddressOffset = kInstrSize;
#endif
+#ifdef USE_BLX
+ static const int kPatchDebugBreakSlotReturnOffset = 2 * kInstrSize;
+#else
+ static const int kPatchDebugBreakSlotReturnOffset = kInstrSize;
+#endif
+
// Difference between address of current opcode and value read from pc
// register.
static const int kPcLoadDelta = 8;
@@ -861,6 +867,12 @@ class Assembler : public AssemblerBase {
void mla(Register dst, Register src1, Register src2, Register srcA,
SBit s = LeaveCC, Condition cond = al);
+ void mls(Register dst, Register src1, Register src2, Register srcA,
+ Condition cond = al);
+
+ void sdiv(Register dst, Register src1, Register src2,
+ Condition cond = al);
+
void mul(Register dst, Register src1, Register src2,
SBit s = LeaveCC, Condition cond = al);
@@ -1045,6 +1057,7 @@ class Assembler : public AssemblerBase {
void vmov(const DwVfpRegister dst,
double imm,
+ const Register scratch = no_reg,
const Condition cond = al);
void vmov(const SwVfpRegister dst,
const SwVfpRegister src,
@@ -1164,6 +1177,20 @@ class Assembler : public AssemblerBase {
// Jump unconditionally to given label.
void jmp(Label* L) { b(L, al); }
+ static bool use_immediate_embedded_pointer_loads(
+ const Assembler* assembler) {
+#ifdef USE_BLX
+ return CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
+ (assembler == NULL || !assembler->predictable_code_size());
+#else
+ // If not using BLX, all loads from the constant pool cannot be immediate,
+ // because the ldr pc, [pc + #xxxx] used for calls must be a single
+ // instruction and cannot be easily distinguished out of context from
+ // other loads that could use movw/movt.
+ return false;
+#endif
+ }
+
// Check the code size generated from label to here.
int SizeOfCodeGeneratedSince(Label* label) {
return pc_offset() - label->pos();
@@ -1203,22 +1230,41 @@ class Assembler : public AssemblerBase {
// Record the AST id of the CallIC being compiled, so that it can be placed
// in the relocation information.
- void SetRecordedAstId(unsigned ast_id) {
- ASSERT(recorded_ast_id_ == kNoASTId);
+ void SetRecordedAstId(TypeFeedbackId ast_id) {
+ ASSERT(recorded_ast_id_.IsNone());
recorded_ast_id_ = ast_id;
}
- unsigned RecordedAstId() {
- ASSERT(recorded_ast_id_ != kNoASTId);
+ TypeFeedbackId RecordedAstId() {
+ ASSERT(!recorded_ast_id_.IsNone());
return recorded_ast_id_;
}
- void ClearRecordedAstId() { recorded_ast_id_ = kNoASTId; }
+ void ClearRecordedAstId() { recorded_ast_id_ = TypeFeedbackId::None(); }
// Record a comment relocation entry that can be used by a disassembler.
// Use --code-comments to enable.
void RecordComment(const char* msg);
+ // Record the emission of a constant pool.
+ //
+ // The emission of constant pool depends on the size of the code generated and
+ // the number of RelocInfo recorded.
+ // The Debug mechanism needs to map code offsets between two versions of a
+ // function, compiled with and without debugger support (see for example
+ // Debug::PrepareForBreakPoints()).
+ // Compiling functions with debugger support generates additional code
+ // (Debug::GenerateSlot()). This may affect the emission of the constant
+ // pools and cause the version of the code with debugger support to have
+ // constant pools generated in different places.
+ // Recording the position and size of emitted constant pools allows to
+ // correctly compute the offset mappings between the different versions of a
+ // function in all situations.
+ //
+ // The parameter indicates the size of the constant pool (in bytes), including
+ // the marker and branch over the data.
+ void RecordConstPool(int size);
+
// Writes a single byte or word of data in the code stream. Used
// for inline tables, e.g., jump-tables. The constant pool should be
// emitted before any use of db and dd to ensure that constant pools
@@ -1265,12 +1311,16 @@ class Assembler : public AssemblerBase {
static Register GetCmpImmediateRegister(Instr instr);
static int GetCmpImmediateRawImmediate(Instr instr);
static bool IsNop(Instr instr, int type = NON_MARKING_NOP);
+ static bool IsMovT(Instr instr);
+ static bool IsMovW(Instr instr);
// Constants in pools are accessed via pc relative addressing, which can
// reach +/-4KB thereby defining a maximum distance between the instruction
// and the accessed constant.
static const int kMaxDistToPool = 4*KB;
static const int kMaxNumPendingRelocInfo = kMaxDistToPool/kInstrSize;
+ STATIC_ASSERT((kConstantPoolLengthMaxMask & kMaxNumPendingRelocInfo) ==
+ kMaxNumPendingRelocInfo);
// Postpone the generation of the constant pool for the specified number of
// instructions.
@@ -1283,9 +1333,7 @@ class Assembler : public AssemblerBase {
// Relocation for a type-recording IC has the AST id added to it. This
// member variable is a way to pass the information from the call site to
// the relocation info.
- unsigned recorded_ast_id_;
-
- bool emit_debug_code() const { return emit_debug_code_; }
+ TypeFeedbackId recorded_ast_id_;
int buffer_space() const { return reloc_info_writer.pos() - pc_; }
@@ -1403,6 +1451,12 @@ class Assembler : public AssemblerBase {
void GrowBuffer();
inline void emit(Instr x);
+ // 32-bit immediate values
+ void move_32_bit_immediate(Condition cond,
+ Register rd,
+ SBit s,
+ const Operand& x);
+
// Instruction generation
void addrmod1(Instr instr, Register rn, Register rd, const Operand& x);
void addrmod2(Instr instr, Register rd, const MemOperand& x);
@@ -1416,8 +1470,14 @@ class Assembler : public AssemblerBase {
void link_to(Label* L, Label* appendix);
void next(Label* L);
+ enum UseConstantPoolMode {
+ USE_CONSTANT_POOL,
+ DONT_USE_CONSTANT_POOL
+ };
+
// Record reloc info for current pc_
- void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0);
+ void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0,
+ UseConstantPoolMode mode = USE_CONSTANT_POOL);
friend class RegExpMacroAssemblerARM;
friend class RelocInfo;
@@ -1425,7 +1485,6 @@ class Assembler : public AssemblerBase {
friend class BlockConstPoolScope;
PositionsRecorder positions_recorder_;
- bool emit_debug_code_;
friend class PositionsRecorder;
friend class EnsureSpace;
};
diff --git a/src/3rdparty/v8/src/arm/builtins-arm.cc b/src/3rdparty/v8/src/arm/builtins-arm.cc
index c99e778..24d14e8 100644
--- a/src/3rdparty/v8/src/arm/builtins-arm.cc
+++ b/src/3rdparty/v8/src/arm/builtins-arm.cc
@@ -75,12 +75,13 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
// Load the built-in InternalArray function from the current context.
static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
Register result) {
- // Load the global context.
+ // Load the native context.
- __ ldr(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ ldr(result,
- FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
- // Load the InternalArray function from the global context.
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ ldr(result,
+ FieldMemOperand(result, GlobalObject::kNativeContextOffset));
+ // Load the InternalArray function from the native context.
__ ldr(result,
MemOperand(result,
Context::SlotOffset(
@@ -90,12 +91,13 @@ static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
// Load the built-in Array function from the current context.
static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
- // Load the global context.
+ // Load the native context.
- __ ldr(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ ldr(result,
- FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
- // Load the Array function from the global context.
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ ldr(result,
+ FieldMemOperand(result, GlobalObject::kNativeContextOffset));
+ // Load the Array function from the native context.
__ ldr(result,
MemOperand(result,
Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
@@ -114,7 +116,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
Label* gc_required) {
const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0);
- __ LoadInitialArrayMap(array_function, scratch2, scratch1);
+ __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
@@ -208,7 +210,8 @@ static void AllocateJSArray(MacroAssembler* masm,
bool fill_with_hole,
Label* gc_required) {
// Load the initial map from the array function.
- __ LoadInitialArrayMap(array_function, scratch2, elements_array_storage);
+ __ LoadInitialArrayMap(array_function, scratch2,
+ elements_array_storage, fill_with_hole);
if (FLAG_debug_code) { // Assert that array size is not zero.
__ tst(array_size, array_size);
@@ -440,10 +443,10 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ b(call_generic_code);
__ bind(&not_double);
- // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
+ // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
// r3: JSArray
__ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
r2,
r9,
@@ -696,6 +699,43 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
}
+static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
+ __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+ __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
+ __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ mov(pc, r2);
+}
+
+
+void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
+ GenerateTailCallToSharedCode(masm);
+}
+
+
+void Builtins::Generate_ParallelRecompile(MacroAssembler* masm) {
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ // Push a copy of the function onto the stack.
+ __ push(r1);
+ // Push call kind information.
+ __ push(r5);
+
+ __ push(r1); // Function is also the parameter to the runtime call.
+ __ CallRuntime(Runtime::kParallelRecompile, 1);
+
+ // Restore call kind information.
+ __ pop(r5);
+ // Restore receiver.
+ __ pop(r1);
+
+ // Tear down internal frame.
+ }
+
+ GenerateTailCallToSharedCode(masm);
+}
+
+
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool count_constructions) {
@@ -1186,6 +1226,39 @@ void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
}
+static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
+ // For now, we are relying on the fact that make_code_young doesn't do any
+ // garbage collection which allows us to save/restore the registers without
+ // worrying about which of them contain pointers. We also don't build an
+ // internal frame to make the code faster, since we shouldn't have to do stack
+ // crawls in MakeCodeYoung. This seems a bit fragile.
+
+ // The following registers must be saved and restored when calling through to
+ // the runtime:
+ // r0 - contains return address (beginning of patch sequence)
+ // r1 - function object
+ FrameScope scope(masm, StackFrame::MANUAL);
+ __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
+ __ PrepareCallCFunction(1, 0, r1);
+ __ CallCFunction(
+ ExternalReference::get_make_code_young_function(masm->isolate()), 1);
+ __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
+ __ mov(pc, r0);
+}
+
+#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
+void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
+ MacroAssembler* masm) { \
+ GenerateMakeCodeYoungAgainCommon(masm); \
+} \
+void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
+ MacroAssembler* masm) { \
+ GenerateMakeCodeYoungAgainCommon(masm); \
+}
+CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
+#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
+
+
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
Deoptimizer::BailoutType type) {
{
@@ -1245,7 +1318,7 @@ void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
CpuFeatures::TryForceFeatureScope scope(VFP3);
- if (!CpuFeatures::IsSupported(VFP3)) {
+ if (!CPU::SupportsCrankshaft()) {
__ Abort("Unreachable code: Cannot optimize without VFP3 support.");
return;
}
@@ -1365,9 +1438,9 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// receiver.
__ bind(&use_global_receiver);
const int kGlobalIndex =
- Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
__ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
- __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
+ __ ldr(r2, FieldMemOperand(r2, GlobalObject::kNativeContextOffset));
__ ldr(r2, FieldMemOperand(r2, kGlobalIndex));
__ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
@@ -1560,9 +1633,9 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Use the current global receiver object as the receiver.
__ bind(&use_global_receiver);
const int kGlobalOffset =
- Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
__ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
- __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
+ __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset));
__ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
__ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
diff --git a/src/3rdparty/v8/src/arm/code-stubs-arm.cc b/src/3rdparty/v8/src/arm/code-stubs-arm.cc
index bea3169..a769f54 100644
--- a/src/3rdparty/v8/src/arm/code-stubs-arm.cc
+++ b/src/3rdparty/v8/src/arm/code-stubs-arm.cc
@@ -85,6 +85,8 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
void FastNewClosureStub::Generate(MacroAssembler* masm) {
// Create a new closure from the given function info in new
// space. Set the context to the current context in cp.
+ Counters* counters = masm->isolate()->counters();
+
Label gc;
// Pop the function info from the stack.
@@ -98,32 +100,44 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
&gc,
TAG_OBJECT);
+ __ IncrementCounter(counters->fast_new_closure_total(), 1, r6, r7);
+
int map_index = (language_mode_ == CLASSIC_MODE)
? Context::FUNCTION_MAP_INDEX
: Context::STRICT_MODE_FUNCTION_MAP_INDEX;
- // Compute the function map in the current global context and set that
+ // Compute the function map in the current native context and set that
// as the map of the allocated object.
- __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
- __ ldr(r2, MemOperand(r2, Context::SlotOffset(map_index)));
- __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
+ __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ ldr(r2, FieldMemOperand(r2, GlobalObject::kNativeContextOffset));
+ __ ldr(r5, MemOperand(r2, Context::SlotOffset(map_index)));
+ __ str(r5, FieldMemOperand(r0, HeapObject::kMapOffset));
// Initialize the rest of the function. We don't have to update the
// write barrier because the allocated object is in new space.
__ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
- __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
- __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
+ __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
__ str(r1, FieldMemOperand(r0, JSObject::kPropertiesOffset));
__ str(r1, FieldMemOperand(r0, JSObject::kElementsOffset));
- __ str(r2, FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
+ __ str(r5, FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
__ str(r3, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
__ str(cp, FieldMemOperand(r0, JSFunction::kContextOffset));
__ str(r1, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
- __ str(r4, FieldMemOperand(r0, JSFunction::kNextFunctionLinkOffset));
// Initialize the code pointer in the function to be the one
// found in the shared function info object.
+ // But first check if there is an optimized version for our context.
+ Label check_optimized;
+ Label install_unoptimized;
+ if (FLAG_cache_optimized_code) {
+ __ ldr(r1,
+ FieldMemOperand(r3, SharedFunctionInfo::kOptimizedCodeMapOffset));
+ __ tst(r1, r1);
+ __ b(ne, &check_optimized);
+ }
+ __ bind(&install_unoptimized);
+ __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
+ __ str(r4, FieldMemOperand(r0, JSFunction::kNextFunctionLinkOffset));
__ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset));
__ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
__ str(r3, FieldMemOperand(r0, JSFunction::kCodeEntryOffset));
@@ -131,6 +145,72 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
// Return result. The argument function info has been popped already.
__ Ret();
+ __ bind(&check_optimized);
+
+ __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1, r6, r7);
+
+ // r2 holds native context, r1 points to fixed array of 3-element entries
+ // (native context, optimized code, literals).
+ // The optimized code map must never be empty, so check the first elements.
+ Label install_optimized;
+ // Speculatively move code object into r4.
+ __ ldr(r4, FieldMemOperand(r1, FixedArray::kHeaderSize + kPointerSize));
+ __ ldr(r5, FieldMemOperand(r1, FixedArray::kHeaderSize));
+ __ cmp(r2, r5);
+ __ b(eq, &install_optimized);
+
+ // Iterate through the rest of map backwards. r4 holds an index as a Smi.
+ Label loop;
+ __ ldr(r4, FieldMemOperand(r1, FixedArray::kLengthOffset));
+ __ bind(&loop);
+ // Do not double check first entry.
+
+ __ cmp(r4, Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
+ __ b(eq, &install_unoptimized);
+ __ sub(r4, r4, Operand(
+ Smi::FromInt(SharedFunctionInfo::kEntryLength))); // Skip an entry.
+ __ add(r5, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ add(r5, r5, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ ldr(r5, MemOperand(r5));
+ __ cmp(r2, r5);
+ __ b(ne, &loop);
+ // Hit: fetch the optimized code.
+ __ add(r5, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ add(r5, r5, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(r5, r5, Operand(kPointerSize));
+ __ ldr(r4, MemOperand(r5));
+
+ __ bind(&install_optimized);
+ __ IncrementCounter(counters->fast_new_closure_install_optimized(),
+ 1, r6, r7);
+
+ // TODO(fschneider): Idea: store proper code pointers in the map and either
+ // unmangle them on marking or do nothing as the whole map is discarded on
+ // major GC anyway.
+ __ add(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ str(r4, FieldMemOperand(r0, JSFunction::kCodeEntryOffset));
+
+ // Now link a function into a list of optimized functions.
+ __ ldr(r4, ContextOperand(r2, Context::OPTIMIZED_FUNCTIONS_LIST));
+
+ __ str(r4, FieldMemOperand(r0, JSFunction::kNextFunctionLinkOffset));
+ // No need for write barrier as JSFunction (eax) is in the new space.
+
+ __ str(r0, ContextOperand(r2, Context::OPTIMIZED_FUNCTIONS_LIST));
+ // Store JSFunction (eax) into edx before issuing write barrier as
+ // it clobbers all the registers passed.
+ __ mov(r4, r0);
+ __ RecordWriteContextSlot(
+ r2,
+ Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST),
+ r4,
+ r1,
+ kLRHasNotBeenSaved,
+ kDontSaveFPRegs);
+
+ // Return result. The argument function info has been popped already.
+ __ Ret();
+
// Create a new closure through the slower runtime call.
__ bind(&gc);
__ LoadRoot(r4, Heap::kFalseValueRootIndex);
@@ -162,16 +242,16 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
__ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
// Set up the fixed slots, copy the global object from the previous context.
- __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
__ mov(r1, Operand(Smi::FromInt(0)));
__ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX)));
__ str(cp, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX)));
__ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX)));
- __ str(r2, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ str(r2, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
// Copy the qml global object from the surrounding context.
- __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::QML_GLOBAL_INDEX)));
- __ str(r1, MemOperand(r0, Context::SlotOffset(Context::QML_GLOBAL_INDEX)));
+ __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::QML_GLOBAL_OBJECT_INDEX)));
+ __ str(r1, MemOperand(r0, Context::SlotOffset(Context::QML_GLOBAL_OBJECT_INDEX)));
// Initialize the rest of the slots to undefined.
__ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
@@ -214,9 +294,9 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ mov(r2, Operand(Smi::FromInt(length)));
__ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
- // If this block context is nested in the global context we get a smi
+ // If this block context is nested in the native context we get a smi
// sentinel instead of a function. The block context should get the
- // canonical empty function of the global context as its closure which
+ // canonical empty function of the native context as its closure which
// we still have to look up.
Label after_sentinel;
__ JumpIfNotSmi(r3, &after_sentinel);
@@ -226,20 +306,20 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ Assert(eq, message);
}
__ ldr(r3, GlobalObjectOperand());
- __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalContextOffset));
+ __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
__ ldr(r3, ContextOperand(r3, Context::CLOSURE_INDEX));
__ bind(&after_sentinel);
// Set up the fixed slots, copy the global object from the previous context.
- __ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX));
+ __ ldr(r2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
__ str(r3, ContextOperand(r0, Context::CLOSURE_INDEX));
__ str(cp, ContextOperand(r0, Context::PREVIOUS_INDEX));
__ str(r1, ContextOperand(r0, Context::EXTENSION_INDEX));
- __ str(r2, ContextOperand(r0, Context::GLOBAL_INDEX));
+ __ str(r2, ContextOperand(r0, Context::GLOBAL_OBJECT_INDEX));
// Copy the qml global object from the surrounding context.
- __ ldr(r1, ContextOperand(cp, Context::QML_GLOBAL_INDEX));
- __ str(r1, ContextOperand(r0, Context::QML_GLOBAL_INDEX));
+ __ ldr(r1, ContextOperand(cp, Context::QML_GLOBAL_OBJECT_INDEX));
+ __ str(r1, ContextOperand(r0, Context::QML_GLOBAL_OBJECT_INDEX));
// Initialize the rest of the slots to the hole value.
__ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
@@ -527,8 +607,8 @@ void FloatingPointHelper::LoadSmis(MacroAssembler* masm,
FloatingPointHelper::Destination destination,
Register scratch1,
Register scratch2) {
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
__ mov(scratch1, Operand(r0, ASR, kSmiTagSize));
__ vmov(d7.high(), scratch1);
__ vcvt_f64_s32(d7, d7.high());
@@ -583,11 +663,9 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
Register scratch1,
Register scratch2,
Label* not_number) {
- if (FLAG_debug_code) {
- __ AbortIfNotRootValue(heap_number_map,
- Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
- }
+ __ AssertRootValue(heap_number_map,
+ Heap::kHeapNumberMapRootIndex,
+ "HeapNumberMap register clobbered.");
Label is_smi, done;
@@ -597,9 +675,9 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number);
// Handle loading a double from a heap number.
- if (CpuFeatures::IsSupported(VFP3) &&
+ if (CpuFeatures::IsSupported(VFP2) &&
destination == kVFPRegisters) {
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
// Load the double from tagged HeapNumber to double register.
__ sub(scratch1, object, Operand(kHeapObjectTag));
__ vldr(dst, scratch1, HeapNumber::kValueOffset);
@@ -612,8 +690,8 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
// Handle loading a double from a smi.
__ bind(&is_smi);
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
// Convert smi to double using VFP instructions.
__ vmov(dst.high(), scratch1);
__ vcvt_f64_s32(dst, dst.high());
@@ -644,11 +722,9 @@ void FloatingPointHelper::ConvertNumberToInt32(MacroAssembler* masm,
Register scratch3,
DwVfpRegister double_scratch,
Label* not_number) {
- if (FLAG_debug_code) {
- __ AbortIfNotRootValue(heap_number_map,
- Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
- }
+ __ AssertRootValue(heap_number_map,
+ Heap::kHeapNumberMapRootIndex,
+ "HeapNumberMap register clobbered.");
Label done;
Label not_in_int32_range;
@@ -690,8 +766,8 @@ void FloatingPointHelper::ConvertIntToDouble(MacroAssembler* masm,
Label done;
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
__ vmov(single_scratch, int_scratch);
__ vcvt_f64_s32(double_dst, single_scratch);
if (destination == kCoreRegisters) {
@@ -754,6 +830,7 @@ void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm,
Register object,
Destination destination,
DwVfpRegister double_dst,
+ DwVfpRegister double_scratch,
Register dst1,
Register dst2,
Register heap_number_map,
@@ -776,25 +853,23 @@ void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm,
__ b(&done);
__ bind(&obj_is_not_smi);
- if (FLAG_debug_code) {
- __ AbortIfNotRootValue(heap_number_map,
- Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
- }
+ __ AssertRootValue(heap_number_map,
+ Heap::kHeapNumberMapRootIndex,
+ "HeapNumberMap register clobbered.");
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32);
// Load the number.
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
// Load the double value.
__ sub(scratch1, object, Operand(kHeapObjectTag));
__ vldr(double_dst, scratch1, HeapNumber::kValueOffset);
__ EmitVFPTruncate(kRoundToZero,
- single_scratch,
- double_dst,
scratch1,
+ double_dst,
scratch2,
+ double_scratch,
kCheckForInexactConversion);
// Jump to not_int32 if the operation did not succeed.
@@ -834,7 +909,8 @@ void FloatingPointHelper::LoadNumberAsInt32(MacroAssembler* masm,
Register scratch1,
Register scratch2,
Register scratch3,
- DwVfpRegister double_scratch,
+ DwVfpRegister double_scratch0,
+ DwVfpRegister double_scratch1,
Label* not_int32) {
ASSERT(!dst.is(object));
ASSERT(!scratch1.is(object) && !scratch2.is(object) && !scratch3.is(object));
@@ -846,34 +922,29 @@ void FloatingPointHelper::LoadNumberAsInt32(MacroAssembler* masm,
__ UntagAndJumpIfSmi(dst, object, &done);
- if (FLAG_debug_code) {
- __ AbortIfNotRootValue(heap_number_map,
- Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
- }
+ __ AssertRootValue(heap_number_map,
+ Heap::kHeapNumberMapRootIndex,
+ "HeapNumberMap register clobbered.");
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32);
// Object is a heap number.
// Convert the floating point value to a 32-bit integer.
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
- SwVfpRegister single_scratch = double_scratch.low();
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
+
// Load the double value.
__ sub(scratch1, object, Operand(kHeapObjectTag));
- __ vldr(double_scratch, scratch1, HeapNumber::kValueOffset);
+ __ vldr(double_scratch0, scratch1, HeapNumber::kValueOffset);
__ EmitVFPTruncate(kRoundToZero,
- single_scratch,
- double_scratch,
+ dst,
+ double_scratch0,
scratch1,
- scratch2,
+ double_scratch1,
kCheckForInexactConversion);
// Jump to not_int32 if the operation did not succeed.
__ b(ne, not_int32);
- // Get the result in the destination register.
- __ vmov(dst, single_scratch);
-
} else {
// Load the double value in the destination registers.
__ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
@@ -986,7 +1057,7 @@ void FloatingPointHelper::CallCCodeForDoubleOperation(
__ push(lr);
__ PrepareCallCFunction(0, 2, scratch);
if (masm->use_eabi_hardfloat()) {
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
__ vmov(d0, r0, r1);
__ vmov(d1, r2, r3);
}
@@ -998,7 +1069,7 @@ void FloatingPointHelper::CallCCodeForDoubleOperation(
// Store answer in the overwritable heap number. Double returned in
// registers r0 and r1 or in d0.
if (masm->use_eabi_hardfloat()) {
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
__ vstr(d0,
FieldMemOperand(heap_number_result, HeapNumber::kValueOffset));
} else {
@@ -1217,9 +1288,9 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
}
// Lhs is a smi, rhs is a number.
- if (CpuFeatures::IsSupported(VFP3)) {
+ if (CpuFeatures::IsSupported(VFP2)) {
// Convert lhs to a double in d7.
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
__ SmiToDoubleVFPRegister(lhs, d7, r7, s15);
// Load the double from rhs, tagged HeapNumber r0, to d6.
__ sub(r7, rhs, Operand(kHeapObjectTag));
@@ -1257,8 +1328,8 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
}
// Rhs is a smi, lhs is a heap number.
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
// Load the double from lhs, tagged HeapNumber r1, to d7.
__ sub(r7, lhs, Operand(kHeapObjectTag));
__ vldr(d7, r7, HeapNumber::kValueOffset);
@@ -1370,7 +1441,7 @@ static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm,
__ push(lr);
__ PrepareCallCFunction(0, 2, r5);
if (masm->use_eabi_hardfloat()) {
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
__ vmov(d0, r0, r1);
__ vmov(d1, r2, r3);
}
@@ -1445,8 +1516,8 @@ static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
// Both are heap numbers. Load them up then jump to the code we have
// for that.
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
__ sub(r7, rhs, Operand(kHeapObjectTag));
__ vldr(d6, r7, HeapNumber::kValueOffset);
__ sub(r7, lhs, Operand(kHeapObjectTag));
@@ -1535,8 +1606,8 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
Label load_result_from_cache;
if (!object_is_smi) {
__ JumpIfSmi(object, &is_smi);
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
__ CheckMap(object,
scratch1,
Heap::kHeapNumberMapRootIndex,
@@ -1698,9 +1769,9 @@ void CompareStub::Generate(MacroAssembler* masm) {
// The arguments have been converted to doubles and stored in d6 and d7, if
// VFP3 is supported, or in r0, r1, r2, and r3.
Isolate* isolate = masm->isolate();
- if (CpuFeatures::IsSupported(VFP3)) {
+ if (CpuFeatures::IsSupported(VFP2)) {
__ bind(&lhs_not_nan);
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
Label no_nan;
// ARMv7 VFP3 instructions to implement double precision comparison.
__ VFPCompareAndSetFlags(d7, d6);
@@ -1818,11 +1889,9 @@ void CompareStub::Generate(MacroAssembler* masm) {
void ToBooleanStub::Generate(MacroAssembler* masm) {
// This stub overrides SometimesSetsUpAFrame() to return false. That means
// we cannot call anything that could cause a GC from this stub.
- // This stub uses VFP3 instructions.
- CpuFeatures::Scope scope(VFP3);
-
Label patch;
const Register map = r9.is(tos_) ? r7 : r9;
+ const Register temp = map;
// undefined -> false.
CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false);
@@ -1875,13 +1944,56 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
Label not_heap_number;
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
__ b(ne, &not_heap_number);
- __ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset));
- __ VFPCompareAndSetFlags(d1, 0.0);
- // "tos_" is a register, and contains a non zero value by default.
- // Hence we only need to overwrite "tos_" with zero to return false for
- // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true.
- __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO
- __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN
+
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
+
+ __ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset));
+ __ VFPCompareAndSetFlags(d1, 0.0);
+ // "tos_" is a register, and contains a non zero value by default.
+ // Hence we only need to overwrite "tos_" with zero to return false for
+ // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true.
+ __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO
+ __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN
+ } else {
+ Label done, not_nan, not_zero;
+ __ ldr(temp, FieldMemOperand(tos_, HeapNumber::kExponentOffset));
+ // -0 maps to false:
+ __ bic(
+ temp, temp, Operand(HeapNumber::kSignMask, RelocInfo::NONE), SetCC);
+ __ b(ne, &not_zero);
+ // If exponent word is zero then the answer depends on the mantissa word.
+ __ ldr(tos_, FieldMemOperand(tos_, HeapNumber::kMantissaOffset));
+ __ jmp(&done);
+
+ // Check for NaN.
+ __ bind(&not_zero);
+ // We already zeroed the sign bit, now shift out the mantissa so we only
+ // have the exponent left.
+ __ mov(temp, Operand(temp, LSR, HeapNumber::kMantissaBitsInTopWord));
+ unsigned int shifted_exponent_mask =
+ HeapNumber::kExponentMask >> HeapNumber::kMantissaBitsInTopWord;
+ __ cmp(temp, Operand(shifted_exponent_mask, RelocInfo::NONE));
+ __ b(ne, &not_nan); // If exponent is not 0x7ff then it can't be a NaN.
+
+ // Reload exponent word.
+ __ ldr(temp, FieldMemOperand(tos_, HeapNumber::kExponentOffset));
+ __ tst(temp, Operand(HeapNumber::kMantissaMask, RelocInfo::NONE));
+ // If mantissa is not zero then we have a NaN, so return 0.
+ __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, ne);
+ __ b(ne, &done);
+
+ // Load mantissa word.
+ __ ldr(temp, FieldMemOperand(tos_, HeapNumber::kMantissaOffset));
+ __ cmp(temp, Operand(0, RelocInfo::NONE));
+ // If mantissa is not zero then we have a NaN, so return 0.
+ __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, ne);
+ __ b(ne, &done);
+
+ __ bind(&not_nan);
+ __ mov(tos_, Operand(1, RelocInfo::NONE));
+ __ bind(&done);
+ }
__ Ret();
__ bind(&not_heap_number);
}
@@ -1931,7 +2043,7 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
// restore them.
__ stm(db_w, sp, kCallerSaved | lr.bit());
if (save_doubles_ == kSaveFPRegs) {
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
__ sub(sp, sp, Operand(kDoubleSize * DwVfpRegister::kNumRegisters));
for (int i = 0; i < DwVfpRegister::kNumRegisters; i++) {
DwVfpRegister reg = DwVfpRegister::from_code(i);
@@ -1949,7 +2061,7 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
ExternalReference::store_buffer_overflow_function(masm->isolate()),
argument_count);
if (save_doubles_ == kSaveFPRegs) {
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
for (int i = 0; i < DwVfpRegister::kNumRegisters; i++) {
DwVfpRegister reg = DwVfpRegister::from_code(i);
__ vldr(reg, MemOperand(sp, i * kDoubleSize));
@@ -2179,9 +2291,9 @@ void UnaryOpStub::GenerateHeapNumberCodeBitNot(
__ mov(r0, r2); // Move newly allocated heap number to r0.
}
- if (CpuFeatures::IsSupported(VFP3)) {
+ if (CpuFeatures::IsSupported(VFP2)) {
// Convert the int32 in r1 to the heap number in r0. r2 is corrupted.
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
__ vmov(s0, r1);
__ vcvt_f64_s32(d0, s0);
__ sub(r2, r0, Operand(kHeapObjectTag));
@@ -2464,9 +2576,9 @@ void BinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
Register scratch3 = r4;
ASSERT(smi_operands || (not_numbers != NULL));
- if (smi_operands && FLAG_debug_code) {
- __ AbortIfNotSmi(left);
- __ AbortIfNotSmi(right);
+ if (smi_operands) {
+ __ AssertSmi(left);
+ __ AssertSmi(right);
}
Register heap_number_map = r6;
@@ -2481,7 +2593,7 @@ void BinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
// Load left and right operands into d6 and d7 or r0/r1 and r2/r3
// depending on whether VFP3 is available or not.
FloatingPointHelper::Destination destination =
- CpuFeatures::IsSupported(VFP3) &&
+ CpuFeatures::IsSupported(VFP2) &&
op_ != Token::MOD ?
FloatingPointHelper::kVFPRegisters :
FloatingPointHelper::kCoreRegisters;
@@ -2508,7 +2620,7 @@ void BinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
// Using VFP registers:
// d6: Left value
// d7: Right value
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
switch (op_) {
case Token::ADD:
__ vadd(d5, d6, d7);
@@ -2597,7 +2709,7 @@ void BinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
// The code below for writing into heap numbers isn't capable of
// writing the register as an unsigned int so we go to slow case if we
// hit this case.
- if (CpuFeatures::IsSupported(VFP3)) {
+ if (CpuFeatures::IsSupported(VFP2)) {
__ b(mi, &result_not_a_smi);
} else {
__ b(mi, not_numbers);
@@ -2636,10 +2748,10 @@ void BinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
// result.
__ mov(r0, Operand(r5));
- if (CpuFeatures::IsSupported(VFP3)) {
+ if (CpuFeatures::IsSupported(VFP2)) {
// Convert the int32 in r2 to the heap number in r0. r3 is corrupted. As
// mentioned above SHR needs to always produce a positive result.
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
__ vmov(s0, r2);
if (op_ == Token::SHR) {
__ vcvt_f64_u32(d0, s0);
@@ -2768,7 +2880,6 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
Register scratch1 = r7;
Register scratch2 = r9;
DwVfpRegister double_scratch = d0;
- SwVfpRegister single_scratch = s3;
Register heap_number_result = no_reg;
Register heap_number_map = r6;
@@ -2798,7 +2909,7 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
// Jump to type transition if they are not. The registers r0 and r1 (right
// and left) are preserved for the runtime call.
FloatingPointHelper::Destination destination =
- (CpuFeatures::IsSupported(VFP3) && op_ != Token::MOD)
+ (CpuFeatures::IsSupported(VFP2) && op_ != Token::MOD)
? FloatingPointHelper::kVFPRegisters
: FloatingPointHelper::kCoreRegisters;
@@ -2806,6 +2917,7 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
right,
destination,
d7,
+ d8,
r2,
r3,
heap_number_map,
@@ -2817,6 +2929,7 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
left,
destination,
d6,
+ d8,
r4,
r5,
heap_number_map,
@@ -2826,7 +2939,7 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
&transition);
if (destination == FloatingPointHelper::kVFPRegisters) {
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
Label return_heap_number;
switch (op_) {
case Token::ADD:
@@ -2852,10 +2965,10 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
// transition.
__ EmitVFPTruncate(kRoundToZero,
- single_scratch,
- d5,
scratch1,
- scratch2);
+ d5,
+ scratch2,
+ d8);
if (result_type_ <= BinaryOpIC::INT32) {
// If the ne condition is set, result does
@@ -2864,7 +2977,6 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
}
// Check if the result fits in a smi.
- __ vmov(scratch1, single_scratch);
__ add(scratch2, scratch1, Operand(0x40000000), SetCC);
// If not try to return a heap number.
__ b(mi, &return_heap_number);
@@ -2959,6 +3071,7 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
scratch2,
scratch3,
d0,
+ d1,
&transition);
FloatingPointHelper::LoadNumberAsInt32(masm,
right,
@@ -2968,6 +3081,7 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
scratch2,
scratch3,
d0,
+ d1,
&transition);
// The ECMA-262 standard specifies that, for shift operations, only the
@@ -2993,9 +3107,9 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
// We only get a negative result if the shift value (r2) is 0.
// This result cannot be respresented as a signed 32-bit integer, try
// to return a heap number if we can.
- // The non vfp3 code does not support this special case, so jump to
+ // The non vfp2 code does not support this special case, so jump to
// runtime if we don't support it.
- if (CpuFeatures::IsSupported(VFP3)) {
+ if (CpuFeatures::IsSupported(VFP2)) {
__ b(mi, (result_type_ <= BinaryOpIC::INT32)
? &transition
: &return_heap_number);
@@ -3030,8 +3144,8 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
scratch2,
&call_runtime);
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
if (op_ != Token::SHR) {
// Convert the result to a floating point value.
__ vmov(double_scratch.low(), r2);
@@ -3260,8 +3374,8 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
const Register cache_entry = r0;
const bool tagged = (argument_type_ == TAGGED);
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
if (tagged) {
// Argument is a number and is on stack and in r0.
// Load argument and check if it is a smi.
@@ -3362,23 +3476,23 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
ExternalReference(RuntimeFunction(), masm->isolate());
__ TailCallExternalReference(runtime_function, 1, 1);
} else {
- if (!CpuFeatures::IsSupported(VFP3)) UNREACHABLE();
- CpuFeatures::Scope scope(VFP3);
+ ASSERT(CpuFeatures::IsSupported(VFP2));
+ CpuFeatures::Scope scope(VFP2);
Label no_update;
Label skip_cache;
// Call C function to calculate the result and update the cache.
- // Register r0 holds precalculated cache entry address; preserve
- // it on the stack and pop it into register cache_entry after the
- // call.
- __ push(cache_entry);
+ // r0: precalculated cache entry address.
+ // r2 and r3: parts of the double value.
+ // Store r0, r2 and r3 on stack for later before calling C function.
+ __ Push(r3, r2, cache_entry);
GenerateCallCFunction(masm, scratch0);
__ GetCFunctionDoubleResult(d2);
// Try to update the cache. If we cannot allocate a
// heap number, we return the result without updating.
- __ pop(cache_entry);
+ __ Pop(r3, r2, cache_entry);
__ LoadRoot(r5, Heap::kHeapNumberMapRootIndex);
__ AllocateHeapNumber(r6, scratch0, scratch1, r5, &no_update);
__ vstr(d2, FieldMemOperand(r6, HeapNumber::kValueOffset));
@@ -3424,6 +3538,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm,
Register scratch) {
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
Isolate* isolate = masm->isolate();
__ push(lr);
@@ -3484,7 +3599,7 @@ void InterruptStub::Generate(MacroAssembler* masm) {
void MathPowStub::Generate(MacroAssembler* masm) {
- CpuFeatures::Scope vfp3_scope(VFP3);
+ CpuFeatures::Scope vfp2_scope(VFP2);
const Register base = r1;
const Register exponent = r2;
const Register heapnumbermap = r5;
@@ -3553,13 +3668,13 @@ void MathPowStub::Generate(MacroAssembler* masm) {
Label not_plus_half;
// Test for 0.5.
- __ vmov(double_scratch, 0.5);
+ __ vmov(double_scratch, 0.5, scratch);
__ VFPCompareAndSetFlags(double_exponent, double_scratch);
__ b(ne, &not_plus_half);
// Calculates square root of base. Check for the special case of
// Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
- __ vmov(double_scratch, -V8_INFINITY);
+ __ vmov(double_scratch, -V8_INFINITY, scratch);
__ VFPCompareAndSetFlags(double_base, double_scratch);
__ vneg(double_result, double_scratch, eq);
__ b(eq, &done);
@@ -3570,20 +3685,20 @@ void MathPowStub::Generate(MacroAssembler* masm) {
__ jmp(&done);
__ bind(&not_plus_half);
- __ vmov(double_scratch, -0.5);
+ __ vmov(double_scratch, -0.5, scratch);
__ VFPCompareAndSetFlags(double_exponent, double_scratch);
__ b(ne, &call_runtime);
// Calculates square root of base. Check for the special case of
// Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
- __ vmov(double_scratch, -V8_INFINITY);
+ __ vmov(double_scratch, -V8_INFINITY, scratch);
__ VFPCompareAndSetFlags(double_base, double_scratch);
__ vmov(double_result, kDoubleRegZero, eq);
__ b(eq, &done);
// Add +0 to convert -0 to +0.
__ vadd(double_scratch, double_base, kDoubleRegZero);
- __ vmov(double_result, 1);
+ __ vmov(double_result, 1.0, scratch);
__ vsqrt(double_scratch, double_scratch);
__ vdiv(double_result, double_result, double_scratch);
__ jmp(&done);
@@ -3618,7 +3733,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
__ mov(exponent, scratch);
}
__ vmov(double_scratch, double_base); // Back up base.
- __ vmov(double_result, 1.0);
+ __ vmov(double_result, 1.0, scratch2);
// Get absolute value of exponent.
__ cmp(scratch, Operand(0));
@@ -3634,7 +3749,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
__ cmp(exponent, Operand(0));
__ b(ge, &done);
- __ vmov(double_scratch, 1.0);
+ __ vmov(double_scratch, 1.0, scratch);
__ vdiv(double_result, double_scratch, double_result);
// Test whether result is zero. Bail out to check for subnormal result.
// Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
@@ -3776,9 +3891,13 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
// Compute the return address in lr to return to after the jump below. Pc is
// already at '+ 8' from the current instruction but return is after three
// instructions so add another 4 to pc to get the return address.
- masm->add(lr, pc, Operand(4));
- __ str(lr, MemOperand(sp, 0));
- masm->Jump(r5);
+ {
+ // Prevent literal pool emission before return address.
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+ masm->add(lr, pc, Operand(4));
+ __ str(lr, MemOperand(sp, 0));
+ masm->Jump(r5);
+ }
if (always_allocate) {
// It's okay to clobber r2 and r3 here. Don't mess with r0 and r1
@@ -3936,8 +4055,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Save callee-saved registers (incl. cp and fp), sp, and lr
__ stm(db_w, sp, kCalleeSaved | lr.bit());
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
// Save callee-saved vfp registers.
__ vstm(db_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
// Set up the reserved register for 0.0.
@@ -3952,7 +4071,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Set up argv in r4.
int offset_to_argv = (kNumCalleeSaved + 1) * kPointerSize;
- if (CpuFeatures::IsSupported(VFP3)) {
+ if (CpuFeatures::IsSupported(VFP2)) {
offset_to_argv += kNumDoubleCalleeSaved * kDoubleSize;
}
__ ldr(r4, MemOperand(sp, offset_to_argv));
@@ -3995,14 +4114,21 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Jump to a faked try block that does the invoke, with a faked catch
// block that sets the pending exception.
__ jmp(&invoke);
- __ bind(&handler_entry);
- handler_offset_ = handler_entry.pos();
- // Caught exception: Store result (exception) in the pending exception
- // field in the JSEnv and return a failure sentinel. Coming in here the
- // fp will be invalid because the PushTryHandler below sets it to 0 to
- // signal the existence of the JSEntry frame.
- __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
+
+ // Block literal pool emission whilst taking the position of the handler
+ // entry. This avoids making the assumption that literal pools are always
+ // emitted after an instruction is emitted, rather than before.
+ {
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+ __ bind(&handler_entry);
+ handler_offset_ = handler_entry.pos();
+ // Caught exception: Store result (exception) in the pending exception
+ // field in the JSEnv and return a failure sentinel. Coming in here the
+ // fp will be invalid because the PushTryHandler below sets it to 0 to
+ // signal the existence of the JSEntry frame.
+ __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
+ isolate)));
+ }
__ str(r0, MemOperand(ip));
__ mov(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
__ b(&exit);
@@ -4045,9 +4171,13 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Branch and link to JSEntryTrampoline. We don't use the double underscore
// macro for the add instruction because we don't want the coverage tool
- // inserting instructions here after we read the pc.
- __ mov(lr, Operand(pc));
- masm->add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
+ // inserting instructions here after we read the pc. We block literal pool
+ // emission for the same reason.
+ {
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+ __ mov(lr, Operand(pc));
+ masm->add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
+ }
// Unlink this frame from the handler chain.
__ PopTryHandler();
@@ -4079,8 +4209,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
}
#endif
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
// Restore callee-saved vfp registers.
__ vldm(ia_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
}
@@ -4409,14 +4539,14 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
// r0 = address of new object(s) (tagged)
// r2 = argument count (tagged)
- // Get the arguments boilerplate from the current (global) context into r4.
+ // Get the arguments boilerplate from the current native context into r4.
const int kNormalOffset =
Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
const int kAliasedOffset =
Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX);
- __ ldr(r4, MemOperand(r8, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset));
+ __ ldr(r4, MemOperand(r8, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset));
__ cmp(r1, Operand::Zero());
__ ldr(r4, MemOperand(r4, kNormalOffset), eq);
__ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
@@ -4589,9 +4719,9 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
static_cast<AllocationFlags>(TAG_OBJECT |
SIZE_IN_WORDS));
- // Get the arguments boilerplate from the current (global) context.
- __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset));
+ // Get the arguments boilerplate from the current native context.
+ __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset));
__ ldr(r4, MemOperand(r4, Context::SlotOffset(
Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX)));
@@ -4720,7 +4850,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
__ add(r2, r2, Operand(2)); // r2 was a smi.
// Check that the static offsets vector buffer is large enough.
- __ cmp(r2, Operand(OffsetsVector::kStaticOffsetsVectorSize));
+ __ cmp(r2, Operand(Isolate::kJSRegexpStaticOffsetsVectorSize));
__ b(hi, &runtime);
// r2: Number of capture registers
@@ -4832,7 +4962,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
// r0: Instance type of subject string
- STATIC_ASSERT(4 == kAsciiStringTag);
+ STATIC_ASSERT(4 == kOneByteStringTag);
STATIC_ASSERT(kTwoByteStringTag == 0);
// Find the code object based on the assumptions above.
__ and_(r0, r0, Operand(kStringEncodingMask));
@@ -4863,27 +4993,32 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ IncrementCounter(isolate->counters()->regexp_entry_native(), 1, r0, r2);
// Isolates: note we add an additional parameter here (isolate pointer).
- const int kRegExpExecuteArguments = 8;
+ const int kRegExpExecuteArguments = 9;
const int kParameterRegisters = 4;
__ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
// Stack pointer now points to cell where return address is to be written.
// Arguments are before that on the stack or in registers.
- // Argument 8 (sp[16]): Pass current isolate address.
+ // Argument 9 (sp[20]): Pass current isolate address.
__ mov(r0, Operand(ExternalReference::isolate_address()));
- __ str(r0, MemOperand(sp, 4 * kPointerSize));
+ __ str(r0, MemOperand(sp, 5 * kPointerSize));
- // Argument 7 (sp[12]): Indicate that this is a direct call from JavaScript.
+ // Argument 8 (sp[16]): Indicate that this is a direct call from JavaScript.
__ mov(r0, Operand(1));
- __ str(r0, MemOperand(sp, 3 * kPointerSize));
+ __ str(r0, MemOperand(sp, 4 * kPointerSize));
- // Argument 6 (sp[8]): Start (high end) of backtracking stack memory area.
+ // Argument 7 (sp[12]): Start (high end) of backtracking stack memory area.
__ mov(r0, Operand(address_of_regexp_stack_memory_address));
__ ldr(r0, MemOperand(r0, 0));
__ mov(r2, Operand(address_of_regexp_stack_memory_size));
__ ldr(r2, MemOperand(r2, 0));
__ add(r0, r0, Operand(r2));
+ __ str(r0, MemOperand(sp, 3 * kPointerSize));
+
+ // Argument 6: Set the number of capture registers to zero to force global
+ // regexps to behave as non-global. This does not affect non-global regexps.
+ __ mov(r0, Operand(0));
__ str(r0, MemOperand(sp, 2 * kPointerSize));
// Argument 5 (sp[4]): static offsets vector buffer.
@@ -4932,7 +5067,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Check the result.
Label success;
- __ cmp(r0, Operand(NativeRegExpMacroAssembler::SUCCESS));
+ __ cmp(r0, Operand(1));
+ // We expect exactly one result since we force the called regexp to behave
+ // as non-global.
__ b(eq, &success);
Label failure;
__ cmp(r0, Operand(NativeRegExpMacroAssembler::FAILURE));
@@ -5099,10 +5236,10 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
// Set empty properties FixedArray.
// Set elements to point to FixedArray allocated right after the JSArray.
// Interleave operations for better latency.
- __ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX));
+ __ ldr(r2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
__ add(r3, r0, Operand(JSRegExpResult::kSize));
__ mov(r4, Operand(factory->empty_fixed_array()));
- __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
+ __ ldr(r2, FieldMemOperand(r2, GlobalObject::kNativeContextOffset));
__ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
__ ldr(r2, ContextOperand(r2, Context::REGEXP_RESULT_MAP_INDEX));
__ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
@@ -5127,12 +5264,12 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
// Set FixedArray length.
__ mov(r6, Operand(r5, LSL, kSmiTagSize));
__ str(r6, FieldMemOperand(r3, FixedArray::kLengthOffset));
- // Fill contents of fixed-array with the-hole.
- __ mov(r2, Operand(factory->the_hole_value()));
+ // Fill contents of fixed-array with undefined.
+ __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
__ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- // Fill fixed array elements with hole.
+ // Fill fixed array elements with undefined.
// r0: JSArray, tagged.
- // r2: the hole.
+ // r2: undefined.
// r3: Start of elements in FixedArray.
// r5: Number of elements to fill.
Label loop;
@@ -5208,7 +5345,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
__ b(ne, &call);
// Patch the receiver on the stack with the global receiver object.
- __ ldr(r3, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ ldr(r3,
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
__ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalReceiverOffset));
__ str(r3, MemOperand(sp, argc_ * kPointerSize));
__ bind(&call);
@@ -5900,23 +6038,28 @@ void SubStringStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
- // I.e., arithmetic shift right by one un-smi-tags.
- __ mov(r2, Operand(r2, ASR, 1), SetCC);
- __ mov(r3, Operand(r3, ASR, 1), SetCC, cc);
- // If either to or from had the smi tag bit set, then carry is set now.
- __ b(cs, &runtime); // Either "from" or "to" is not a smi.
+ // Arithmetic shift right by one un-smi-tags. In this case we rotate right
+ // instead because we bail out on non-smi values: ROR and ASR are equivalent
+ // for smis but they set the flags in a way that's easier to optimize.
+ __ mov(r2, Operand(r2, ROR, 1), SetCC);
+ __ mov(r3, Operand(r3, ROR, 1), SetCC, cc);
+ // If either to or from had the smi tag bit set, then C is set now, and N
+ // has the same value: we rotated by 1, so the bottom bit is now the top bit.
// We want to bailout to runtime here if From is negative. In that case, the
// next instruction is not executed and we fall through to bailing out to
- // runtime. pl is the opposite of mi.
- // Both r2 and r3 are untagged integers.
- __ sub(r2, r2, Operand(r3), SetCC, pl);
- __ b(mi, &runtime); // Fail if from > to.
+ // runtime.
+ // Executed if both r2 and r3 are untagged integers.
+ __ sub(r2, r2, Operand(r3), SetCC, cc);
+ // One of the above un-smis or the above SUB could have set N==1.
+ __ b(mi, &runtime); // Either "from" or "to" is not an smi, or from > to.
// Make sure first argument is a string.
__ ldr(r0, MemOperand(sp, kStringOffset));
STATIC_ASSERT(kSmiTag == 0);
- __ JumpIfSmi(r0, &runtime);
- Condition is_string = masm->IsObjectStringType(r0, r1);
+ // Do a JumpIfSmi, but fold its jump into the subsequent string test.
+ __ tst(r0, Operand(kSmiTagMask));
+ Condition is_string = masm->IsObjectStringType(r0, r1, ne);
+ ASSERT(is_string == eq);
__ b(NegateCondition(is_string), &runtime);
// Short-cut for the case of trivial substring.
@@ -5987,7 +6130,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// string's encoding is wrong because we always have to recheck encoding of
// the newly created string's parent anyways due to externalized strings.
Label two_byte_slice, set_slice_header;
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+ STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
__ tst(r1, Operand(kStringEncodingMask));
__ b(eq, &two_byte_slice);
@@ -6030,7 +6173,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ bind(&allocate_result);
// Sequential acii string. Allocate the result.
- STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0);
+ STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
__ tst(r1, Operand(kStringEncodingMask));
__ b(eq, &two_byte_sequential);
@@ -6395,9 +6538,9 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ tst(r5, Operand(kAsciiDataHintMask), ne);
__ b(ne, &ascii_data);
__ eor(r4, r4, Operand(r5));
- STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
- __ and_(r4, r4, Operand(kAsciiStringTag | kAsciiDataHintTag));
- __ cmp(r4, Operand(kAsciiStringTag | kAsciiDataHintTag));
+ STATIC_ASSERT(kOneByteStringTag != 0 && kAsciiDataHintTag != 0);
+ __ and_(r4, r4, Operand(kOneByteStringTag | kAsciiDataHintTag));
+ __ cmp(r4, Operand(kOneByteStringTag | kAsciiDataHintTag));
__ b(eq, &ascii_data);
// Allocate a two byte cons string.
@@ -6600,8 +6743,8 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
// Inlining the double comparison and falling back to the general compare
// stub if NaN is involved or VFP3 is unsupported.
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
// Load left and right operand
__ sub(r2, r1, Operand(kHeapObjectTag));
@@ -6860,6 +7003,10 @@ void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
Register target) {
__ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
RelocInfo::CODE_TARGET));
+
+ // Prevent literal pool emission during calculation of return address.
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+
// Push return address (accessible to GC through exit frame pc).
// Note that using pc with str is deprecated.
Label start;
@@ -6970,8 +7117,7 @@ void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
ASSERT(!name.is(scratch1));
ASSERT(!name.is(scratch2));
- // Assert that name contains a string.
- if (FLAG_debug_code) __ AbortIfNotString(name);
+ __ AssertString(name);
// Compute the capacity mask.
__ ldr(scratch1, FieldMemOperand(elements, kCapacityOffset));
@@ -7150,8 +7296,8 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
// KeyedStoreStubCompiler::GenerateStoreFastElement.
{ REG(r3), REG(r2), REG(r4), EMIT_REMEMBERED_SET },
{ REG(r2), REG(r3), REG(r4), EMIT_REMEMBERED_SET },
- // ElementsTransitionGenerator::GenerateSmiOnlyToObject
- // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
+ // ElementsTransitionGenerator::GenerateMapChangeElementTransition
+ // and ElementsTransitionGenerator::GenerateSmiToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
{ REG(r2), REG(r3), REG(r9), EMIT_REMEMBERED_SET },
{ REG(r2), REG(r3), REG(r9), OMIT_REMEMBERED_SET },
@@ -7160,12 +7306,15 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
{ REG(r2), REG(r6), REG(r9), EMIT_REMEMBERED_SET },
// StoreArrayLiteralElementStub::Generate
{ REG(r5), REG(r0), REG(r6), EMIT_REMEMBERED_SET },
+ // FastNewClosureStub::Generate
+ { REG(r2), REG(r4), REG(r1), EMIT_REMEMBERED_SET },
// Null termination.
{ REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
#undef REG
+
bool RecordWriteStub::IsPregenerated() {
for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
@@ -7207,6 +7356,11 @@ void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
}
+bool CodeStub::CanUseFPRegisters() {
+ return CpuFeatures::IsSupported(VFP2);
+}
+
+
// Takes the input in 3 registers: address_ value_ and object_. A pointer to
// the value has just been written into the object, now this stub makes sure
// we keep the GC informed. The word in the object where the value has been
@@ -7220,8 +7374,13 @@ void RecordWriteStub::Generate(MacroAssembler* masm) {
// forth between a compare instructions (a nop in this position) and the
// real branch when we start and stop incremental heap marking.
// See RecordWriteStub::Patch for details.
- __ b(&skip_to_incremental_noncompacting);
- __ b(&skip_to_incremental_compacting);
+ {
+ // Block literal pool emission, as the position of these two instructions
+ // is assumed by the patching code.
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+ __ b(&skip_to_incremental_noncompacting);
+ __ b(&skip_to_incremental_compacting);
+ }
if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
__ RememberedSetHelper(object_,
@@ -7330,6 +7489,16 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
Label need_incremental;
Label need_incremental_pop_scratch;
+ __ and_(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
+ __ ldr(regs_.scratch1(),
+ MemOperand(regs_.scratch0(),
+ MemoryChunk::kWriteBarrierCounterOffset));
+ __ sub(regs_.scratch1(), regs_.scratch1(), Operand(1), SetCC);
+ __ str(regs_.scratch1(),
+ MemOperand(regs_.scratch0(),
+ MemoryChunk::kWriteBarrierCounterOffset));
+ __ b(mi, &need_incremental);
+
// Let's look at the color of the object: If it is not black we don't have
// to inform the incremental marker.
__ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
@@ -7414,9 +7583,9 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
Label fast_elements;
__ CheckFastElements(r2, r5, &double_elements);
- // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+ // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
__ JumpIfSmi(r0, &smi_element);
- __ CheckFastSmiOnlyElements(r2, r5, &fast_elements);
+ __ CheckFastSmiElements(r2, r5, &fast_elements);
// Store into the array literal requires a elements transition. Call into
// the runtime.
@@ -7428,7 +7597,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ Push(r5, r4);
__ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
- // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+ // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
__ bind(&fast_elements);
__ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
__ add(r6, r5, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
@@ -7439,8 +7608,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
__ Ret();
- // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
- // FAST_ELEMENTS, and value is Smi.
+ // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
+ // and value is Smi.
__ bind(&smi_element);
__ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
__ add(r6, r5, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
@@ -7450,11 +7619,73 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
// Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
__ bind(&double_elements);
__ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
- __ StoreNumberToDoubleElements(r0, r3, r1, r5, r6, r7, r9, r2,
+ __ StoreNumberToDoubleElements(r0, r3, r1,
+ // Overwrites all regs after this.
+ r5, r6, r7, r9, r2,
&slow_elements);
__ Ret();
}
+
+void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
+ if (entry_hook_ != NULL) {
+ PredictableCodeSizeScope predictable(masm);
+ ProfileEntryHookStub stub;
+ __ push(lr);
+ __ CallStub(&stub);
+ __ pop(lr);
+ }
+}
+
+
+void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
+ // The entry hook is a "push lr" instruction, followed by a call.
+ const int32_t kReturnAddressDistanceFromFunctionStart =
+ 3 * Assembler::kInstrSize;
+
+ // Save live volatile registers.
+ __ Push(lr, r5, r1);
+ const int32_t kNumSavedRegs = 3;
+
+ // Compute the function's address for the first argument.
+ __ sub(r0, lr, Operand(kReturnAddressDistanceFromFunctionStart));
+
+ // The caller's return address is above the saved temporaries.
+ // Grab that for the second argument to the hook.
+ __ add(r1, sp, Operand(kNumSavedRegs * kPointerSize));
+
+ // Align the stack if necessary.
+ int frame_alignment = masm->ActivationFrameAlignment();
+ if (frame_alignment > kPointerSize) {
+ __ mov(r5, sp);
+ ASSERT(IsPowerOf2(frame_alignment));
+ __ and_(sp, sp, Operand(-frame_alignment));
+ }
+
+#if defined(V8_HOST_ARCH_ARM)
+ __ mov(ip, Operand(reinterpret_cast<int32_t>(&entry_hook_)));
+ __ ldr(ip, MemOperand(ip));
+#else
+ // Under the simulator we need to indirect the entry hook through a
+ // trampoline function at a known address.
+ Address trampoline_address = reinterpret_cast<Address>(
+ reinterpret_cast<intptr_t>(EntryHookTrampoline));
+ ApiFunction dispatcher(trampoline_address);
+ __ mov(ip, Operand(ExternalReference(&dispatcher,
+ ExternalReference::BUILTIN_CALL,
+ masm->isolate())));
+#endif
+ __ Call(ip);
+
+ // Restore the stack pointer if needed.
+ if (frame_alignment > kPointerSize) {
+ __ mov(sp, r5);
+ }
+
+ __ Pop(lr, r5, r1);
+ __ Ret();
+}
+
#undef __
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/arm/code-stubs-arm.h b/src/3rdparty/v8/src/arm/code-stubs-arm.h
index 38ed476..3e79624 100644
--- a/src/3rdparty/v8/src/arm/code-stubs-arm.h
+++ b/src/3rdparty/v8/src/arm/code-stubs-arm.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -149,7 +149,7 @@ class BinaryOpStub: public CodeStub {
mode_(mode),
operands_type_(BinaryOpIC::UNINITIALIZED),
result_type_(BinaryOpIC::UNINITIALIZED) {
- use_vfp3_ = CpuFeatures::IsSupported(VFP3);
+ use_vfp2_ = CpuFeatures::IsSupported(VFP2);
ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
}
@@ -159,7 +159,7 @@ class BinaryOpStub: public CodeStub {
BinaryOpIC::TypeInfo result_type = BinaryOpIC::UNINITIALIZED)
: op_(OpBits::decode(key)),
mode_(ModeBits::decode(key)),
- use_vfp3_(VFP3Bits::decode(key)),
+ use_vfp2_(VFP2Bits::decode(key)),
operands_type_(operands_type),
result_type_(result_type) { }
@@ -171,7 +171,7 @@ class BinaryOpStub: public CodeStub {
Token::Value op_;
OverwriteMode mode_;
- bool use_vfp3_;
+ bool use_vfp2_;
// Operand type information determined at runtime.
BinaryOpIC::TypeInfo operands_type_;
@@ -182,7 +182,7 @@ class BinaryOpStub: public CodeStub {
// Minor key encoding in 16 bits RRRTTTVOOOOOOOMM.
class ModeBits: public BitField<OverwriteMode, 0, 2> {};
class OpBits: public BitField<Token::Value, 2, 7> {};
- class VFP3Bits: public BitField<bool, 9, 1> {};
+ class VFP2Bits: public BitField<bool, 9, 1> {};
class OperandTypeInfoBits: public BitField<BinaryOpIC::TypeInfo, 10, 3> {};
class ResultTypeInfoBits: public BitField<BinaryOpIC::TypeInfo, 13, 3> {};
@@ -190,7 +190,7 @@ class BinaryOpStub: public CodeStub {
int MinorKey() {
return OpBits::encode(op_)
| ModeBits::encode(mode_)
- | VFP3Bits::encode(use_vfp3_)
+ | VFP2Bits::encode(use_vfp2_)
| OperandTypeInfoBits::encode(operands_type_)
| ResultTypeInfoBits::encode(result_type_);
}
@@ -571,7 +571,7 @@ class RecordWriteStub: public CodeStub {
void SaveCallerSaveRegisters(MacroAssembler* masm, SaveFPRegsMode mode) {
masm->stm(db_w, sp, (kCallerSaved | lr.bit()) & ~scratch1_.bit());
if (mode == kSaveFPRegs) {
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
masm->sub(sp,
sp,
Operand(kDoubleSize * (DwVfpRegister::kNumRegisters - 1)));
@@ -586,7 +586,7 @@ class RecordWriteStub: public CodeStub {
inline void RestoreCallerSaveRegisters(MacroAssembler*masm,
SaveFPRegsMode mode) {
if (mode == kSaveFPRegs) {
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
// Restore all VFP registers except d0.
for (int i = DwVfpRegister::kNumRegisters - 1; i > 0; i--) {
DwVfpRegister reg = DwVfpRegister::from_code(i);
@@ -773,6 +773,7 @@ class FloatingPointHelper : public AllStatic {
Register object,
Destination destination,
DwVfpRegister double_dst,
+ DwVfpRegister double_scratch,
Register dst1,
Register dst2,
Register heap_number_map,
@@ -794,7 +795,8 @@ class FloatingPointHelper : public AllStatic {
Register scratch1,
Register scratch2,
Register scratch3,
- DwVfpRegister double_scratch,
+ DwVfpRegister double_scratch0,
+ DwVfpRegister double_scratch1,
Label* not_int32);
// Generate non VFP3 code to check if a double can be exactly represented by a
diff --git a/src/3rdparty/v8/src/arm/codegen-arm.cc b/src/3rdparty/v8/src/arm/codegen-arm.cc
index befd8f2..209e151 100644
--- a/src/3rdparty/v8/src/arm/codegen-arm.cc
+++ b/src/3rdparty/v8/src/arm/codegen-arm.cc
@@ -73,7 +73,7 @@ void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
// -------------------------------------------------------------------------
// Code generators
-void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
+void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r0 : value
@@ -96,7 +96,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
}
-void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
+void ElementsTransitionGenerator::GenerateSmiToDouble(
MacroAssembler* masm, Label* fail) {
// ----------- S t a t e -------------
// -- r0 : value
@@ -107,7 +107,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// -- r4 : scratch (elements)
// -----------------------------------
Label loop, entry, convert_hole, gc_required, only_change_map, done;
- bool vfp3_supported = CpuFeatures::IsSupported(VFP3);
+ bool vfp2_supported = CpuFeatures::IsSupported(VFP2);
// Check for empty arrays, which only require a map transition and no changes
// to the backing store.
@@ -121,15 +121,34 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// r5: number of elements (smi-tagged)
// Allocate new FixedDoubleArray.
- __ mov(lr, Operand(FixedDoubleArray::kHeaderSize));
- __ add(lr, lr, Operand(r5, LSL, 2));
+ // Use lr as a temporary register.
+ __ mov(lr, Operand(r5, LSL, 2));
+ __ add(lr, lr, Operand(FixedDoubleArray::kHeaderSize + kPointerSize));
__ AllocateInNewSpace(lr, r6, r7, r9, &gc_required, NO_ALLOCATION_FLAGS);
- // r6: destination FixedDoubleArray, not tagged as heap object
+ // r6: destination FixedDoubleArray, not tagged as heap object.
+
+ // Align the array conveniently for doubles.
+ // Store a filler value in the unused memory.
+ Label aligned, aligned_done;
+ __ tst(r6, Operand(kDoubleAlignmentMask));
+ __ mov(ip, Operand(masm->isolate()->factory()->one_pointer_filler_map()));
+ __ b(eq, &aligned);
+ // Store at the beginning of the allocated memory and update the base pointer.
+ __ str(ip, MemOperand(r6, kPointerSize, PostIndex));
+ __ b(&aligned_done);
+
+ __ bind(&aligned);
+ // Store the filler at the end of the allocated memory.
+ __ sub(lr, lr, Operand(kPointerSize));
+ __ str(ip, MemOperand(r6, lr));
+
+ __ bind(&aligned_done);
+
// Set destination FixedDoubleArray's length and map.
__ LoadRoot(r9, Heap::kFixedDoubleArrayMapRootIndex);
__ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset));
- __ str(r9, MemOperand(r6, HeapObject::kMapOffset));
// Update receiver's map.
+ __ str(r9, MemOperand(r6, HeapObject::kMapOffset));
__ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
__ RecordWriteField(r2,
@@ -163,7 +182,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// r5: kHoleNanUpper32
// r6: end of destination FixedDoubleArray, not tagged
// r7: begin of FixedDoubleArray element fields, not tagged
- if (!vfp3_supported) __ Push(r1, r0);
+ if (!vfp2_supported) __ Push(r1, r0);
__ b(&entry);
@@ -191,8 +210,8 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
__ UntagAndJumpIfNotSmi(r9, r9, &convert_hole);
// Normal smi, convert to double and store.
- if (vfp3_supported) {
- CpuFeatures::Scope scope(VFP3);
+ if (vfp2_supported) {
+ CpuFeatures::Scope scope(VFP2);
__ vmov(s0, r9);
__ vcvt_f64_s32(d0, s0);
__ vstr(d0, r7, 0);
@@ -225,7 +244,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
__ cmp(r7, r6);
__ b(lt, &loop);
- if (!vfp3_supported) __ Pop(r1, r0);
+ if (!vfp2_supported) __ Pop(r1, r0);
__ pop(lr);
__ bind(&done);
}
@@ -433,6 +452,92 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
#undef __
+// add(r0, pc, Operand(-8))
+static const uint32_t kCodeAgePatchFirstInstruction = 0xe24f0008;
+
+static byte* GetNoCodeAgeSequence(uint32_t* length) {
+ // The sequence of instructions that is patched out for aging code is the
+ // following boilerplate stack-building prologue that is found in FUNCTIONS
+ static bool initialized = false;
+ static uint32_t sequence[kNoCodeAgeSequenceLength];
+ byte* byte_sequence = reinterpret_cast<byte*>(sequence);
+ *length = kNoCodeAgeSequenceLength * Assembler::kInstrSize;
+ if (!initialized) {
+ CodePatcher patcher(byte_sequence, kNoCodeAgeSequenceLength);
+ patcher.masm()->stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
+ patcher.masm()->LoadRoot(ip, Heap::kUndefinedValueRootIndex);
+ patcher.masm()->add(fp, sp, Operand(2 * kPointerSize));
+ initialized = true;
+ }
+ return byte_sequence;
+}
+
+
+byte* Code::FindPlatformCodeAgeSequence() {
+ byte* start = instruction_start();
+ uint32_t young_length;
+ byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+ if (!memcmp(start, young_sequence, young_length) ||
+ Memory::uint32_at(start) == kCodeAgePatchFirstInstruction) {
+ return start;
+ } else {
+ byte* start_after_strict = NULL;
+ if (kind() == FUNCTION) {
+ start_after_strict = start + kSizeOfFullCodegenStrictModePrologue;
+ } else {
+ ASSERT(kind() == OPTIMIZED_FUNCTION);
+ start_after_strict = start + kSizeOfOptimizedStrictModePrologue;
+ }
+ ASSERT(!memcmp(start_after_strict, young_sequence, young_length) ||
+ Memory::uint32_at(start_after_strict) ==
+ kCodeAgePatchFirstInstruction);
+ return start_after_strict;
+ }
+}
+
+
+bool Code::IsYoungSequence(byte* sequence) {
+ uint32_t young_length;
+ byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+ bool result = !memcmp(sequence, young_sequence, young_length);
+ ASSERT(result ||
+ Memory::uint32_at(sequence) == kCodeAgePatchFirstInstruction);
+ return result;
+}
+
+
+void Code::GetCodeAgeAndParity(byte* sequence, Age* age,
+ MarkingParity* parity) {
+ if (IsYoungSequence(sequence)) {
+ *age = kNoAge;
+ *parity = NO_MARKING_PARITY;
+ } else {
+ Address target_address = Memory::Address_at(
+ sequence + Assembler::kInstrSize * (kNoCodeAgeSequenceLength - 1));
+ Code* stub = GetCodeFromTargetAddress(target_address);
+ GetCodeAgeAndParity(stub, age, parity);
+ }
+}
+
+
+void Code::PatchPlatformCodeAge(byte* sequence,
+ Code::Age age,
+ MarkingParity parity) {
+ uint32_t young_length;
+ byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+ if (age == kNoAge) {
+ memcpy(sequence, young_sequence, young_length);
+ CPU::FlushICache(sequence, young_length);
+ } else {
+ Code* stub = GetCodeAgeStub(age, parity);
+ CodePatcher patcher(sequence, young_length / Assembler::kInstrSize);
+ patcher.masm()->add(r0, pc, Operand(-8));
+ patcher.masm()->ldr(pc, MemOperand(pc, -4));
+ patcher.masm()->dd(reinterpret_cast<uint32_t>(stub->instruction_start()));
+ }
+}
+
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_ARM
diff --git a/src/3rdparty/v8/src/arm/codegen-arm.h b/src/3rdparty/v8/src/arm/codegen-arm.h
index c340e6b..c77844d 100644
--- a/src/3rdparty/v8/src/arm/codegen-arm.h
+++ b/src/3rdparty/v8/src/arm/codegen-arm.h
@@ -34,6 +34,9 @@
namespace v8 {
namespace internal {
+static const int kSizeOfFullCodegenStrictModePrologue = 16;
+static const int kSizeOfOptimizedStrictModePrologue = 16;
+
// Forward declarations
class CompilationInfo;
diff --git a/src/3rdparty/v8/src/arm/constants-arm.h b/src/3rdparty/v8/src/arm/constants-arm.h
index e767001..03876f9 100644
--- a/src/3rdparty/v8/src/arm/constants-arm.h
+++ b/src/3rdparty/v8/src/arm/constants-arm.h
@@ -29,14 +29,14 @@
#define V8_ARM_CONSTANTS_ARM_H_
// ARM EABI is required.
-#if defined(__arm__) && !defined(__ARM_EABI__)
+#if defined(__arm__) && !defined(__ARM_EABI__) && !defined(_WIN32_WCE)
#error ARM EABI support is required.
#endif
// This means that interwork-compatible jump instructions are generated. We
// want to generate them on the simulator too so it makes snapshots that can
// be used on real hardware.
-#if defined(__THUMB_INTERWORK__) || !defined(__arm__)
+#if defined(__THUMB_INTERWORK__) || !defined(__arm__) || defined(_WIN32_WCE)
# define USE_THUMB_INTERWORK 1
#endif
@@ -56,16 +56,19 @@
# define CAN_USE_ARMV6_INSTRUCTIONS 1
#endif
-#if defined(__ARM_ARCH_5T__) || \
- defined(__ARM_ARCH_5TE__) || \
+#if defined(__ARM_ARCH_5T__) || \
+ defined(__ARM_ARCH_5TE__) || \
+ defined(__ARM_ARCH_5TEJ__) || \
defined(CAN_USE_ARMV6_INSTRUCTIONS)
# define CAN_USE_ARMV5_INSTRUCTIONS 1
# define CAN_USE_THUMB_INSTRUCTIONS 1
#endif
// Simulator should support ARM5 instructions and unaligned access by default.
-#if !defined(__arm__)
-# define CAN_USE_ARMV5_INSTRUCTIONS 1
+#if !defined(__arm__) || defined(_WIN32_WCE)
+# if !defined(_WIN32_WCE)
+# define CAN_USE_ARMV5_INSTRUCTIONS 1
+# endif
# define CAN_USE_THUMB_INSTRUCTIONS 1
# ifndef CAN_USE_UNALIGNED_ACCESSES
@@ -74,10 +77,6 @@
#endif
-#if CAN_USE_UNALIGNED_ACCESSES
-#define V8_TARGET_CAN_READ_UNALIGNED 1
-#endif
-
// Using blx may yield better code, so use it when required or when available
#if defined(USE_THUMB_INTERWORK) || defined(CAN_USE_ARMV5_INSTRUCTIONS)
#define USE_BLX 1
@@ -87,9 +86,18 @@ namespace v8 {
namespace internal {
// Constant pool marker.
-const int kConstantPoolMarkerMask = 0xffe00000;
-const int kConstantPoolMarker = 0x0c000000;
-const int kConstantPoolLengthMask = 0x001ffff;
+// Use UDF, the permanently undefined instruction.
+const int kConstantPoolMarkerMask = 0xfff000f0;
+const int kConstantPoolMarker = 0xe7f000f0;
+const int kConstantPoolLengthMaxMask = 0xffff;
+inline int EncodeConstantPoolLength(int length) {
+ ASSERT((length & kConstantPoolLengthMaxMask) == length);
+ return ((length & 0xfff0) << 4) | (length & 0xf);
+}
+inline int DecodeConstantPoolLength(int instr) {
+ ASSERT((instr & kConstantPoolMarkerMask) == kConstantPoolMarker);
+ return ((instr >> 4) & 0xfff0) | (instr & 0xf);
+}
// Number of registers in normal ARM mode.
const int kNumRegisters = 16;
@@ -690,6 +698,9 @@ class Instruction {
&& (Bit(20) == 0)
&& ((Bit(7) == 0)); }
+ // Test for a nop instruction, which falls under type 1.
+ inline bool IsNopType1() const { return Bits(24, 0) == 0x0120F000; }
+
// Test for a stop instruction.
inline bool IsStop() const {
return (TypeValue() == 7) && (Bit(24) == 1) && (SvcValue() >= kStopCode);
diff --git a/src/3rdparty/v8/src/arm/cpu-arm.cc b/src/3rdparty/v8/src/arm/cpu-arm.cc
index f7da6c3..bed9503 100644
--- a/src/3rdparty/v8/src/arm/cpu-arm.cc
+++ b/src/3rdparty/v8/src/arm/cpu-arm.cc
@@ -29,7 +29,7 @@
#include "v8.h"
-#if defined(__arm__)
+#if defined(__arm__) && !defined(_WIN32_WCE)
#if !defined(__QNXNTO__)
#include <sys/syscall.h> // for cache flushing.
#else
@@ -73,6 +73,11 @@ void CPU::FlushICache(void* start, size_t size) {
// The QNX kernel does not expose the symbol __ARM_NR_cacheflush so we
// use the msync system call instead of the approach used on Linux
msync(start, size, MS_SYNC|MS_INVALIDATE_ICACHE);
+#elif defined(_WIN32_WCE)
+ // Windows CE compiler does not support the asm command, nor does it expose
+ // __ARM_NR_cacheflush. As well as Windows CE does not support to flush a
+ // region, so we need to flush the whole process.
+ FlushInstructionCache(GetCurrentProcess(), NULL, NULL);
#else
// Ideally, we would call
// syscall(__ARM_NR_cacheflush, start,
diff --git a/src/3rdparty/v8/src/arm/debug-arm.cc b/src/3rdparty/v8/src/arm/debug-arm.cc
index 3e7a1e9..c2941be 100644
--- a/src/3rdparty/v8/src/arm/debug-arm.cc
+++ b/src/3rdparty/v8/src/arm/debug-arm.cc
@@ -48,7 +48,7 @@ void BreakLocationIterator::SetDebugBreakAtReturn() {
// add sp, sp, #4
// bx lr
// to a call to the debug break return code.
- // #if USE_BLX
+ // #ifdef USE_BLX
// ldr ip, [pc, #0]
// blx ip
// #else
@@ -99,7 +99,7 @@ void BreakLocationIterator::SetDebugBreakAtSlot() {
// mov r2, r2
// mov r2, r2
// to a call to the debug break slot code.
- // #if USE_BLX
+ // #ifdef USE_BLX
// ldr ip, [pc, #0]
// blx ip
// #else
diff --git a/src/3rdparty/v8/src/arm/deoptimizer-arm.cc b/src/3rdparty/v8/src/arm/deoptimizer-arm.cc
index 699e6aa..19667b9 100644
--- a/src/3rdparty/v8/src/arm/deoptimizer-arm.cc
+++ b/src/3rdparty/v8/src/arm/deoptimizer-arm.cc
@@ -50,6 +50,10 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
if (!function->IsOptimized()) return;
+ // The optimized code is going to be patched, so we cannot use it
+ // any more. Play safe and reset the whole cache.
+ function->shared()->ClearOptimizedCodeMap();
+
// Get the optimized code.
Code* code = function->code();
Address code_start_address = code->instruction_start();
@@ -69,8 +73,11 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
if (deopt_data->Pc(i)->value() == -1) continue;
Address call_address = code_start_address + deopt_data->Pc(i)->value();
Address deopt_entry = GetDeoptimizationEntry(i, LAZY);
- int call_size_in_bytes = MacroAssembler::CallSize(deopt_entry,
- RelocInfo::NONE);
+ // We need calls to have a predictable size in the unoptimized code, but
+ // this is optimized code, so we don't have to have a predictable size.
+ int call_size_in_bytes =
+ MacroAssembler::CallSizeNotPredictableCodeSize(deopt_entry,
+ RelocInfo::NONE);
int call_size_in_words = call_size_in_bytes / Assembler::kInstrSize;
ASSERT(call_size_in_bytes % Assembler::kInstrSize == 0);
ASSERT(call_size_in_bytes <= patch_size());
@@ -97,8 +104,7 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
// ignore all slots that might have been recorded on it.
isolate->heap()->mark_compact_collector()->InvalidateCode(code);
- // Set the code for the function to non-optimized version.
- function->ReplaceCode(function->shared()->code());
+ ReplaceCodeForRelatedFunctions(function, code);
if (FLAG_trace_deopt) {
PrintF("[forced deoptimization: ");
@@ -196,11 +202,11 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
}
-static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) {
+static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) {
ByteArray* translations = data->TranslationByteArray();
int length = data->DeoptCount();
for (int i = 0; i < length; i++) {
- if (static_cast<unsigned>(data->AstId(i)->value()) == ast_id) {
+ if (data->AstId(i) == ast_id) {
TranslationIterator it(translations, data->TranslationIndex(i)->value());
int value = it.Next();
ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value));
@@ -219,7 +225,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
optimized_code_->deoptimization_data());
unsigned ast_id = data->OsrAstId()->value();
- int bailout_id = LookupBailoutId(data, ast_id);
+ int bailout_id = LookupBailoutId(data, BailoutId(ast_id));
unsigned translation_index = data->TranslationIndex(bailout_id)->value();
ByteArray* translations = data->TranslationByteArray();
@@ -239,9 +245,9 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
unsigned node_id = iterator.Next();
USE(node_id);
ASSERT(node_id == ast_id);
- JSFunction* function = JSFunction::cast(ComputeLiteral(iterator.Next()));
- USE(function);
- ASSERT(function == function_);
+ int closure_id = iterator.Next();
+ USE(closure_id);
+ ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
unsigned height = iterator.Next();
unsigned height_in_bytes = height * kPointerSize;
USE(height_in_bytes);
@@ -352,8 +358,8 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
if (FLAG_trace_osr) {
PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ",
ok ? "finished" : "aborted",
- reinterpret_cast<intptr_t>(function));
- function->PrintName();
+ reinterpret_cast<intptr_t>(function_));
+ function_->PrintName();
PrintF(" => pc=0x%0x]\n", output_[0]->GetPc());
}
}
@@ -577,19 +583,145 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
}
+void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
+ int frame_index,
+ bool is_setter_stub_frame) {
+ JSFunction* accessor = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ // The receiver (and the implicit return value, if any) are expected in
+ // registers by the LoadIC/StoreIC, so they don't belong to the output stack
+ // frame. This means that we have to use a height of 0.
+ unsigned height = 0;
+ unsigned height_in_bytes = height * kPointerSize;
+ const char* kind = is_setter_stub_frame ? "setter" : "getter";
+ if (FLAG_trace_deopt) {
+ PrintF(" translating %s stub => height=%u\n", kind, height_in_bytes);
+ }
+
+ // We need 5 stack entries from StackFrame::INTERNAL (lr, fp, cp, frame type,
+ // code object, see MacroAssembler::EnterFrame). For a setter stub frames we
+ // need one additional entry for the implicit return value, see
+ // StoreStubCompiler::CompileStoreViaSetter.
+ unsigned fixed_frame_entries = 5 + (is_setter_stub_frame ? 1 : 0);
+ unsigned fixed_frame_size = fixed_frame_entries * kPointerSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, accessor);
+ output_frame->SetFrameType(StackFrame::INTERNAL);
+
+ // A frame for an accessor stub can not be the topmost or bottommost one.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous frame's top and
+ // this frame's size.
+ uint32_t top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ unsigned output_offset = output_frame_size;
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // The context can be gotten from the previous frame.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetContext();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; context\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // A marker value is used in place of the function.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::INTERNAL));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; function (%s sentinel)\n",
+ top_address + output_offset, output_offset, value, kind);
+ }
+
+ // Get Code object from accessor stub.
+ output_offset -= kPointerSize;
+ Builtins::Name name = is_setter_stub_frame ?
+ Builtins::kStoreIC_Setter_ForDeopt :
+ Builtins::kLoadIC_Getter_ForDeopt;
+ Code* accessor_stub = isolate_->builtins()->builtin(name);
+ value = reinterpret_cast<intptr_t>(accessor_stub);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; code object\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Skip receiver.
+ Translation::Opcode opcode =
+ static_cast<Translation::Opcode>(iterator->Next());
+ iterator->Skip(Translation::NumberOfOperandsFor(opcode));
+
+ if (is_setter_stub_frame) {
+ // The implicit return value was part of the artificial setter stub
+ // environment.
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Smi* offset = is_setter_stub_frame ?
+ isolate_->heap()->setter_stub_deopt_pc_offset() :
+ isolate_->heap()->getter_stub_deopt_pc_offset();
+ intptr_t pc = reinterpret_cast<intptr_t>(
+ accessor_stub->instruction_start() + offset->value());
+ output_frame->SetPc(pc);
+}
+
+
// This code is very similar to ia32 code, but relies on register names (fp, sp)
// and how the frame is laid out.
void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
int frame_index) {
// Read the ast node id, function, and frame height for this output frame.
- int node_id = iterator->Next();
- JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ BailoutId node_id = BailoutId(iterator->Next());
+ JSFunction* function;
+ if (frame_index != 0) {
+ function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ } else {
+ int closure_id = iterator->Next();
+ USE(closure_id);
+ ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
+ function = function_;
+ }
unsigned height = iterator->Next();
unsigned height_in_bytes = height * kPointerSize;
if (FLAG_trace_deopt) {
PrintF(" translating ");
function->PrintName();
- PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes);
+ PrintF(" => node=%d, height=%d\n", node_id.ToInt(), height_in_bytes);
}
// The 'fixed' part of the frame consists of the incoming parameters and
diff --git a/src/3rdparty/v8/src/arm/disasm-arm.cc b/src/3rdparty/v8/src/arm/disasm-arm.cc
index 96a7d3c..af2ed52 100644
--- a/src/3rdparty/v8/src/arm/disasm-arm.cc
+++ b/src/3rdparty/v8/src/arm/disasm-arm.cc
@@ -692,11 +692,19 @@ void Decoder::DecodeType01(Instruction* instr) {
// Rn field to encode it.
Format(instr, "mul'cond's 'rn, 'rm, 'rs");
} else {
- // The MLA instruction description (A 4.1.28) refers to the order
- // of registers as "Rd, Rm, Rs, Rn". But confusingly it uses the
- // Rn field to encode the Rd register and the Rd field to encode
- // the Rn register.
- Format(instr, "mla'cond's 'rn, 'rm, 'rs, 'rd");
+ if (instr->Bit(22) == 0) {
+ // The MLA instruction description (A 4.1.28) refers to the order
+ // of registers as "Rd, Rm, Rs, Rn". But confusingly it uses the
+ // Rn field to encode the Rd register and the Rd field to encode
+ // the Rn register.
+ Format(instr, "mla'cond's 'rn, 'rm, 'rs, 'rd");
+ } else {
+ // The MLS instruction description (A 4.1.29) refers to the order
+ // of registers as "Rd, Rm, Rs, Rn". But confusingly it uses the
+ // Rn field to encode the Rd register and the Rd field to encode
+ // the Rn register.
+ Format(instr, "mls'cond's 'rn, 'rm, 'rs, 'rd");
+ }
}
} else {
// The signed/long multiply instructions use the terms RdHi and RdLo
@@ -822,6 +830,8 @@ void Decoder::DecodeType01(Instruction* instr) {
} else {
Unknown(instr); // not used by V8
}
+ } else if ((type == 1) && instr->IsNopType1()) {
+ Format(instr, "nop'cond");
} else {
switch (instr->OpcodeField()) {
case AND: {
@@ -974,6 +984,17 @@ void Decoder::DecodeType3(Instruction* instr) {
break;
}
case db_x: {
+ if (FLAG_enable_sudiv) {
+ if (!instr->HasW()) {
+ if (instr->Bits(5, 4) == 0x1) {
+ if ((instr->Bit(22) == 0x0) && (instr->Bit(20) == 0x1)) {
+ // SDIV (in V8 notation matching ARM ISA format) rn = rm/rs
+ Format(instr, "sdiv'cond'b 'rn, 'rm, 'rs");
+ break;
+ }
+ }
+ }
+ }
Format(instr, "'memop'cond'b 'rd, ['rn, -'shift_rm]'w");
break;
}
@@ -1367,7 +1388,7 @@ bool Decoder::IsConstantPoolAt(byte* instr_ptr) {
int Decoder::ConstantPoolSizeAt(byte* instr_ptr) {
if (IsConstantPoolAt(instr_ptr)) {
int instruction_bits = *(reinterpret_cast<int*>(instr_ptr));
- return instruction_bits & kConstantPoolLengthMask;
+ return DecodeConstantPoolLength(instruction_bits);
} else {
return -1;
}
@@ -1389,8 +1410,7 @@ int Decoder::InstructionDecode(byte* instr_ptr) {
if ((instruction_bits & kConstantPoolMarkerMask) == kConstantPoolMarker) {
out_buffer_pos_ += OS::SNPrintF(out_buffer_ + out_buffer_pos_,
"constant pool begin (length %d)",
- instruction_bits &
- kConstantPoolLengthMask);
+ DecodeConstantPoolLength(instruction_bits));
return Instruction::kInstrSize;
}
switch (instr->TypeValue()) {
diff --git a/src/3rdparty/v8/src/arm/full-codegen-arm.cc b/src/3rdparty/v8/src/arm/full-codegen-arm.cc
index 2555c04..9a7b116 100644
--- a/src/3rdparty/v8/src/arm/full-codegen-arm.cc
+++ b/src/3rdparty/v8/src/arm/full-codegen-arm.cc
@@ -73,9 +73,6 @@ class JumpPatchSite BASE_EMBEDDED {
Assembler::BlockConstPoolScope block_const_pool(masm_);
__ bind(&patch_site_);
__ cmp(reg, Operand(reg));
- // Don't use b(al, ...) as that might emit the constant pool right after the
- // branch. After patching when the branch is no longer unconditional
- // execution can continue into the constant pool.
__ b(eq, target); // Always taken before patched.
}
@@ -90,6 +87,8 @@ class JumpPatchSite BASE_EMBEDDED {
}
void EmitPatchInfo() {
+ // Block literal pool emission whilst recording patch site information.
+ Assembler::BlockConstPoolScope block_const_pool(masm_);
if (patch_site_.is_bound()) {
int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
Register reg;
@@ -135,6 +134,8 @@ void FullCodeGenerator::Generate() {
SetFunctionPosition(function());
Comment cmnt(masm_, "[ function compiled by full code generator");
+ ProfileEntryHookStub::MaybeCallEntryHook(masm_);
+
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
@@ -148,12 +149,15 @@ void FullCodeGenerator::Generate() {
// function calls.
if (!info->is_classic_mode() || info->is_native()) {
Label ok;
+ Label begin;
+ __ bind(&begin);
__ cmp(r5, Operand(0));
__ b(eq, &ok);
int receiver_offset = info->scope()->num_parameters() * kPointerSize;
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
__ str(r2, MemOperand(sp, receiver_offset));
__ bind(&ok);
+ ASSERT_EQ(kSizeOfFullCodegenStrictModePrologue, ok.pos() - begin.pos());
}
// Open a frame scope to indicate that there is a frame on the stack. The
@@ -163,12 +167,12 @@ void FullCodeGenerator::Generate() {
int locals_count = info->scope()->num_stack_slots();
- __ Push(lr, fp, cp, r1);
- if (locals_count > 0) {
- // Load undefined value here, so the value is ready for the loop
- // below.
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
- }
+ // The following four instructions must remain together and unmodified for
+ // code aging to work properly.
+ __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
+ // Load undefined value here, so the value is ready for the loop
+ // below.
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
// Adjust fp to point to caller's fp.
__ add(fp, sp, Operand(2 * kPointerSize));
@@ -184,11 +188,14 @@ void FullCodeGenerator::Generate() {
int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
if (heap_slots > 0 ||
(scope()->is_qml_mode() && scope()->is_global_scope())) {
- Comment cmnt(masm_, "[ Allocate local context");
- // Argument to NewContext is the function, which is in r1.
+ // Argument to NewContext is the function, which is still in r1.
+ Comment cmnt(masm_, "[ Allocate context");
__ push(r1);
- if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
+ if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
+ __ Push(info->scope()->GetScopeInfo());
+ __ CallRuntime(Runtime::kNewGlobalContext, 2);
+ } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+ FastNewContextStub stub((heap_slots < 0) ? 0 : heap_slots);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kNewFunctionContext, 1);
@@ -264,7 +271,7 @@ void FullCodeGenerator::Generate() {
scope()->VisitIllegalRedeclaration(this);
} else {
- PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
+ PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
{ Comment cmnt(masm_, "[ Declarations");
// For named function expressions, declare the function name as a
// constant.
@@ -279,11 +286,12 @@ void FullCodeGenerator::Generate() {
}
{ Comment cmnt(masm_, "[ Stack check");
- PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
+ PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
Label ok;
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(hs, &ok);
+ PredictableCodeSizeScope predictable(masm_);
StackCheckStub stub;
__ CallStub(&stub);
__ bind(&ok);
@@ -330,7 +338,7 @@ void FullCodeGenerator::EmitProfilingCounterReset() {
}
if (isolate()->IsDebuggerActive()) {
// Detect debug break requests as soon as possible.
- reset_value = 10;
+ reset_value = FLAG_interrupt_budget >> 4;
}
__ mov(r2, Operand(profiling_counter_));
__ mov(r3, Operand(Smi::FromInt(reset_value)));
@@ -338,13 +346,11 @@ void FullCodeGenerator::EmitProfilingCounterReset() {
}
-static const int kMaxBackEdgeWeight = 127;
-static const int kBackEdgeDistanceDivisor = 142;
-
-
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
Label* back_edge_target) {
Comment cmnt(masm_, "[ Stack check");
+ // Block literal pools whilst emitting stack check code.
+ Assembler::BlockConstPoolScope block_const_pool(masm_);
Label ok;
if (FLAG_count_based_interrupts) {
@@ -353,7 +359,7 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
ASSERT(back_edge_target->is_bound());
int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
weight = Min(kMaxBackEdgeWeight,
- Max(1, distance / kBackEdgeDistanceDivisor));
+ Max(1, distance / kBackEdgeDistanceUnit));
}
EmitProfilingCounterDecrement(weight);
__ b(pl, &ok);
@@ -363,6 +369,7 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(hs, &ok);
+ PredictableCodeSizeScope predictable(masm_);
StackCheckStub stub;
__ CallStub(&stub);
}
@@ -405,7 +412,7 @@ void FullCodeGenerator::EmitReturnSequence() {
} else if (FLAG_weighted_back_edges) {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
- Max(1, distance / kBackEdgeDistanceDivisor));
+ Max(1, distance / kBackEdgeDistanceUnit));
}
EmitProfilingCounterDecrement(weight);
Label ok;
@@ -436,6 +443,7 @@ void FullCodeGenerator::EmitReturnSequence() {
// tool from instrumenting as we rely on the code size here.
int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
+ PredictableCodeSizeScope predictable(masm_);
__ RecordJSReturn();
masm_->mov(sp, fp);
masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
@@ -675,18 +683,9 @@ void FullCodeGenerator::DoTest(Expression* condition,
Label* if_true,
Label* if_false,
Label* fall_through) {
- if (CpuFeatures::IsSupported(VFP3)) {
- ToBooleanStub stub(result_register());
- __ CallStub(&stub);
- __ tst(result_register(), result_register());
- } else {
- // Call the runtime to find the boolean value of the source and then
- // translate it into control flow to the pair of labels.
- __ push(result_register());
- __ CallRuntime(Runtime::kToBool, 1);
- __ LoadRoot(ip, Heap::kFalseValueRootIndex);
- __ cmp(r0, ip);
- }
+ ToBooleanStub stub(result_register());
+ __ CallStub(&stub);
+ __ tst(result_register(), result_register());
Split(ne, if_true, if_false, fall_through);
}
@@ -787,7 +786,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
// The variable in the declaration always resides in the current function
// context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
- if (FLAG_debug_code) {
+ if (generate_debug_code_) {
// Check that we're not inside a with or catch context.
__ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
__ CompareRoot(r1, Heap::kWithContextMapRootIndex);
@@ -809,11 +808,12 @@ void FullCodeGenerator::VisitVariableDeclaration(
bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
switch (variable->location()) {
case Variable::UNALLOCATED:
- globals_->Add(variable->name());
+ globals_->Add(variable->name(), zone());
globals_->Add(variable->binding_needs_init()
? isolate()->factory()->the_hole_value()
- : isolate()->factory()->undefined_value());
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ : isolate()->factory()->undefined_value(),
+ zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
break;
case Variable::PARAMETER:
@@ -840,10 +840,9 @@ void FullCodeGenerator::VisitVariableDeclaration(
Comment cmnt(masm_, "[ VariableDeclaration");
__ mov(r2, Operand(variable->name()));
// Declaration nodes are always introduced in one of four modes.
- ASSERT(mode == VAR || mode == LET ||
- mode == CONST || mode == CONST_HARMONY);
- PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
- ? READ_ONLY : NONE;
+ ASSERT(IsDeclaredVariableMode(mode));
+ PropertyAttributes attr =
+ IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
__ mov(r1, Operand(Smi::FromInt(attr)));
// Push initial value, if any.
// Note: For variables we must not push an initial value (such as
@@ -869,13 +868,13 @@ void FullCodeGenerator::VisitFunctionDeclaration(
Variable* variable = proxy->var();
switch (variable->location()) {
case Variable::UNALLOCATED: {
- globals_->Add(variable->name());
+ globals_->Add(variable->name(), zone());
Handle<SharedFunctionInfo> function =
Compiler::BuildFunctionInfo(declaration->fun(), script());
// Check for stack-overflow exception.
if (function.is_null()) return SetStackOverflow();
- globals_->Add(function);
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ globals_->Add(function, zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
break;
}
@@ -929,9 +928,9 @@ void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
switch (variable->location()) {
case Variable::UNALLOCATED: {
Comment cmnt(masm_, "[ ModuleDeclaration");
- globals_->Add(variable->name());
- globals_->Add(instance);
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ globals_->Add(variable->name(), zone());
+ globals_->Add(instance, zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
Visit(declaration->module());
break;
}
@@ -1135,26 +1134,34 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// modification check. Otherwise, we got a fixed array, and we have
// to do a slow check.
Label fixed_array;
- __ mov(r2, r0);
- __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
+ __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kMetaMapRootIndex);
- __ cmp(r1, ip);
+ __ cmp(r2, ip);
__ b(ne, &fixed_array);
// We got a map in register r0. Get the enumeration cache from it.
+ Label no_descriptors;
__ bind(&use_cache);
- __ LoadInstanceDescriptors(r0, r1);
- __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
- __ ldr(r2, FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
+
+ __ EnumLength(r1, r0);
+ __ cmp(r1, Operand(Smi::FromInt(0)));
+ __ b(eq, &no_descriptors);
+
+ __ LoadInstanceDescriptors(r0, r2);
+ __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
+ __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
// Set up the four remaining stack slots.
__ push(r0); // Map.
- __ ldr(r1, FieldMemOperand(r2, FixedArray::kLengthOffset));
__ mov(r0, Operand(Smi::FromInt(0)));
// Push enumeration cache, enumeration cache length (as smi) and zero.
__ Push(r2, r1, r0);
__ jmp(&loop);
+ __ bind(&no_descriptors);
+ __ Drop(1);
+ __ jmp(&exit);
+
// We got a fixed array in register r0. Iterate through that.
Label non_proxy;
__ bind(&fixed_array);
@@ -1163,7 +1170,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
isolate()->factory()->NewJSGlobalPropertyCell(
Handle<Object>(
Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
- RecordTypeFeedbackCell(stmt->PrepareId(), cell);
+ RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
__ LoadHeapObject(r1, cell);
__ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
__ str(r2, FieldMemOperand(r1, JSGlobalPropertyCell::kValueOffset));
@@ -1319,9 +1326,9 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
__ Move(next, current);
}
__ bind(&loop);
- // Terminate at global context.
+ // Terminate at native context.
__ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
- __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
+ __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
__ cmp(temp, ip);
__ b(eq, &fast);
// Check that extension is NULL.
@@ -1607,9 +1614,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// Mark all computed expressions that are bound to a key that
// is shadowed by a later occurrence of the same key. For the
// marked expressions, no store code is emitted.
- expr->CalculateEmitStore();
+ expr->CalculateEmitStore(zone());
- AccessorTable accessor_table(isolate()->zone());
+ AccessorTable accessor_table(zone());
for (int i = 0; i < expr->properties()->length(); i++) {
ObjectLiteral::Property* property = expr->properties()->at(i);
if (property->IsCompileTimeValue()) continue;
@@ -1635,7 +1642,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, key->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -1705,7 +1712,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT_EQ(2, constant_elements->length());
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
- bool has_fast_elements = constant_elements_kind == FAST_ELEMENTS;
+ bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
@@ -1726,8 +1733,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
- ASSERT(constant_elements_kind == FAST_ELEMENTS ||
- constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
+ ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode = has_fast_elements
? FastCloneShallowArrayStub::CLONE_ELEMENTS
@@ -1755,7 +1761,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
}
VisitForAccumulatorValue(subexpr);
- if (constant_elements_kind == FAST_ELEMENTS) {
+ if (IsFastObjectElementsKind(constant_elements_kind)) {
int offset = FixedArray::kHeaderSize + (i * kPointerSize);
__ ldr(r6, MemOperand(sp)); // Copy of array literal.
__ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
@@ -1842,11 +1848,11 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
break;
case NAMED_PROPERTY:
EmitNamedPropertyLoad(property);
- PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
+ PrepareForBailoutForId(property->LoadId(), TOS_REG);
break;
case KEYED_PROPERTY:
EmitKeyedPropertyLoad(property);
- PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
+ PrepareForBailoutForId(property->LoadId(), TOS_REG);
break;
}
}
@@ -1903,7 +1909,7 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
__ mov(r2, Operand(key->handle()));
// Call load IC. It has arguments receiver and property name r0 and r2.
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
}
@@ -1911,7 +1917,7 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
// Call keyed load IC. It has arguments key and receiver in r0 and r1.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
}
@@ -1938,7 +1944,8 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&stub_call);
BinaryOpStub stub(op, mode);
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
__ jmp(&done);
@@ -2021,7 +2028,8 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
__ pop(r1);
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
context()->Plug(r0);
}
@@ -2152,7 +2160,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// in harmony mode.
if (var->IsStackAllocated() || var->IsContextSlot()) {
MemOperand location = VarOperand(var, r1);
- if (FLAG_debug_code && op == Token::INIT_LET) {
+ if (generate_debug_code_ && op == Token::INIT_LET) {
// Check for an uninitialized let binding.
__ ldr(r2, location);
__ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
@@ -2185,43 +2193,16 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
ASSERT(prop != NULL);
ASSERT(prop->key()->AsLiteral() != NULL);
- // If the assignment starts a block of assignments to the same object,
- // change to slow case to avoid the quadratic behavior of repeatedly
- // adding fast properties.
- if (expr->starts_initialization_block()) {
- __ push(result_register());
- __ ldr(ip, MemOperand(sp, kPointerSize)); // Receiver is now under value.
- __ push(ip);
- __ CallRuntime(Runtime::kToSlowProperties, 1);
- __ pop(result_register());
- }
-
// Record source code position before IC call.
SetSourcePosition(expr->position());
__ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
- // Load receiver to r1. Leave a copy in the stack if needed for turning the
- // receiver into fast case.
- if (expr->ends_initialization_block()) {
- __ ldr(r1, MemOperand(sp));
- } else {
- __ pop(r1);
- }
+ __ pop(r1);
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
-
- // If the assignment ends an initialization block, revert to fast case.
- if (expr->ends_initialization_block()) {
- __ push(r0); // Result of assignment, saved even if not needed.
- // Receiver is under the result value.
- __ ldr(ip, MemOperand(sp, kPointerSize));
- __ push(ip);
- __ CallRuntime(Runtime::kToFastProperties, 1);
- __ pop(r0);
- __ Drop(1);
- }
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
+
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(r0);
}
@@ -2230,44 +2211,16 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
// Assignment to a property, using a keyed store IC.
- // If the assignment starts a block of assignments to the same object,
- // change to slow case to avoid the quadratic behavior of repeatedly
- // adding fast properties.
- if (expr->starts_initialization_block()) {
- __ push(result_register());
- // Receiver is now under the key and value.
- __ ldr(ip, MemOperand(sp, 2 * kPointerSize));
- __ push(ip);
- __ CallRuntime(Runtime::kToSlowProperties, 1);
- __ pop(result_register());
- }
-
// Record source code position before IC call.
SetSourcePosition(expr->position());
__ pop(r1); // Key.
- // Load receiver to r2. Leave a copy in the stack if needed for turning the
- // receiver into fast case.
- if (expr->ends_initialization_block()) {
- __ ldr(r2, MemOperand(sp));
- } else {
- __ pop(r2);
- }
+ __ pop(r2);
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
-
- // If the assignment ends an initialization block, revert to fast case.
- if (expr->ends_initialization_block()) {
- __ push(r0); // Result of assignment, saved even if not needed.
- // Receiver is under the result value.
- __ ldr(ip, MemOperand(sp, kPointerSize));
- __ push(ip);
- __ CallRuntime(Runtime::kToFastProperties, 1);
- __ pop(r0);
- __ Drop(1);
- }
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
+
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(r0);
}
@@ -2280,6 +2233,7 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
if (key->IsPropertyName()) {
VisitForAccumulatorValue(expr->obj());
EmitNamedPropertyLoad(expr);
+ PrepareForBailoutForId(expr->LoadId(), TOS_REG);
context()->Plug(r0);
} else {
VisitForStackValue(expr->obj());
@@ -2293,9 +2247,11 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
void FullCodeGenerator::CallIC(Handle<Code> code,
RelocInfo::Mode rmode,
- unsigned ast_id) {
+ TypeFeedbackId ast_id) {
ic_total_count_++;
- __ Call(code, rmode, ast_id);
+ // All calls must have a predictable size in full-codegen code to ensure that
+ // the debugger can patch them correctly.
+ __ Call(code, rmode, ast_id, al, NEVER_INLINE_TARGET_ADDRESS);
}
void FullCodeGenerator::EmitCallWithIC(Call* expr,
@@ -2315,7 +2271,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
// Call the IC initialization code.
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- CallIC(ic, mode, expr->id());
+ CallIC(ic, mode, expr->CallFeedbackId());
RecordJSReturnSite(expr);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2348,7 +2304,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
__ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
RecordJSReturnSite(expr);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2368,16 +2324,14 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
// Record source position for debugger.
SetSourcePosition(expr->position());
- // Record call targets in unoptimized code, but not in the snapshot.
- if (!Serializer::enabled()) {
- flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
- Handle<Object> uninitialized =
- TypeFeedbackCells::UninitializedSentinel(isolate());
- Handle<JSGlobalPropertyCell> cell =
- isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
- RecordTypeFeedbackCell(expr->id(), cell);
- __ mov(r2, Operand(cell));
- }
+ // Record call targets in unoptimized code.
+ flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
+ __ mov(r2, Operand(cell));
CallFunctionStub stub(arg_count, flags);
__ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
@@ -2571,21 +2525,15 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ mov(r0, Operand(arg_count));
__ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
- // Record call targets in unoptimized code, but not in the snapshot.
- CallFunctionFlags flags;
- if (!Serializer::enabled()) {
- flags = RECORD_CALL_TARGET;
- Handle<Object> uninitialized =
- TypeFeedbackCells::UninitializedSentinel(isolate());
- Handle<JSGlobalPropertyCell> cell =
- isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
- RecordTypeFeedbackCell(expr->id(), cell);
- __ mov(r2, Operand(cell));
- } else {
- flags = NO_CALL_FUNCTION_FLAGS;
- }
+ // Record call targets in unoptimized code.
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
+ __ mov(r2, Operand(cell));
- CallConstructStub stub(flags);
+ CallConstructStub stub(RECORD_CALL_TARGET);
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(r0);
@@ -2727,7 +2675,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
- if (FLAG_debug_code) __ AbortIfSmi(r0);
+ __ AssertNotSmi(r0);
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
@@ -2742,27 +2690,31 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
__ b(eq, if_false);
// Look for valueOf symbol in the descriptor array, and indicate false if
- // found. The type is not checked, so if it is a transition it is a false
- // negative.
+ // found. Since we omit an enumeration index check, if it is added via a
+ // transition that shares its descriptor array, this is a false positive.
+ Label entry, loop, done;
+
+ // Skip loop if no descriptors are valid.
+ __ NumberOfOwnDescriptors(r3, r1);
+ __ cmp(r3, Operand(0));
+ __ b(eq, &done);
+
__ LoadInstanceDescriptors(r1, r4);
- __ ldr(r3, FieldMemOperand(r4, FixedArray::kLengthOffset));
- // r4: descriptor array
- // r3: length of descriptor array
- // Calculate the end of the descriptor array.
+ // r4: descriptor array.
+ // r3: valid entries in the descriptor array.
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kPointerSize == 4);
- __ add(r2, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
+ __ mul(r3, r3, ip);
+ // Calculate location of the first key name.
+ __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
+ // Calculate the end of the descriptor array.
+ __ mov(r2, r4);
__ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
- // Calculate location of the first key name.
- __ add(r4,
- r4,
- Operand(FixedArray::kHeaderSize - kHeapObjectTag +
- DescriptorArray::kFirstIndex * kPointerSize));
// Loop through all the keys in the descriptor array. If one of these is the
// symbol valueOf the result is false.
- Label entry, loop;
// The use of ip to store the valueOf symbol asumes that it is not otherwise
// used in the loop below.
__ mov(ip, Operand(FACTORY->value_of_symbol()));
@@ -2771,18 +2723,19 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
__ ldr(r3, MemOperand(r4, 0));
__ cmp(r3, ip);
__ b(eq, if_false);
- __ add(r4, r4, Operand(kPointerSize));
+ __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
__ bind(&entry);
__ cmp(r4, Operand(r2));
__ b(ne, &loop);
- // If a valueOf property is not found on the object check that it's
+ __ bind(&done);
+ // If a valueOf property is not found on the object check that its
// prototype is the un-modified String prototype. If not result is false.
__ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
__ JumpIfSmi(r2, if_false);
__ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
- __ ldr(r3, ContextOperand(cp, Context::GLOBAL_INDEX));
- __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalContextOffset));
+ __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
+ __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
__ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
__ cmp(r2, r3);
__ b(ne, if_false);
@@ -3059,13 +3012,14 @@ void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
// Convert 32 random bits in r0 to 0.(32 random bits) in a double
// by computing:
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
- if (CpuFeatures::IsSupported(VFP3)) {
+ if (CpuFeatures::IsSupported(VFP2)) {
__ PrepareCallCFunction(1, r0);
- __ ldr(r0, ContextOperand(context_register(), Context::GLOBAL_INDEX));
- __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
+ __ ldr(r0,
+ ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
+ __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset));
__ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
// 0x41300000 is the top half of 1.0 x 2^20 as a double.
// Create this constant using mov/orr to avoid PC relative load.
__ mov(r1, Operand(0x41000000));
@@ -3082,9 +3036,10 @@ void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
__ mov(r0, r4);
} else {
__ PrepareCallCFunction(2, r0);
- __ ldr(r1, ContextOperand(context_register(), Context::GLOBAL_INDEX));
+ __ ldr(r1,
+ ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
__ mov(r0, Operand(r4));
- __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalContextOffset));
+ __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset));
__ CallCFunction(
ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
}
@@ -3146,20 +3101,19 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
VisitForAccumulatorValue(args->at(0)); // Load the object.
- Label runtime, done;
+ Label runtime, done, not_date_object;
Register object = r0;
Register result = r0;
Register scratch0 = r9;
Register scratch1 = r1;
-#ifdef DEBUG
- __ AbortIfSmi(object);
+ __ JumpIfSmi(object, &not_date_object);
__ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
- __ Assert(eq, "Trying to get date field from non-date.");
-#endif
+ __ b(ne, &not_date_object);
if (index->value() == 0) {
__ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
+ __ jmp(&done);
} else {
if (index->value() < JSDate::kFirstUncachedField) {
ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
@@ -3176,8 +3130,12 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
__ PrepareCallCFunction(2, scratch1);
__ mov(r1, Operand(index));
__ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
- __ bind(&done);
+ __ jmp(&done);
}
+
+ __ bind(&not_date_object);
+ __ CallRuntime(Runtime::kThrowNotDateError, 0);
+ __ bind(&done);
context()->Plug(r0);
}
@@ -3188,7 +3146,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- if (CpuFeatures::IsSupported(VFP3)) {
+ if (CpuFeatures::IsSupported(VFP2)) {
MathPowStub stub(MathPowStub::ON_STACK);
__ CallStub(&stub);
} else {
@@ -3440,10 +3398,11 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
}
VisitForAccumulatorValue(args->last()); // Function.
- // Check for proxy.
- Label proxy, done;
- __ CompareObjectType(r0, r1, r1, JS_FUNCTION_PROXY_TYPE);
- __ b(eq, &proxy);
+ Label runtime, done;
+ // Check for non-function argument (including proxy).
+ __ JumpIfSmi(r0, &runtime);
+ __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
+ __ b(ne, &runtime);
// InvokeFunction requires the function in r1. Move it in there.
__ mov(r1, result_register());
@@ -3453,7 +3412,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
__ jmp(&done);
- __ bind(&proxy);
+ __ bind(&runtime);
__ push(r0);
__ CallRuntime(Runtime::kCall, args->length());
__ bind(&done);
@@ -3481,7 +3440,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
Handle<FixedArray> jsfunction_result_caches(
- isolate()->global_context()->jsfunction_result_caches());
+ isolate()->native_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
__ Abort("Attempt to use undefined cache.");
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
@@ -3493,8 +3452,8 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
Register key = r0;
Register cache = r1;
- __ ldr(cache, ContextOperand(cp, Context::GLOBAL_INDEX));
- __ ldr(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset));
+ __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
+ __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
__ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
__ ldr(cache,
FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
@@ -3591,9 +3550,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
- if (FLAG_debug_code) {
- __ AbortIfNotString(r0);
- }
+ __ AssertString(r0);
__ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
__ IndexFromHash(r0, r0);
@@ -3665,7 +3622,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// string_length: Accumulated sum of string lengths (smi).
// element: Current array element.
// elements_end: Array end.
- if (FLAG_debug_code) {
+ if (generate_debug_code_) {
__ cmp(array_length, Operand(0));
__ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
}
@@ -3863,7 +3820,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- CallIC(ic, mode, expr->id());
+ CallIC(ic, mode, expr->CallRuntimeFeedbackId());
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
} else {
@@ -4018,7 +3975,8 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
// accumulator register r0.
VisitForAccumulatorValue(expr->expression());
SetSourcePosition(expr->position());
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->UnaryOperationFeedbackId());
context()->Plug(r0);
}
@@ -4076,7 +4034,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
if (assign_type == VARIABLE) {
PrepareForBailout(expr->expression(), TOS_REG);
} else {
- PrepareForBailoutForId(expr->CountId(), TOS_REG);
+ PrepareForBailoutForId(prop->LoadId(), TOS_REG);
}
// Call ToNumber only if operand is not a smi.
@@ -4129,7 +4087,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
SetSourcePosition(expr->position());
BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
patch_site.EmitPatchInfo();
__ bind(&done);
@@ -4161,7 +4119,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4178,7 +4136,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4387,7 +4345,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
__ cmp(r0, Operand(0));
@@ -4471,7 +4429,7 @@ void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
Scope* declaration_scope = scope()->DeclarationScope();
if (declaration_scope->is_global_scope() ||
declaration_scope->is_module_scope()) {
- // Contexts nested in the global context have a canonical empty function
+ // Contexts nested in the native context have a canonical empty function
// as their closure, not the anonymous closure containing the global
// code. Pass a smi sentinel and let the runtime look up the empty
// function.
@@ -4501,14 +4459,57 @@ void FullCodeGenerator::EnterFinallyBlock() {
ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
STATIC_ASSERT(kSmiTag == 0);
__ add(r1, r1, Operand(r1)); // Convert to smi.
+
+ // Store result register while executing finally block.
+ __ push(r1);
+
+ // Store pending message while executing finally block.
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ mov(ip, Operand(pending_message_obj));
+ __ ldr(r1, MemOperand(ip));
+ __ push(r1);
+
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ mov(ip, Operand(has_pending_message));
+ __ ldr(r1, MemOperand(ip));
+ __ SmiTag(r1);
+ __ push(r1);
+
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ mov(ip, Operand(pending_message_script));
+ __ ldr(r1, MemOperand(ip));
__ push(r1);
}
void FullCodeGenerator::ExitFinallyBlock() {
ASSERT(!result_register().is(r1));
+ // Restore pending message from stack.
+ __ pop(r1);
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ mov(ip, Operand(pending_message_script));
+ __ str(r1, MemOperand(ip));
+
+ __ pop(r1);
+ __ SmiUntag(r1);
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ mov(ip, Operand(has_pending_message));
+ __ str(r1, MemOperand(ip));
+
+ __ pop(r1);
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ mov(ip, Operand(pending_message_obj));
+ __ str(r1, MemOperand(ip));
+
// Restore result register from stack.
__ pop(r1);
+
// Uncook return address and return.
__ pop(result_register());
ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
diff --git a/src/3rdparty/v8/src/arm/ic-arm.cc b/src/3rdparty/v8/src/arm/ic-arm.cc
index c12c167..4839589 100644
--- a/src/3rdparty/v8/src/arm/ic-arm.cc
+++ b/src/3rdparty/v8/src/arm/ic-arm.cc
@@ -396,7 +396,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
Code::Flags flags = Code::ComputeFlags(kind,
MONOMORPHIC,
extra_state,
- NORMAL,
+ Code::NORMAL,
argc);
Isolate::Current()->stub_cache()->GenerateProbe(
masm, flags, r1, r2, r3, r4, r5, r6);
@@ -1249,7 +1249,7 @@ void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
// Must return the modified receiver in r0.
if (!FLAG_trace_elements_transitions) {
Label fail;
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
__ mov(r0, r2);
__ Ret();
__ bind(&fail);
@@ -1301,6 +1301,144 @@ void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
}
+static void KeyedStoreGenerateGenericHelper(
+ MacroAssembler* masm,
+ Label* fast_object,
+ Label* fast_double,
+ Label* slow,
+ KeyedStoreCheckMap check_map,
+ KeyedStoreIncrementLength increment_length,
+ Register value,
+ Register key,
+ Register receiver,
+ Register receiver_map,
+ Register elements_map,
+ Register elements) {
+ Label transition_smi_elements;
+ Label finish_object_store, non_double_value, transition_double_elements;
+ Label fast_double_without_map_check;
+
+ // Fast case: Do the store, could be either Object or double.
+ __ bind(fast_object);
+ Register scratch_value = r4;
+ Register address = r5;
+ if (check_map == kCheckMap) {
+ __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
+ __ cmp(elements_map,
+ Operand(masm->isolate()->factory()->fixed_array_map()));
+ __ b(ne, fast_double);
+ }
+ // Smi stores don't require further checks.
+ Label non_smi_value;
+ __ JumpIfNotSmi(value, &non_smi_value);
+
+ if (increment_length == kIncrementLength) {
+ // Add 1 to receiver->length.
+ __ add(scratch_value, key, Operand(Smi::FromInt(1)));
+ __ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
+ }
+ // It's irrelevant whether array is smi-only or not when writing a smi.
+ __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ str(value, MemOperand(address));
+ __ Ret();
+
+ __ bind(&non_smi_value);
+ // Escape to elements kind transition case.
+ __ CheckFastObjectElements(receiver_map, scratch_value,
+ &transition_smi_elements);
+
+ // Fast elements array, store the value to the elements backing store.
+ __ bind(&finish_object_store);
+ if (increment_length == kIncrementLength) {
+ // Add 1 to receiver->length.
+ __ add(scratch_value, key, Operand(Smi::FromInt(1)));
+ __ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
+ }
+ __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ str(value, MemOperand(address));
+ // Update write barrier for the elements array address.
+ __ mov(scratch_value, value); // Preserve the value which is returned.
+ __ RecordWrite(elements,
+ address,
+ scratch_value,
+ kLRHasNotBeenSaved,
+ kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ __ Ret();
+
+ __ bind(fast_double);
+ if (check_map == kCheckMap) {
+ // Check for fast double array case. If this fails, call through to the
+ // runtime.
+ __ CompareRoot(elements_map, Heap::kFixedDoubleArrayMapRootIndex);
+ __ b(ne, slow);
+ }
+ __ bind(&fast_double_without_map_check);
+ __ StoreNumberToDoubleElements(value,
+ key,
+ receiver,
+ elements, // Overwritten.
+ r3, // Scratch regs...
+ r4,
+ r5,
+ r6,
+ &transition_double_elements);
+ if (increment_length == kIncrementLength) {
+ // Add 1 to receiver->length.
+ __ add(scratch_value, key, Operand(Smi::FromInt(1)));
+ __ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
+ }
+ __ Ret();
+
+ __ bind(&transition_smi_elements);
+ // Transition the array appropriately depending on the value type.
+ __ ldr(r4, FieldMemOperand(value, HeapObject::kMapOffset));
+ __ CompareRoot(r4, Heap::kHeapNumberMapRootIndex);
+ __ b(ne, &non_double_value);
+
+ // Value is a double. Transition FAST_SMI_ELEMENTS ->
+ // FAST_DOUBLE_ELEMENTS and complete the store.
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_DOUBLE_ELEMENTS,
+ receiver_map,
+ r4,
+ slow);
+ ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, slow);
+ __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&fast_double_without_map_check);
+
+ __ bind(&non_double_value);
+ // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_ELEMENTS,
+ receiver_map,
+ r4,
+ slow);
+ ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
+ ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm);
+ __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
+
+ __ bind(&transition_double_elements);
+ // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
+ // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
+ // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
+ FAST_ELEMENTS,
+ receiver_map,
+ r4,
+ slow);
+ ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
+ ElementsTransitionGenerator::GenerateDoubleToObject(masm, slow);
+ __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
+}
+
+
void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
StrictModeFlag strict_mode) {
// ---------- S t a t e --------------
@@ -1309,11 +1447,9 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
// -- r2 : receiver
// -- lr : return address
// -----------------------------------
- Label slow, array, extra, check_if_double_array;
- Label fast_object_with_map_check, fast_object_without_map_check;
- Label fast_double_with_map_check, fast_double_without_map_check;
- Label transition_smi_elements, finish_object_store, non_double_value;
- Label transition_double_elements;
+ Label slow, fast_object, fast_object_grow;
+ Label fast_double, fast_double_grow;
+ Label array, extra, check_if_double_array;
// Register usage.
Register value = r0;
@@ -1348,7 +1484,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
// Check array bounds. Both the key and the length of FixedArray are smis.
__ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset));
__ cmp(key, Operand(ip));
- __ b(lo, &fast_object_with_map_check);
+ __ b(lo, &fast_object);
// Slow case, handle jump to runtime.
__ bind(&slow);
@@ -1373,21 +1509,13 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ cmp(elements_map,
Operand(masm->isolate()->factory()->fixed_array_map()));
__ b(ne, &check_if_double_array);
- // Calculate key + 1 as smi.
- STATIC_ASSERT(kSmiTag == 0);
- __ add(r4, key, Operand(Smi::FromInt(1)));
- __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
- __ b(&fast_object_without_map_check);
+ __ jmp(&fast_object_grow);
__ bind(&check_if_double_array);
__ cmp(elements_map,
Operand(masm->isolate()->factory()->fixed_double_array_map()));
__ b(ne, &slow);
- // Add 1 to key, and go to common element store code for doubles.
- STATIC_ASSERT(kSmiTag == 0);
- __ add(r4, key, Operand(Smi::FromInt(1)));
- __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
- __ jmp(&fast_double_without_map_check);
+ __ jmp(&fast_double_grow);
// Array case: Get the length and the elements array from the JS
// array. Check that the array is in fast mode (and writable); if it
@@ -1399,106 +1527,15 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ ldr(ip, FieldMemOperand(receiver, JSArray::kLengthOffset));
__ cmp(key, Operand(ip));
__ b(hs, &extra);
- // Fall through to fast case.
-
- __ bind(&fast_object_with_map_check);
- Register scratch_value = r4;
- Register address = r5;
- __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
- __ cmp(elements_map,
- Operand(masm->isolate()->factory()->fixed_array_map()));
- __ b(ne, &fast_double_with_map_check);
- __ bind(&fast_object_without_map_check);
- // Smi stores don't require further checks.
- Label non_smi_value;
- __ JumpIfNotSmi(value, &non_smi_value);
- // It's irrelevant whether array is smi-only or not when writing a smi.
- __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ str(value, MemOperand(address));
- __ Ret();
-
- __ bind(&non_smi_value);
- // Escape to elements kind transition case.
- __ CheckFastObjectElements(receiver_map, scratch_value,
- &transition_smi_elements);
- // Fast elements array, store the value to the elements backing store.
- __ bind(&finish_object_store);
- __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ str(value, MemOperand(address));
- // Update write barrier for the elements array address.
- __ mov(scratch_value, value); // Preserve the value which is returned.
- __ RecordWrite(elements,
- address,
- scratch_value,
- kLRHasNotBeenSaved,
- kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
- OMIT_SMI_CHECK);
- __ Ret();
-
- __ bind(&fast_double_with_map_check);
- // Check for fast double array case. If this fails, call through to the
- // runtime.
- __ cmp(elements_map,
- Operand(masm->isolate()->factory()->fixed_double_array_map()));
- __ b(ne, &slow);
- __ bind(&fast_double_without_map_check);
- __ StoreNumberToDoubleElements(value,
- key,
- receiver,
- elements,
- r3,
- r4,
- r5,
- r6,
- &transition_double_elements);
- __ Ret();
- __ bind(&transition_smi_elements);
- // Transition the array appropriately depending on the value type.
- __ ldr(r4, FieldMemOperand(value, HeapObject::kMapOffset));
- __ CompareRoot(r4, Heap::kHeapNumberMapRootIndex);
- __ b(ne, &non_double_value);
-
- // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
- // FAST_DOUBLE_ELEMENTS and complete the store.
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_DOUBLE_ELEMENTS,
- receiver_map,
- r4,
- &slow);
- ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
- __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
- __ jmp(&fast_double_without_map_check);
-
- __ bind(&non_double_value);
- // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_ELEMENTS,
- receiver_map,
- r4,
- &slow);
- ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
- __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
- __ jmp(&finish_object_store);
-
- __ bind(&transition_double_elements);
- // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
- // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
- // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
- __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
- FAST_ELEMENTS,
- receiver_map,
- r4,
- &slow);
- ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
- ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
- __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
- __ jmp(&finish_object_store);
+ KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
+ &slow, kCheckMap, kDontIncrementLength,
+ value, key, receiver, receiver_map,
+ elements_map, elements);
+ KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
+ &slow, kDontCheckMap, kIncrementLength,
+ value, key, receiver, receiver_map,
+ elements_map, elements);
}
@@ -1697,7 +1734,7 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
Address cmp_instruction_address =
- address + Assembler::kCallTargetAddressOffset;
+ Assembler::return_address_from_call_start(address);
// If the instruction following the call is not a cmp rx, #yyy, nothing
// was inlined.
diff --git a/src/3rdparty/v8/src/arm/lithium-arm.cc b/src/3rdparty/v8/src/arm/lithium-arm.cc
index a679c0c..b492d48 100644
--- a/src/3rdparty/v8/src/arm/lithium-arm.cc
+++ b/src/3rdparty/v8/src/arm/lithium-arm.cc
@@ -177,6 +177,7 @@ const char* LArithmeticT::Mnemonic() const {
case Token::BIT_AND: return "bit-and-t";
case Token::BIT_OR: return "bit-or-t";
case Token::BIT_XOR: return "bit-xor-t";
+ case Token::ROR: return "ror-t";
case Token::SHL: return "shl-t";
case Token::SAR: return "sar-t";
case Token::SHR: return "shr-t";
@@ -194,22 +195,22 @@ void LGoto::PrintDataTo(StringStream* stream) {
void LBranch::PrintDataTo(StringStream* stream) {
stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
}
void LCmpIDAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if ");
- InputAt(0)->PrintTo(stream);
+ left()->PrintTo(stream);
stream->Add(" %s ", Token::String(op()));
- InputAt(1)->PrintTo(stream);
+ right()->PrintTo(stream);
stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
}
void LIsNilAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if ");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(kind() == kStrictEquality ? " === " : " == ");
stream->Add(nil() == kNullValue ? "null" : "undefined");
stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
@@ -218,57 +219,57 @@ void LIsNilAndBranch::PrintDataTo(StringStream* stream) {
void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_object(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_string(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_smi(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_undetectable(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if string_compare(");
- InputAt(0)->PrintTo(stream);
- InputAt(1)->PrintTo(stream);
+ left()->PrintTo(stream);
+ right()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if has_instance_type(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if has_cached_array_index(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if class_of_test(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(", \"%o\") then B%d else B%d",
*hydrogen()->class_name(),
true_block_id(),
@@ -278,7 +279,7 @@ void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if typeof ");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(" == \"%s\" then B%d else B%d",
*hydrogen()->type_literal()->ToCString(),
true_block_id(), false_block_id());
@@ -292,26 +293,26 @@ void LCallConstantFunction::PrintDataTo(StringStream* stream) {
void LUnaryMathOperation::PrintDataTo(StringStream* stream) {
stream->Add("/%s ", hydrogen()->OpName());
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
}
void LLoadContextSlot::PrintDataTo(StringStream* stream) {
- InputAt(0)->PrintTo(stream);
+ context()->PrintTo(stream);
stream->Add("[%d]", slot_index());
}
void LStoreContextSlot::PrintDataTo(StringStream* stream) {
- InputAt(0)->PrintTo(stream);
+ context()->PrintTo(stream);
stream->Add("[%d] <- ", slot_index());
- InputAt(1)->PrintTo(stream);
+ value()->PrintTo(stream);
}
void LInvokeFunction::PrintDataTo(StringStream* stream) {
stream->Add("= ");
- InputAt(0)->PrintTo(stream);
+ function()->PrintTo(stream);
stream->Add(" #%d / ", arity());
}
@@ -340,17 +341,15 @@ void LCallKnownGlobal::PrintDataTo(StringStream* stream) {
void LCallNew::PrintDataTo(StringStream* stream) {
stream->Add("= ");
- InputAt(0)->PrintTo(stream);
+ constructor()->PrintTo(stream);
stream->Add(" #%d / ", arity());
}
void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
arguments()->PrintTo(stream);
-
stream->Add(" length ");
length()->PrintTo(stream);
-
stream->Add(" index ");
index()->PrintTo(stream);
}
@@ -374,16 +373,7 @@ void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
}
-void LStoreKeyedFastElement::PrintDataTo(StringStream* stream) {
- object()->PrintTo(stream);
- stream->Add("[");
- key()->PrintTo(stream);
- stream->Add("] <- ");
- value()->PrintTo(stream);
-}
-
-
-void LStoreKeyedFastDoubleElement::PrintDataTo(StringStream* stream) {
+void LStoreKeyed::PrintDataTo(StringStream* stream) {
elements()->PrintTo(stream);
stream->Add("[");
key()->PrintTo(stream);
@@ -407,146 +397,26 @@ void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
}
-LChunk::LChunk(CompilationInfo* info, HGraph* graph)
- : spill_slot_count_(0),
- info_(info),
- graph_(graph),
- instructions_(32),
- pointer_maps_(8),
- inlined_closures_(1) {
-}
-
-
-int LChunk::GetNextSpillIndex(bool is_double) {
+int LPlatformChunk::GetNextSpillIndex(bool is_double) {
// Skip a slot if for a double-width slot.
if (is_double) spill_slot_count_++;
return spill_slot_count_++;
}
-LOperand* LChunk::GetNextSpillSlot(bool is_double) {
+LOperand* LPlatformChunk::GetNextSpillSlot(bool is_double) {
int index = GetNextSpillIndex(is_double);
if (is_double) {
- return LDoubleStackSlot::Create(index);
- } else {
- return LStackSlot::Create(index);
- }
-}
-
-
-void LChunk::MarkEmptyBlocks() {
- HPhase phase("L_Mark empty blocks", this);
- for (int i = 0; i < graph()->blocks()->length(); ++i) {
- HBasicBlock* block = graph()->blocks()->at(i);
- int first = block->first_instruction_index();
- int last = block->last_instruction_index();
- LInstruction* first_instr = instructions()->at(first);
- LInstruction* last_instr = instructions()->at(last);
-
- LLabel* label = LLabel::cast(first_instr);
- if (last_instr->IsGoto()) {
- LGoto* goto_instr = LGoto::cast(last_instr);
- if (label->IsRedundant() &&
- !label->is_loop_header()) {
- bool can_eliminate = true;
- for (int i = first + 1; i < last && can_eliminate; ++i) {
- LInstruction* cur = instructions()->at(i);
- if (cur->IsGap()) {
- LGap* gap = LGap::cast(cur);
- if (!gap->IsRedundant()) {
- can_eliminate = false;
- }
- } else {
- can_eliminate = false;
- }
- }
-
- if (can_eliminate) {
- label->set_replacement(GetLabel(goto_instr->block_id()));
- }
- }
- }
- }
-}
-
-
-void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
- LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
- int index = -1;
- if (instr->IsControl()) {
- instructions_.Add(gap);
- index = instructions_.length();
- instructions_.Add(instr);
+ return LDoubleStackSlot::Create(index, zone());
} else {
- index = instructions_.length();
- instructions_.Add(instr);
- instructions_.Add(gap);
- }
- if (instr->HasPointerMap()) {
- pointer_maps_.Add(instr->pointer_map());
- instr->pointer_map()->set_lithium_position(index);
+ return LStackSlot::Create(index, zone());
}
}
-LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
- return LConstantOperand::Create(constant->id());
-}
-
-
-int LChunk::GetParameterStackSlot(int index) const {
- // The receiver is at index 0, the first parameter at index 1, so we
- // shift all parameter indexes down by the number of parameters, and
- // make sure they end up negative so they are distinguishable from
- // spill slots.
- int result = index - info()->scope()->num_parameters() - 1;
- ASSERT(result < 0);
- return result;
-}
-
-// A parameter relative to ebp in the arguments stub.
-int LChunk::ParameterAt(int index) {
- ASSERT(-1 <= index); // -1 is the receiver.
- return (1 + info()->scope()->num_parameters() - index) *
- kPointerSize;
-}
-
-
-LGap* LChunk::GetGapAt(int index) const {
- return LGap::cast(instructions_[index]);
-}
-
-
-bool LChunk::IsGapAt(int index) const {
- return instructions_[index]->IsGap();
-}
-
-
-int LChunk::NearestGapPos(int index) const {
- while (!IsGapAt(index)) index--;
- return index;
-}
-
-
-void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
- GetGapAt(index)->GetOrCreateParallelMove(LGap::START)->AddMove(from, to);
-}
-
-
-Handle<Object> LChunk::LookupLiteral(LConstantOperand* operand) const {
- return HConstant::cast(graph_->LookupValue(operand->index()))->handle();
-}
-
-
-Representation LChunk::LookupLiteralRepresentation(
- LConstantOperand* operand) const {
- return graph_->LookupValue(operand->index())->representation();
-}
-
-
-LChunk* LChunkBuilder::Build() {
+LPlatformChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
- chunk_ = new(zone()) LChunk(info(), graph());
+ chunk_ = new(zone()) LPlatformChunk(info(), graph());
HPhase phase("L_Building chunk", chunk_);
status_ = BUILDING;
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
@@ -561,17 +431,8 @@ LChunk* LChunkBuilder::Build() {
}
-void LChunkBuilder::Abort(const char* format, ...) {
- if (FLAG_trace_bailout) {
- SmartArrayPointer<char> name(
- info()->shared_info()->DebugName()->ToCString());
- PrintF("Aborting LChunk building in @\"%s\": ", *name);
- va_list arguments;
- va_start(arguments, format);
- OS::VPrint(format, arguments);
- va_end(arguments);
- PrintF("\n");
- }
+void LChunkBuilder::Abort(const char* reason) {
+ info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -740,7 +601,7 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
ASSERT(hinstr->next()->IsSimulate());
HSimulate* sim = HSimulate::cast(hinstr->next());
ASSERT(instruction_pending_deoptimization_environment_ == NULL);
- ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
+ ASSERT(pending_deoptimization_ast_id_.IsNone());
instruction_pending_deoptimization_environment_ = instr;
pending_deoptimization_ast_id_ = sim->ast_id();
}
@@ -762,7 +623,7 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
- instr->set_pointer_map(new(zone()) LPointerMap(position_));
+ instr->set_pointer_map(new(zone()) LPointerMap(position_, zone()));
return instr;
}
@@ -835,13 +696,16 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
// Shift operations can only deoptimize if we do a logical shift
// by 0 and the result cannot be truncated to int32.
- bool may_deopt = (op == Token::SHR && constant_value == 0);
bool does_deopt = false;
- if (may_deopt) {
- for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
- if (!it.value()->CheckFlag(HValue::kTruncatingToInt32)) {
- does_deopt = true;
- break;
+ if (op == Token::SHR && constant_value == 0) {
+ if (FLAG_opt_safe_uint32_operations) {
+ does_deopt = !instr->CheckFlag(HInstruction::kUint32);
+ } else {
+ for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
+ if (!it.value()->CheckFlag(HValue::kTruncatingToInt32)) {
+ does_deopt = true;
+ break;
+ }
}
}
}
@@ -974,8 +838,8 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LEnvironment* outer =
CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
- int ast_id = hydrogen_env->ast_id();
- ASSERT(ast_id != AstNode::kNoNumber ||
+ BailoutId ast_id = hydrogen_env->ast_id();
+ ASSERT(!ast_id.IsNone() ||
hydrogen_env->frame_type() != JS_FUNCTION);
int value_count = hydrogen_env->length();
LEnvironment* result = new(zone()) LEnvironment(
@@ -985,7 +849,9 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
hydrogen_env->parameter_count(),
argument_count_,
value_count,
- outer);
+ outer,
+ hydrogen_env->entry(),
+ zone());
int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -999,7 +865,9 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
} else {
op = UseAny(value);
}
- result->AddValue(op, value->representation());
+ result->AddValue(op,
+ value->representation(),
+ value->CheckFlag(HInstruction::kUint32));
}
if (hydrogen_env->frame_type() == JS_FUNCTION) {
@@ -1164,7 +1032,8 @@ LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
return DefineFixedDouble(result, d2);
} else {
LOperand* input = UseRegisterAtStart(instr->value());
- LOperand* temp = (op == kMathFloor) ? TempRegister() : NULL;
+
+ LOperand* temp = (op == kMathRound) ? FixedTemp(d3) : NULL;
LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, temp);
switch (op) {
case kMathAbs:
@@ -1231,6 +1100,11 @@ LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
}
+LInstruction* LChunkBuilder::DoRor(HRor* instr) {
+ return DoShift(Token::ROR, instr);
+}
+
+
LInstruction* LChunkBuilder::DoShr(HShr* instr) {
return DoShift(Token::SHR, instr);
}
@@ -1344,7 +1218,8 @@ HValue* LChunkBuilder::SimplifiedDivisorForMathFloorOfDiv(HValue* divisor) {
HConstant* constant_val = HConstant::cast(divisor);
int32_t int32_val = constant_val->Integer32Value();
if (LChunkBuilder::HasMagicNumberForDivisor(int32_val)) {
- return constant_val->CopyToRepresentation(Representation::Integer32());
+ return constant_val->CopyToRepresentation(Representation::Integer32(),
+ divisor->block()->zone());
}
}
return NULL;
@@ -1360,7 +1235,7 @@ LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
HConstant::cast(right)->HasInteger32Value() &&
HasMagicNumberForDivisor(HConstant::cast(right)->Integer32Value()));
return AssignEnvironment(DefineAsRegister(
- new LMathFloorOfDiv(dividend, divisor, remainder)));
+ new(zone()) LMathFloorOfDiv(dividend, divisor, remainder)));
}
@@ -1477,6 +1352,25 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
}
+LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
+ LOperand* left = NULL;
+ LOperand* right = NULL;
+ if (instr->representation().IsInteger32()) {
+ ASSERT(instr->left()->representation().IsInteger32());
+ ASSERT(instr->right()->representation().IsInteger32());
+ left = UseRegisterAtStart(instr->LeastConstantOperand());
+ right = UseOrConstantAtStart(instr->MostConstantOperand());
+ } else {
+ ASSERT(instr->representation().IsDouble());
+ ASSERT(instr->left()->representation().IsDouble());
+ ASSERT(instr->right()->representation().IsDouble());
+ left = UseRegisterAtStart(instr->left());
+ right = UseRegisterAtStart(instr->right());
+ }
+ return DefineAsRegister(new(zone()) LMathMinMax(left, right));
+}
+
+
LInstruction* LChunkBuilder::DoPower(HPower* instr) {
ASSERT(instr->representation().IsDouble());
// We call a C function for double power. It can't trigger a GC.
@@ -1642,6 +1536,12 @@ LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
}
+LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
+ LOperand* map = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new(zone()) LMapEnumLength(map));
+}
+
+
LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
LOperand* object = UseRegisterAtStart(instr->value());
return DefineAsRegister(new(zone()) LElementsKind(object));
@@ -1657,13 +1557,14 @@ LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
LOperand* object = UseFixed(instr->value(), r0);
- LDateField* result = new LDateField(object, FixedTemp(r1), instr->index());
- return MarkAsCall(DefineFixed(result, r0), instr);
+ LDateField* result =
+ new(zone()) LDateField(object, FixedTemp(r1), instr->index());
+ return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
}
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
- LOperand* value = UseRegisterAtStart(instr->index());
+ LOperand* value = UseRegisterOrConstantAtStart(instr->index());
LOperand* length = UseRegister(instr->length());
return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
}
@@ -1706,16 +1607,14 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
} else {
ASSERT(to.IsInteger32());
LOperand* value = UseRegisterAtStart(instr->value());
- bool needs_check = !instr->value()->type().IsSmi();
LInstruction* res = NULL;
- if (!needs_check) {
- res = DefineAsRegister(new(zone()) LSmiUntag(value, needs_check));
+ if (instr->value()->type().IsSmi()) {
+ res = DefineAsRegister(new(zone()) LSmiUntag(value, false));
} else {
LOperand* temp1 = TempRegister();
LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister()
: NULL;
- LOperand* temp3 = instr->CanTruncateToInt32() ? FixedTemp(d11)
- : NULL;
+ LOperand* temp3 = FixedTemp(d11);
res = DefineSameAsFirst(new(zone()) LTaggedToI(value,
temp1,
temp2,
@@ -1748,7 +1647,10 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
if (to.IsTagged()) {
HValue* val = instr->value();
LOperand* value = UseRegisterAtStart(val);
- if (val->HasRange() && val->range()->IsInSmiRange()) {
+ if (val->CheckFlag(HInstruction::kUint32)) {
+ LNumberTagU* result = new(zone()) LNumberTagU(value);
+ return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
+ } else if (val->HasRange() && val->range()->IsInSmiRange()) {
return DefineAsRegister(new(zone()) LSmiTag(value));
} else {
LNumberTagI* result = new(zone()) LNumberTagI(value);
@@ -1756,8 +1658,13 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
}
} else {
ASSERT(to.IsDouble());
- LOperand* value = Use(instr->value());
- return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
+ if (instr->value()->CheckFlag(HInstruction::kUint32)) {
+ return DefineAsRegister(
+ new(zone()) LUint32ToDouble(UseRegister(instr->value())));
+ } else {
+ return DefineAsRegister(
+ new(zone()) LInteger32ToDouble(Use(instr->value())));
+ }
}
}
UNREACHABLE();
@@ -1779,10 +1686,10 @@ LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
- LOperand* temp1 = TempRegister();
+ LUnallocated* temp1 = TempRegister();
LOperand* temp2 = TempRegister();
- LInstruction* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
- return AssignEnvironment(result);
+ LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
+ return AssignEnvironment(Define(result, temp1));
}
@@ -1950,50 +1857,41 @@ LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
}
-LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
- HLoadKeyedFastElement* instr) {
- ASSERT(instr->representation().IsTagged());
- ASSERT(instr->key()->representation().IsInteger32());
- LOperand* obj = UseRegisterAtStart(instr->object());
- LOperand* key = UseRegisterAtStart(instr->key());
- LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
- if (instr->RequiresHoleCheck()) AssignEnvironment(result);
- return DefineAsRegister(result);
-}
-
-
-LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
- HLoadKeyedFastDoubleElement* instr) {
- ASSERT(instr->representation().IsDouble());
- ASSERT(instr->key()->representation().IsInteger32());
- LOperand* elements = UseTempRegister(instr->elements());
+LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
+ ASSERT(instr->key()->representation().IsInteger32() ||
+ instr->key()->representation().IsTagged());
+ ElementsKind elements_kind = instr->elements_kind();
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- LLoadKeyedFastDoubleElement* result =
- new(zone()) LLoadKeyedFastDoubleElement(elements, key);
- return AssignEnvironment(DefineAsRegister(result));
-}
+ LLoadKeyed* result = NULL;
+ if (!instr->is_external()) {
+ LOperand* obj = NULL;
+ if (instr->representation().IsDouble()) {
+ obj = UseTempRegister(instr->elements());
+ } else {
+ ASSERT(instr->representation().IsTagged());
+ obj = UseRegisterAtStart(instr->elements());
+ }
+ result = new(zone()) LLoadKeyed(obj, key);
+ } else {
+ ASSERT(
+ (instr->representation().IsInteger32() &&
+ (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
+ (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
+ (instr->representation().IsDouble() &&
+ ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
+ (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
+
+ LOperand* external_pointer = UseRegister(instr->elements());
+ result = new(zone()) LLoadKeyed(external_pointer, key);
+ }
-LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
- HLoadKeyedSpecializedArrayElement* instr) {
- ElementsKind elements_kind = instr->elements_kind();
- ASSERT(
- (instr->representation().IsInteger32() &&
- (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
- (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
- (instr->representation().IsDouble() &&
- ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
- (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
- ASSERT(instr->key()->representation().IsInteger32());
- LOperand* external_pointer = UseRegister(instr->external_pointer());
- LOperand* key = UseRegisterOrConstant(instr->key());
- LLoadKeyedSpecializedArrayElement* result =
- new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
- LInstruction* load_instr = DefineAsRegister(result);
+ DefineAsRegister(result);
// An unsigned int array load might overflow and cause a deopt, make sure it
// has an environment.
- return (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) ?
- AssignEnvironment(load_instr) : load_instr;
+ bool can_deoptimize = instr->RequiresHoleCheck() ||
+ (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS);
+ return can_deoptimize ? AssignEnvironment(result) : result;
}
@@ -2007,63 +1905,37 @@ LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
}
-LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
- HStoreKeyedFastElement* instr) {
- bool needs_write_barrier = instr->NeedsWriteBarrier();
- ASSERT(instr->value()->representation().IsTagged());
- ASSERT(instr->object()->representation().IsTagged());
- ASSERT(instr->key()->representation().IsInteger32());
-
- LOperand* obj = UseTempRegister(instr->object());
- LOperand* val = needs_write_barrier
- ? UseTempRegister(instr->value())
- : UseRegisterAtStart(instr->value());
- LOperand* key = needs_write_barrier
- ? UseTempRegister(instr->key())
- : UseRegisterOrConstantAtStart(instr->key());
- return new(zone()) LStoreKeyedFastElement(obj, key, val);
-}
-
-
-LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
- HStoreKeyedFastDoubleElement* instr) {
- ASSERT(instr->value()->representation().IsDouble());
- ASSERT(instr->elements()->representation().IsTagged());
- ASSERT(instr->key()->representation().IsInteger32());
-
+LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
LOperand* elements = UseRegisterAtStart(instr->elements());
- LOperand* val = UseTempRegister(instr->value());
- LOperand* key = UseRegisterOrConstantAtStart(instr->key());
-
- return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
-}
-
+ LOperand* key;
+ LOperand* val;
+ if (instr->NeedsWriteBarrier()) {
+ key = UseTempRegister(instr->key());
+ val = UseTempRegister(instr->value());
+ } else {
+ key = UseRegisterOrConstantAtStart(instr->key());
+ val = UseRegisterAtStart(instr->value());
+ }
-LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
- HStoreKeyedSpecializedArrayElement* instr) {
- ElementsKind elements_kind = instr->elements_kind();
- ASSERT(
- (instr->value()->representation().IsInteger32() &&
- (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
- (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
- (instr->value()->representation().IsDouble() &&
- ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
- (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
- ASSERT(instr->external_pointer()->representation().IsExternal());
- ASSERT(instr->key()->representation().IsInteger32());
-
- LOperand* external_pointer = UseRegister(instr->external_pointer());
- bool val_is_temp_register =
- elements_kind == EXTERNAL_PIXEL_ELEMENTS ||
- elements_kind == EXTERNAL_FLOAT_ELEMENTS;
- LOperand* val = val_is_temp_register
- ? UseTempRegister(instr->value())
- : UseRegister(instr->value());
- LOperand* key = UseRegisterOrConstant(instr->key());
+#ifdef DEBUG
+ if (!instr->is_external()) {
+ ASSERT(instr->elements()->representation().IsTagged());
+ } else {
+ ElementsKind elements_kind = instr->elements_kind();
+ ASSERT(
+ (instr->value()->representation().IsInteger32() &&
+ (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
+ (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
+ (instr->value()->representation().IsDouble() &&
+ ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
+ (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
+ ASSERT(instr->elements()->representation().IsExternal());
+ }
+#endif
- return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
- key,
- val);
+ LStoreKeyed* result = new(zone()) LStoreKeyed(elements, key, val);
+ ASSERT(result != NULL);
+ return result;
}
@@ -2082,8 +1954,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
LInstruction* LChunkBuilder::DoTransitionElementsKind(
HTransitionElementsKind* instr) {
- if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
- instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) {
+ ElementsKind from_kind = instr->original_map()->elements_kind();
+ ElementsKind to_kind = instr->transitioned_map()->elements_kind();
+ if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
LOperand* object = UseRegister(instr->object());
LOperand* new_map_reg = TempRegister();
LTransitionElementsKind* result =
@@ -2104,16 +1977,28 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool needs_write_barrier = instr->NeedsWriteBarrier();
-
- LOperand* obj = needs_write_barrier
- ? UseTempRegister(instr->object())
- : UseRegisterAtStart(instr->object());
+ bool needs_write_barrier_for_map = !instr->transition().is_null() &&
+ instr->NeedsWriteBarrierForMap();
+
+ LOperand* obj;
+ if (needs_write_barrier) {
+ obj = instr->is_in_object()
+ ? UseRegister(instr->object())
+ : UseTempRegister(instr->object());
+ } else {
+ obj = needs_write_barrier_for_map
+ ? UseRegister(instr->object())
+ : UseRegisterAtStart(instr->object());
+ }
LOperand* val = needs_write_barrier
? UseTempRegister(instr->value())
: UseRegister(instr->value());
- return new(zone()) LStoreNamedField(obj, val);
+ // We need a temporary register for write barrier of the map field.
+ LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL;
+
+ return new(zone()) LStoreNamedField(obj, val, temp);
}
@@ -2156,7 +2041,8 @@ LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
- LAllocateObject* result = new LAllocateObject(TempRegister(), TempRegister());
+ LAllocateObject* result =
+ new(zone()) LAllocateObject(TempRegister(), TempRegister());
return AssignPointerMap(DefineAsRegister(result));
}
@@ -2195,6 +2081,7 @@ LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
+ ASSERT(argument_count_ == 0);
allocator_->MarkAsOsrEntry();
current_block_->last_environment()->set_ast_id(instr->ast_id());
return AssignEnvironment(new(zone()) LOsrEntry);
@@ -2233,12 +2120,10 @@ LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
- LOperand* arguments = UseRegister(instr->arguments());
+ LOperand* args = UseRegister(instr->arguments());
LOperand* length = UseTempRegister(instr->length());
LOperand* index = UseRegister(instr->index());
- LAccessArgumentsAt* result =
- new(zone()) LAccessArgumentsAt(arguments, length, index);
- return AssignEnvironment(DefineAsRegister(result));
+ return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
}
@@ -2292,7 +2177,7 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
instruction_pending_deoptimization_environment_->
SetDeferredLazyDeoptimizationEnvironment(result->environment());
instruction_pending_deoptimization_environment_ = NULL;
- pending_deoptimization_ast_id_ = AstNode::kNoNumber;
+ pending_deoptimization_ast_id_ = BailoutId::None();
return result;
}
@@ -2318,10 +2203,11 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
instr->function(),
undefined,
instr->call_kind(),
- instr->is_construct());
+ instr->inlining_kind());
if (instr->arguments_var() != NULL) {
inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
}
+ inner->set_entry(instr);
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
return NULL;
@@ -2333,7 +2219,7 @@ LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
HEnvironment* env = current_block_->last_environment();
- if (instr->arguments_pushed()) {
+ if (env->entry()->arguments_pushed()) {
int argument_count = env->arguments_environment()->parameter_count();
pop = new(zone()) LDrop(argument_count);
argument_count_ -= argument_count;
@@ -2364,8 +2250,7 @@ LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
LOperand* map = UseRegister(instr->map());
- return AssignEnvironment(DefineAsRegister(
- new(zone()) LForInCacheArray(map)));
+ return AssignEnvironment(DefineAsRegister(new(zone()) LForInCacheArray(map)));
}
diff --git a/src/3rdparty/v8/src/arm/lithium-arm.h b/src/3rdparty/v8/src/arm/lithium-arm.h
index 7e94f88..2c289dd 100644
--- a/src/3rdparty/v8/src/arm/lithium-arm.h
+++ b/src/3rdparty/v8/src/arm/lithium-arm.h
@@ -108,6 +108,7 @@ class LCodeGen;
V(InstanceOfKnownGlobal) \
V(InstructionGap) \
V(Integer32ToDouble) \
+ V(Uint32ToDouble) \
V(InvokeFunction) \
V(IsConstructCallAndBranch) \
V(IsNilAndBranch) \
@@ -115,7 +116,6 @@ class LCodeGen;
V(IsStringAndBranch) \
V(IsSmiAndBranch) \
V(IsUndetectableAndBranch) \
- V(StringCompareAndBranch) \
V(JSArrayLength) \
V(Label) \
V(LazyBailout) \
@@ -125,18 +125,19 @@ class LCodeGen;
V(LoadFunctionPrototype) \
V(LoadGlobalCell) \
V(LoadGlobalGeneric) \
- V(LoadKeyedFastDoubleElement) \
- V(LoadKeyedFastElement) \
+ V(LoadKeyed) \
V(LoadKeyedGeneric) \
- V(LoadKeyedSpecializedArrayElement) \
V(LoadNamedField) \
V(LoadNamedFieldPolymorphic) \
V(LoadNamedGeneric) \
+ V(MapEnumLength) \
V(MathFloorOfDiv) \
+ V(MathMinMax) \
V(ModI) \
V(MulI) \
V(NumberTagD) \
V(NumberTagI) \
+ V(NumberTagU) \
V(NumberUntagD) \
V(ObjectLiteral) \
V(OsrEntry) \
@@ -154,15 +155,14 @@ class LCodeGen;
V(StoreContextSlot) \
V(StoreGlobalCell) \
V(StoreGlobalGeneric) \
- V(StoreKeyedFastDoubleElement) \
- V(StoreKeyedFastElement) \
+ V(StoreKeyed) \
V(StoreKeyedGeneric) \
- V(StoreKeyedSpecializedArrayElement) \
V(StoreNamedField) \
V(StoreNamedGeneric) \
V(StringAdd) \
V(StringCharCodeAt) \
V(StringCharFromCode) \
+ V(StringCompareAndBranch) \
V(StringLength) \
V(SubI) \
V(TaggedToI) \
@@ -257,11 +257,6 @@ class LInstruction: public ZoneObject {
virtual bool HasResult() const = 0;
virtual LOperand* result() = 0;
- virtual int InputCount() = 0;
- virtual LOperand* InputAt(int i) = 0;
- virtual int TempCount() = 0;
- virtual LOperand* TempAt(int i) = 0;
-
LOperand* FirstInput() { return InputAt(0); }
LOperand* Output() { return HasResult() ? result() : NULL; }
@@ -270,6 +265,15 @@ class LInstruction: public ZoneObject {
#endif
private:
+ // Iterator support.
+ friend class InputIterator;
+ virtual int InputCount() = 0;
+ virtual LOperand* InputAt(int i) = 0;
+
+ friend class TempIterator;
+ virtual int TempCount() = 0;
+ virtual LOperand* TempAt(int i) = 0;
+
LEnvironment* environment_;
SetOncePointer<LPointerMap> pointer_map_;
HValue* hydrogen_value_;
@@ -289,16 +293,17 @@ class LTemplateInstruction: public LInstruction {
void set_result(LOperand* operand) { results_[0] = operand; }
LOperand* result() { return results_[0]; }
- int InputCount() { return I; }
- LOperand* InputAt(int i) { return inputs_[i]; }
-
- int TempCount() { return T; }
- LOperand* TempAt(int i) { return temps_[i]; }
-
protected:
EmbeddedContainer<LOperand*, R> results_;
EmbeddedContainer<LOperand*, I> inputs_;
EmbeddedContainer<LOperand*, T> temps_;
+
+ private:
+ virtual int InputCount() { return I; }
+ virtual LOperand* InputAt(int i) { return inputs_[i]; }
+
+ virtual int TempCount() { return T; }
+ virtual LOperand* TempAt(int i) { return temps_[i]; }
};
@@ -333,8 +338,10 @@ class LGap: public LTemplateInstruction<0, 0, 0> {
LAST_INNER_POSITION = AFTER
};
- LParallelMove* GetOrCreateParallelMove(InnerPosition pos) {
- if (parallel_moves_[pos] == NULL) parallel_moves_[pos] = new LParallelMove;
+ LParallelMove* GetOrCreateParallelMove(InnerPosition pos, Zone* zone) {
+ if (parallel_moves_[pos] == NULL) {
+ parallel_moves_[pos] = new(zone) LParallelMove(zone);
+ }
return parallel_moves_[pos];
}
@@ -514,6 +521,8 @@ class LArgumentsLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = elements;
}
+ LOperand* elements() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ArgumentsLength, "arguments-length")
};
@@ -540,16 +549,22 @@ class LModI: public LTemplateInstruction<1, 2, 3> {
// Used for the standard case.
LModI(LOperand* left,
LOperand* right,
- LOperand* temp1,
+ LOperand* temp,
LOperand* temp2,
LOperand* temp3) {
inputs_[0] = left;
inputs_[1] = right;
- temps_[0] = temp1;
+ temps_[0] = temp;
temps_[1] = temp2;
temps_[2] = temp3;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+ LOperand* temp3() { return temps_[2]; }
+
DECLARE_CONCRETE_INSTRUCTION(ModI, "mod-i")
DECLARE_HYDROGEN_ACCESSOR(Mod)
};
@@ -562,6 +577,9 @@ class LDivI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(DivI, "div-i")
DECLARE_HYDROGEN_ACCESSOR(Div)
};
@@ -577,6 +595,10 @@ class LMathFloorOfDiv: public LTemplateInstruction<1, 2, 1> {
temps_[0] = temp;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(MathFloorOfDiv, "math-floor-of-div")
DECLARE_HYDROGEN_ACCESSOR(MathFloorOfDiv)
};
@@ -590,6 +612,10 @@ class LMulI: public LTemplateInstruction<1, 2, 1> {
temps_[0] = temp;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(MulI, "mul-i")
DECLARE_HYDROGEN_ACCESSOR(Mul)
};
@@ -602,6 +628,9 @@ class LCmpIDAndBranch: public LControlInstruction<2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpIDAndBranch, "cmp-id-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareIDAndBranch)
@@ -621,6 +650,9 @@ class LUnaryMathOperation: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(UnaryMathOperation, "unary-math-operation")
DECLARE_HYDROGEN_ACCESSOR(UnaryMathOperation)
@@ -636,6 +668,9 @@ class LCmpObjectEqAndBranch: public LControlInstruction<2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpObjectEqAndBranch,
"cmp-object-eq-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareObjectEqAndBranch)
@@ -648,6 +683,8 @@ class LCmpConstantEqAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = left;
}
+ LOperand* left() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpConstantEqAndBranch,
"cmp-constant-eq-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareConstantEqAndBranch)
@@ -660,6 +697,8 @@ class LIsNilAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsNilAndBranch, "is-nil-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsNilAndBranch)
@@ -677,6 +716,9 @@ class LIsObjectAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsObjectAndBranch, "is-object-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsObjectAndBranch)
@@ -691,6 +733,9 @@ class LIsStringAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsStringAndBranch, "is-string-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsStringAndBranch)
@@ -704,6 +749,8 @@ class LIsSmiAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsSmiAndBranch, "is-smi-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsSmiAndBranch)
@@ -718,6 +765,9 @@ class LIsUndetectableAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsUndetectableAndBranch,
"is-undetectable-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsUndetectableAndBranch)
@@ -733,6 +783,9 @@ class LStringCompareAndBranch: public LControlInstruction<2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,
"string-compare-and-branch")
DECLARE_HYDROGEN_ACCESSOR(StringCompareAndBranch)
@@ -749,6 +802,8 @@ class LHasInstanceTypeAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(HasInstanceTypeAndBranch,
"has-instance-type-and-branch")
DECLARE_HYDROGEN_ACCESSOR(HasInstanceTypeAndBranch)
@@ -763,6 +818,8 @@ class LGetCachedArrayIndex: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(GetCachedArrayIndex, "get-cached-array-index")
DECLARE_HYDROGEN_ACCESSOR(GetCachedArrayIndex)
};
@@ -774,6 +831,8 @@ class LHasCachedArrayIndexAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(HasCachedArrayIndexAndBranch,
"has-cached-array-index-and-branch")
DECLARE_HYDROGEN_ACCESSOR(HasCachedArrayIndexAndBranch)
@@ -789,6 +848,9 @@ class LClassOfTestAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ClassOfTestAndBranch,
"class-of-test-and-branch")
DECLARE_HYDROGEN_ACCESSOR(ClassOfTestAndBranch)
@@ -804,6 +866,9 @@ class LCmpT: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpT, "cmp-t")
DECLARE_HYDROGEN_ACCESSOR(CompareGeneric)
@@ -818,6 +883,9 @@ class LInstanceOf: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(InstanceOf, "instance-of")
};
@@ -829,6 +897,9 @@ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
"instance-of-known-global")
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
@@ -857,6 +928,7 @@ class LBoundsCheck: public LTemplateInstruction<0, 2, 0> {
LOperand* length() { return inputs_[1]; }
DECLARE_CONCRETE_INSTRUCTION(BoundsCheck, "bounds-check")
+ DECLARE_HYDROGEN_ACCESSOR(BoundsCheck)
};
@@ -867,6 +939,9 @@ class LBitI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
Token::Value op() const { return hydrogen()->op(); }
DECLARE_CONCRETE_INSTRUCTION(BitI, "bit-i")
@@ -883,7 +958,8 @@ class LShiftI: public LTemplateInstruction<1, 2, 0> {
}
Token::Value op() const { return op_; }
-
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
bool can_deopt() const { return can_deopt_; }
DECLARE_CONCRETE_INSTRUCTION(ShiftI, "shift-i")
@@ -901,6 +977,9 @@ class LSubI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(SubI, "sub-i")
DECLARE_HYDROGEN_ACCESSOR(Sub)
};
@@ -939,6 +1018,8 @@ class LBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Branch, "branch")
DECLARE_HYDROGEN_ACCESSOR(Branch)
@@ -953,6 +1034,9 @@ class LCmpMapAndBranch: public LTemplateInstruction<0, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpMapAndBranch, "cmp-map-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareMap)
@@ -974,6 +1058,8 @@ class LJSArrayLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(JSArrayLength, "js-array-length")
DECLARE_HYDROGEN_ACCESSOR(JSArrayLength)
};
@@ -985,18 +1071,34 @@ class LFixedArrayBaseLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(FixedArrayBaseLength,
"fixed-array-base-length")
DECLARE_HYDROGEN_ACCESSOR(FixedArrayBaseLength)
};
+class LMapEnumLength: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LMapEnumLength(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(MapEnumLength, "map-enum-length")
+};
+
+
class LElementsKind: public LTemplateInstruction<1, 1, 0> {
public:
explicit LElementsKind(LOperand* value) {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ElementsKind, "elements-kind")
DECLARE_HYDROGEN_ACCESSOR(ElementsKind)
};
@@ -1009,6 +1111,9 @@ class LValueOf: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ValueOf, "value-of")
DECLARE_HYDROGEN_ACCESSOR(ValueOf)
};
@@ -1021,40 +1126,26 @@ class LDateField: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* date() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+ Smi* index() const { return index_; }
+
DECLARE_CONCRETE_INSTRUCTION(ValueOf, "date-field")
DECLARE_HYDROGEN_ACCESSOR(ValueOf)
- Smi* index() const { return index_; }
private:
Smi* index_;
};
-class LSetDateField: public LTemplateInstruction<1, 2, 1> {
- public:
- LSetDateField(LOperand* date, LOperand* value, LOperand* temp, int index)
- : index_(index) {
- inputs_[0] = date;
- inputs_[1] = value;
- temps_[0] = temp;
- }
-
- DECLARE_CONCRETE_INSTRUCTION(DateField, "date-set-field")
- DECLARE_HYDROGEN_ACCESSOR(DateField)
-
- int index() const { return index_; }
-
- private:
- int index_;
-};
-
-
class LThrow: public LTemplateInstruction<0, 1, 0> {
public:
explicit LThrow(LOperand* value) {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Throw, "throw")
};
@@ -1065,6 +1156,8 @@ class LBitNotI: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(BitNotI, "bit-not-i")
};
@@ -1076,11 +1169,29 @@ class LAddI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(AddI, "add-i")
DECLARE_HYDROGEN_ACCESSOR(Add)
};
+class LMathMinMax: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LMathMinMax(LOperand* left, LOperand* right) {
+ inputs_[0] = left;
+ inputs_[1] = right;
+ }
+
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(MathMinMax, "min-max")
+ DECLARE_HYDROGEN_ACCESSOR(MathMinMax)
+};
+
+
class LPower: public LTemplateInstruction<1, 2, 0> {
public:
LPower(LOperand* left, LOperand* right) {
@@ -1088,6 +1199,9 @@ class LPower: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(Power, "power")
DECLARE_HYDROGEN_ACCESSOR(Power)
};
@@ -1099,6 +1213,8 @@ class LRandom: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = global_object;
}
+ LOperand* global_object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Random, "random")
DECLARE_HYDROGEN_ACCESSOR(Random)
};
@@ -1113,6 +1229,8 @@ class LArithmeticD: public LTemplateInstruction<1, 2, 0> {
}
Token::Value op() const { return op_; }
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
virtual Opcode opcode() const { return LInstruction::kArithmeticD; }
virtual void CompileToNative(LCodeGen* generator);
@@ -1131,12 +1249,14 @@ class LArithmeticT: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+ Token::Value op() const { return op_; }
+
virtual Opcode opcode() const { return LInstruction::kArithmeticT; }
virtual void CompileToNative(LCodeGen* generator);
virtual const char* Mnemonic() const;
- Token::Value op() const { return op_; }
-
private:
Token::Value op_;
};
@@ -1148,6 +1268,8 @@ class LReturn: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Return, "return")
};
@@ -1158,6 +1280,8 @@ class LLoadNamedField: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadNamedField, "load-named-field")
DECLARE_HYDROGEN_ACCESSOR(LoadNamedField)
};
@@ -1169,10 +1293,10 @@ class LLoadNamedFieldPolymorphic: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadNamedField, "load-named-field-polymorphic")
DECLARE_HYDROGEN_ACCESSOR(LoadNamedFieldPolymorphic)
-
- LOperand* object() { return inputs_[0]; }
};
@@ -1182,10 +1306,11 @@ class LLoadNamedGeneric: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadNamedGeneric, "load-named-generic")
DECLARE_HYDROGEN_ACCESSOR(LoadNamedGeneric)
- LOperand* object() { return inputs_[0]; }
Handle<Object> name() const { return hydrogen()->name(); }
};
@@ -1196,10 +1321,10 @@ class LLoadFunctionPrototype: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = function;
}
+ LOperand* function() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadFunctionPrototype, "load-function-prototype")
DECLARE_HYDROGEN_ACCESSOR(LoadFunctionPrototype)
-
- LOperand* function() { return inputs_[0]; }
};
@@ -1209,6 +1334,8 @@ class LLoadElements: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadElements, "load-elements")
};
@@ -1219,75 +1346,47 @@ class LLoadExternalArrayPointer: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadExternalArrayPointer,
"load-external-array-pointer")
};
-class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
+class LLoadKeyed: public LTemplateInstruction<1, 2, 0> {
public:
- LLoadKeyedFastElement(LOperand* elements, LOperand* key) {
+ LLoadKeyed(LOperand* elements, LOperand* key) {
inputs_[0] = elements;
inputs_[1] = key;
}
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastElement, "load-keyed-fast-element")
- DECLARE_HYDROGEN_ACCESSOR(LoadKeyedFastElement)
-
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
- uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
-
-
-class LLoadKeyedFastDoubleElement: public LTemplateInstruction<1, 2, 0> {
- public:
- LLoadKeyedFastDoubleElement(LOperand* elements, LOperand* key) {
- inputs_[0] = elements;
- inputs_[1] = key;
+ ElementsKind elements_kind() const {
+ return hydrogen()->elements_kind();
}
-
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastDoubleElement,
- "load-keyed-fast-double-element")
- DECLARE_HYDROGEN_ACCESSOR(LoadKeyedFastDoubleElement)
-
- LOperand* elements() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
- uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
-
-
-class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
- public:
- LLoadKeyedSpecializedArrayElement(LOperand* external_pointer, LOperand* key) {
- inputs_[0] = external_pointer;
- inputs_[1] = key;
+ bool is_external() const {
+ return hydrogen()->is_external();
}
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedSpecializedArrayElement,
- "load-keyed-specialized-array-element")
- DECLARE_HYDROGEN_ACCESSOR(LoadKeyedSpecializedArrayElement)
+ DECLARE_CONCRETE_INSTRUCTION(LoadKeyed, "load-keyed")
+ DECLARE_HYDROGEN_ACCESSOR(LoadKeyed)
- LOperand* external_pointer() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
- ElementsKind elements_kind() const {
- return hydrogen()->elements_kind();
- }
uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
class LLoadKeyedGeneric: public LTemplateInstruction<1, 2, 0> {
public:
- LLoadKeyedGeneric(LOperand* obj, LOperand* key) {
- inputs_[0] = obj;
+ LLoadKeyedGeneric(LOperand* object, LOperand* key) {
+ inputs_[0] = object;
inputs_[1] = key;
}
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedGeneric, "load-keyed-generic")
-
LOperand* object() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadKeyedGeneric, "load-keyed-generic")
};
@@ -1304,10 +1403,11 @@ class LLoadGlobalGeneric: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = global_object;
}
+ LOperand* global_object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadGlobalGeneric, "load-global-generic")
DECLARE_HYDROGEN_ACCESSOR(LoadGlobalGeneric)
- LOperand* global_object() { return inputs_[0]; }
Handle<Object> name() const { return hydrogen()->name(); }
bool for_typeof() const { return hydrogen()->for_typeof(); }
};
@@ -1320,10 +1420,11 @@ class LStoreGlobalCell: public LTemplateInstruction<0, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreGlobalCell, "store-global-cell")
DECLARE_HYDROGEN_ACCESSOR(StoreGlobalCell)
-
- LOperand* value() { return inputs_[0]; }
};
@@ -1335,12 +1436,13 @@ class LStoreGlobalGeneric: public LTemplateInstruction<0, 2, 0> {
inputs_[1] = value;
}
+ LOperand* global_object() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreGlobalGeneric, "store-global-generic")
DECLARE_HYDROGEN_ACCESSOR(StoreGlobalGeneric)
- LOperand* global_object() { return InputAt(0); }
Handle<Object> name() const { return hydrogen()->name(); }
- LOperand* value() { return InputAt(1); }
StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
@@ -1351,10 +1453,11 @@ class LLoadContextSlot: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = context;
}
+ LOperand* context() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadContextSlot, "load-context-slot")
DECLARE_HYDROGEN_ACCESSOR(LoadContextSlot)
- LOperand* context() { return InputAt(0); }
int slot_index() { return hydrogen()->slot_index(); }
virtual void PrintDataTo(StringStream* stream);
@@ -1368,11 +1471,12 @@ class LStoreContextSlot: public LTemplateInstruction<0, 2, 0> {
inputs_[1] = value;
}
+ LOperand* context() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreContextSlot, "store-context-slot")
DECLARE_HYDROGEN_ACCESSOR(StoreContextSlot)
- LOperand* context() { return InputAt(0); }
- LOperand* value() { return InputAt(1); }
int slot_index() { return hydrogen()->slot_index(); }
virtual void PrintDataTo(StringStream* stream);
@@ -1385,6 +1489,8 @@ class LPushArgument: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(PushArgument, "push-argument")
};
@@ -1421,9 +1527,9 @@ class LOuterContext: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = context;
}
- DECLARE_CONCRETE_INSTRUCTION(OuterContext, "outer-context")
+ LOperand* context() { return inputs_[0]; }
- LOperand* context() { return InputAt(0); }
+ DECLARE_CONCRETE_INSTRUCTION(OuterContext, "outer-context")
};
@@ -1443,8 +1549,9 @@ class LGlobalObject: public LTemplateInstruction<1, 1, 0> {
DECLARE_CONCRETE_INSTRUCTION(GlobalObject, "global-object")
- LOperand* context() { return InputAt(0); }
+ LOperand* context() { return inputs_[0]; }
bool qml_global() { return qml_global_; }
+
private:
bool qml_global_;
};
@@ -1456,9 +1563,9 @@ class LGlobalReceiver: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = global_object;
}
- DECLARE_CONCRETE_INSTRUCTION(GlobalReceiver, "global-receiver")
+ LOperand* global_object() { return inputs_[0]; }
- LOperand* global() { return InputAt(0); }
+ DECLARE_CONCRETE_INSTRUCTION(GlobalReceiver, "global-receiver")
};
@@ -1480,11 +1587,11 @@ class LInvokeFunction: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = function;
}
+ LOperand* function() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(InvokeFunction, "invoke-function")
DECLARE_HYDROGEN_ACCESSOR(InvokeFunction)
- LOperand* function() { return inputs_[0]; }
-
virtual void PrintDataTo(StringStream* stream);
int arity() const { return hydrogen()->argument_count() - 1; }
@@ -1498,6 +1605,8 @@ class LCallKeyed: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = key;
}
+ LOperand* key() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CallKeyed, "call-keyed")
DECLARE_HYDROGEN_ACCESSOR(CallKeyed)
@@ -1526,10 +1635,11 @@ class LCallFunction: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = function;
}
+ LOperand* function() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CallFunction, "call-function")
DECLARE_HYDROGEN_ACCESSOR(CallFunction)
- LOperand* function() { return inputs_[0]; }
int arity() const { return hydrogen()->argument_count() - 1; }
};
@@ -1570,6 +1680,8 @@ class LCallNew: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = constructor;
}
+ LOperand* constructor() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CallNew, "call-new")
DECLARE_HYDROGEN_ACCESSOR(CallNew)
@@ -1595,28 +1707,60 @@ class LInteger32ToDouble: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Integer32ToDouble, "int32-to-double")
};
+class LUint32ToDouble: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LUint32ToDouble(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(Uint32ToDouble, "uint32-to-double")
+};
+
+
class LNumberTagI: public LTemplateInstruction<1, 1, 0> {
public:
explicit LNumberTagI(LOperand* value) {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(NumberTagI, "number-tag-i")
};
+class LNumberTagU: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LNumberTagU(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(NumberTagU, "number-tag-u")
+};
+
+
class LNumberTagD: public LTemplateInstruction<1, 1, 2> {
public:
- LNumberTagD(LOperand* value, LOperand* temp1, LOperand* temp2) {
+ LNumberTagD(LOperand* value, LOperand* temp, LOperand* temp2) {
inputs_[0] = value;
- temps_[0] = temp1;
+ temps_[0] = temp;
temps_[1] = temp2;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(NumberTagD, "number-tag-d")
};
@@ -1624,12 +1768,16 @@ class LNumberTagD: public LTemplateInstruction<1, 1, 2> {
// Sometimes truncating conversion from a tagged value to an int32.
class LDoubleToI: public LTemplateInstruction<1, 1, 2> {
public:
- LDoubleToI(LOperand* value, LOperand* temp1, LOperand* temp2) {
+ LDoubleToI(LOperand* value, LOperand* temp, LOperand* temp2) {
inputs_[0] = value;
- temps_[0] = temp1;
+ temps_[0] = temp;
temps_[1] = temp2;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(DoubleToI, "double-to-i")
DECLARE_HYDROGEN_ACCESSOR(UnaryOperation)
@@ -1641,15 +1789,20 @@ class LDoubleToI: public LTemplateInstruction<1, 1, 2> {
class LTaggedToI: public LTemplateInstruction<1, 1, 3> {
public:
LTaggedToI(LOperand* value,
- LOperand* temp1,
+ LOperand* temp,
LOperand* temp2,
LOperand* temp3) {
inputs_[0] = value;
- temps_[0] = temp1;
+ temps_[0] = temp;
temps_[1] = temp2;
temps_[2] = temp3;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+ LOperand* temp3() { return temps_[2]; }
+
DECLARE_CONCRETE_INSTRUCTION(TaggedToI, "tagged-to-i")
DECLARE_HYDROGEN_ACCESSOR(UnaryOperation)
@@ -1663,6 +1816,8 @@ class LSmiTag: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(SmiTag, "smi-tag")
};
@@ -1673,6 +1828,8 @@ class LNumberUntagD: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(NumberUntagD, "double-untag")
DECLARE_HYDROGEN_ACCESSOR(Change)
};
@@ -1685,30 +1842,33 @@ class LSmiUntag: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(SmiUntag, "smi-untag")
-
+ LOperand* value() { return inputs_[0]; }
bool needs_check() const { return needs_check_; }
+ DECLARE_CONCRETE_INSTRUCTION(SmiUntag, "smi-untag")
+
private:
bool needs_check_;
};
-class LStoreNamedField: public LTemplateInstruction<0, 2, 0> {
+class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
public:
- LStoreNamedField(LOperand* obj, LOperand* val) {
- inputs_[0] = obj;
- inputs_[1] = val;
+ LStoreNamedField(LOperand* object, LOperand* value, LOperand* temp) {
+ inputs_[0] = object;
+ inputs_[1] = value;
+ temps_[0] = temp;
}
+ LOperand* object() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreNamedField, "store-named-field")
DECLARE_HYDROGEN_ACCESSOR(StoreNamedField)
virtual void PrintDataTo(StringStream* stream);
- LOperand* object() { return inputs_[0]; }
- LOperand* value() { return inputs_[1]; }
-
Handle<Object> name() const { return hydrogen()->name(); }
bool is_in_object() { return hydrogen()->is_in_object(); }
int offset() { return hydrogen()->offset(); }
@@ -1718,109 +1878,67 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 0> {
class LStoreNamedGeneric: public LTemplateInstruction<0, 2, 0> {
public:
- LStoreNamedGeneric(LOperand* obj, LOperand* val) {
- inputs_[0] = obj;
- inputs_[1] = val;
+ LStoreNamedGeneric(LOperand* object, LOperand* value) {
+ inputs_[0] = object;
+ inputs_[1] = value;
}
+ LOperand* object() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store-named-generic")
DECLARE_HYDROGEN_ACCESSOR(StoreNamedGeneric)
virtual void PrintDataTo(StringStream* stream);
- LOperand* object() { return inputs_[0]; }
- LOperand* value() { return inputs_[1]; }
Handle<Object> name() const { return hydrogen()->name(); }
StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
-class LStoreKeyedFastElement: public LTemplateInstruction<0, 3, 0> {
+class LStoreKeyed: public LTemplateInstruction<0, 3, 0> {
public:
- LStoreKeyedFastElement(LOperand* obj, LOperand* key, LOperand* val) {
- inputs_[0] = obj;
+ LStoreKeyed(LOperand* object, LOperand* key, LOperand* value) {
+ inputs_[0] = object;
inputs_[1] = key;
- inputs_[2] = val;
+ inputs_[2] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedFastElement,
- "store-keyed-fast-element")
- DECLARE_HYDROGEN_ACCESSOR(StoreKeyedFastElement)
-
- virtual void PrintDataTo(StringStream* stream);
-
- LOperand* object() { return inputs_[0]; }
+ bool is_external() const { return hydrogen()->is_external(); }
+ LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
- uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
-
-
-class LStoreKeyedFastDoubleElement: public LTemplateInstruction<0, 3, 0> {
- public:
- LStoreKeyedFastDoubleElement(LOperand* elements,
- LOperand* key,
- LOperand* val) {
- inputs_[0] = elements;
- inputs_[1] = key;
- inputs_[2] = val;
+ ElementsKind elements_kind() const {
+ return hydrogen()->elements_kind();
}
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedFastDoubleElement,
- "store-keyed-fast-double-element")
- DECLARE_HYDROGEN_ACCESSOR(StoreKeyedFastDoubleElement)
+ DECLARE_CONCRETE_INSTRUCTION(StoreKeyed, "store-keyed")
+ DECLARE_HYDROGEN_ACCESSOR(StoreKeyed)
virtual void PrintDataTo(StringStream* stream);
-
- LOperand* elements() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
- LOperand* value() { return inputs_[2]; }
- uint32_t additional_index() const { return hydrogen()->index_offset(); }
-
bool NeedsCanonicalization() { return hydrogen()->NeedsCanonicalization(); }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
class LStoreKeyedGeneric: public LTemplateInstruction<0, 3, 0> {
public:
- LStoreKeyedGeneric(LOperand* obj, LOperand* key, LOperand* val) {
+ LStoreKeyedGeneric(LOperand* obj, LOperand* key, LOperand* value) {
inputs_[0] = obj;
inputs_[1] = key;
- inputs_[2] = val;
+ inputs_[2] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
- DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
-
- virtual void PrintDataTo(StringStream* stream);
-
LOperand* object() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
- StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
-};
-class LStoreKeyedSpecializedArrayElement: public LTemplateInstruction<0, 3, 0> {
- public:
- LStoreKeyedSpecializedArrayElement(LOperand* external_pointer,
- LOperand* key,
- LOperand* val) {
- inputs_[0] = external_pointer;
- inputs_[1] = key;
- inputs_[2] = val;
- }
+ DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
+ DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedSpecializedArrayElement,
- "store-keyed-specialized-array-element")
- DECLARE_HYDROGEN_ACCESSOR(StoreKeyedSpecializedArrayElement)
+ virtual void PrintDataTo(StringStream* stream);
- LOperand* external_pointer() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
- LOperand* value() { return inputs_[2]; }
- ElementsKind elements_kind() const {
- return hydrogen()->elements_kind();
- }
- uint32_t additional_index() const { return hydrogen()->index_offset(); }
+ StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
@@ -1828,21 +1946,22 @@ class LTransitionElementsKind: public LTemplateInstruction<1, 1, 2> {
public:
LTransitionElementsKind(LOperand* object,
LOperand* new_map_temp,
- LOperand* temp_reg) {
+ LOperand* temp) {
inputs_[0] = object;
temps_[0] = new_map_temp;
- temps_[1] = temp_reg;
+ temps_[1] = temp;
}
+ LOperand* object() { return inputs_[0]; }
+ LOperand* new_map_temp() { return temps_[0]; }
+ LOperand* temp() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(TransitionElementsKind,
"transition-elements-kind")
DECLARE_HYDROGEN_ACCESSOR(TransitionElementsKind)
virtual void PrintDataTo(StringStream* stream);
- LOperand* object() { return inputs_[0]; }
- LOperand* new_map_reg() { return temps_[0]; }
- LOperand* temp_reg() { return temps_[1]; }
Handle<Map> original_map() { return hydrogen()->original_map(); }
Handle<Map> transitioned_map() { return hydrogen()->transitioned_map(); }
};
@@ -1855,11 +1974,11 @@ class LStringAdd: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
- DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string-add")
- DECLARE_HYDROGEN_ACCESSOR(StringAdd)
-
LOperand* left() { return inputs_[0]; }
LOperand* right() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string-add")
+ DECLARE_HYDROGEN_ACCESSOR(StringAdd)
};
@@ -1871,11 +1990,11 @@ class LStringCharCodeAt: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = index;
}
- DECLARE_CONCRETE_INSTRUCTION(StringCharCodeAt, "string-char-code-at")
- DECLARE_HYDROGEN_ACCESSOR(StringCharCodeAt)
-
LOperand* string() { return inputs_[0]; }
LOperand* index() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringCharCodeAt, "string-char-code-at")
+ DECLARE_HYDROGEN_ACCESSOR(StringCharCodeAt)
};
@@ -1885,10 +2004,10 @@ class LStringCharFromCode: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = char_code;
}
+ LOperand* char_code() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StringCharFromCode, "string-char-from-code")
DECLARE_HYDROGEN_ACCESSOR(StringCharFromCode)
-
- LOperand* char_code() { return inputs_[0]; }
};
@@ -1898,10 +2017,10 @@ class LStringLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = string;
}
+ LOperand* string() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StringLength, "string-length")
DECLARE_HYDROGEN_ACCESSOR(StringLength)
-
- LOperand* string() { return inputs_[0]; }
};
@@ -1911,7 +2030,7 @@ class LCheckFunction: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
- LOperand* value() { return InputAt(0); }
+ LOperand* value() { return inputs_[0]; }
DECLARE_CONCRETE_INSTRUCTION(CheckFunction, "check-function")
DECLARE_HYDROGEN_ACCESSOR(CheckFunction)
@@ -1924,6 +2043,8 @@ class LCheckInstanceType: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckInstanceType, "check-instance-type")
DECLARE_HYDROGEN_ACCESSOR(CheckInstanceType)
};
@@ -1935,18 +2056,23 @@ class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps")
DECLARE_HYDROGEN_ACCESSOR(CheckMaps)
};
-class LCheckPrototypeMaps: public LTemplateInstruction<0, 0, 2> {
+class LCheckPrototypeMaps: public LTemplateInstruction<1, 0, 2> {
public:
- LCheckPrototypeMaps(LOperand* temp1, LOperand* temp2) {
- temps_[0] = temp1;
+ LCheckPrototypeMaps(LOperand* temp, LOperand* temp2) {
+ temps_[0] = temp;
temps_[1] = temp2;
}
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps, "check-prototype-maps")
DECLARE_HYDROGEN_ACCESSOR(CheckPrototypeMaps)
@@ -1961,6 +2087,8 @@ class LCheckSmi: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckSmi, "check-smi")
};
@@ -1971,18 +2099,21 @@ class LCheckNonSmi: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckNonSmi, "check-non-smi")
};
class LClampDToUint8: public LTemplateInstruction<1, 1, 1> {
public:
- LClampDToUint8(LOperand* value, LOperand* temp) {
- inputs_[0] = value;
+ LClampDToUint8(LOperand* unclamped, LOperand* temp) {
+ inputs_[0] = unclamped;
temps_[0] = temp;
}
LOperand* unclamped() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
DECLARE_CONCRETE_INSTRUCTION(ClampDToUint8, "clamp-d-to-uint8")
};
@@ -1990,8 +2121,8 @@ class LClampDToUint8: public LTemplateInstruction<1, 1, 1> {
class LClampIToUint8: public LTemplateInstruction<1, 1, 0> {
public:
- explicit LClampIToUint8(LOperand* value) {
- inputs_[0] = value;
+ explicit LClampIToUint8(LOperand* unclamped) {
+ inputs_[0] = unclamped;
}
LOperand* unclamped() { return inputs_[0]; }
@@ -2002,12 +2133,13 @@ class LClampIToUint8: public LTemplateInstruction<1, 1, 0> {
class LClampTToUint8: public LTemplateInstruction<1, 1, 1> {
public:
- LClampTToUint8(LOperand* value, LOperand* temp) {
- inputs_[0] = value;
+ LClampTToUint8(LOperand* unclamped, LOperand* temp) {
+ inputs_[0] = unclamped;
temps_[0] = temp;
}
LOperand* unclamped() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
DECLARE_CONCRETE_INSTRUCTION(ClampTToUint8, "clamp-t-to-uint8")
};
@@ -2015,11 +2147,14 @@ class LClampTToUint8: public LTemplateInstruction<1, 1, 1> {
class LAllocateObject: public LTemplateInstruction<1, 0, 2> {
public:
- LAllocateObject(LOperand* temp1, LOperand* temp2) {
- temps_[0] = temp1;
+ LAllocateObject(LOperand* temp, LOperand* temp2) {
+ temps_[0] = temp;
temps_[1] = temp2;
}
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
};
@@ -2068,6 +2203,8 @@ class LToFastProperties: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ToFastProperties, "to-fast-properties")
DECLARE_HYDROGEN_ACCESSOR(ToFastProperties)
};
@@ -2079,6 +2216,8 @@ class LTypeof: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Typeof, "typeof")
};
@@ -2089,6 +2228,8 @@ class LTypeofIsAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(TypeofIsAndBranch, "typeof-is-and-branch")
DECLARE_HYDROGEN_ACCESSOR(TypeofIsAndBranch)
@@ -2104,6 +2245,8 @@ class LIsConstructCallAndBranch: public LControlInstruction<0, 1> {
temps_[0] = temp;
}
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsConstructCallAndBranch,
"is-construct-call-and-branch")
};
@@ -2111,15 +2254,15 @@ class LIsConstructCallAndBranch: public LControlInstruction<0, 1> {
class LDeleteProperty: public LTemplateInstruction<1, 2, 0> {
public:
- LDeleteProperty(LOperand* obj, LOperand* key) {
- inputs_[0] = obj;
+ LDeleteProperty(LOperand* object, LOperand* key) {
+ inputs_[0] = object;
inputs_[1] = key;
}
- DECLARE_CONCRETE_INSTRUCTION(DeleteProperty, "delete-property")
-
LOperand* object() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(DeleteProperty, "delete-property")
};
@@ -2229,63 +2372,13 @@ class LLoadFieldByIndex: public LTemplateInstruction<1, 2, 0> {
class LChunkBuilder;
-class LChunk: public ZoneObject {
+class LPlatformChunk: public LChunk {
public:
- explicit LChunk(CompilationInfo* info, HGraph* graph);
-
- void AddInstruction(LInstruction* instruction, HBasicBlock* block);
- LConstantOperand* DefineConstantOperand(HConstant* constant);
- Handle<Object> LookupLiteral(LConstantOperand* operand) const;
- Representation LookupLiteralRepresentation(LConstantOperand* operand) const;
+ LPlatformChunk(CompilationInfo* info, HGraph* graph)
+ : LChunk(info, graph) { }
int GetNextSpillIndex(bool is_double);
LOperand* GetNextSpillSlot(bool is_double);
-
- int ParameterAt(int index);
- int GetParameterStackSlot(int index) const;
- int spill_slot_count() const { return spill_slot_count_; }
- CompilationInfo* info() const { return info_; }
- HGraph* graph() const { return graph_; }
- const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
- void AddGapMove(int index, LOperand* from, LOperand* to);
- LGap* GetGapAt(int index) const;
- bool IsGapAt(int index) const;
- int NearestGapPos(int index) const;
- void MarkEmptyBlocks();
- const ZoneList<LPointerMap*>* pointer_maps() const { return &pointer_maps_; }
- LLabel* GetLabel(int block_id) const {
- HBasicBlock* block = graph_->blocks()->at(block_id);
- int first_instruction = block->first_instruction_index();
- return LLabel::cast(instructions_[first_instruction]);
- }
- int LookupDestination(int block_id) const {
- LLabel* cur = GetLabel(block_id);
- while (cur->replacement() != NULL) {
- cur = cur->replacement();
- }
- return cur->block_id();
- }
- Label* GetAssemblyLabel(int block_id) const {
- LLabel* label = GetLabel(block_id);
- ASSERT(!label->HasReplacement());
- return label->label();
- }
-
- const ZoneList<Handle<JSFunction> >* inlined_closures() const {
- return &inlined_closures_;
- }
-
- void AddInlinedClosure(Handle<JSFunction> closure) {
- inlined_closures_.Add(closure);
- }
-
- private:
- int spill_slot_count_;
- CompilationInfo* info_;
- HGraph* const graph_;
- ZoneList<LInstruction*> instructions_;
- ZoneList<LPointerMap*> pointer_maps_;
- ZoneList<Handle<JSFunction> > inlined_closures_;
};
@@ -2295,7 +2388,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
- zone_(graph->isolate()->zone()),
+ zone_(graph->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2304,10 +2397,10 @@ class LChunkBuilder BASE_EMBEDDED {
allocator_(allocator),
position_(RelocInfo::kNoPosition),
instruction_pending_deoptimization_environment_(NULL),
- pending_deoptimization_ast_id_(AstNode::kNoNumber) { }
+ pending_deoptimization_ast_id_(BailoutId::None()) { }
// Build the sequence for the graph.
- LChunk* Build();
+ LPlatformChunk* Build();
// Declare methods that deal with the individual node types.
#define DECLARE_DO(type) LInstruction* Do##type(H##type* node);
@@ -2326,7 +2419,7 @@ class LChunkBuilder BASE_EMBEDDED {
ABORTED
};
- LChunk* chunk() const { return chunk_; }
+ LPlatformChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
Zone* zone() const { return zone_; }
@@ -2336,7 +2429,7 @@ class LChunkBuilder BASE_EMBEDDED {
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
- void Abort(const char* format, ...);
+ void Abort(const char* reason);
// Methods for getting operands for Use / Define / Temp.
LUnallocated* ToUnallocated(Register reg);
@@ -2426,7 +2519,7 @@ class LChunkBuilder BASE_EMBEDDED {
LInstruction* DoArithmeticT(Token::Value op,
HArithmeticBinaryOperation* instr);
- LChunk* chunk_;
+ LPlatformChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
Zone* zone_;
@@ -2438,7 +2531,7 @@ class LChunkBuilder BASE_EMBEDDED {
LAllocator* allocator_;
int position_;
LInstruction* instruction_pending_deoptimization_environment_;
- int pending_deoptimization_ast_id_;
+ BailoutId pending_deoptimization_ast_id_;
DISALLOW_COPY_AND_ASSIGN(LChunkBuilder);
};
diff --git a/src/3rdparty/v8/src/arm/lithium-codegen-arm.cc b/src/3rdparty/v8/src/arm/lithium-codegen-arm.cc
index 90060a9..67773ee 100644
--- a/src/3rdparty/v8/src/arm/lithium-codegen-arm.cc
+++ b/src/3rdparty/v8/src/arm/lithium-codegen-arm.cc
@@ -91,17 +91,8 @@ void LCodeGen::FinishCode(Handle<Code> code) {
}
-void LCodeGen::Abort(const char* format, ...) {
- if (FLAG_trace_bailout) {
- SmartArrayPointer<char> name(
- info()->shared_info()->DebugName()->ToCString());
- PrintF("Aborting LCodeGen in @\"%s\": ", *name);
- va_list arguments;
- va_start(arguments, format);
- OS::VPrint(format, arguments);
- va_end(arguments);
- PrintF("\n");
- }
+void LCodeGen::Abort(const char* reason) {
+ info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -127,6 +118,8 @@ void LCodeGen::Comment(const char* format, ...) {
bool LCodeGen::GeneratePrologue() {
ASSERT(is_generating());
+ ProfileEntryHookStub::MaybeCallEntryHook(masm_);
+
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
@@ -145,15 +138,23 @@ bool LCodeGen::GeneratePrologue() {
// function calls.
if (!info_->is_classic_mode() || info_->is_native()) {
Label ok;
+ Label begin;
+ __ bind(&begin);
__ cmp(r5, Operand(0));
__ b(eq, &ok);
int receiver_offset = scope()->num_parameters() * kPointerSize;
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
__ str(r2, MemOperand(sp, receiver_offset));
__ bind(&ok);
+ //ASSERT_EQ(kSizeOfOptimizedStrictModePrologue, ok.pos() - begin.pos());
}
+ // The following three instructions must remain together and unmodified for
+ // code aging to work properly.
__ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
+ // Add unused load of ip to ensure prologue sequence is identical for
+ // full-codegen and lithium-codegen.
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
// Reserve space for the stack slots needed by the code.
@@ -323,7 +324,8 @@ Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
return ToRegister(op->index());
} else if (op->IsConstantOperand()) {
LConstantOperand* const_op = LConstantOperand::cast(op);
- Handle<Object> literal = chunk_->LookupLiteral(const_op);
+ HConstant* constant = chunk_->LookupConstant(const_op);
+ Handle<Object> literal = constant->handle();
Representation r = chunk_->LookupLiteralRepresentation(const_op);
if (r.IsInteger32()) {
ASSERT(literal->IsNumber());
@@ -361,7 +363,8 @@ DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
return ToDoubleRegister(op->index());
} else if (op->IsConstantOperand()) {
LConstantOperand* const_op = LConstantOperand::cast(op);
- Handle<Object> literal = chunk_->LookupLiteral(const_op);
+ HConstant* constant = chunk_->LookupConstant(const_op);
+ Handle<Object> literal = constant->handle();
Representation r = chunk_->LookupLiteralRepresentation(const_op);
if (r.IsInteger32()) {
ASSERT(literal->IsNumber());
@@ -387,9 +390,9 @@ DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
- Handle<Object> literal = chunk_->LookupLiteral(op);
+ HConstant* constant = chunk_->LookupConstant(op);
ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
- return literal;
+ return constant->handle();
}
@@ -399,33 +402,33 @@ bool LCodeGen::IsInteger32(LConstantOperand* op) const {
int LCodeGen::ToInteger32(LConstantOperand* op) const {
- Handle<Object> value = chunk_->LookupLiteral(op);
+ HConstant* constant = chunk_->LookupConstant(op);
ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
- ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
- value->Number());
- return static_cast<int32_t>(value->Number());
+ ASSERT(constant->HasInteger32Value());
+ return constant->Integer32Value();
}
double LCodeGen::ToDouble(LConstantOperand* op) const {
- Handle<Object> value = chunk_->LookupLiteral(op);
- return value->Number();
+ HConstant* constant = chunk_->LookupConstant(op);
+ ASSERT(constant->HasDoubleValue());
+ return constant->DoubleValue();
}
Operand LCodeGen::ToOperand(LOperand* op) {
if (op->IsConstantOperand()) {
LConstantOperand* const_op = LConstantOperand::cast(op);
- Handle<Object> literal = chunk_->LookupLiteral(const_op);
+ HConstant* constant = chunk()->LookupConstant(const_op);
Representation r = chunk_->LookupLiteralRepresentation(const_op);
if (r.IsInteger32()) {
- ASSERT(literal->IsNumber());
- return Operand(static_cast<int32_t>(literal->Number()));
+ ASSERT(constant->HasInteger32Value());
+ return Operand(constant->Integer32Value());
} else if (r.IsDouble()) {
Abort("ToOperand Unsupported double immediate.");
}
ASSERT(r.IsTagged());
- return Operand(literal);
+ return Operand(constant->handle());
} else if (op->IsRegister()) {
return Operand(ToRegister(op));
} else if (op->IsDoubleRegister()) {
@@ -470,7 +473,9 @@ MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
void LCodeGen::WriteTranslation(LEnvironment* environment,
- Translation* translation) {
+ Translation* translation,
+ int* arguments_index,
+ int* arguments_count) {
if (environment == NULL) return;
// The translation includes one command per value in the environment.
@@ -478,8 +483,21 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
// The output frame height does not include the parameters.
int height = translation_size - environment->parameter_count();
- WriteTranslation(environment->outer(), translation);
- int closure_id = DefineDeoptimizationLiteral(environment->closure());
+ // Function parameters are arguments to the outermost environment. The
+ // arguments index points to the first element of a sequence of tagged
+ // values on the stack that represent the arguments. This needs to be
+ // kept in sync with the LArgumentsElements implementation.
+ *arguments_index = -environment->parameter_count();
+ *arguments_count = environment->parameter_count();
+
+ WriteTranslation(environment->outer(),
+ translation,
+ arguments_index,
+ arguments_count);
+ int closure_id = *info()->closure() != *environment->closure()
+ ? DefineDeoptimizationLiteral(environment->closure())
+ : Translation::kSelfLiteralId;
+
switch (environment->frame_type()) {
case JS_FUNCTION:
translation->BeginJSFrame(environment->ast_id(), closure_id, height);
@@ -487,12 +505,31 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
case JS_CONSTRUCT:
translation->BeginConstructStubFrame(closure_id, translation_size);
break;
+ case JS_GETTER:
+ ASSERT(translation_size == 1);
+ ASSERT(height == 0);
+ translation->BeginGetterStubFrame(closure_id);
+ break;
+ case JS_SETTER:
+ ASSERT(translation_size == 2);
+ ASSERT(height == 0);
+ translation->BeginSetterStubFrame(closure_id);
+ break;
case ARGUMENTS_ADAPTOR:
translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
break;
- default:
- UNREACHABLE();
}
+
+ // Inlined frames which push their arguments cause the index to be
+ // bumped and a new stack area to be used for materialization.
+ if (environment->entry() != NULL &&
+ environment->entry()->arguments_pushed()) {
+ *arguments_index = *arguments_index < 0
+ ? GetStackSlotCount()
+ : *arguments_index + *arguments_count;
+ *arguments_count = environment->entry()->arguments_count() + 1;
+ }
+
for (int i = 0; i < translation_size; ++i) {
LOperand* value = environment->values()->at(i);
// spilled_registers_ and spilled_double_registers_ are either
@@ -503,7 +540,10 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
translation->MarkDuplicate();
AddToTranslation(translation,
environment->spilled_registers()[value->index()],
- environment->HasTaggedValueAt(i));
+ environment->HasTaggedValueAt(i),
+ environment->HasUint32ValueAt(i),
+ *arguments_index,
+ *arguments_count);
} else if (
value->IsDoubleRegister() &&
environment->spilled_double_registers()[value->index()] != NULL) {
@@ -511,26 +551,39 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
AddToTranslation(
translation,
environment->spilled_double_registers()[value->index()],
- false);
+ false,
+ false,
+ *arguments_index,
+ *arguments_count);
}
}
- AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
+ AddToTranslation(translation,
+ value,
+ environment->HasTaggedValueAt(i),
+ environment->HasUint32ValueAt(i),
+ *arguments_index,
+ *arguments_count);
}
}
void LCodeGen::AddToTranslation(Translation* translation,
LOperand* op,
- bool is_tagged) {
+ bool is_tagged,
+ bool is_uint32,
+ int arguments_index,
+ int arguments_count) {
if (op == NULL) {
// TODO(twuerthinger): Introduce marker operands to indicate that this value
// is not present and must be reconstructed from the deoptimizer. Currently
// this is only used for the arguments object.
- translation->StoreArgumentsObject();
+ translation->StoreArgumentsObject(arguments_index, arguments_count);
} else if (op->IsStackSlot()) {
if (is_tagged) {
translation->StoreStackSlot(op->index());
+ } else if (is_uint32) {
+ translation->StoreUint32StackSlot(op->index());
} else {
translation->StoreInt32StackSlot(op->index());
}
@@ -544,6 +597,8 @@ void LCodeGen::AddToTranslation(Translation* translation,
Register reg = ToRegister(op);
if (is_tagged) {
translation->StoreRegister(reg);
+ } else if (is_uint32) {
+ translation->StoreUint32Register(reg);
} else {
translation->StoreInt32Register(reg);
}
@@ -551,8 +606,8 @@ void LCodeGen::AddToTranslation(Translation* translation,
DoubleRegister reg = ToDoubleRegister(op);
translation->StoreDoubleRegister(reg);
} else if (op->IsConstantOperand()) {
- Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
- int src_index = DefineDeoptimizationLiteral(literal);
+ HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op));
+ int src_index = DefineDeoptimizationLiteral(constant->handle());
translation->StoreLiteral(src_index);
} else {
UNREACHABLE();
@@ -562,19 +617,24 @@ void LCodeGen::AddToTranslation(Translation* translation,
void LCodeGen::CallCode(Handle<Code> code,
RelocInfo::Mode mode,
- LInstruction* instr) {
- CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
+ LInstruction* instr,
+ TargetAddressStorageMode storage_mode) {
+ CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, storage_mode);
}
void LCodeGen::CallCodeGeneric(Handle<Code> code,
RelocInfo::Mode mode,
LInstruction* instr,
- SafepointMode safepoint_mode) {
+ SafepointMode safepoint_mode,
+ TargetAddressStorageMode storage_mode) {
ASSERT(instr != NULL);
+ // Block literal pool emission to ensure nop indicating no inlined smi code
+ // is in the correct position.
+ Assembler::BlockConstPoolScope block_const_pool(masm());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
- __ Call(code, mode);
+ __ Call(code, mode, TypeFeedbackId::None(), al, storage_mode);
RecordSafepointWithLazyDeopt(instr, safepoint_mode);
// Signal that we don't inline smi code before these stubs in the
@@ -626,20 +686,22 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
int frame_count = 0;
int jsframe_count = 0;
+ int args_index = 0;
+ int args_count = 0;
for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
++frame_count;
if (e->frame_type() == JS_FUNCTION) {
++jsframe_count;
}
}
- Translation translation(&translations_, frame_count, jsframe_count);
- WriteTranslation(environment, &translation);
+ Translation translation(&translations_, frame_count, jsframe_count, zone());
+ WriteTranslation(environment, &translation, &args_index, &args_count);
int deoptimization_index = deoptimizations_.length();
int pc_offset = masm()->pc_offset();
environment->Register(deoptimization_index,
translation.index(),
(mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
- deoptimizations_.Add(environment);
+ deoptimizations_.Add(environment, zone());
}
}
@@ -671,7 +733,7 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
// jump entry if this is the case.
if (deopt_jump_table_.is_empty() ||
(deopt_jump_table_.last().address != entry)) {
- deopt_jump_table_.Add(JumpTableEntry(entry));
+ deopt_jump_table_.Add(JumpTableEntry(entry), zone());
}
__ b(cc, &deopt_jump_table_.last().label);
}
@@ -695,13 +757,13 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
}
data->SetLiteralArray(*literals);
- data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
+ data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
// Populate the deoptimization entries.
for (int i = 0; i < length; i++) {
LEnvironment* env = deoptimizations_[i];
- data->SetAstId(i, Smi::FromInt(env->ast_id()));
+ data->SetAstId(i, env->ast_id());
data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
data->SetArgumentsStackHeight(i,
Smi::FromInt(env->arguments_stack_height()));
@@ -716,7 +778,7 @@ int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
for (int i = 0; i < deoptimization_literals_.length(); ++i) {
if (deoptimization_literals_[i].is_identical_to(literal)) return i;
}
- deoptimization_literals_.Add(literal);
+ deoptimization_literals_.Add(literal, zone());
return result;
}
@@ -762,14 +824,14 @@ void LCodeGen::RecordSafepoint(
for (int i = 0; i < operands->length(); i++) {
LOperand* pointer = operands->at(i);
if (pointer->IsStackSlot()) {
- safepoint.DefinePointerSlot(pointer->index());
+ safepoint.DefinePointerSlot(pointer->index(), zone());
} else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
- safepoint.DefinePointerRegister(ToRegister(pointer));
+ safepoint.DefinePointerRegister(ToRegister(pointer), zone());
}
}
if (kind & Safepoint::kWithRegisters) {
// Register cp always contains a pointer to the context.
- safepoint.DefinePointerRegister(cp);
+ safepoint.DefinePointerRegister(cp, zone());
}
}
@@ -781,7 +843,7 @@ void LCodeGen::RecordSafepoint(LPointerMap* pointers,
void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
- LPointerMap empty_pointers(RelocInfo::kNoPosition);
+ LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
RecordSafepoint(&empty_pointers, deopt_mode);
}
@@ -900,7 +962,7 @@ void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
void LCodeGen::DoModI(LModI* instr) {
if (instr->hydrogen()->HasPowerOf2Divisor()) {
- Register dividend = ToRegister(instr->InputAt(0));
+ Register dividend = ToRegister(instr->left());
Register result = ToRegister(instr->result());
int32_t divisor =
@@ -925,112 +987,135 @@ void LCodeGen::DoModI(LModI* instr) {
}
// These registers hold untagged 32 bit values.
- Register left = ToRegister(instr->InputAt(0));
- Register right = ToRegister(instr->InputAt(1));
+ Register left = ToRegister(instr->left());
+ Register right = ToRegister(instr->right());
Register result = ToRegister(instr->result());
+ Label done;
- Register scratch = scratch0();
- Register scratch2 = ToRegister(instr->TempAt(0));
- DwVfpRegister dividend = ToDoubleRegister(instr->TempAt(1));
- DwVfpRegister divisor = ToDoubleRegister(instr->TempAt(2));
- DwVfpRegister quotient = double_scratch0();
-
- ASSERT(!dividend.is(divisor));
- ASSERT(!dividend.is(quotient));
- ASSERT(!divisor.is(quotient));
- ASSERT(!scratch.is(left));
- ASSERT(!scratch.is(right));
- ASSERT(!scratch.is(result));
+ if (CpuFeatures::IsSupported(SUDIV)) {
+ CpuFeatures::Scope scope(SUDIV);
+ // Check for x % 0.
+ if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
+ __ cmp(right, Operand(0));
+ DeoptimizeIf(eq, instr->environment());
+ }
- Label done, vfp_modulo, both_positive, right_negative;
+ // For r3 = r1 % r2; we can have the following ARM code
+ // sdiv r3, r1, r2
+ // mls r3, r3, r2, r1
- // Check for x % 0.
- if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
- __ cmp(right, Operand(0));
- DeoptimizeIf(eq, instr->environment());
- }
+ __ sdiv(result, left, right);
+ __ mls(result, result, right, left);
+ __ cmp(result, Operand(0));
+ __ b(ne, &done);
- __ Move(result, left);
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ __ cmp(left, Operand(0));
+ DeoptimizeIf(lt, instr->environment());
+ }
+ } else {
+ Register scratch = scratch0();
+ Register scratch2 = ToRegister(instr->temp());
+ DwVfpRegister dividend = ToDoubleRegister(instr->temp2());
+ DwVfpRegister divisor = ToDoubleRegister(instr->temp3());
+ DwVfpRegister quotient = double_scratch0();
+
+ ASSERT(!dividend.is(divisor));
+ ASSERT(!dividend.is(quotient));
+ ASSERT(!divisor.is(quotient));
+ ASSERT(!scratch.is(left));
+ ASSERT(!scratch.is(right));
+ ASSERT(!scratch.is(result));
+
+ Label vfp_modulo, both_positive, right_negative;
+
+ // Check for x % 0.
+ if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
+ __ cmp(right, Operand(0));
+ DeoptimizeIf(eq, instr->environment());
+ }
- // (0 % x) must yield 0 (if x is finite, which is the case here).
- __ cmp(left, Operand(0));
- __ b(eq, &done);
- // Preload right in a vfp register.
- __ vmov(divisor.low(), right);
- __ b(lt, &vfp_modulo);
+ __ Move(result, left);
- __ cmp(left, Operand(right));
- __ b(lt, &done);
-
- // Check for (positive) power of two on the right hand side.
- __ JumpIfNotPowerOfTwoOrZeroAndNeg(right,
- scratch,
- &right_negative,
- &both_positive);
- // Perform modulo operation (scratch contains right - 1).
- __ and_(result, scratch, Operand(left));
- __ b(&done);
+ // (0 % x) must yield 0 (if x is finite, which is the case here).
+ __ cmp(left, Operand(0));
+ __ b(eq, &done);
+ // Preload right in a vfp register.
+ __ vmov(divisor.low(), right);
+ __ b(lt, &vfp_modulo);
- __ bind(&right_negative);
- // Negate right. The sign of the divisor does not matter.
- __ rsb(right, right, Operand(0));
-
- __ bind(&both_positive);
- const int kUnfolds = 3;
- // If the right hand side is smaller than the (nonnegative)
- // left hand side, the left hand side is the result.
- // Else try a few subtractions of the left hand side.
- __ mov(scratch, left);
- for (int i = 0; i < kUnfolds; i++) {
- // Check if the left hand side is less or equal than the
- // the right hand side.
- __ cmp(scratch, Operand(right));
- __ mov(result, scratch, LeaveCC, lt);
+ __ cmp(left, Operand(right));
__ b(lt, &done);
- // If not, reduce the left hand side by the right hand
- // side and check again.
- if (i < kUnfolds - 1) __ sub(scratch, scratch, right);
- }
-
- __ bind(&vfp_modulo);
- // Load the arguments in VFP registers.
- // The divisor value is preloaded before. Be careful that 'right' is only live
- // on entry.
- __ vmov(dividend.low(), left);
- // From here on don't use right as it may have been reallocated (for example
- // to scratch2).
- right = no_reg;
-
- __ vcvt_f64_s32(dividend, dividend.low());
- __ vcvt_f64_s32(divisor, divisor.low());
-
- // We do not care about the sign of the divisor.
- __ vabs(divisor, divisor);
- // Compute the quotient and round it to a 32bit integer.
- __ vdiv(quotient, dividend, divisor);
- __ vcvt_s32_f64(quotient.low(), quotient);
- __ vcvt_f64_s32(quotient, quotient.low());
-
- // Compute the remainder in result.
- DwVfpRegister double_scratch = dividend;
- __ vmul(double_scratch, divisor, quotient);
- __ vcvt_s32_f64(double_scratch.low(), double_scratch);
- __ vmov(scratch, double_scratch.low());
-
- if (!instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
- __ sub(result, left, scratch);
- } else {
- Label ok;
- // Check for -0.
- __ sub(scratch2, left, scratch, SetCC);
- __ b(ne, &ok);
- __ cmp(left, Operand(0));
- DeoptimizeIf(mi, instr->environment());
- __ bind(&ok);
- // Load the result and we are done.
- __ mov(result, scratch2);
- }
+ // Check for (positive) power of two on the right hand side.
+ __ JumpIfNotPowerOfTwoOrZeroAndNeg(right,
+ scratch,
+ &right_negative,
+ &both_positive);
+ // Perform modulo operation (scratch contains right - 1).
+ __ and_(result, scratch, Operand(left));
+ __ b(&done);
+
+ __ bind(&right_negative);
+ // Negate right. The sign of the divisor does not matter.
+ __ rsb(right, right, Operand(0));
+
+ __ bind(&both_positive);
+ const int kUnfolds = 3;
+ // If the right hand side is smaller than the (nonnegative)
+ // left hand side, the left hand side is the result.
+ // Else try a few subtractions of the left hand side.
+ __ mov(scratch, left);
+ for (int i = 0; i < kUnfolds; i++) {
+ // Check if the left hand side is less or equal than the
+ // the right hand side.
+ __ cmp(scratch, Operand(right));
+ __ mov(result, scratch, LeaveCC, lt);
+ __ b(lt, &done);
+ // If not, reduce the left hand side by the right hand
+ // side and check again.
+ if (i < kUnfolds - 1) __ sub(scratch, scratch, right);
+ }
+
+ __ bind(&vfp_modulo);
+ // Load the arguments in VFP registers.
+ // The divisor value is preloaded before. Be careful that 'right'
+ // is only live on entry.
+ __ vmov(dividend.low(), left);
+ // From here on don't use right as it may have been reallocated
+ // (for example to scratch2).
+ right = no_reg;
+
+ __ vcvt_f64_s32(dividend, dividend.low());
+ __ vcvt_f64_s32(divisor, divisor.low());
+
+ // We do not care about the sign of the divisor.
+ __ vabs(divisor, divisor);
+ // Compute the quotient and round it to a 32bit integer.
+ __ vdiv(quotient, dividend, divisor);
+ __ vcvt_s32_f64(quotient.low(), quotient);
+ __ vcvt_f64_s32(quotient, quotient.low());
+
+ // Compute the remainder in result.
+ DwVfpRegister double_scratch = dividend;
+ __ vmul(double_scratch, divisor, quotient);
+ __ vcvt_s32_f64(double_scratch.low(), double_scratch);
+ __ vmov(scratch, double_scratch.low());
+
+ if (!instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ __ sub(result, left, scratch);
+ } else {
+ Label ok;
+ // Check for -0.
+ __ sub(scratch2, left, scratch, SetCC);
+ __ b(ne, &ok);
+ __ cmp(left, Operand(0));
+ DeoptimizeIf(mi, instr->environment());
+ __ bind(&ok);
+ // Load the result and we are done.
+ __ mov(result, scratch2);
+ }
+ }
__ bind(&done);
}
@@ -1135,15 +1220,18 @@ void LCodeGen::DoDivI(LDivI* instr) {
DeferredDivI(LCodeGen* codegen, LDivI* instr)
: LDeferredCode(codegen), instr_(instr) { }
virtual void Generate() {
- codegen()->DoDeferredBinaryOpStub(instr_, Token::DIV);
+ codegen()->DoDeferredBinaryOpStub(instr_->pointer_map(),
+ instr_->left(),
+ instr_->right(),
+ Token::DIV);
}
virtual LInstruction* instr() { return instr_; }
private:
LDivI* instr_;
};
- const Register left = ToRegister(instr->InputAt(0));
- const Register right = ToRegister(instr->InputAt(1));
+ const Register left = ToRegister(instr->left());
+ const Register right = ToRegister(instr->right());
const Register scratch = scratch0();
const Register result = ToRegister(instr->result());
@@ -1191,7 +1279,7 @@ void LCodeGen::DoDivI(LDivI* instr) {
// Call the stub. The numbers in r0 and r1 have
// to be tagged to Smis. If that is not possible, deoptimize.
- DeferredDivI* deferred = new DeferredDivI(this, instr);
+ DeferredDivI* deferred = new(zone()) DeferredDivI(this, instr);
__ TrySmiTag(left, &deoptimize, scratch);
__ TrySmiTag(right, &deoptimize, scratch);
@@ -1212,15 +1300,15 @@ void LCodeGen::DoDivI(LDivI* instr) {
void LCodeGen::DoMathFloorOfDiv(LMathFloorOfDiv* instr) {
const Register result = ToRegister(instr->result());
- const Register left = ToRegister(instr->InputAt(0));
- const Register remainder = ToRegister(instr->TempAt(0));
+ const Register left = ToRegister(instr->left());
+ const Register remainder = ToRegister(instr->temp());
const Register scratch = scratch0();
// We only optimize this for division by constants, because the standard
// integer division routine is usually slower than transitionning to VFP.
// This could be optimized on processors with SDIV available.
- ASSERT(instr->InputAt(1)->IsConstantOperand());
- int32_t divisor = ToInteger32(LConstantOperand::cast(instr->InputAt(1)));
+ ASSERT(instr->right()->IsConstantOperand());
+ int32_t divisor = ToInteger32(LConstantOperand::cast(instr->right()));
if (divisor < 0) {
__ cmp(left, Operand(0));
DeoptimizeIf(eq, instr->environment());
@@ -1238,11 +1326,12 @@ void LCodeGen::DoMathFloorOfDiv(LMathFloorOfDiv* instr) {
}
-template<int T>
-void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
+void LCodeGen::DoDeferredBinaryOpStub(LPointerMap* pointer_map,
+ LOperand* left_argument,
+ LOperand* right_argument,
Token::Value op) {
- Register left = ToRegister(instr->InputAt(0));
- Register right = ToRegister(instr->InputAt(1));
+ Register left = ToRegister(left_argument);
+ Register right = ToRegister(right_argument);
PushSafepointRegistersScope scope(this, Safepoint::kWithRegistersAndDoubles);
// Move left to r1 and right to r0 for the stub call.
@@ -1261,7 +1350,7 @@ void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
}
BinaryOpStub stub(op, OVERWRITE_LEFT);
__ CallStub(&stub);
- RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
+ RecordSafepointWithRegistersAndDoubles(pointer_map,
0,
Safepoint::kNoLazyDeopt);
// Overwrite the stored value of r0 with the result of the stub.
@@ -1273,8 +1362,8 @@ void LCodeGen::DoMulI(LMulI* instr) {
Register scratch = scratch0();
Register result = ToRegister(instr->result());
// Note that result may alias left.
- Register left = ToRegister(instr->InputAt(0));
- LOperand* right_op = instr->InputAt(1);
+ Register left = ToRegister(instr->left());
+ LOperand* right_op = instr->right();
bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
bool bailout_on_minus_zero =
@@ -1341,7 +1430,7 @@ void LCodeGen::DoMulI(LMulI* instr) {
} else {
Register right = EmitLoadRegister(right_op, scratch);
if (bailout_on_minus_zero) {
- __ orr(ToRegister(instr->TempAt(0)), left, right);
+ __ orr(ToRegister(instr->temp()), left, right);
}
if (can_overflow) {
@@ -1358,7 +1447,7 @@ void LCodeGen::DoMulI(LMulI* instr) {
Label done;
__ cmp(result, Operand(0));
__ b(ne, &done);
- __ cmp(ToRegister(instr->TempAt(0)), Operand(0));
+ __ cmp(ToRegister(instr->temp()), Operand(0));
DeoptimizeIf(mi, instr->environment());
__ bind(&done);
}
@@ -1367,8 +1456,8 @@ void LCodeGen::DoMulI(LMulI* instr) {
void LCodeGen::DoBitI(LBitI* instr) {
- LOperand* left_op = instr->InputAt(0);
- LOperand* right_op = instr->InputAt(1);
+ LOperand* left_op = instr->left();
+ LOperand* right_op = instr->right();
ASSERT(left_op->IsRegister());
Register left = ToRegister(left_op);
Register result = ToRegister(instr->result());
@@ -1401,14 +1490,17 @@ void LCodeGen::DoBitI(LBitI* instr) {
void LCodeGen::DoShiftI(LShiftI* instr) {
// Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so
// result may alias either of them.
- LOperand* right_op = instr->InputAt(1);
- Register left = ToRegister(instr->InputAt(0));
+ LOperand* right_op = instr->right();
+ Register left = ToRegister(instr->left());
Register result = ToRegister(instr->result());
Register scratch = scratch0();
if (right_op->IsRegister()) {
// Mask the right_op operand.
__ and_(scratch, ToRegister(right_op), Operand(0x1F));
switch (instr->op()) {
+ case Token::ROR:
+ __ mov(result, Operand(left, ROR, scratch));
+ break;
case Token::SAR:
__ mov(result, Operand(left, ASR, scratch));
break;
@@ -1432,6 +1524,13 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
int value = ToInteger32(LConstantOperand::cast(right_op));
uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
switch (instr->op()) {
+ case Token::ROR:
+ if (shift_count != 0) {
+ __ mov(result, Operand(left, ROR, shift_count));
+ } else {
+ __ Move(result, left);
+ }
+ break;
case Token::SAR:
if (shift_count != 0) {
__ mov(result, Operand(left, ASR, shift_count));
@@ -1466,8 +1565,8 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
void LCodeGen::DoSubI(LSubI* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
LOperand* result = instr->result();
bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
SBit set_cond = can_overflow ? SetCC : LeaveCC;
@@ -1496,7 +1595,7 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
ASSERT(instr->result()->IsDoubleRegister());
DwVfpRegister result = ToDoubleRegister(instr->result());
double v = instr->value();
- __ Vmov(result, v);
+ __ Vmov(result, v, scratch0());
}
@@ -1513,21 +1612,28 @@ void LCodeGen::DoConstantT(LConstantT* instr) {
void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Register result = ToRegister(instr->result());
- Register array = ToRegister(instr->InputAt(0));
+ Register array = ToRegister(instr->value());
__ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
}
void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
Register result = ToRegister(instr->result());
- Register array = ToRegister(instr->InputAt(0));
+ Register array = ToRegister(instr->value());
__ ldr(result, FieldMemOperand(array, FixedArrayBase::kLengthOffset));
}
+void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
+ Register result = ToRegister(instr->result());
+ Register map = ToRegister(instr->value());
+ __ EnumLength(result, map);
+}
+
+
void LCodeGen::DoElementsKind(LElementsKind* instr) {
Register result = ToRegister(instr->result());
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
// Load map into |result|.
__ ldr(result, FieldMemOperand(input, HeapObject::kMapOffset));
@@ -1540,9 +1646,9 @@ void LCodeGen::DoElementsKind(LElementsKind* instr) {
void LCodeGen::DoValueOf(LValueOf* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
- Register map = ToRegister(instr->TempAt(0));
+ Register map = ToRegister(instr->temp());
Label done;
// If the object is a smi return the object.
@@ -1561,9 +1667,9 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
void LCodeGen::DoDateField(LDateField* instr) {
- Register object = ToRegister(instr->InputAt(0));
+ Register object = ToRegister(instr->date());
Register result = ToRegister(instr->result());
- Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch = ToRegister(instr->temp());
Smi* index = instr->index();
Label runtime, done;
ASSERT(object.is(result));
@@ -1571,11 +1677,10 @@ void LCodeGen::DoDateField(LDateField* instr) {
ASSERT(!scratch.is(scratch0()));
ASSERT(!scratch.is(object));
-#ifdef DEBUG
- __ AbortIfSmi(object);
+ __ tst(object, Operand(kSmiTagMask));
+ DeoptimizeIf(eq, instr->environment());
__ CompareObjectType(object, scratch, scratch, JS_DATE_TYPE);
- __ Assert(eq, "Trying to get date field from non-date.");
-#endif
+ DeoptimizeIf(ne, instr->environment());
if (index->value() == 0) {
__ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
@@ -1601,14 +1706,14 @@ void LCodeGen::DoDateField(LDateField* instr) {
void LCodeGen::DoBitNotI(LBitNotI* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
__ mvn(result, Operand(input));
}
void LCodeGen::DoThrow(LThrow* instr) {
- Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
+ Register input_reg = EmitLoadRegister(instr->value(), ip);
__ push(input_reg);
CallRuntime(Runtime::kThrow, 1, instr);
@@ -1619,8 +1724,8 @@ void LCodeGen::DoThrow(LThrow* instr) {
void LCodeGen::DoAddI(LAddI* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
LOperand* result = instr->result();
bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
SBit set_cond = can_overflow ? SetCC : LeaveCC;
@@ -1639,9 +1744,71 @@ void LCodeGen::DoAddI(LAddI* instr) {
}
+void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
+ HMathMinMax::Operation operation = instr->hydrogen()->operation();
+ Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge;
+ if (instr->hydrogen()->representation().IsInteger32()) {
+ Register left_reg = ToRegister(left);
+ Operand right_op = (right->IsRegister() || right->IsConstantOperand())
+ ? ToOperand(right)
+ : Operand(EmitLoadRegister(right, ip));
+ Register result_reg = ToRegister(instr->result());
+ __ cmp(left_reg, right_op);
+ if (!result_reg.is(left_reg)) {
+ __ mov(result_reg, left_reg, LeaveCC, condition);
+ }
+ __ mov(result_reg, right_op, LeaveCC, NegateCondition(condition));
+ } else {
+ ASSERT(instr->hydrogen()->representation().IsDouble());
+ DoubleRegister left_reg = ToDoubleRegister(left);
+ DoubleRegister right_reg = ToDoubleRegister(right);
+ DoubleRegister result_reg = ToDoubleRegister(instr->result());
+ Label check_nan_left, check_zero, return_left, return_right, done;
+ __ VFPCompareAndSetFlags(left_reg, right_reg);
+ __ b(vs, &check_nan_left);
+ __ b(eq, &check_zero);
+ __ b(condition, &return_left);
+ __ b(al, &return_right);
+
+ __ bind(&check_zero);
+ __ VFPCompareAndSetFlags(left_reg, 0.0);
+ __ b(ne, &return_left); // left == right != 0.
+ // At this point, both left and right are either 0 or -0.
+ if (operation == HMathMinMax::kMathMin) {
+ // We could use a single 'vorr' instruction here if we had NEON support.
+ __ vneg(left_reg, left_reg);
+ __ vsub(result_reg, left_reg, right_reg);
+ __ vneg(result_reg, result_reg);
+ } else {
+ // Since we operate on +0 and/or -0, vadd and vand have the same effect;
+ // the decision for vadd is easy because vand is a NEON instruction.
+ __ vadd(result_reg, left_reg, right_reg);
+ }
+ __ b(al, &done);
+
+ __ bind(&check_nan_left);
+ __ VFPCompareAndSetFlags(left_reg, left_reg);
+ __ b(vs, &return_left); // left == NaN.
+ __ bind(&return_right);
+ if (!right_reg.is(result_reg)) {
+ __ vmov(result_reg, right_reg);
+ }
+ __ b(al, &done);
+
+ __ bind(&return_left);
+ if (!left_reg.is(result_reg)) {
+ __ vmov(result_reg, left_reg);
+ }
+ __ bind(&done);
+ }
+}
+
+
void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
- DoubleRegister left = ToDoubleRegister(instr->InputAt(0));
- DoubleRegister right = ToDoubleRegister(instr->InputAt(1));
+ DoubleRegister left = ToDoubleRegister(instr->left());
+ DoubleRegister right = ToDoubleRegister(instr->right());
DoubleRegister result = ToDoubleRegister(instr->result());
switch (instr->op()) {
case Token::ADD:
@@ -1680,11 +1847,14 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
- ASSERT(ToRegister(instr->InputAt(0)).is(r1));
- ASSERT(ToRegister(instr->InputAt(1)).is(r0));
+ ASSERT(ToRegister(instr->left()).is(r1));
+ ASSERT(ToRegister(instr->right()).is(r0));
ASSERT(ToRegister(instr->result()).is(r0));
BinaryOpStub stub(instr->op(), NO_OVERWRITE);
+ // Block literal pool emission to ensure nop indicating no inlined smi code
+ // is in the correct position.
+ Assembler::BlockConstPoolScope block_const_pool(masm());
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
__ nop(); // Signals no inlined code.
}
@@ -1723,11 +1893,11 @@ void LCodeGen::DoBranch(LBranch* instr) {
Representation r = instr->hydrogen()->value()->representation();
if (r.IsInteger32()) {
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
__ cmp(reg, Operand(0));
EmitBranch(true_block, false_block, ne);
} else if (r.IsDouble()) {
- DoubleRegister reg = ToDoubleRegister(instr->InputAt(0));
+ DoubleRegister reg = ToDoubleRegister(instr->value());
Register scratch = scratch0();
// Test the double value. Zero and NaN are false.
@@ -1736,7 +1906,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
EmitBranch(true_block, false_block, eq);
} else {
ASSERT(r.IsTagged());
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
HType type = instr->hydrogen()->value()->type();
if (type.IsBoolean()) {
__ CompareRoot(reg, Heap::kTrueValueRootIndex);
@@ -1875,8 +2045,8 @@ Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
int false_block = chunk_->LookupDestination(instr->false_block_id());
int true_block = chunk_->LookupDestination(instr->true_block_id());
Condition cond = TokenToCondition(instr->op(), false);
@@ -1916,8 +2086,8 @@ void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
- Register left = ToRegister(instr->InputAt(0));
- Register right = ToRegister(instr->InputAt(1));
+ Register left = ToRegister(instr->left());
+ Register right = ToRegister(instr->right());
int false_block = chunk_->LookupDestination(instr->false_block_id());
int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -1927,7 +2097,7 @@ void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
- Register left = ToRegister(instr->InputAt(0));
+ Register left = ToRegister(instr->left());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1938,7 +2108,7 @@ void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
Register scratch = scratch0();
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
int false_block = chunk_->LookupDestination(instr->false_block_id());
// If the expression is known to be untagged or a smi, then it's definitely
@@ -2006,8 +2176,8 @@ Condition LCodeGen::EmitIsObject(Register input,
void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
- Register reg = ToRegister(instr->InputAt(0));
- Register temp1 = ToRegister(instr->TempAt(0));
+ Register reg = ToRegister(instr->value());
+ Register temp1 = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -2032,8 +2202,8 @@ Condition LCodeGen::EmitIsString(Register input,
void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
- Register reg = ToRegister(instr->InputAt(0));
- Register temp1 = ToRegister(instr->TempAt(0));
+ Register reg = ToRegister(instr->value());
+ Register temp1 = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -2050,15 +2220,15 @@ void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
- Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
+ Register input_reg = EmitLoadRegister(instr->value(), ip);
__ tst(input_reg, Operand(kSmiTagMask));
EmitBranch(true_block, false_block, eq);
}
void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
+ Register input = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -2128,7 +2298,7 @@ static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Register scratch = scratch0();
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -2143,12 +2313,10 @@ void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
- if (FLAG_debug_code) {
- __ AbortIfNotString(input);
- }
+ __ AssertString(input);
__ ldr(result, FieldMemOperand(input, String::kHashFieldOffset));
__ IndexFromHash(result, result);
@@ -2157,7 +2325,7 @@ void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
void LCodeGen::DoHasCachedArrayIndexAndBranch(
LHasCachedArrayIndexAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register scratch = scratch0();
int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -2238,9 +2406,9 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register temp = scratch0();
- Register temp2 = ToRegister(instr->TempAt(0));
+ Register temp2 = ToRegister(instr->temp());
Handle<String> class_name = instr->hydrogen()->class_name();
int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -2256,8 +2424,8 @@ void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
- Register reg = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
+ Register reg = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
int true_block = instr->true_block_id();
int false_block = instr->false_block_id();
@@ -2268,8 +2436,8 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
- ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
- ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
+ ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0.
+ ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1.
InstanceofStub stub(InstanceofStub::kArgsInRegisters);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
@@ -2297,11 +2465,11 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
};
DeferredInstanceOfKnownGlobal* deferred;
- deferred = new DeferredInstanceOfKnownGlobal(this, instr);
+ deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
Label done, false_result;
- Register object = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
+ Register object = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
Register result = ToRegister(instr->result());
ASSERT(object.is(r0));
@@ -2316,20 +2484,26 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Label cache_miss;
Register map = temp;
__ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
- __ bind(deferred->map_check()); // Label for calculating code patching.
- // We use Factory::the_hole_value() on purpose instead of loading from the
- // root array to force relocation to be able to later patch with
- // the cached map.
- Handle<JSGlobalPropertyCell> cell =
- factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
- __ mov(ip, Operand(Handle<Object>(cell)));
- __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
- __ cmp(map, Operand(ip));
- __ b(ne, &cache_miss);
- // We use Factory::the_hole_value() on purpose instead of loading from the
- // root array to force relocation to be able to later patch
- // with true or false.
- __ mov(result, Operand(factory()->the_hole_value()));
+ {
+ // Block constant pool emission to ensure the positions of instructions are
+ // as expected by the patcher. See InstanceofStub::Generate().
+ Assembler::BlockConstPoolScope block_const_pool(masm());
+ __ bind(deferred->map_check()); // Label for calculating code patching.
+ // We use Factory::the_hole_value() on purpose instead of loading from the
+ // root array to force relocation to be able to later patch with
+ // the cached map.
+ PredictableCodeSizeScope predictable(masm_);
+ Handle<JSGlobalPropertyCell> cell =
+ factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+ __ mov(ip, Operand(Handle<Object>(cell)));
+ __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
+ __ cmp(map, Operand(ip));
+ __ b(ne, &cache_miss);
+ // We use Factory::the_hole_value() on purpose instead of loading from the
+ // root array to force relocation to be able to later patch
+ // with true or false.
+ __ mov(result, Operand(factory()->the_hole_value()));
+ }
__ b(&done);
// The inlined call site cache did not match. Check null and string before
@@ -2376,15 +2550,24 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
// Get the temp register reserved by the instruction. This needs to be r4 as
// its slot of the pushing of safepoint registers is used to communicate the
// offset to the location of the map check.
- Register temp = ToRegister(instr->TempAt(0));
+ Register temp = ToRegister(instr->temp());
ASSERT(temp.is(r4));
__ LoadHeapObject(InstanceofStub::right(), instr->function());
- static const int kAdditionalDelta = 4;
+ static const int kAdditionalDelta = 5;
+ // Make sure that code size is predicable, since we use specific constants
+ // offsets in the code to find embedded values..
+ PredictableCodeSizeScope predictable(masm_);
int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
Label before_push_delta;
__ bind(&before_push_delta);
__ BlockConstPoolFor(kAdditionalDelta);
__ mov(temp, Operand(delta * kPointerSize));
+ // The mov above can generate one or two instructions. The delta was computed
+ // for two instructions, so we need to pad here in case of one instruction.
+ if (masm_->InstructionsGeneratedSince(&before_push_delta) != 2) {
+ ASSERT_EQ(1, masm_->InstructionsGeneratedSince(&before_push_delta));
+ __ nop();
+ }
__ StoreToSafepointRegisterSlot(temp, temp);
CallCodeGeneric(stub.GetCode(),
RelocInfo::CODE_TARGET,
@@ -2467,7 +2650,7 @@ void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
// it as no longer deleted.
if (instr->hydrogen()->RequiresHoleCheck()) {
// We use a temp to check the payload (CompareRoot might clobber ip).
- Register payload = ToRegister(instr->TempAt(0));
+ Register payload = ToRegister(instr->temp());
__ ldr(payload, FieldMemOperand(cell, JSGlobalPropertyCell::kValueOffset));
__ CompareRoot(payload, Heap::kTheHoleValueRootIndex);
DeoptimizeIf(eq, instr->environment());
@@ -2546,7 +2729,7 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
- Register object = ToRegister(instr->InputAt(0));
+ Register object = ToRegister(instr->object());
Register result = ToRegister(instr->result());
if (instr->hydrogen()->is_in_object()) {
__ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
@@ -2560,12 +2743,12 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name) {
+ Handle<String> name,
+ LEnvironment* env) {
LookupResult lookup(isolate());
- type->LookupInDescriptors(NULL, *name, &lookup);
- ASSERT(lookup.IsFound() &&
- (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
- if (lookup.type() == FIELD) {
+ type->LookupDescriptor(NULL, *name, &lookup);
+ ASSERT(lookup.IsFound() || lookup.IsCacheable());
+ if (lookup.IsField()) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
int offset = index * kPointerSize;
if (index < 0) {
@@ -2577,9 +2760,23 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
__ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
__ ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
}
- } else {
+ } else if (lookup.IsConstantFunction()) {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
__ LoadHeapObject(result, function);
+ } else {
+ // Negative lookup.
+ // Check prototypes.
+ Handle<HeapObject> current(HeapObject::cast((*type)->prototype()));
+ Heap* heap = type->GetHeap();
+ while (*current != heap->null_value()) {
+ __ LoadHeapObject(result, current);
+ __ ldr(result, FieldMemOperand(result, HeapObject::kMapOffset));
+ __ cmp(result, Operand(Handle<Map>(current->map())));
+ DeoptimizeIf(ne, env);
+ current =
+ Handle<HeapObject>(HeapObject::cast(current->map()->prototype()));
+ }
+ __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
}
}
@@ -2587,7 +2784,7 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
Register object = ToRegister(instr->object());
Register result = ToRegister(instr->result());
- Register scratch = scratch0();
+ Register object_map = scratch0();
int map_count = instr->hydrogen()->types()->length();
bool need_generic = instr->hydrogen()->need_generic();
@@ -2598,18 +2795,24 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
}
Handle<String> name = instr->hydrogen()->name();
Label done;
- __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
+ __ ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
for (int i = 0; i < map_count; ++i) {
bool last = (i == map_count - 1);
Handle<Map> map = instr->hydrogen()->types()->at(i);
- __ cmp(scratch, Operand(map));
+ Label check_passed;
+ __ CompareMap(
+ object_map, map, &check_passed, ALLOW_ELEMENT_TRANSITION_MAPS);
if (last && !need_generic) {
DeoptimizeIf(ne, instr->environment());
- EmitLoadFieldOrConstantFunction(result, object, map, name);
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
} else {
Label next;
__ b(ne, &next);
- EmitLoadFieldOrConstantFunction(result, object, map, name);
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
__ b(&done);
__ bind(&next);
}
@@ -2617,7 +2820,7 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
if (need_generic) {
__ mov(r2, Operand(name));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
}
__ bind(&done);
}
@@ -2630,7 +2833,7 @@ void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
// Name is always in r2.
__ mov(r2, Operand(instr->name()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
}
@@ -2680,7 +2883,7 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
void LCodeGen::DoLoadElements(LLoadElements* instr) {
Register result = ToRegister(instr->result());
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->object());
Register scratch = scratch0();
__ ldr(result, FieldMemOperand(input, JSObject::kElementsOffset));
@@ -2697,8 +2900,10 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
__ ldr(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
__ ubfx(scratch, scratch, Map::kElementsKindShift,
Map::kElementsKindBitCount);
- __ cmp(scratch, Operand(FAST_ELEMENTS));
- __ b(eq, &done);
+ __ cmp(scratch, Operand(GetInitialFastElementsKind()));
+ __ b(lt, &fail);
+ __ cmp(scratch, Operand(TERMINAL_FAST_ELEMENTS_KIND));
+ __ b(le, &done);
__ cmp(scratch, Operand(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
__ b(lt, &fail);
__ cmp(scratch, Operand(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
@@ -2713,7 +2918,7 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
void LCodeGen::DoLoadExternalArrayPointer(
LLoadExternalArrayPointer* instr) {
Register to_reg = ToRegister(instr->result());
- Register from_reg = ToRegister(instr->InputAt(0));
+ Register from_reg = ToRegister(instr->object());
__ ldr(to_reg, FieldMemOperand(from_reg,
ExternalArray::kExternalPointerOffset));
}
@@ -2724,82 +2929,16 @@ void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Register length = ToRegister(instr->length());
Register index = ToRegister(instr->index());
Register result = ToRegister(instr->result());
-
- // Bailout index is not a valid argument index. Use unsigned check to get
- // negative check for free.
- __ sub(length, length, index, SetCC);
- DeoptimizeIf(ls, instr->environment());
-
// There are two words between the frame pointer and the last argument.
// Subtracting from length accounts for one of them add one more.
+ __ sub(length, length, index);
__ add(length, length, Operand(1));
__ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
}
-void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
- Register elements = ToRegister(instr->elements());
- Register key = EmitLoadRegister(instr->key(), scratch0());
- Register result = ToRegister(instr->result());
- Register scratch = scratch0();
-
- // Load the result.
- __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
- uint32_t offset = FixedArray::kHeaderSize +
- (instr->additional_index() << kPointerSizeLog2);
- __ ldr(result, FieldMemOperand(scratch, offset));
-
- // Check for the hole value.
- if (instr->hydrogen()->RequiresHoleCheck()) {
- __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
- __ cmp(result, scratch);
- DeoptimizeIf(eq, instr->environment());
- }
-}
-
-
-void LCodeGen::DoLoadKeyedFastDoubleElement(
- LLoadKeyedFastDoubleElement* instr) {
- Register elements = ToRegister(instr->elements());
- bool key_is_constant = instr->key()->IsConstantOperand();
- Register key = no_reg;
- DwVfpRegister result = ToDoubleRegister(instr->result());
- Register scratch = scratch0();
-
- int shift_size =
- ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
- int constant_key = 0;
- if (key_is_constant) {
- constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
- if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
- }
- } else {
- key = ToRegister(instr->key());
- }
-
- Operand operand = key_is_constant
- ? Operand(((constant_key + instr->additional_index()) << shift_size) +
- FixedDoubleArray::kHeaderSize - kHeapObjectTag)
- : Operand(key, LSL, shift_size);
- __ add(elements, elements, operand);
- if (!key_is_constant) {
- __ add(elements, elements,
- Operand((FixedDoubleArray::kHeaderSize - kHeapObjectTag) +
- (instr->additional_index() << shift_size)));
- }
-
- __ ldr(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
- __ cmp(scratch, Operand(kHoleNanUpper32));
- DeoptimizeIf(eq, instr->environment());
-
- __ vldr(result, elements, 0);
-}
-
-
-void LCodeGen::DoLoadKeyedSpecializedArrayElement(
- LLoadKeyedSpecializedArrayElement* instr) {
- Register external_pointer = ToRegister(instr->external_pointer());
+void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
+ Register external_pointer = ToRegister(instr->elements());
Register key = no_reg;
ElementsKind elements_kind = instr->elements_kind();
bool key_is_constant = instr->key()->IsConstantOperand();
@@ -2812,15 +2951,17 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
} else {
key = ToRegister(instr->key());
}
- int shift_size = ElementsKindToShiftSize(elements_kind);
- int additional_offset = instr->additional_index() << shift_size;
+ int element_size_shift = ElementsKindToShiftSize(elements_kind);
+ int shift_size = (instr->hydrogen()->key()->representation().IsTagged())
+ ? (element_size_shift - kSmiTagSize) : element_size_shift;
+ int additional_offset = instr->additional_index() << element_size_shift;
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
CpuFeatures::Scope scope(VFP3);
DwVfpRegister result = ToDoubleRegister(instr->result());
Operand operand = key_is_constant
- ? Operand(constant_key << shift_size)
+ ? Operand(constant_key << element_size_shift)
: Operand(key, LSL, shift_size);
__ add(scratch0(), external_pointer, operand);
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
@@ -2831,15 +2972,10 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
}
} else {
Register result = ToRegister(instr->result());
- if (instr->additional_index() != 0 && !key_is_constant) {
- __ add(scratch0(), key, Operand(instr->additional_index()));
- }
- MemOperand mem_operand(key_is_constant
- ? MemOperand(external_pointer,
- (constant_key << shift_size) + additional_offset)
- : (instr->additional_index() == 0
- ? MemOperand(external_pointer, key, LSL, shift_size)
- : MemOperand(external_pointer, scratch0(), LSL, shift_size)));
+ MemOperand mem_operand = PrepareKeyedOperand(
+ key, external_pointer, key_is_constant, constant_key,
+ element_size_shift, shift_size,
+ instr->additional_index(), additional_offset);
switch (elements_kind) {
case EXTERNAL_BYTE_ELEMENTS:
__ ldrsb(result, mem_operand);
@@ -2859,17 +2995,19 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
break;
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
__ ldr(result, mem_operand);
- __ cmp(result, Operand(0x80000000));
- // TODO(danno): we could be more clever here, perhaps having a special
- // version of the stub that detects if the overflow case actually
- // happens, and generate code that returns a double rather than int.
- DeoptimizeIf(cs, instr->environment());
+ if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) {
+ __ cmp(result, Operand(0x80000000));
+ DeoptimizeIf(cs, instr->environment());
+ }
break;
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -2879,12 +3017,142 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
}
+void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
+ Register elements = ToRegister(instr->elements());
+ bool key_is_constant = instr->key()->IsConstantOperand();
+ Register key = no_reg;
+ DwVfpRegister result = ToDoubleRegister(instr->result());
+ Register scratch = scratch0();
+
+ int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
+ int shift_size = (instr->hydrogen()->key()->representation().IsTagged())
+ ? (element_size_shift - kSmiTagSize) : element_size_shift;
+ int constant_key = 0;
+ if (key_is_constant) {
+ constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
+ if (constant_key & 0xF0000000) {
+ Abort("array index constant value too big.");
+ }
+ } else {
+ key = ToRegister(instr->key());
+ }
+
+ Operand operand = key_is_constant
+ ? Operand(((constant_key + instr->additional_index()) <<
+ element_size_shift) +
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag)
+ : Operand(key, LSL, shift_size);
+ __ add(elements, elements, operand);
+ if (!key_is_constant) {
+ __ add(elements, elements,
+ Operand((FixedDoubleArray::kHeaderSize - kHeapObjectTag) +
+ (instr->additional_index() << element_size_shift)));
+ }
+
+ __ vldr(result, elements, 0);
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ __ ldr(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
+ __ cmp(scratch, Operand(kHoleNanUpper32));
+ DeoptimizeIf(eq, instr->environment());
+ }
+}
+
+
+void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
+ Register elements = ToRegister(instr->elements());
+ Register result = ToRegister(instr->result());
+ Register scratch = scratch0();
+ Register store_base = scratch;
+ int offset = 0;
+
+ if (instr->key()->IsConstantOperand()) {
+ LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
+ offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) +
+ instr->additional_index());
+ store_base = elements;
+ } else {
+ Register key = EmitLoadRegister(instr->key(), scratch0());
+ // Even though the HLoadKeyed instruction forces the input
+ // representation for the key to be an integer, the input gets replaced
+ // during bound check elimination with the index argument to the bounds
+ // check, which can be tagged, so that case must be handled here, too.
+ if (instr->hydrogen()->key()->representation().IsTagged()) {
+ __ add(scratch, elements,
+ Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
+ } else {
+ __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
+ }
+ offset = FixedArray::OffsetOfElementAt(instr->additional_index());
+ }
+ __ ldr(result, FieldMemOperand(store_base, offset));
+
+ // Check for the hole value.
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
+ __ tst(result, Operand(kSmiTagMask));
+ DeoptimizeIf(ne, instr->environment());
+ } else {
+ __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
+ __ cmp(result, scratch);
+ DeoptimizeIf(eq, instr->environment());
+ }
+ }
+}
+
+
+void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
+ if (instr->is_external()) {
+ DoLoadKeyedExternalArray(instr);
+ } else if (instr->hydrogen()->representation().IsDouble()) {
+ DoLoadKeyedFixedDoubleArray(instr);
+ } else {
+ DoLoadKeyedFixedArray(instr);
+ }
+}
+
+
+MemOperand LCodeGen::PrepareKeyedOperand(Register key,
+ Register base,
+ bool key_is_constant,
+ int constant_key,
+ int element_size,
+ int shift_size,
+ int additional_index,
+ int additional_offset) {
+ if (additional_index != 0 && !key_is_constant) {
+ additional_index *= 1 << (element_size - shift_size);
+ __ add(scratch0(), key, Operand(additional_index));
+ }
+
+ if (key_is_constant) {
+ return MemOperand(base,
+ (constant_key << element_size) + additional_offset);
+ }
+
+ if (additional_index == 0) {
+ if (shift_size >= 0) {
+ return MemOperand(base, key, LSL, shift_size);
+ } else {
+ ASSERT_EQ(-1, shift_size);
+ return MemOperand(base, key, LSR, 1);
+ }
+ }
+
+ if (shift_size >= 0) {
+ return MemOperand(base, scratch0(), LSL, shift_size);
+ } else {
+ ASSERT_EQ(-1, shift_size);
+ return MemOperand(base, scratch0(), LSR, 1);
+ }
+}
+
+
void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
ASSERT(ToRegister(instr->object()).is(r1));
ASSERT(ToRegister(instr->key()).is(r0));
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
}
@@ -2910,7 +3178,7 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
- Register elem = ToRegister(instr->InputAt(0));
+ Register elem = ToRegister(instr->elements());
Register result = ToRegister(instr->result());
Label done;
@@ -3029,7 +3297,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
void LCodeGen::DoPushArgument(LPushArgument* instr) {
- LOperand* argument = instr->InputAt(0);
+ LOperand* argument = instr->value();
if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
Abort("DoPushArgument not implemented for double type.");
} else {
@@ -3046,7 +3314,7 @@ void LCodeGen::DoDrop(LDrop* instr) {
void LCodeGen::DoThisFunction(LThisFunction* instr) {
Register result = ToRegister(instr->result());
- __ LoadHeapObject(result, instr->hydrogen()->closure());
+ __ ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
}
@@ -3076,12 +3344,14 @@ void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
Register result = ToRegister(instr->result());
- __ ldr(result, ContextOperand(cp, instr->qml_global()?Context::QML_GLOBAL_INDEX:Context::GLOBAL_INDEX));
+ __ ldr(result, ContextOperand(cp, instr->qml_global()
+ ? Context::QML_GLOBAL_OBJECT_INDEX
+ : Context::GLOBAL_OBJECT_INDEX));
}
void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
- Register global = ToRegister(instr->global());
+ Register global = ToRegister(instr->global_object());
Register result = ToRegister(instr->result());
__ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
}
@@ -3103,14 +3373,8 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
__ LoadHeapObject(r1, function);
}
- // Change context if needed.
- bool change_context =
- (info()->closure()->context() != function->context()) ||
- scope()->contains_with() ||
- (scope()->num_heap_slots() > 0);
- if (change_context) {
- __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
- }
+ // Change context.
+ __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
// Set r0 to arguments count if adaption is not needed. Assumes that r0
// is available to write to at this point.
@@ -3147,7 +3411,7 @@ void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
Register scratch = scratch0();
@@ -3213,7 +3477,7 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
__ cmp(input, Operand(0));
__ Move(result, input, pl);
@@ -3243,7 +3507,7 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
Representation r = instr->hydrogen()->value()->representation();
if (r.IsDouble()) {
- DwVfpRegister input = ToDoubleRegister(instr->InputAt(0));
+ DwVfpRegister input = ToDoubleRegister(instr->value());
DwVfpRegister result = ToDoubleRegister(instr->result());
__ vabs(result, input);
} else if (r.IsInteger32()) {
@@ -3251,8 +3515,8 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
} else {
// Representation is tagged.
DeferredMathAbsTaggedHeapNumber* deferred =
- new DeferredMathAbsTaggedHeapNumber(this, instr);
- Register input = ToRegister(instr->InputAt(0));
+ new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
+ Register input = ToRegister(instr->value());
// Smi check.
__ JumpIfNotSmi(input, deferred->entry());
// If smi, handle it directly.
@@ -3263,29 +3527,24 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
- DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
+ DoubleRegister input = ToDoubleRegister(instr->value());
Register result = ToRegister(instr->result());
- SwVfpRegister single_scratch = double_scratch0().low();
- Register scratch1 = scratch0();
- Register scratch2 = ToRegister(instr->TempAt(0));
+ Register scratch = scratch0();
__ EmitVFPTruncate(kRoundToMinusInf,
- single_scratch,
+ result,
input,
- scratch1,
- scratch2);
+ scratch,
+ double_scratch0());
DeoptimizeIf(ne, instr->environment());
- // Move the result back to general purpose register r0.
- __ vmov(result, single_scratch);
-
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
// Test for -0.
Label done;
__ cmp(result, Operand(0));
__ b(ne, &done);
- __ vmov(scratch1, input.high());
- __ tst(scratch1, Operand(HeapNumber::kSignMask));
+ __ vmov(scratch, input.high());
+ __ tst(scratch, Operand(HeapNumber::kSignMask));
DeoptimizeIf(ne, instr->environment());
__ bind(&done);
}
@@ -3293,8 +3552,9 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
- DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
+ DoubleRegister input = ToDoubleRegister(instr->value());
Register result = ToRegister(instr->result());
+ DwVfpRegister double_scratch1 = ToDoubleRegister(instr->temp());
Register scratch = scratch0();
Label done, check_sign_on_zero;
@@ -3319,12 +3579,12 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
__ cmp(scratch, Operand(HeapNumber::kExponentBias + 32));
DeoptimizeIf(ge, instr->environment());
+ __ Vmov(double_scratch0(), 0.5, scratch);
+ __ vadd(double_scratch0(), input, double_scratch0());
+
// Save the original sign for later comparison.
__ and_(scratch, result, Operand(HeapNumber::kSignMask));
- __ Vmov(double_scratch0(), 0.5);
- __ vadd(double_scratch0(), input, double_scratch0());
-
// Check sign of the result: if the sign changed, the input
// value was in ]0.5, 0[ and the result should be -0.
__ vmov(result, double_scratch0().high());
@@ -3337,12 +3597,11 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
}
__ EmitVFPTruncate(kRoundToMinusInf,
- double_scratch0().low(),
- double_scratch0(),
result,
- scratch);
+ double_scratch0(),
+ scratch,
+ double_scratch1);
DeoptimizeIf(ne, instr->environment());
- __ vmov(result, double_scratch0().low());
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
// Test for -0.
@@ -3358,22 +3617,22 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
- DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
+ DoubleRegister input = ToDoubleRegister(instr->value());
DoubleRegister result = ToDoubleRegister(instr->result());
__ vsqrt(result, input);
}
void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
- DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
+ DoubleRegister input = ToDoubleRegister(instr->value());
DoubleRegister result = ToDoubleRegister(instr->result());
- DoubleRegister temp = ToDoubleRegister(instr->TempAt(0));
+ DoubleRegister temp = ToDoubleRegister(instr->temp());
// Note that according to ECMA-262 15.8.2.13:
// Math.pow(-Infinity, 0.5) == Infinity
// Math.sqrt(-Infinity) == NaN
Label done;
- __ vmov(temp, -V8_INFINITY);
+ __ vmov(temp, -V8_INFINITY, scratch0());
__ VFPCompareAndSetFlags(input, temp);
__ vneg(result, temp, eq);
__ b(&done, eq);
@@ -3389,11 +3648,11 @@ void LCodeGen::DoPower(LPower* instr) {
Representation exponent_type = instr->hydrogen()->right()->representation();
// Having marked this as a call, we can use any registers.
// Just make sure that the input/output registers are the expected ones.
- ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
- ToDoubleRegister(instr->InputAt(1)).is(d2));
- ASSERT(!instr->InputAt(1)->IsRegister() ||
- ToRegister(instr->InputAt(1)).is(r2));
- ASSERT(ToDoubleRegister(instr->InputAt(0)).is(d1));
+ ASSERT(!instr->right()->IsDoubleRegister() ||
+ ToDoubleRegister(instr->right()).is(d2));
+ ASSERT(!instr->right()->IsRegister() ||
+ ToRegister(instr->right()).is(r2));
+ ASSERT(ToDoubleRegister(instr->left()).is(d1));
ASSERT(ToDoubleRegister(instr->result()).is(d3));
if (exponent_type.IsTagged()) {
@@ -3428,21 +3687,21 @@ void LCodeGen::DoRandom(LRandom* instr) {
LRandom* instr_;
};
- DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
+ DeferredDoRandom* deferred = new(zone()) DeferredDoRandom(this, instr);
// Having marked this instruction as a call we can use any
// registers.
ASSERT(ToDoubleRegister(instr->result()).is(d7));
- ASSERT(ToRegister(instr->InputAt(0)).is(r0));
+ ASSERT(ToRegister(instr->global_object()).is(r0));
static const int kSeedSize = sizeof(uint32_t);
STATIC_ASSERT(kPointerSize == kSeedSize);
- __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
+ __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset));
static const int kRandomSeedOffset =
FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
__ ldr(r2, FieldMemOperand(r0, kRandomSeedOffset));
- // r2: FixedArray of the global context's random seeds
+ // r2: FixedArray of the native context's random seeds
// Load state[0].
__ ldr(r1, FieldMemOperand(r2, ByteArray::kHeaderSize));
@@ -3590,7 +3849,7 @@ void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
int arity = instr->arity();
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
@@ -3603,7 +3862,7 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
__ mov(r2, Operand(instr->name()));
- CallCode(ic, mode, instr);
+ CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
@@ -3628,7 +3887,7 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
__ mov(r2, Operand(instr->name()));
- CallCode(ic, mode, instr);
+ CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS);
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
@@ -3644,7 +3903,7 @@ void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
void LCodeGen::DoCallNew(LCallNew* instr) {
- ASSERT(ToRegister(instr->InputAt(0)).is(r1));
+ ASSERT(ToRegister(instr->constructor()).is(r1));
ASSERT(ToRegister(instr->result()).is(r0));
CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
@@ -3669,6 +3928,18 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
if (!instr->transition().is_null()) {
__ mov(scratch, Operand(instr->transition()));
__ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
+ if (instr->hydrogen()->NeedsWriteBarrierForMap()) {
+ Register temp = ToRegister(instr->temp());
+ // Update the write barrier for the map field.
+ __ RecordWriteField(object,
+ HeapObject::kMapOffset,
+ scratch,
+ temp,
+ kLRHasBeenSaved,
+ kSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ }
}
// Do the store.
@@ -3716,104 +3987,50 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
}
-void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
- __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
- DeoptimizeIf(hs, instr->environment());
-}
-
-
-void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
- Register value = ToRegister(instr->value());
- Register elements = ToRegister(instr->object());
- Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
- Register scratch = scratch0();
-
- // Do the store.
- if (instr->key()->IsConstantOperand()) {
- ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
- LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
- int offset =
- (ToInteger32(const_operand) + instr->additional_index()) * kPointerSize
- + FixedArray::kHeaderSize;
- __ str(value, FieldMemOperand(elements, offset));
- } else {
- __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
- if (instr->additional_index() != 0) {
- __ add(scratch,
- scratch,
- Operand(instr->additional_index() << kPointerSizeLog2));
+void LCodeGen::DeoptIfTaggedButNotSmi(LEnvironment* environment,
+ HValue* value,
+ LOperand* operand) {
+ if (value->representation().IsTagged() && !value->type().IsSmi()) {
+ if (operand->IsRegister()) {
+ __ tst(ToRegister(operand), Operand(kSmiTagMask));
+ } else {
+ __ mov(ip, ToOperand(operand));
+ __ tst(ip, Operand(kSmiTagMask));
}
- __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
- }
-
- if (instr->hydrogen()->NeedsWriteBarrier()) {
- HType type = instr->hydrogen()->value()->type();
- SmiCheck check_needed =
- type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
- // Compute address of modified element and store it into key register.
- __ add(key, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ RecordWrite(elements,
- key,
- value,
- kLRHasBeenSaved,
- kSaveFPRegs,
- EMIT_REMEMBERED_SET,
- check_needed);
+ DeoptimizeIf(ne, environment);
}
}
-void LCodeGen::DoStoreKeyedFastDoubleElement(
- LStoreKeyedFastDoubleElement* instr) {
- DwVfpRegister value = ToDoubleRegister(instr->value());
- Register elements = ToRegister(instr->elements());
- Register key = no_reg;
- Register scratch = scratch0();
- bool key_is_constant = instr->key()->IsConstantOperand();
- int constant_key = 0;
-
- // Calculate the effective address of the slot in the array to store the
- // double value.
- if (key_is_constant) {
- constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
- if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
+void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
+ DeoptIfTaggedButNotSmi(instr->environment(),
+ instr->hydrogen()->length(),
+ instr->length());
+ DeoptIfTaggedButNotSmi(instr->environment(),
+ instr->hydrogen()->index(),
+ instr->index());
+ if (instr->index()->IsConstantOperand()) {
+ int constant_index =
+ ToInteger32(LConstantOperand::cast(instr->index()));
+ if (instr->hydrogen()->length()->representation().IsTagged()) {
+ __ mov(ip, Operand(Smi::FromInt(constant_index)));
+ } else {
+ __ mov(ip, Operand(constant_index));
}
+ __ cmp(ip, ToRegister(instr->length()));
} else {
- key = ToRegister(instr->key());
+ __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
}
- int shift_size = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
- Operand operand = key_is_constant
- ? Operand((constant_key << shift_size) +
- FixedDoubleArray::kHeaderSize - kHeapObjectTag)
- : Operand(key, LSL, shift_size);
- __ add(scratch, elements, operand);
- if (!key_is_constant) {
- __ add(scratch, scratch,
- Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
- }
-
- if (instr->NeedsCanonicalization()) {
- // Check for NaN. All NaNs must be canonicalized.
- __ VFPCompareAndSetFlags(value, value);
- // Only load canonical NaN if the comparison above set the overflow.
- __ Vmov(value,
- FixedDoubleArray::canonical_not_the_hole_nan_as_double(),
- vs);
- }
-
- __ vstr(value, scratch, instr->additional_index() << shift_size);
+ DeoptimizeIf(hs, instr->environment());
}
-void LCodeGen::DoStoreKeyedSpecializedArrayElement(
- LStoreKeyedSpecializedArrayElement* instr) {
-
- Register external_pointer = ToRegister(instr->external_pointer());
+void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
+ Register external_pointer = ToRegister(instr->elements());
Register key = no_reg;
ElementsKind elements_kind = instr->elements_kind();
bool key_is_constant = instr->key()->IsConstantOperand();
@@ -3826,15 +4043,18 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
} else {
key = ToRegister(instr->key());
}
- int shift_size = ElementsKindToShiftSize(elements_kind);
- int additional_offset = instr->additional_index() << shift_size;
+ int element_size_shift = ElementsKindToShiftSize(elements_kind);
+ int shift_size = (instr->hydrogen()->key()->representation().IsTagged())
+ ? (element_size_shift - kSmiTagSize) : element_size_shift;
+ int additional_offset = instr->additional_index() << element_size_shift;
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
CpuFeatures::Scope scope(VFP3);
DwVfpRegister value(ToDoubleRegister(instr->value()));
- Operand operand(key_is_constant ? Operand(constant_key << shift_size)
- : Operand(key, LSL, shift_size));
+ Operand operand(key_is_constant
+ ? Operand(constant_key << element_size_shift)
+ : Operand(key, LSL, shift_size));
__ add(scratch0(), external_pointer, operand);
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
__ vcvt_f32_f64(double_scratch0().low(), value);
@@ -3844,16 +4064,10 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
}
} else {
Register value(ToRegister(instr->value()));
- if (instr->additional_index() != 0 && !key_is_constant) {
- __ add(scratch0(), key, Operand(instr->additional_index()));
- }
- MemOperand mem_operand(key_is_constant
- ? MemOperand(external_pointer,
- ((constant_key + instr->additional_index())
- << shift_size))
- : (instr->additional_index() == 0
- ? MemOperand(external_pointer, key, LSL, shift_size)
- : MemOperand(external_pointer, scratch0(), LSL, shift_size)));
+ MemOperand mem_operand = PrepareKeyedOperand(
+ key, external_pointer, key_is_constant, constant_key,
+ element_size_shift, shift_size,
+ instr->additional_index(), additional_offset);
switch (elements_kind) {
case EXTERNAL_PIXEL_ELEMENTS:
case EXTERNAL_BYTE_ELEMENTS:
@@ -3872,7 +4086,10 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3882,6 +4099,110 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
}
+void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
+ DwVfpRegister value = ToDoubleRegister(instr->value());
+ Register elements = ToRegister(instr->elements());
+ Register key = no_reg;
+ Register scratch = scratch0();
+ bool key_is_constant = instr->key()->IsConstantOperand();
+ int constant_key = 0;
+
+ // Calculate the effective address of the slot in the array to store the
+ // double value.
+ if (key_is_constant) {
+ constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
+ if (constant_key & 0xF0000000) {
+ Abort("array index constant value too big.");
+ }
+ } else {
+ key = ToRegister(instr->key());
+ }
+ int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
+ int shift_size = (instr->hydrogen()->key()->representation().IsTagged())
+ ? (element_size_shift - kSmiTagSize) : element_size_shift;
+ Operand operand = key_is_constant
+ ? Operand((constant_key << element_size_shift) +
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag)
+ : Operand(key, LSL, shift_size);
+ __ add(scratch, elements, operand);
+ if (!key_is_constant) {
+ __ add(scratch, scratch,
+ Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
+ }
+
+ if (instr->NeedsCanonicalization()) {
+ // Check for NaN. All NaNs must be canonicalized.
+ __ VFPCompareAndSetFlags(value, value);
+ // Only load canonical NaN if the comparison above set the overflow.
+ __ Vmov(value,
+ FixedDoubleArray::canonical_not_the_hole_nan_as_double(),
+ no_reg, vs);
+ }
+
+ __ vstr(value, scratch, instr->additional_index() << element_size_shift);
+}
+
+
+void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) {
+ Register value = ToRegister(instr->value());
+ Register elements = ToRegister(instr->elements());
+ Register key = instr->key()->IsRegister() ? ToRegister(instr->key())
+ : no_reg;
+ Register scratch = scratch0();
+ Register store_base = scratch;
+ int offset = 0;
+
+ // Do the store.
+ if (instr->key()->IsConstantOperand()) {
+ ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
+ LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
+ offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) +
+ instr->additional_index());
+ store_base = elements;
+ } else {
+ // Even though the HLoadKeyed instruction forces the input
+ // representation for the key to be an integer, the input gets replaced
+ // during bound check elimination with the index argument to the bounds
+ // check, which can be tagged, so that case must be handled here, too.
+ if (instr->hydrogen()->key()->representation().IsTagged()) {
+ __ add(scratch, elements,
+ Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
+ } else {
+ __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
+ }
+ offset = FixedArray::OffsetOfElementAt(instr->additional_index());
+ }
+ __ str(value, FieldMemOperand(store_base, offset));
+
+ if (instr->hydrogen()->NeedsWriteBarrier()) {
+ HType type = instr->hydrogen()->value()->type();
+ SmiCheck check_needed =
+ type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
+ // Compute address of modified element and store it into key register.
+ __ add(key, store_base, Operand(offset - kHeapObjectTag));
+ __ RecordWrite(elements,
+ key,
+ value,
+ kLRHasBeenSaved,
+ kSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ check_needed);
+ }
+}
+
+
+void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) {
+ // By cases: external, fast double
+ if (instr->is_external()) {
+ DoStoreKeyedExternalArray(instr);
+ } else if (instr->hydrogen()->value()->representation().IsDouble()) {
+ DoStoreKeyedFixedDoubleArray(instr);
+ } else {
+ DoStoreKeyedFixedArray(instr);
+ }
+}
+
+
void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
ASSERT(ToRegister(instr->object()).is(r2));
ASSERT(ToRegister(instr->key()).is(r1));
@@ -3890,13 +4211,13 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
}
void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
Register object_reg = ToRegister(instr->object());
- Register new_map_reg = ToRegister(instr->new_map_reg());
+ Register new_map_reg = ToRegister(instr->new_map_temp());
Register scratch = scratch0();
Handle<Map> from_map = instr->original_map();
@@ -3909,21 +4230,23 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
__ cmp(scratch, Operand(from_map));
__ b(ne, &not_applicable);
__ mov(new_map_reg, Operand(to_map));
- if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
+
+ if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
__ str(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset));
// Write barrier.
__ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
scratch, kLRHasBeenSaved, kDontSaveFPRegs);
- } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
- to_kind == FAST_DOUBLE_ELEMENTS) {
- Register fixed_object_reg = ToRegister(instr->temp_reg());
+ } else if (IsFastSmiElementsKind(from_kind) &&
+ IsFastDoubleElementsKind(to_kind)) {
+ Register fixed_object_reg = ToRegister(instr->temp());
ASSERT(fixed_object_reg.is(r2));
ASSERT(new_map_reg.is(r3));
__ mov(fixed_object_reg, object_reg);
CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
RelocInfo::CODE_TARGET, instr);
- } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
- Register fixed_object_reg = ToRegister(instr->temp_reg());
+ } else if (IsFastDoubleElementsKind(from_kind) &&
+ IsFastObjectElementsKind(to_kind)) {
+ Register fixed_object_reg = ToRegister(instr->temp());
ASSERT(fixed_object_reg.is(r2));
ASSERT(new_map_reg.is(r3));
__ mov(fixed_object_reg, object_reg);
@@ -3956,7 +4279,7 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
};
DeferredStringCharCodeAt* deferred =
- new DeferredStringCharCodeAt(this, instr);
+ new(zone()) DeferredStringCharCodeAt(this, instr);
StringCharLoadGenerator::Generate(masm(),
ToRegister(instr->string()),
@@ -3991,9 +4314,7 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
__ push(index);
}
CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
- if (FLAG_debug_code) {
- __ AbortIfNotSmi(r0);
- }
+ __ AssertSmi(r0);
__ SmiUntag(r0);
__ StoreToSafepointRegisterSlot(r0, result);
}
@@ -4011,7 +4332,7 @@ void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
};
DeferredStringCharFromCode* deferred =
- new DeferredStringCharFromCode(this, instr);
+ new(zone()) DeferredStringCharFromCode(this, instr);
ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
Register char_code = ToRegister(instr->char_code());
@@ -4048,14 +4369,14 @@ void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
void LCodeGen::DoStringLength(LStringLength* instr) {
- Register string = ToRegister(instr->InputAt(0));
+ Register string = ToRegister(instr->string());
Register result = ToRegister(instr->result());
__ ldr(result, FieldMemOperand(string, String::kLengthOffset));
}
void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister() || input->IsStackSlot());
LOperand* output = instr->result();
ASSERT(output->IsDoubleRegister());
@@ -4071,30 +4392,73 @@ void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
}
+void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
+ LOperand* input = instr->value();
+ LOperand* output = instr->result();
+
+ SwVfpRegister flt_scratch = double_scratch0().low();
+ __ vmov(flt_scratch, ToRegister(input));
+ __ vcvt_f64_u32(ToDoubleRegister(output), flt_scratch);
+}
+
+
void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
class DeferredNumberTagI: public LDeferredCode {
public:
DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
: LDeferredCode(codegen), instr_(instr) { }
- virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
+ virtual void Generate() {
+ codegen()->DoDeferredNumberTagI(instr_,
+ instr_->value(),
+ SIGNED_INT32);
+ }
virtual LInstruction* instr() { return instr_; }
private:
LNumberTagI* instr_;
};
- Register src = ToRegister(instr->InputAt(0));
+ Register src = ToRegister(instr->value());
Register dst = ToRegister(instr->result());
- DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
+ DeferredNumberTagI* deferred = new(zone()) DeferredNumberTagI(this, instr);
__ SmiTag(dst, src, SetCC);
__ b(vs, deferred->entry());
__ bind(deferred->exit());
}
-void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
+void LCodeGen::DoNumberTagU(LNumberTagU* instr) {
+ class DeferredNumberTagU: public LDeferredCode {
+ public:
+ DeferredNumberTagU(LCodeGen* codegen, LNumberTagU* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() {
+ codegen()->DoDeferredNumberTagI(instr_,
+ instr_->value(),
+ UNSIGNED_INT32);
+ }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LNumberTagU* instr_;
+ };
+
+ LOperand* input = instr->value();
+ ASSERT(input->IsRegister() && input->Equals(instr->result()));
+ Register reg = ToRegister(input);
+
+ DeferredNumberTagU* deferred = new(zone()) DeferredNumberTagU(this, instr);
+ __ cmp(reg, Operand(Smi::kMaxValue));
+ __ b(hi, deferred->entry());
+ __ SmiTag(reg, reg);
+ __ bind(deferred->exit());
+}
+
+
+void LCodeGen::DoDeferredNumberTagI(LInstruction* instr,
+ LOperand* value,
+ IntegerSignedness signedness) {
Label slow;
- Register src = ToRegister(instr->InputAt(0));
+ Register src = ToRegister(value);
Register dst = ToRegister(instr->result());
DoubleRegister dbl_scratch = double_scratch0();
SwVfpRegister flt_scratch = dbl_scratch.low();
@@ -4102,19 +4466,25 @@ void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
// Preserve the value of all registers.
PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
- // There was overflow, so bits 30 and 31 of the original integer
- // disagree. Try to allocate a heap number in new space and store
- // the value in there. If that fails, call the runtime system.
Label done;
- if (dst.is(src)) {
- __ SmiUntag(src, dst);
- __ eor(src, src, Operand(0x80000000));
+ if (signedness == SIGNED_INT32) {
+ // There was overflow, so bits 30 and 31 of the original integer
+ // disagree. Try to allocate a heap number in new space and store
+ // the value in there. If that fails, call the runtime system.
+ if (dst.is(src)) {
+ __ SmiUntag(src, dst);
+ __ eor(src, src, Operand(0x80000000));
+ }
+ __ vmov(flt_scratch, src);
+ __ vcvt_f64_s32(dbl_scratch, flt_scratch);
+ } else {
+ __ vmov(flt_scratch, src);
+ __ vcvt_f64_u32(dbl_scratch, flt_scratch);
}
- __ vmov(flt_scratch, src);
- __ vcvt_f64_s32(dbl_scratch, flt_scratch);
+
if (FLAG_inline_new) {
__ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
- __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
+ __ AllocateHeapNumber(r5, r3, r4, r6, &slow, DONT_TAG_RESULT);
__ Move(dst, r5);
__ b(&done);
}
@@ -4129,12 +4499,13 @@ void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
__ StoreToSafepointRegisterSlot(ip, dst);
CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
__ Move(dst, r0);
+ __ sub(dst, dst, Operand(kHeapObjectTag));
// Done. Put the value in dbl_scratch into the value of the allocated heap
// number.
__ bind(&done);
- __ sub(ip, dst, Operand(kHeapObjectTag));
- __ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
+ __ vstr(dbl_scratch, dst, HeapNumber::kValueOffset);
+ __ add(dst, dst, Operand(kHeapObjectTag));
__ StoreToSafepointRegisterSlot(dst, dst);
}
@@ -4150,22 +4521,25 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
LNumberTagD* instr_;
};
- DoubleRegister input_reg = ToDoubleRegister(instr->InputAt(0));
+ DoubleRegister input_reg = ToDoubleRegister(instr->value());
Register scratch = scratch0();
Register reg = ToRegister(instr->result());
- Register temp1 = ToRegister(instr->TempAt(0));
- Register temp2 = ToRegister(instr->TempAt(1));
+ Register temp1 = ToRegister(instr->temp());
+ Register temp2 = ToRegister(instr->temp2());
- DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
+ DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
if (FLAG_inline_new) {
__ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
- __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
+ // We want the untagged address first for performance
+ __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry(),
+ DONT_TAG_RESULT);
} else {
__ jmp(deferred->entry());
}
__ bind(deferred->exit());
- __ sub(ip, reg, Operand(kHeapObjectTag));
- __ vstr(input_reg, ip, HeapNumber::kValueOffset);
+ __ vstr(input_reg, reg, HeapNumber::kValueOffset);
+ // Now that we have finished with the object's real address tag it
+ __ add(reg, reg, Operand(kHeapObjectTag));
}
@@ -4178,18 +4552,19 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
+ __ sub(r0, r0, Operand(kHeapObjectTag));
__ StoreToSafepointRegisterSlot(r0, reg);
}
void LCodeGen::DoSmiTag(LSmiTag* instr) {
ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
- __ SmiTag(ToRegister(instr->result()), ToRegister(instr->InputAt(0)));
+ __ SmiTag(ToRegister(instr->result()), ToRegister(instr->value()));
}
void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
if (instr->needs_check()) {
STATIC_ASSERT(kHeapObjectTag == 1);
@@ -4261,11 +4636,11 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
- Register input_reg = ToRegister(instr->InputAt(0));
+ Register input_reg = ToRegister(instr->value());
Register scratch1 = scratch0();
- Register scratch2 = ToRegister(instr->TempAt(0));
+ Register scratch2 = ToRegister(instr->temp());
DwVfpRegister double_scratch = double_scratch0();
- SwVfpRegister single_scratch = double_scratch.low();
+ DwVfpRegister double_scratch2 = ToDoubleRegister(instr->temp3());
ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2));
ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1));
@@ -4284,8 +4659,8 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
__ cmp(scratch1, Operand(ip));
if (instr->truncating()) {
- Register scratch3 = ToRegister(instr->TempAt(1));
- DwVfpRegister double_scratch2 = ToDoubleRegister(instr->TempAt(2));
+ Register scratch3 = ToRegister(instr->temp2());
+ SwVfpRegister single_scratch = double_scratch.low();
ASSERT(!scratch3.is(input_reg) &&
!scratch3.is(scratch1) &&
!scratch3.is(scratch2));
@@ -4320,14 +4695,12 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
__ sub(ip, input_reg, Operand(kHeapObjectTag));
__ vldr(double_scratch, ip, HeapNumber::kValueOffset);
__ EmitVFPTruncate(kRoundToZero,
- single_scratch,
+ input_reg,
double_scratch,
scratch1,
- scratch2,
+ double_scratch2,
kCheckForInexactConversion);
DeoptimizeIf(ne, instr->environment());
- // Load the result.
- __ vmov(input_reg, single_scratch);
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
__ cmp(input_reg, Operand(0));
@@ -4352,13 +4725,13 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
LTaggedToI* instr_;
};
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister());
ASSERT(input->Equals(instr->result()));
Register input_reg = ToRegister(input);
- DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
+ DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr);
// Optimistically untag the input.
// If the input is a HeapObject, SmiUntag will set the carry flag.
@@ -4371,7 +4744,7 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister());
LOperand* result = instr->result();
ASSERT(result->IsDoubleRegister());
@@ -4389,14 +4762,14 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Register result_reg = ToRegister(instr->result());
Register scratch1 = scratch0();
- Register scratch2 = ToRegister(instr->TempAt(0));
- DwVfpRegister double_input = ToDoubleRegister(instr->InputAt(0));
- SwVfpRegister single_scratch = double_scratch0().low();
+ Register scratch2 = ToRegister(instr->temp());
+ DwVfpRegister double_input = ToDoubleRegister(instr->value());
Label done;
if (instr->truncating()) {
- Register scratch3 = ToRegister(instr->TempAt(1));
+ Register scratch3 = ToRegister(instr->temp2());
+ SwVfpRegister single_scratch = double_scratch0().low();
__ EmitECMATruncate(result_reg,
double_input,
single_scratch,
@@ -4404,39 +4777,38 @@ void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
scratch2,
scratch3);
} else {
- VFPRoundingMode rounding_mode = kRoundToMinusInf;
- __ EmitVFPTruncate(rounding_mode,
- single_scratch,
+ DwVfpRegister double_scratch = double_scratch0();
+ __ EmitVFPTruncate(kRoundToMinusInf,
+ result_reg,
double_input,
scratch1,
- scratch2,
+ double_scratch,
kCheckForInexactConversion);
+
// Deoptimize if we had a vfp invalid exception,
// including inexact operation.
DeoptimizeIf(ne, instr->environment());
- // Retrieve the result.
- __ vmov(result_reg, single_scratch);
}
__ bind(&done);
}
void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
__ tst(ToRegister(input), Operand(kSmiTagMask));
DeoptimizeIf(ne, instr->environment());
}
void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
__ tst(ToRegister(input), Operand(kSmiTagMask));
DeoptimizeIf(eq, instr->environment());
}
void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register scratch = scratch0();
__ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
@@ -4509,7 +4881,7 @@ void LCodeGen::DoCheckMapCommon(Register reg,
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
Register scratch = scratch0();
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
@@ -4529,7 +4901,7 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
DoubleRegister value_reg = ToDoubleRegister(instr->unclamped());
Register result_reg = ToRegister(instr->result());
- DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
+ DoubleRegister temp_reg = ToDoubleRegister(instr->temp());
__ ClampDoubleToUint8(result_reg, value_reg, temp_reg);
}
@@ -4545,7 +4917,7 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
Register scratch = scratch0();
Register input_reg = ToRegister(instr->unclamped());
Register result_reg = ToRegister(instr->result());
- DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
+ DoubleRegister temp_reg = ToDoubleRegister(instr->temp());
Label is_smi, done, heap_number;
// Both smi and heap number cases are handled.
@@ -4579,8 +4951,9 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
- Register temp1 = ToRegister(instr->TempAt(0));
- Register temp2 = ToRegister(instr->TempAt(1));
+ ASSERT(instr->temp()->Equals(instr->result()));
+ Register temp1 = ToRegister(instr->temp());
+ Register temp2 = ToRegister(instr->temp2());
Handle<JSObject> holder = instr->holder();
Handle<JSObject> current_prototype = instr->prototype();
@@ -4603,7 +4976,6 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
DoCheckMapCommon(temp1, temp2,
Handle<Map>(current_prototype->map()),
ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
- DeoptimizeIf(ne, instr->environment());
}
@@ -4618,11 +4990,12 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
LAllocateObject* instr_;
};
- DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+ DeferredAllocateObject* deferred =
+ new(zone()) DeferredAllocateObject(this, instr);
Register result = ToRegister(instr->result());
- Register scratch = ToRegister(instr->TempAt(0));
- Register scratch2 = ToRegister(instr->TempAt(1));
+ Register scratch = ToRegister(instr->temp());
+ Register scratch2 = ToRegister(instr->temp2());
Handle<JSFunction> constructor = instr->hydrogen()->constructor();
Handle<Map> initial_map(constructor->initial_map());
int instance_size = initial_map->instance_size();
@@ -4690,14 +5063,15 @@ void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
- Heap* heap = isolate()->heap();
+ Handle<FixedArray> literals(instr->environment()->closure()->literals());
ElementsKind boilerplate_elements_kind =
instr->hydrogen()->boilerplate_elements_kind();
// Deopt if the array literal boilerplate ElementsKind is of a type different
// than the expected one. The check isn't necessary if the boilerplate has
- // already been converted to FAST_ELEMENTS.
- if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
__ LoadHeapObject(r1, instr->hydrogen()->boilerplate_object());
// Load map into r2.
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
@@ -4709,12 +5083,12 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
DeoptimizeIf(ne, instr->environment());
}
- __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
- __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
+ // Set up the parameters to the stub/runtime call.
+ __ LoadHeapObject(r3, literals);
__ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
// Boilerplate already exists, constant elements are never accessed.
// Pass an empty fixed array.
- __ mov(r1, Operand(Handle<FixedArray>(heap->empty_fixed_array())));
+ __ mov(r1, Operand(isolate()->factory()->empty_fixed_array()));
__ Push(r3, r2, r1);
// Pick the right runtime function or stub to call.
@@ -4808,8 +5182,8 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
for (int i = 0; i < elements_length; i++) {
int64_t value = double_array->get_representation(i);
// We only support little endian mode...
- int32_t value_low = value & 0xFFFFFFFF;
- int32_t value_high = value >> 32;
+ int32_t value_low = static_cast<int32_t>(value & 0xFFFFFFFF);
+ int32_t value_high = static_cast<int32_t>(value >> 32);
int total_offset =
elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
__ mov(r2, Operand(value_low));
@@ -4848,10 +5222,11 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
ElementsKind boilerplate_elements_kind =
instr->hydrogen()->boilerplate()->GetElementsKind();
- // Deopt if the literal boilerplate ElementsKind is of a type different than
- // the expected one. The check isn't necessary if the boilerplate has already
- // been converted to FAST_ELEMENTS.
- if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ // Deopt if the array literal boilerplate ElementsKind is of a type different
+ // than the expected one. The check isn't necessary if the boilerplate has
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
__ LoadHeapObject(r1, instr->hydrogen()->boilerplate());
// Load map into r2.
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
@@ -4912,7 +5287,7 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
- ASSERT(ToRegister(instr->InputAt(0)).is(r0));
+ ASSERT(ToRegister(instr->value()).is(r0));
__ push(r0);
CallRuntime(Runtime::kToFastProperties, 1, instr);
}
@@ -4921,15 +5296,13 @@ void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Label materialized;
// Registers will be used as follows:
- // r3 = JS function.
// r7 = literals array.
// r1 = regexp literal.
// r0 = regexp literal clone.
// r2 and r4-r6 are used as temporaries.
- __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
- __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
- int literal_offset = FixedArray::kHeaderSize +
- instr->hydrogen()->literal_index() * kPointerSize;
+ int literal_offset =
+ FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
+ __ LoadHeapObject(r7, instr->hydrogen()->literals());
__ ldr(r1, FieldMemOperand(r7, literal_offset));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r1, ip);
@@ -4995,14 +5368,14 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
void LCodeGen::DoTypeof(LTypeof* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
__ push(input);
CallRuntime(Runtime::kTypeof, 1, instr);
}
void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
Label* true_label = chunk_->GetAssemblyLabel(true_block);
@@ -5092,7 +5465,7 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
- Register temp1 = ToRegister(instr->TempAt(0));
+ Register temp1 = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -5126,6 +5499,8 @@ void LCodeGen::EnsureSpaceForLazyDeopt() {
int current_pc = masm()->pc_offset();
int patch_size = Deoptimizer::patch_size();
if (current_pc < last_lazy_deopt_pc_ + patch_size) {
+ // Block literal pool emission for duration of padding.
+ Assembler::BlockConstPoolScope block_const_pool(masm());
int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
while (padding_size > 0) {
@@ -5211,6 +5586,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
__ cmp(sp, Operand(ip));
__ b(hs, &done);
StackCheckStub stub;
+ PredictableCodeSizeScope predictable(masm_);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
EnsureSpaceForLazyDeopt();
__ bind(&done);
@@ -5220,7 +5596,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
DeferredStackCheck* deferred_stack_check =
- new DeferredStackCheck(this, instr);
+ new(zone()) DeferredStackCheck(this, instr);
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(lo, deferred_stack_check->entry());
@@ -5291,13 +5667,23 @@ void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
Register map = ToRegister(instr->map());
Register result = ToRegister(instr->result());
+ Label load_cache, done;
+ __ EnumLength(result, map);
+ __ cmp(result, Operand(Smi::FromInt(0)));
+ __ b(ne, &load_cache);
+ __ mov(result, Operand(isolate()->factory()->empty_fixed_array()));
+ __ jmp(&done);
+
+ __ bind(&load_cache);
__ LoadInstanceDescriptors(map, result);
__ ldr(result,
- FieldMemOperand(result, DescriptorArray::kEnumerationIndexOffset));
+ FieldMemOperand(result, DescriptorArray::kEnumCacheOffset));
__ ldr(result,
FieldMemOperand(result, FixedArray::SizeFor(instr->idx())));
__ cmp(result, Operand(0));
DeoptimizeIf(eq, instr->environment());
+
+ __ bind(&done);
}
diff --git a/src/3rdparty/v8/src/arm/lithium-codegen-arm.h b/src/3rdparty/v8/src/arm/lithium-codegen-arm.h
index c6a3af7..921285b 100644
--- a/src/3rdparty/v8/src/arm/lithium-codegen-arm.h
+++ b/src/3rdparty/v8/src/arm/lithium-codegen-arm.h
@@ -44,21 +44,24 @@ class SafepointGenerator;
class LCodeGen BASE_EMBEDDED {
public:
LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
- : chunk_(chunk),
+ : zone_(info->zone()),
+ chunk_(static_cast<LPlatformChunk*>(chunk)),
masm_(assembler),
info_(info),
current_block_(-1),
current_instruction_(-1),
instructions_(chunk->instructions()),
- deoptimizations_(4),
- deopt_jump_table_(4),
- deoptimization_literals_(8),
+ deoptimizations_(4, info->zone()),
+ deopt_jump_table_(4, info->zone()),
+ deoptimization_literals_(8, info->zone()),
inlined_function_count_(0),
scope_(info->scope()),
status_(UNUSED),
- deferred_(8),
+ translations_(info->zone()),
+ deferred_(8, info->zone()),
osr_pc_offset_(-1),
last_lazy_deopt_pc_(0),
+ safepoints_(info->zone()),
resolver_(this),
expected_safepoint_kind_(Safepoint::kSimple) {
PopulateDeoptimizationLiteralsWithInlinedFunctions();
@@ -71,6 +74,7 @@ class LCodeGen BASE_EMBEDDED {
Isolate* isolate() const { return info_->isolate(); }
Factory* factory() const { return isolate()->factory(); }
Heap* heap() const { return isolate()->heap(); }
+ Zone* zone() const { return zone_; }
// Support for converting LOperands to assembler types.
// LOperand must be a register.
@@ -106,11 +110,17 @@ class LCodeGen BASE_EMBEDDED {
void FinishCode(Handle<Code> code);
// Deferred code support.
- template<int T>
- void DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
+ void DoDeferredBinaryOpStub(LPointerMap* pointer_map,
+ LOperand* left_argument,
+ LOperand* right_argument,
Token::Value op);
void DoDeferredNumberTagD(LNumberTagD* instr);
- void DoDeferredNumberTagI(LNumberTagI* instr);
+
+ enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
+ void DoDeferredNumberTagI(LInstruction* instr,
+ LOperand* value,
+ IntegerSignedness signedness);
+
void DoDeferredTaggedToI(LTaggedToI* instr);
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
void DoDeferredStackCheck(LStackCheck* instr);
@@ -128,8 +138,20 @@ class LCodeGen BASE_EMBEDDED {
void DoParallelMove(LParallelMove* move);
void DoGap(LGap* instr);
+ MemOperand PrepareKeyedOperand(Register key,
+ Register base,
+ bool key_is_constant,
+ int constant_key,
+ int element_size,
+ int shift_size,
+ int additional_index,
+ int additional_offset);
+
// Emit frame translation commands for an environment.
- void WriteTranslation(LEnvironment* environment, Translation* translation);
+ void WriteTranslation(LEnvironment* environment,
+ Translation* translation,
+ int* arguments_index,
+ int* arguments_count);
// Declare methods that deal with the individual node types.
#define DECLARE_DO(type) void Do##type(L##type* node);
@@ -153,7 +175,7 @@ class LCodeGen BASE_EMBEDDED {
return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
}
- LChunk* chunk() const { return chunk_; }
+ LPlatformChunk* chunk() const { return chunk_; }
Scope* scope() const { return scope_; }
HGraph* graph() const { return chunk_->graph(); }
@@ -173,10 +195,10 @@ class LCodeGen BASE_EMBEDDED {
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
int GetParameterCount() const { return scope()->num_parameters(); }
- void Abort(const char* format, ...);
+ void Abort(const char* reason);
void Comment(const char* format, ...);
- void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code); }
+ void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
// Code generation passes. Returns true if code generation should
// continue.
@@ -191,14 +213,18 @@ class LCodeGen BASE_EMBEDDED {
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
};
- void CallCode(Handle<Code> code,
- RelocInfo::Mode mode,
- LInstruction* instr);
+ void CallCode(
+ Handle<Code> code,
+ RelocInfo::Mode mode,
+ LInstruction* instr,
+ TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS);
- void CallCodeGeneric(Handle<Code> code,
- RelocInfo::Mode mode,
- LInstruction* instr,
- SafepointMode safepoint_mode);
+ void CallCodeGeneric(
+ Handle<Code> code,
+ RelocInfo::Mode mode,
+ LInstruction* instr,
+ SafepointMode safepoint_mode,
+ TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS);
void CallRuntime(const Runtime::Function* function,
int num_arguments,
@@ -239,7 +265,10 @@ class LCodeGen BASE_EMBEDDED {
void AddToTranslation(Translation* translation,
LOperand* op,
- bool is_tagged);
+ bool is_tagged,
+ bool is_uint32,
+ int arguments_index,
+ int arguments_count);
void PopulateDeoptimizationData(Handle<Code> code);
int DefineDeoptimizationLiteral(Handle<Object> literal);
@@ -284,6 +313,10 @@ class LCodeGen BASE_EMBEDDED {
bool deoptimize_on_minus_zero,
LEnvironment* env);
+ void DeoptIfTaggedButNotSmi(LEnvironment* environment,
+ HValue* value,
+ LOperand* operand);
+
// Emits optimized code for typeof x == "y". Modifies input register.
// Returns the condition on which a final split to
// true and false label should be made, to optimize fallthrough.
@@ -314,7 +347,8 @@ class LCodeGen BASE_EMBEDDED {
void EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name);
+ Handle<String> name,
+ LEnvironment* env);
// Emits optimized code to deep-copy the contents of statically known
// object graphs (e.g. object literal boilerplate).
@@ -343,8 +377,15 @@ class LCodeGen BASE_EMBEDDED {
};
void EnsureSpaceForLazyDeopt();
-
- LChunk* const chunk_;
+ void DoLoadKeyedExternalArray(LLoadKeyed* instr);
+ void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
+ void DoLoadKeyedFixedArray(LLoadKeyed* instr);
+ void DoStoreKeyedExternalArray(LStoreKeyed* instr);
+ void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
+ void DoStoreKeyedFixedArray(LStoreKeyed* instr);
+
+ Zone* zone_;
+ LPlatformChunk* const chunk_;
MacroAssembler* const masm_;
CompilationInfo* const info_;
diff --git a/src/3rdparty/v8/src/arm/lithium-gap-resolver-arm.cc b/src/3rdparty/v8/src/arm/lithium-gap-resolver-arm.cc
index cefca47..c100720 100644
--- a/src/3rdparty/v8/src/arm/lithium-gap-resolver-arm.cc
+++ b/src/3rdparty/v8/src/arm/lithium-gap-resolver-arm.cc
@@ -36,7 +36,7 @@ namespace internal {
static const Register kSavedValueRegister = { 9 };
LGapResolver::LGapResolver(LCodeGen* owner)
- : cgen_(owner), moves_(32), root_index_(0), in_cycle_(false),
+ : cgen_(owner), moves_(32, owner->zone()), root_index_(0), in_cycle_(false),
saved_destination_(NULL) { }
@@ -79,7 +79,7 @@ void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) {
const ZoneList<LMoveOperands>* moves = parallel_move->move_operands();
for (int i = 0; i < moves->length(); ++i) {
LMoveOperands move = moves->at(i);
- if (!move.IsRedundant()) moves_.Add(move);
+ if (!move.IsRedundant()) moves_.Add(move, cgen_->zone());
}
Verify();
}
diff --git a/src/3rdparty/v8/src/arm/macro-assembler-arm.cc b/src/3rdparty/v8/src/arm/macro-assembler-arm.cc
index 4da2fec..dcc7149 100644
--- a/src/3rdparty/v8/src/arm/macro-assembler-arm.cc
+++ b/src/3rdparty/v8/src/arm/macro-assembler-arm.cc
@@ -108,7 +108,7 @@ void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
int MacroAssembler::CallSize(Register target, Condition cond) {
-#if USE_BLX
+#ifdef USE_BLX
return kInstrSize;
#else
return 2 * kInstrSize;
@@ -121,7 +121,7 @@ void MacroAssembler::Call(Register target, Condition cond) {
BlockConstPoolScope block_const_pool(this);
Label start;
bind(&start);
-#if USE_BLX
+#ifdef USE_BLX
blx(target, cond);
#else
// set lr for return at current pc + 8
@@ -137,7 +137,19 @@ int MacroAssembler::CallSize(
int size = 2 * kInstrSize;
Instr mov_instr = cond | MOV | LeaveCC;
intptr_t immediate = reinterpret_cast<intptr_t>(target);
- if (!Operand(immediate, rmode).is_single_instruction(mov_instr)) {
+ if (!Operand(immediate, rmode).is_single_instruction(this, mov_instr)) {
+ size += kInstrSize;
+ }
+ return size;
+}
+
+
+int MacroAssembler::CallSizeNotPredictableCodeSize(
+ Address target, RelocInfo::Mode rmode, Condition cond) {
+ int size = 2 * kInstrSize;
+ Instr mov_instr = cond | MOV | LeaveCC;
+ intptr_t immediate = reinterpret_cast<intptr_t>(target);
+ if (!Operand(immediate, rmode).is_single_instruction(NULL, mov_instr)) {
size += kInstrSize;
}
return size;
@@ -146,15 +158,29 @@ int MacroAssembler::CallSize(
void MacroAssembler::Call(Address target,
RelocInfo::Mode rmode,
- Condition cond) {
+ Condition cond,
+ TargetAddressStorageMode mode) {
// Block constant pool for the call instruction sequence.
BlockConstPoolScope block_const_pool(this);
Label start;
bind(&start);
-#if USE_BLX
- // On ARMv5 and after the recommended call sequence is:
- // ldr ip, [pc, #...]
- // blx ip
+
+ bool old_predictable_code_size = predictable_code_size();
+ if (mode == NEVER_INLINE_TARGET_ADDRESS) {
+ set_predictable_code_size(true);
+ }
+
+#ifdef USE_BLX
+ // Call sequence on V7 or later may be :
+ // movw ip, #... @ call address low 16
+ // movt ip, #... @ call address high 16
+ // blx ip
+ // @ return address
+ // Or for pre-V7 or values that may be back-patched
+ // to avoid ICache flushes:
+ // ldr ip, [pc, #...] @ call address
+ // blx ip
+ // @ return address
// Statement positions are expected to be recorded when the target
// address is loaded. The mov method will automatically record
@@ -165,21 +191,22 @@ void MacroAssembler::Call(Address target,
mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode));
blx(ip, cond);
- ASSERT(kCallTargetAddressOffset == 2 * kInstrSize);
#else
// Set lr for return at current pc + 8.
mov(lr, Operand(pc), LeaveCC, cond);
// Emit a ldr<cond> pc, [pc + offset of target in constant pool].
mov(pc, Operand(reinterpret_cast<int32_t>(target), rmode), LeaveCC, cond);
- ASSERT(kCallTargetAddressOffset == kInstrSize);
#endif
ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start));
+ if (mode == NEVER_INLINE_TARGET_ADDRESS) {
+ set_predictable_code_size(old_predictable_code_size);
+ }
}
int MacroAssembler::CallSize(Handle<Code> code,
RelocInfo::Mode rmode,
- unsigned ast_id,
+ TypeFeedbackId ast_id,
Condition cond) {
return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond);
}
@@ -187,19 +214,18 @@ int MacroAssembler::CallSize(Handle<Code> code,
void MacroAssembler::Call(Handle<Code> code,
RelocInfo::Mode rmode,
- unsigned ast_id,
- Condition cond) {
+ TypeFeedbackId ast_id,
+ Condition cond,
+ TargetAddressStorageMode mode) {
Label start;
bind(&start);
ASSERT(RelocInfo::IsCodeTarget(rmode));
- if (rmode == RelocInfo::CODE_TARGET && ast_id != kNoASTId) {
+ if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
SetRecordedAstId(ast_id);
rmode = RelocInfo::CODE_TARGET_WITH_ID;
}
// 'code' is always generated ARM code, never THUMB code
- Call(reinterpret_cast<Address>(code.location()), rmode, cond);
- ASSERT_EQ(CallSize(code, rmode, ast_id, cond),
- SizeOfCodeGeneratedSince(&start));
+ Call(reinterpret_cast<Address>(code.location()), rmode, cond, mode);
}
@@ -265,8 +291,8 @@ void MacroAssembler::Move(Register dst, Register src, Condition cond) {
void MacroAssembler::Move(DoubleRegister dst, DoubleRegister src) {
- ASSERT(CpuFeatures::IsSupported(VFP3));
- CpuFeatures::Scope scope(VFP3);
+ ASSERT(CpuFeatures::IsSupported(VFP2));
+ CpuFeatures::Scope scope(VFP2);
if (!dst.is(src)) {
vmov(dst, src);
}
@@ -276,17 +302,15 @@ void MacroAssembler::Move(DoubleRegister dst, DoubleRegister src) {
void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
Condition cond) {
if (!src2.is_reg() &&
- !src2.must_use_constant_pool() &&
+ !src2.must_output_reloc_info(this) &&
src2.immediate() == 0) {
mov(dst, Operand(0, RelocInfo::NONE), LeaveCC, cond);
-
- } else if (!src2.is_single_instruction() &&
- !src2.must_use_constant_pool() &&
+ } else if (!src2.is_single_instruction(this) &&
+ !src2.must_output_reloc_info(this) &&
CpuFeatures::IsSupported(ARMv7) &&
IsPowerOf2(src2.immediate() + 1)) {
ubfx(dst, src1, 0,
WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond);
-
} else {
and_(dst, src1, src2, LeaveCC, cond);
}
@@ -296,7 +320,7 @@ void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
Condition cond) {
ASSERT(lsb < 32);
- if (!CpuFeatures::IsSupported(ARMv7)) {
+ if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
and_(dst, src1, Operand(mask), LeaveCC, cond);
if (lsb != 0) {
@@ -311,7 +335,7 @@ void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width,
Condition cond) {
ASSERT(lsb < 32);
- if (!CpuFeatures::IsSupported(ARMv7)) {
+ if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
and_(dst, src1, Operand(mask), LeaveCC, cond);
int shift_up = 32 - lsb - width;
@@ -339,7 +363,7 @@ void MacroAssembler::Bfi(Register dst,
ASSERT(lsb + width < 32);
ASSERT(!scratch.is(dst));
if (width == 0) return;
- if (!CpuFeatures::IsSupported(ARMv7)) {
+ if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
bic(dst, dst, Operand(mask));
and_(scratch, src, Operand((1 << width) - 1));
@@ -351,12 +375,14 @@ void MacroAssembler::Bfi(Register dst,
}
-void MacroAssembler::Bfc(Register dst, int lsb, int width, Condition cond) {
+void MacroAssembler::Bfc(Register dst, Register src, int lsb, int width,
+ Condition cond) {
ASSERT(lsb < 32);
- if (!CpuFeatures::IsSupported(ARMv7)) {
+ if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
- bic(dst, dst, Operand(mask));
+ bic(dst, src, Operand(mask));
} else {
+ Move(dst, src, cond);
bfc(dst, lsb, width, cond);
}
}
@@ -364,7 +390,7 @@ void MacroAssembler::Bfc(Register dst, int lsb, int width, Condition cond) {
void MacroAssembler::Usat(Register dst, int satpos, const Operand& src,
Condition cond) {
- if (!CpuFeatures::IsSupported(ARMv7)) {
+ if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
ASSERT(!dst.is(pc) && !src.rm().is(pc));
ASSERT((satpos >= 0) && (satpos <= 31));
@@ -396,6 +422,16 @@ void MacroAssembler::Usat(Register dst, int satpos, const Operand& src,
void MacroAssembler::LoadRoot(Register destination,
Heap::RootListIndex index,
Condition cond) {
+ if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
+ !Heap::RootCanBeWrittenAfterInitialization(index)) {
+ Handle<Object> root(isolate()->heap()->roots_array_start()[index]);
+ if (!isolate()->heap()->InNewSpace(*root)) {
+ // The CPU supports fast immediate values, and this root will never
+ // change. We will load it as a relocatable immediate value.
+ mov(destination, Operand(root), LeaveCC, cond);
+ return;
+ }
+ }
ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
}
@@ -672,7 +708,7 @@ void MacroAssembler::Ldrd(Register dst1, Register dst2,
ASSERT((src.am() != PreIndex) && (src.am() != NegPreIndex));
// Generate two ldr instructions if ldrd is not available.
- if (CpuFeatures::IsSupported(ARMv7)) {
+ if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size()) {
CpuFeatures::Scope scope(ARMv7);
ldrd(dst1, dst2, src, cond);
} else {
@@ -714,7 +750,7 @@ void MacroAssembler::Strd(Register src1, Register src2,
ASSERT((dst.am() != PreIndex) && (dst.am() != NegPreIndex));
// Generate two str instructions if strd is not available.
- if (CpuFeatures::IsSupported(ARMv7)) {
+ if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size()) {
CpuFeatures::Scope scope(ARMv7);
strd(src1, src2, dst, cond);
} else {
@@ -777,8 +813,9 @@ void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
void MacroAssembler::Vmov(const DwVfpRegister dst,
const double imm,
+ const Register scratch,
const Condition cond) {
- ASSERT(CpuFeatures::IsEnabled(VFP3));
+ ASSERT(CpuFeatures::IsEnabled(VFP2));
static const DoubleRepresentation minus_zero(-0.0);
static const DoubleRepresentation zero(0.0);
DoubleRepresentation value(imm);
@@ -788,7 +825,7 @@ void MacroAssembler::Vmov(const DwVfpRegister dst,
} else if (value.bits == minus_zero.bits) {
vneg(dst, kDoubleRegZero, cond);
} else {
- vmov(dst, imm, cond);
+ vmov(dst, imm, scratch, cond);
}
}
@@ -930,6 +967,7 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles,
}
void MacroAssembler::GetCFunctionDoubleResult(const DoubleRegister dst) {
+ ASSERT(CpuFeatures::IsSupported(VFP2));
if (use_eabi_hardfloat()) {
Move(dst, d0);
} else {
@@ -1338,31 +1376,32 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
Check(ne, "we should not have an empty lexical context");
#endif
- // Load the global context of the current context.
- int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ // Load the native context of the current context.
+ int offset =
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
ldr(scratch, FieldMemOperand(scratch, offset));
- ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
+ ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
- // Check the context is a global context.
+ // Check the context is a native context.
if (emit_debug_code()) {
// TODO(119): avoid push(holder_reg)/pop(holder_reg)
// Cannot use ip as a temporary in this verification code. Due to the fact
// that ip is clobbered as part of cmp with an object Operand.
push(holder_reg); // Temporarily save holder on the stack.
- // Read the first word and compare to the global_context_map.
+ // Read the first word and compare to the native_context_map.
ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
- LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
+ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
cmp(holder_reg, ip);
- Check(eq, "JSGlobalObject::global_context should be a global context.");
+ Check(eq, "JSGlobalObject::native_context should be a native context.");
pop(holder_reg); // Restore holder.
}
// Check if both contexts are the same.
- ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
+ ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
cmp(scratch, Operand(ip));
b(eq, &same_contexts);
- // Check the context is a global context.
+ // Check the context is a native context.
if (emit_debug_code()) {
// TODO(119): avoid push(holder_reg)/pop(holder_reg)
// Cannot use ip as a temporary in this verification code. Due to the fact
@@ -1374,13 +1413,13 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
Check(ne, "JSGlobalProxy::context() should not be null.");
ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
- LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
+ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
cmp(holder_reg, ip);
- Check(eq, "JSGlobalObject::global_context should be a global context.");
+ Check(eq, "JSGlobalObject::native_context should be a native context.");
// Restore ip is not needed. ip is reloaded below.
pop(holder_reg); // Restore holder.
// Restore ip to holder's context.
- ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
+ ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
}
// Check that the security token in the calling global object is
@@ -1553,7 +1592,11 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
Register topaddr = scratch1;
Register obj_size_reg = scratch2;
mov(topaddr, Operand(new_space_allocation_top));
- mov(obj_size_reg, Operand(object_size));
+ Operand obj_size_operand = Operand(object_size);
+ if (!obj_size_operand.is_single_instruction(this)) {
+ // We are about to steal IP, so we need to load this value first
+ mov(obj_size_reg, obj_size_operand);
+ }
// This code stores a temporary value in ip. This is OK, as the code below
// does not need ip for implicit literal generation.
@@ -1575,7 +1618,13 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
// Calculate new top and bail out if new space is exhausted. Use result
// to calculate the new top.
- add(scratch2, result, Operand(obj_size_reg), SetCC);
+ if (obj_size_operand.is_single_instruction(this)) {
+ // We can add the size as an immediate
+ add(scratch2, result, obj_size_operand, SetCC);
+ } else {
+ // Doesn't fit in an immediate, we have to use the register
+ add(scratch2, result, obj_size_reg, SetCC);
+ }
b(cs, gc_required);
cmp(scratch2, Operand(ip));
b(hi, gc_required);
@@ -1868,10 +1917,12 @@ void MacroAssembler::CompareRoot(Register obj,
void MacroAssembler::CheckFastElements(Register map,
Register scratch,
Label* fail) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
- cmp(scratch, Operand(Map::kMaximumBitField2FastElementValue));
+ cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
b(hi, fail);
}
@@ -1879,22 +1930,25 @@ void MacroAssembler::CheckFastElements(Register map,
void MacroAssembler::CheckFastObjectElements(Register map,
Register scratch,
Label* fail) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
- cmp(scratch, Operand(Map::kMaximumBitField2FastSmiOnlyElementValue));
+ cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
b(ls, fail);
- cmp(scratch, Operand(Map::kMaximumBitField2FastElementValue));
+ cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
b(hi, fail);
}
-void MacroAssembler::CheckFastSmiOnlyElements(Register map,
- Register scratch,
- Label* fail) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
+void MacroAssembler::CheckFastSmiElements(Register map,
+ Register scratch,
+ Label* fail) {
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
- cmp(scratch, Operand(Map::kMaximumBitField2FastSmiOnlyElementValue));
+ cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
b(hi, fail);
}
@@ -1962,13 +2016,13 @@ void MacroAssembler::StoreNumberToDoubleElements(Register value_reg,
// scratch1 is now effective address of the double element
FloatingPointHelper::Destination destination;
- if (CpuFeatures::IsSupported(VFP3)) {
+ if (CpuFeatures::IsSupported(VFP2)) {
destination = FloatingPointHelper::kVFPRegisters;
} else {
destination = FloatingPointHelper::kCoreRegisters;
}
- Register untagged_value = receiver_reg;
+ Register untagged_value = elements_reg;
SmiUntag(untagged_value, value_reg);
FloatingPointHelper::ConvertIntToDouble(this,
untagged_value,
@@ -1979,7 +2033,7 @@ void MacroAssembler::StoreNumberToDoubleElements(Register value_reg,
scratch4,
s2);
if (destination == FloatingPointHelper::kVFPRegisters) {
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
vstr(d0, scratch1, 0);
} else {
str(mantissa_reg, MemOperand(scratch1, 0));
@@ -1995,24 +2049,27 @@ void MacroAssembler::CompareMap(Register obj,
Label* early_success,
CompareMapMode mode) {
ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
- cmp(scratch, Operand(map));
- if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
- Map* transitioned_fast_element_map(
- map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
- ASSERT(transitioned_fast_element_map == NULL ||
- map->elements_kind() != FAST_ELEMENTS);
- if (transitioned_fast_element_map != NULL) {
- b(eq, early_success);
- cmp(scratch, Operand(Handle<Map>(transitioned_fast_element_map)));
- }
+ CompareMap(scratch, map, early_success, mode);
+}
+
- Map* transitioned_double_map(
- map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
- ASSERT(transitioned_double_map == NULL ||
- map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
- if (transitioned_double_map != NULL) {
- b(eq, early_success);
- cmp(scratch, Operand(Handle<Map>(transitioned_double_map)));
+void MacroAssembler::CompareMap(Register obj_map,
+ Handle<Map> map,
+ Label* early_success,
+ CompareMapMode mode) {
+ cmp(obj_map, Operand(map));
+ if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
+ ElementsKind kind = map->elements_kind();
+ if (IsFastElementsKind(kind)) {
+ bool packed = IsFastPackedElementsKind(kind);
+ Map* current_map = *map;
+ while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
+ kind = GetNextMoreGeneralFastElementsKind(kind, packed);
+ current_map = current_map->LookupElementsTransitionMap(kind);
+ if (!current_map) break;
+ b(eq, early_success);
+ cmp(obj_map, Operand(Handle<Map>(current_map)));
+ }
}
}
}
@@ -2127,7 +2184,7 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
void MacroAssembler::CallStub(CodeStub* stub, Condition cond) {
ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
- Call(stub->GetCode(), RelocInfo::CODE_TARGET, kNoASTId, cond);
+ Call(stub->GetCode(), RelocInfo::CODE_TARGET, TypeFeedbackId::None(), cond);
}
@@ -2323,8 +2380,8 @@ void MacroAssembler::ConvertToInt32(Register source,
Register scratch2,
DwVfpRegister double_scratch,
Label *not_int32) {
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
sub(scratch, source, Operand(kHeapObjectTag));
vldr(double_scratch, scratch, HeapNumber::kValueOffset);
vcvt_s32_f64(double_scratch.low(), double_scratch);
@@ -2414,16 +2471,27 @@ void MacroAssembler::ConvertToInt32(Register source,
void MacroAssembler::EmitVFPTruncate(VFPRoundingMode rounding_mode,
- SwVfpRegister result,
+ Register result,
DwVfpRegister double_input,
- Register scratch1,
- Register scratch2,
+ Register scratch,
+ DwVfpRegister double_scratch,
CheckForInexactConversion check_inexact) {
- ASSERT(CpuFeatures::IsSupported(VFP3));
- CpuFeatures::Scope scope(VFP3);
- Register prev_fpscr = scratch1;
- Register scratch = scratch2;
+ ASSERT(!result.is(scratch));
+ ASSERT(!double_input.is(double_scratch));
+
+ ASSERT(CpuFeatures::IsSupported(VFP2));
+ CpuFeatures::Scope scope(VFP2);
+ Register prev_fpscr = result;
+ Label done;
+ // Test for values that can be exactly represented as a signed 32-bit integer.
+ vcvt_s32_f64(double_scratch.low(), double_input);
+ vmov(result, double_scratch.low());
+ vcvt_f64_s32(double_scratch, double_scratch.low());
+ VFPCompareAndSetFlags(double_input, double_scratch);
+ b(eq, &done);
+
+ // Convert to integer, respecting rounding mode.
int32_t check_inexact_conversion =
(check_inexact == kCheckForInexactConversion) ? kVFPInexactExceptionBit : 0;
@@ -2445,7 +2513,7 @@ void MacroAssembler::EmitVFPTruncate(VFPRoundingMode rounding_mode,
vmsr(scratch);
// Convert the argument to an integer.
- vcvt_s32_f64(result,
+ vcvt_s32_f64(double_scratch.low(),
double_input,
(rounding_mode == kRoundToZero) ? kDefaultRoundToZero
: kFPSCRRounding);
@@ -2454,8 +2522,12 @@ void MacroAssembler::EmitVFPTruncate(VFPRoundingMode rounding_mode,
vmrs(scratch);
// Restore FPSCR.
vmsr(prev_fpscr);
+ // Move the converted value into the result register.
+ vmov(result, double_scratch.low());
// Check for vfp exceptions.
tst(scratch, Operand(kVFPExceptionMask | check_inexact_conversion));
+
+ bind(&done);
}
@@ -2538,7 +2610,7 @@ void MacroAssembler::EmitECMATruncate(Register result,
Register scratch,
Register input_high,
Register input_low) {
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
ASSERT(!input_high.is(result));
ASSERT(!input_low.is(result));
ASSERT(!input_low.is(input_high));
@@ -2577,7 +2649,7 @@ void MacroAssembler::EmitECMATruncate(Register result,
void MacroAssembler::GetLeastBitsFromSmi(Register dst,
Register src,
int num_least_bits) {
- if (CpuFeatures::IsSupported(ARMv7)) {
+ if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size()) {
ubfx(dst, src, kSmiTagSize, num_least_bits);
} else {
mov(dst, Operand(src, ASR, kSmiTagSize));
@@ -2695,7 +2767,8 @@ void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
void MacroAssembler::GetBuiltinFunction(Register target,
Builtins::JavaScript id) {
// Load the builtins object into target register.
- ldr(target, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ ldr(target,
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
ldr(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset));
// Load the JavaScript builtin function from the builtins object.
ldr(target, FieldMemOperand(target,
@@ -2861,32 +2934,44 @@ void MacroAssembler::LoadTransitionedArrayMapConditional(
Register scratch,
Label* no_map_match) {
// Load the global or builtins object from the current context.
- ldr(scratch, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
+ ldr(scratch,
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
// Check that the function's map is the same as the expected cached map.
- int expected_index =
- Context::GetContextMapIndexFromElementsKind(expected_kind);
- ldr(ip, MemOperand(scratch, Context::SlotOffset(expected_index)));
+ ldr(scratch,
+ MemOperand(scratch,
+ Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
+ size_t offset = expected_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ ldr(ip, FieldMemOperand(scratch, offset));
cmp(map_in_out, ip);
b(ne, no_map_match);
// Use the transitioned cached map.
- int trans_index =
- Context::GetContextMapIndexFromElementsKind(transitioned_kind);
- ldr(map_in_out, MemOperand(scratch, Context::SlotOffset(trans_index)));
+ offset = transitioned_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ ldr(map_in_out, FieldMemOperand(scratch, offset));
}
void MacroAssembler::LoadInitialArrayMap(
- Register function_in, Register scratch, Register map_out) {
+ Register function_in, Register scratch,
+ Register map_out, bool can_have_holes) {
ASSERT(!function_in.is(map_out));
Label done;
ldr(map_out, FieldMemOperand(function_in,
JSFunction::kPrototypeOrInitialMapOffset));
if (!FLAG_smi_only_arrays) {
- LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_ELEMENTS,
+ ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ kind,
+ map_out,
+ scratch,
+ &done);
+ } else if (can_have_holes) {
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_HOLEY_SMI_ELEMENTS,
map_out,
scratch,
&done);
@@ -2897,11 +2982,12 @@ void MacroAssembler::LoadInitialArrayMap(
void MacroAssembler::LoadGlobalFunction(int index, Register function) {
// Load the global or builtins object from the current context.
- ldr(function, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- // Load the global context from the global or builtins object.
+ ldr(function,
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ // Load the native context from the global or builtins object.
ldr(function, FieldMemOperand(function,
- GlobalObject::kGlobalContextOffset));
- // Load the function from the global context.
+ GlobalObject::kNativeContextOffset));
+ // Load the function from the native context.
ldr(function, MemOperand(function, Context::SlotOffset(index)));
}
@@ -2981,38 +3067,46 @@ void MacroAssembler::JumpIfEitherSmi(Register reg1,
}
-void MacroAssembler::AbortIfSmi(Register object) {
- STATIC_ASSERT(kSmiTag == 0);
- tst(object, Operand(kSmiTagMask));
- Assert(ne, "Operand is a smi");
+void MacroAssembler::AssertNotSmi(Register object) {
+ if (emit_debug_code()) {
+ STATIC_ASSERT(kSmiTag == 0);
+ tst(object, Operand(kSmiTagMask));
+ Check(ne, "Operand is a smi");
+ }
}
-void MacroAssembler::AbortIfNotSmi(Register object) {
- STATIC_ASSERT(kSmiTag == 0);
- tst(object, Operand(kSmiTagMask));
- Assert(eq, "Operand is not smi");
+void MacroAssembler::AssertSmi(Register object) {
+ if (emit_debug_code()) {
+ STATIC_ASSERT(kSmiTag == 0);
+ tst(object, Operand(kSmiTagMask));
+ Check(eq, "Operand is not smi");
+ }
}
-void MacroAssembler::AbortIfNotString(Register object) {
- STATIC_ASSERT(kSmiTag == 0);
- tst(object, Operand(kSmiTagMask));
- Assert(ne, "Operand is not a string");
- push(object);
- ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
- CompareInstanceType(object, object, FIRST_NONSTRING_TYPE);
- pop(object);
- Assert(lo, "Operand is not a string");
+void MacroAssembler::AssertString(Register object) {
+ if (emit_debug_code()) {
+ STATIC_ASSERT(kSmiTag == 0);
+ tst(object, Operand(kSmiTagMask));
+ Check(ne, "Operand is a smi and not a string");
+ push(object);
+ ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
+ CompareInstanceType(object, object, FIRST_NONSTRING_TYPE);
+ pop(object);
+ Check(lo, "Operand is not a string");
+ }
}
-void MacroAssembler::AbortIfNotRootValue(Register src,
- Heap::RootListIndex root_value_index,
- const char* message) {
- CompareRoot(src, root_value_index);
- Assert(eq, message);
+void MacroAssembler::AssertRootValue(Register src,
+ Heap::RootListIndex root_value_index,
+ const char* message) {
+ if (emit_debug_code()) {
+ CompareRoot(src, root_value_index);
+ Check(eq, message);
+ }
}
@@ -3070,7 +3164,8 @@ void MacroAssembler::AllocateHeapNumber(Register result,
Register scratch1,
Register scratch2,
Register heap_number_map,
- Label* gc_required) {
+ Label* gc_required,
+ TaggingMode tagging_mode) {
// Allocate an object in the heap for the heap number and tag it as a heap
// object.
AllocateInNewSpace(HeapNumber::kSize,
@@ -3078,11 +3173,16 @@ void MacroAssembler::AllocateHeapNumber(Register result,
scratch1,
scratch2,
gc_required,
- TAG_OBJECT);
+ tagging_mode == TAG_RESULT ? TAG_OBJECT :
+ NO_ALLOCATION_FLAGS);
// Store heap number map in the allocated object.
AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
- str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
+ if (tagging_mode == TAG_RESULT) {
+ str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
+ } else {
+ str(heap_number_map, MemOperand(result, HeapObject::kMapOffset));
+ }
}
@@ -3153,17 +3253,17 @@ void MacroAssembler::CopyBytes(Register src,
cmp(length, Operand(kPointerSize));
b(lt, &byte_loop);
ldr(scratch, MemOperand(src, kPointerSize, PostIndex));
-#if CAN_USE_UNALIGNED_ACCESSES
- str(scratch, MemOperand(dst, kPointerSize, PostIndex));
-#else
- strb(scratch, MemOperand(dst, 1, PostIndex));
- mov(scratch, Operand(scratch, LSR, 8));
- strb(scratch, MemOperand(dst, 1, PostIndex));
- mov(scratch, Operand(scratch, LSR, 8));
- strb(scratch, MemOperand(dst, 1, PostIndex));
- mov(scratch, Operand(scratch, LSR, 8));
- strb(scratch, MemOperand(dst, 1, PostIndex));
-#endif
+ if (CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) {
+ str(scratch, MemOperand(dst, kPointerSize, PostIndex));
+ } else {
+ strb(scratch, MemOperand(dst, 1, PostIndex));
+ mov(scratch, Operand(scratch, LSR, 8));
+ strb(scratch, MemOperand(dst, 1, PostIndex));
+ mov(scratch, Operand(scratch, LSR, 8));
+ strb(scratch, MemOperand(dst, 1, PostIndex));
+ mov(scratch, Operand(scratch, LSR, 8));
+ strb(scratch, MemOperand(dst, 1, PostIndex));
+ }
sub(length, length, Operand(kPointerSize));
b(&word_loop);
@@ -3313,6 +3413,7 @@ void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg) {
+ ASSERT(CpuFeatures::IsSupported(VFP2));
if (use_eabi_hardfloat()) {
Move(d0, dreg);
} else {
@@ -3323,6 +3424,7 @@ void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg) {
void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg1,
DoubleRegister dreg2) {
+ ASSERT(CpuFeatures::IsSupported(VFP2));
if (use_eabi_hardfloat()) {
if (dreg2.is(d0)) {
ASSERT(!dreg1.is(d1));
@@ -3341,6 +3443,7 @@ void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg1,
void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg,
Register reg) {
+ ASSERT(CpuFeatures::IsSupported(VFP2));
if (use_eabi_hardfloat()) {
Move(d0, dreg);
Move(r0, reg);
@@ -3442,7 +3545,7 @@ void MacroAssembler::CheckPageFlag(
int mask,
Condition cc,
Label* condition_met) {
- and_(scratch, object, Operand(~Page::kPageAlignmentMask));
+ Bfc(scratch, object, 0, kPageSizeBits);
ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
tst(scratch, Operand(mask));
b(cc, condition_met);
@@ -3591,7 +3694,7 @@ void MacroAssembler::EnsureNotWhite(
// For ASCII (char-size of 1) we shift the smi tag away to get the length.
// For UC16 (char-size of 2) we just leave the smi tag in place, thereby
// getting the length multiplied by 2.
- ASSERT(kAsciiStringTag == 4 && kStringEncodingMask == 4);
+ ASSERT(kOneByteStringTag == 4 && kStringEncodingMask == 4);
ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
ldr(ip, FieldMemOperand(value, String::kLengthOffset));
tst(instance_type, Operand(kStringEncodingMask));
@@ -3637,7 +3740,7 @@ void MacroAssembler::ClampDoubleToUint8(Register result_reg,
// Double value is >= 255, return 255.
bind(&above_zero);
- Vmov(temp_double_reg, 255.0);
+ Vmov(temp_double_reg, 255.0, result_reg);
VFPCompareAndSetFlags(input_reg, temp_double_reg);
b(le, &in_bounds);
mov(result_reg, Operand(255));
@@ -3645,67 +3748,72 @@ void MacroAssembler::ClampDoubleToUint8(Register result_reg,
// In 0-255 range, round and truncate.
bind(&in_bounds);
- Vmov(temp_double_reg, 0.5);
- vadd(temp_double_reg, input_reg, temp_double_reg);
- vcvt_u32_f64(temp_double_reg.low(), temp_double_reg);
- vmov(result_reg, temp_double_reg.low());
+ // Save FPSCR.
+ vmrs(ip);
+ // Set rounding mode to round to the nearest integer by clearing bits[23:22].
+ bic(result_reg, ip, Operand(kVFPRoundingModeMask));
+ vmsr(result_reg);
+ vcvt_s32_f64(input_reg.low(), input_reg, kFPSCRRounding);
+ vmov(result_reg, input_reg.low());
+ // Restore FPSCR.
+ vmsr(ip);
bind(&done);
}
void MacroAssembler::LoadInstanceDescriptors(Register map,
Register descriptors) {
- ldr(descriptors,
- FieldMemOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
- Label not_smi;
- JumpIfNotSmi(descriptors, &not_smi);
- mov(descriptors, Operand(FACTORY->empty_descriptor_array()));
- bind(&not_smi);
+ ldr(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset));
+}
+
+
+void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
+ ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
+ DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
+}
+
+
+void MacroAssembler::EnumLength(Register dst, Register map) {
+ STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
+ ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
+ and_(dst, dst, Operand(Smi::FromInt(Map::EnumLengthBits::kMask)));
}
void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
- Label next;
- // Preload a couple of values used in the loop.
Register empty_fixed_array_value = r6;
LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
- Register empty_descriptor_array_value = r7;
- LoadRoot(empty_descriptor_array_value,
- Heap::kEmptyDescriptorArrayRootIndex);
- mov(r1, r0);
- bind(&next);
+ Label next, start;
+ mov(r2, r0);
- // Check that there are no elements. Register r1 contains the
- // current JS object we've reached through the prototype chain.
- ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
- cmp(r2, empty_fixed_array_value);
- b(ne, call_runtime);
+ // Check if the enum length field is properly initialized, indicating that
+ // there is an enum cache.
+ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
- // Check that instance descriptors are not empty so that we can
- // check for an enum cache. Leave the map in r2 for the subsequent
- // prototype load.
- ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
- ldr(r3, FieldMemOperand(r2, Map::kInstanceDescriptorsOrBitField3Offset));
- JumpIfSmi(r3, call_runtime);
+ EnumLength(r3, r1);
+ cmp(r3, Operand(Smi::FromInt(Map::kInvalidEnumCache)));
+ b(eq, call_runtime);
- // Check that there is an enum cache in the non-empty instance
- // descriptors (r3). This is the case if the next enumeration
- // index field does not contain a smi.
- ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumerationIndexOffset));
- JumpIfSmi(r3, call_runtime);
+ jmp(&start);
+
+ bind(&next);
+ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
// For all objects but the receiver, check that the cache is empty.
- Label check_prototype;
- cmp(r1, r0);
- b(eq, &check_prototype);
- ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumCacheBridgeCacheOffset));
- cmp(r3, empty_fixed_array_value);
+ EnumLength(r3, r1);
+ cmp(r3, Operand(Smi::FromInt(0)));
+ b(ne, call_runtime);
+
+ bind(&start);
+
+ // Check that there are no elements. Register r2 contains the current JS
+ // object we've reached through the prototype chain.
+ ldr(r2, FieldMemOperand(r2, JSObject::kElementsOffset));
+ cmp(r2, empty_fixed_array_value);
b(ne, call_runtime);
- // Load the prototype from the map and loop if non-null.
- bind(&check_prototype);
- ldr(r1, FieldMemOperand(r2, Map::kPrototypeOffset));
- cmp(r1, null_value);
+ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
+ cmp(r2, null_value);
b(ne, &next);
}
diff --git a/src/3rdparty/v8/src/arm/macro-assembler-arm.h b/src/3rdparty/v8/src/arm/macro-assembler-arm.h
index 5a5469f..0ff8579 100644
--- a/src/3rdparty/v8/src/arm/macro-assembler-arm.h
+++ b/src/3rdparty/v8/src/arm/macro-assembler-arm.h
@@ -68,6 +68,13 @@ enum AllocationFlags {
SIZE_IN_WORDS = 1 << 2
};
+// Flags used for AllocateHeapNumber
+enum TaggingMode {
+ // Tag the result.
+ TAG_RESULT,
+ // Don't tag
+ DONT_TAG_RESULT
+};
// Flags used for the ObjectToDoubleVFPRegister function.
enum ObjectToDoubleFlags {
@@ -95,6 +102,11 @@ bool AreAliased(Register reg1,
#endif
+enum TargetAddressStorageMode {
+ CAN_INLINE_TARGET_ADDRESS,
+ NEVER_INLINE_TARGET_ADDRESS
+};
+
// MacroAssembler implements a collection of frequently used macros.
class MacroAssembler: public Assembler {
public:
@@ -110,18 +122,22 @@ class MacroAssembler: public Assembler {
void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
static int CallSize(Register target, Condition cond = al);
void Call(Register target, Condition cond = al);
- static int CallSize(Address target,
- RelocInfo::Mode rmode,
- Condition cond = al);
- void Call(Address target, RelocInfo::Mode rmode, Condition cond = al);
- static int CallSize(Handle<Code> code,
- RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
- unsigned ast_id = kNoASTId,
- Condition cond = al);
+ int CallSize(Address target, RelocInfo::Mode rmode, Condition cond = al);
+ static int CallSizeNotPredictableCodeSize(Address target,
+ RelocInfo::Mode rmode,
+ Condition cond = al);
+ void Call(Address target, RelocInfo::Mode rmode,
+ Condition cond = al,
+ TargetAddressStorageMode mode = CAN_INLINE_TARGET_ADDRESS);
+ int CallSize(Handle<Code> code,
+ RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
+ TypeFeedbackId ast_id = TypeFeedbackId::None(),
+ Condition cond = al);
void Call(Handle<Code> code,
RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
- unsigned ast_id = kNoASTId,
- Condition cond = al);
+ TypeFeedbackId ast_id = TypeFeedbackId::None(),
+ Condition cond = al,
+ TargetAddressStorageMode mode = CAN_INLINE_TARGET_ADDRESS);
void Ret(Condition cond = al);
// Emit code to discard a non-negative number of pointer-sized elements
@@ -153,7 +169,7 @@ class MacroAssembler: public Assembler {
int lsb,
int width,
Condition cond = al);
- void Bfc(Register dst, int lsb, int width, Condition cond = al);
+ void Bfc(Register dst, Register src, int lsb, int width, Condition cond = al);
void Usat(Register dst, int satpos, const Operand& src,
Condition cond = al);
@@ -482,6 +498,7 @@ class MacroAssembler: public Assembler {
void Vmov(const DwVfpRegister dst,
const double imm,
+ const Register scratch = no_reg,
const Condition cond = al);
// Enter exit frame.
@@ -499,8 +516,8 @@ class MacroAssembler: public Assembler {
void LoadContext(Register dst, int context_chain_length);
// Conditionally load the cached Array transitioned map of type
- // transitioned_kind from the global context if the map in register
- // map_in_out is the cached Array map in the global context of
+ // transitioned_kind from the native context if the map in register
+ // map_in_out is the cached Array map in the native context of
// expected_kind.
void LoadTransitionedArrayMapConditional(
ElementsKind expected_kind,
@@ -512,7 +529,8 @@ class MacroAssembler: public Assembler {
// Load the initial map for new Arrays from a JSFunction.
void LoadInitialArrayMap(Register function_in,
Register scratch,
- Register map_out);
+ Register map_out,
+ bool can_have_holes);
void LoadGlobalFunction(int index, Register function);
@@ -728,7 +746,8 @@ class MacroAssembler: public Assembler {
Register scratch1,
Register scratch2,
Register heap_number_map,
- Label* gc_required);
+ Label* gc_required,
+ TaggingMode tagging_mode = TAG_RESULT);
void AllocateHeapNumberWithValue(Register result,
DwVfpRegister value,
Register scratch1,
@@ -802,9 +821,9 @@ class MacroAssembler: public Assembler {
// Check if a map for a JSObject indicates that the object has fast smi only
// elements. Jump to the specified label if it does not.
- void CheckFastSmiOnlyElements(Register map,
- Register scratch,
- Label* fail);
+ void CheckFastSmiElements(Register map,
+ Register scratch,
+ Label* fail);
// Check to see if maybe_number can be stored as a double in
// FastDoubleElements. If it can, store it at the index specified by key in
@@ -813,6 +832,7 @@ class MacroAssembler: public Assembler {
void StoreNumberToDoubleElements(Register value_reg,
Register key_reg,
Register receiver_reg,
+ // All regs below here overwritten.
Register elements_reg,
Register scratch1,
Register scratch2,
@@ -830,6 +850,13 @@ class MacroAssembler: public Assembler {
Label* early_success,
CompareMapMode mode = REQUIRE_EXACT_MAP);
+ // As above, but the map of the object is already loaded into the register
+ // which is preserved by the code generated.
+ void CompareMap(Register obj_map,
+ Handle<Map> map,
+ Label* early_success,
+ CompareMapMode mode = REQUIRE_EXACT_MAP);
+
// Check if the map of an object is equal to a specified map and branch to
// label if not. Skip the smi check if not required (object is known to be a
// heap object). If mode is ALLOW_ELEMENT_TRANSITION_MAPS, then also match
@@ -866,12 +893,15 @@ class MacroAssembler: public Assembler {
// Load and check the instance type of an object for being a string.
// Loads the type into the second argument register.
- // Returns a condition that will be enabled if the object was a string.
+ // Returns a condition that will be enabled if the object was a string
+ // and the passed-in condition passed. If the passed-in condition failed
+ // then flags remain unchanged.
Condition IsObjectStringType(Register obj,
- Register type) {
- ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset));
- ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset));
- tst(type, Operand(kIsNotStringMask));
+ Register type,
+ Condition cond = al) {
+ ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset), cond);
+ ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset), cond);
+ tst(type, Operand(kIsNotStringMask), cond);
ASSERT_EQ(0, kStringTag);
return eq;
}
@@ -928,21 +958,22 @@ class MacroAssembler: public Assembler {
DwVfpRegister double_scratch,
Label *not_int32);
- // Truncates a double using a specific rounding mode.
+ // Truncates a double using a specific rounding mode, and writes the value
+ // to the result register.
// Clears the z flag (ne condition) if an overflow occurs.
- // If exact_conversion is true, the z flag is also cleared if the conversion
- // was inexact, i.e. if the double value could not be converted exactly
- // to a 32bit integer.
+ // If kCheckForInexactConversion is passed, the z flag is also cleared if the
+ // conversion was inexact, i.e. if the double value could not be converted
+ // exactly to a 32-bit integer.
void EmitVFPTruncate(VFPRoundingMode rounding_mode,
- SwVfpRegister result,
+ Register result,
DwVfpRegister double_input,
- Register scratch1,
- Register scratch2,
+ Register scratch,
+ DwVfpRegister double_scratch,
CheckForInexactConversion check
= kDontCheckForInexactConversion);
// Helper for EmitECMATruncate.
- // This will truncate a floating-point value outside of the singed 32bit
+ // This will truncate a floating-point value outside of the signed 32bit
// integer range to a 32bit signed integer.
// Expects the double value loaded in input_high and input_low.
// Exits with the answer in 'result'.
@@ -1174,7 +1205,7 @@ class MacroAssembler: public Assembler {
// Souce and destination can be the same register.
void UntagAndJumpIfNotSmi(Register dst, Register src, Label* non_smi_case);
- // Jump the register contains a smi.
+ // Jump if the register contains a smi.
inline void JumpIfSmi(Register value, Label* smi_label) {
tst(value, Operand(kSmiTagMask));
b(eq, smi_label);
@@ -1189,17 +1220,18 @@ class MacroAssembler: public Assembler {
// Jump if either of the registers contain a smi.
void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
- // Abort execution if argument is a smi. Used in debug code.
- void AbortIfSmi(Register object);
- void AbortIfNotSmi(Register object);
+ // Abort execution if argument is a smi, enabled via --debug-code.
+ void AssertNotSmi(Register object);
+ void AssertSmi(Register object);
- // Abort execution if argument is a string. Used in debug code.
- void AbortIfNotString(Register object);
+ // Abort execution if argument is a string, enabled via --debug-code.
+ void AssertString(Register object);
- // Abort execution if argument is not the root value with the given index.
- void AbortIfNotRootValue(Register src,
- Heap::RootListIndex root_value_index,
- const char* message);
+ // Abort execution if argument is not the root value with the given index,
+ // enabled via --debug-code.
+ void AssertRootValue(Register src,
+ Heap::RootListIndex root_value_index,
+ const char* message);
// ---------------------------------------------------------------------------
// HeapNumber utilities
@@ -1261,6 +1293,16 @@ class MacroAssembler: public Assembler {
void LoadInstanceDescriptors(Register map, Register descriptors);
+ void EnumLength(Register dst, Register map);
+ void NumberOfOwnDescriptors(Register dst, Register map);
+
+ template<typename Field>
+ void DecodeField(Register reg) {
+ static const int shift = Field::kShift;
+ static const int mask = (Field::kMask >> shift) << kSmiTagSize;
+ mov(reg, Operand(reg, LSR, shift));
+ and_(reg, reg, Operand(mask));
+ }
// Activation support.
void EnterFrame(StackFrame::Type type);
@@ -1368,12 +1410,12 @@ inline MemOperand ContextOperand(Register context, int index) {
inline MemOperand GlobalObjectOperand() {
- return ContextOperand(cp, Context::GLOBAL_INDEX);
+ return ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX);
}
static inline MemOperand QmlGlobalObjectOperand() {
- return ContextOperand(cp, Context::QML_GLOBAL_INDEX);
+ return ContextOperand(cp, Context::QML_GLOBAL_OBJECT_INDEX);
}
diff --git a/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.cc b/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.cc
index a833624..17b8677 100644
--- a/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.cc
+++ b/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -43,45 +43,49 @@ namespace internal {
#ifndef V8_INTERPRETED_REGEXP
/*
* This assembler uses the following register assignment convention
+ * - r4 : Temporarily stores the index of capture start after a matching pass
+ * for a global regexp.
* - r5 : Pointer to current code object (Code*) including heap object tag.
* - r6 : Current position in input, as negative offset from end of string.
* Please notice that this is the byte offset, not the character offset!
* - r7 : Currently loaded character. Must be loaded using
* LoadCurrentCharacter before using any of the dispatch methods.
- * - r8 : points to tip of backtrack stack
+ * - r8 : Points to tip of backtrack stack
* - r9 : Unused, might be used by C code and expected unchanged.
* - r10 : End of input (points to byte after last character in input).
* - r11 : Frame pointer. Used to access arguments, local variables and
* RegExp registers.
* - r12 : IP register, used by assembler. Very volatile.
- * - r13/sp : points to tip of C stack.
+ * - r13/sp : Points to tip of C stack.
*
* The remaining registers are free for computations.
* Each call to a public method should retain this convention.
*
* The stack will have the following structure:
- * - fp[52] Isolate* isolate (Address of the current isolate)
- * - fp[48] direct_call (if 1, direct call from JavaScript code,
- * if 0, call through the runtime system).
- * - fp[44] stack_area_base (High end of the memory area to use as
- * backtracking stack).
+ * - fp[56] Isolate* isolate (address of the current isolate)
+ * - fp[52] direct_call (if 1, direct call from JavaScript code,
+ * if 0, call through the runtime system).
+ * - fp[48] stack_area_base (high end of the memory area to use as
+ * backtracking stack).
+ * - fp[44] capture array size (may fit multiple sets of matches)
* - fp[40] int* capture_array (int[num_saved_registers_], for output).
* - fp[36] secondary link/return address used by native call.
* --- sp when called ---
- * - fp[32] return address (lr).
- * - fp[28] old frame pointer (r11).
+ * - fp[32] return address (lr).
+ * - fp[28] old frame pointer (r11).
* - fp[0..24] backup of registers r4..r10.
* --- frame pointer ----
- * - fp[-4] end of input (Address of end of string).
- * - fp[-8] start of input (Address of first character in string).
+ * - fp[-4] end of input (address of end of string).
+ * - fp[-8] start of input (address of first character in string).
* - fp[-12] start index (character index of start).
* - fp[-16] void* input_string (location of a handle containing the string).
- * - fp[-20] Offset of location before start of input (effectively character
+ * - fp[-20] success counter (only for global regexps to count matches).
+ * - fp[-24] Offset of location before start of input (effectively character
* position -1). Used to initialize capture registers to a
* non-position.
- * - fp[-24] At start (if 1, we are starting at the start of the
+ * - fp[-28] At start (if 1, we are starting at the start of the
* string, otherwise 0)
- * - fp[-28] register 0 (Only positions must be stored in the first
+ * - fp[-32] register 0 (Only positions must be stored in the first
* - register 1 num_saved_registers_ registers)
* - ...
* - register num_registers-1
@@ -115,8 +119,10 @@ namespace internal {
RegExpMacroAssemblerARM::RegExpMacroAssemblerARM(
Mode mode,
- int registers_to_save)
- : masm_(new MacroAssembler(Isolate::Current(), NULL, kRegExpCodeSize)),
+ int registers_to_save,
+ Zone* zone)
+ : NativeRegExpMacroAssembler(zone),
+ masm_(new MacroAssembler(Isolate::Current(), NULL, kRegExpCodeSize)),
mode_(mode),
num_registers_(registers_to_save),
num_saved_registers_(registers_to_save),
@@ -197,9 +203,9 @@ void RegExpMacroAssemblerARM::CheckCharacterGT(uc16 limit, Label* on_greater) {
void RegExpMacroAssemblerARM::CheckAtStart(Label* on_at_start) {
Label not_at_start;
// Did we start the match at the start of the string at all?
- __ ldr(r0, MemOperand(frame_pointer(), kAtStart));
+ __ ldr(r0, MemOperand(frame_pointer(), kStartIndex));
__ cmp(r0, Operand(0, RelocInfo::NONE));
- BranchOrBacktrack(eq, &not_at_start);
+ BranchOrBacktrack(ne, &not_at_start);
// If we did, are we still at the start of the input?
__ ldr(r1, MemOperand(frame_pointer(), kInputStart));
@@ -212,9 +218,9 @@ void RegExpMacroAssemblerARM::CheckAtStart(Label* on_at_start) {
void RegExpMacroAssemblerARM::CheckNotAtStart(Label* on_not_at_start) {
// Did we start the match at the start of the string at all?
- __ ldr(r0, MemOperand(frame_pointer(), kAtStart));
+ __ ldr(r0, MemOperand(frame_pointer(), kStartIndex));
__ cmp(r0, Operand(0, RelocInfo::NONE));
- BranchOrBacktrack(eq, on_not_at_start);
+ BranchOrBacktrack(ne, on_not_at_start);
// If we did, are we still at the start of the input?
__ ldr(r1, MemOperand(frame_pointer(), kInputStart));
__ add(r0, end_of_input_address(), Operand(current_input_offset()));
@@ -432,16 +438,6 @@ void RegExpMacroAssemblerARM::CheckNotBackReference(
}
-void RegExpMacroAssemblerARM::CheckNotRegistersEqual(int reg1,
- int reg2,
- Label* on_not_equal) {
- __ ldr(r0, register_location(reg1));
- __ ldr(r1, register_location(reg2));
- __ cmp(r0, r1);
- BranchOrBacktrack(ne, on_not_equal);
-}
-
-
void RegExpMacroAssemblerARM::CheckNotCharacter(unsigned c,
Label* on_not_equal) {
__ cmp(current_character(), Operand(c));
@@ -655,6 +651,7 @@ void RegExpMacroAssemblerARM::Fail() {
Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
+ Label return_r0;
// Finalize code - write the entry point code now we know how many
// registers we need.
@@ -678,8 +675,9 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
// Set frame pointer in space for it if this is not a direct call
// from generated code.
__ add(frame_pointer(), sp, Operand(4 * kPointerSize));
+ __ mov(r0, Operand(0, RelocInfo::NONE));
+ __ push(r0); // Make room for success counter and initialize it to 0.
__ push(r0); // Make room for "position - 1" constant (value is irrelevant).
- __ push(r0); // Make room for "at start" constant (value is irrelevant).
// Check if we have space on the stack for registers.
Label stack_limit_hit;
Label stack_ok;
@@ -698,13 +696,13 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
// Exit with OutOfMemory exception. There is not enough space on the stack
// for our working registers.
__ mov(r0, Operand(EXCEPTION));
- __ jmp(&exit_label_);
+ __ jmp(&return_r0);
__ bind(&stack_limit_hit);
CallCheckStackGuardState(r0);
__ cmp(r0, Operand(0, RelocInfo::NONE));
// If returned value is non-zero, we exit with the returned value as result.
- __ b(ne, &exit_label_);
+ __ b(ne, &return_r0);
__ bind(&stack_ok);
@@ -725,41 +723,45 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
// position registers.
__ str(r0, MemOperand(frame_pointer(), kInputStartMinusOne));
- // Determine whether the start index is zero, that is at the start of the
- // string, and store that value in a local variable.
- __ cmp(r1, Operand(0));
- __ mov(r1, Operand(1), LeaveCC, eq);
- __ mov(r1, Operand(0, RelocInfo::NONE), LeaveCC, ne);
- __ str(r1, MemOperand(frame_pointer(), kAtStart));
+ // Initialize code pointer register
+ __ mov(code_pointer(), Operand(masm_->CodeObject()));
+
+ Label load_char_start_regexp, start_regexp;
+ // Load newline if index is at start, previous character otherwise.
+ __ cmp(r1, Operand(0, RelocInfo::NONE));
+ __ b(ne, &load_char_start_regexp);
+ __ mov(current_character(), Operand('\n'), LeaveCC, eq);
+ __ jmp(&start_regexp);
+ // Global regexp restarts matching here.
+ __ bind(&load_char_start_regexp);
+ // Load previous char as initial value of current character register.
+ LoadCurrentCharacterUnchecked(-1, 1);
+ __ bind(&start_regexp);
+
+ // Initialize on-stack registers.
if (num_saved_registers_ > 0) { // Always is, if generated from a regexp.
// Fill saved registers with initial value = start offset - 1
-
- // Address of register 0.
- __ add(r1, frame_pointer(), Operand(kRegisterZero));
- __ mov(r2, Operand(num_saved_registers_));
- Label init_loop;
- __ bind(&init_loop);
- __ str(r0, MemOperand(r1, kPointerSize, NegPostIndex));
- __ sub(r2, r2, Operand(1), SetCC);
- __ b(ne, &init_loop);
+ if (num_saved_registers_ > 8) {
+ // Address of register 0.
+ __ add(r1, frame_pointer(), Operand(kRegisterZero));
+ __ mov(r2, Operand(num_saved_registers_));
+ Label init_loop;
+ __ bind(&init_loop);
+ __ str(r0, MemOperand(r1, kPointerSize, NegPostIndex));
+ __ sub(r2, r2, Operand(1), SetCC);
+ __ b(ne, &init_loop);
+ } else {
+ for (int i = 0; i < num_saved_registers_; i++) {
+ __ str(r0, register_location(i));
+ }
+ }
}
// Initialize backtrack stack pointer.
__ ldr(backtrack_stackpointer(), MemOperand(frame_pointer(), kStackHighEnd));
- // Initialize code pointer register
- __ mov(code_pointer(), Operand(masm_->CodeObject()));
- // Load previous char as initial value of current character register.
- Label at_start;
- __ ldr(r0, MemOperand(frame_pointer(), kAtStart));
- __ cmp(r0, Operand(0, RelocInfo::NONE));
- __ b(ne, &at_start);
- LoadCurrentCharacterUnchecked(-1, 1); // Load previous char.
- __ jmp(&start_label_);
- __ bind(&at_start);
- __ mov(current_character(), Operand('\n'));
- __ jmp(&start_label_);
+ __ jmp(&start_label_);
// Exit code:
if (success_label_.is_linked()) {
@@ -786,6 +788,10 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
for (int i = 0; i < num_saved_registers_; i += 2) {
__ ldr(r2, register_location(i));
__ ldr(r3, register_location(i + 1));
+ if (i == 0 && global_with_zero_length_check()) {
+ // Keep capture start in r4 for the zero-length check later.
+ __ mov(r4, r2);
+ }
if (mode_ == UC16) {
__ add(r2, r1, Operand(r2, ASR, 1));
__ add(r3, r1, Operand(r3, ASR, 1));
@@ -797,10 +803,58 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
__ str(r3, MemOperand(r0, kPointerSize, PostIndex));
}
}
- __ mov(r0, Operand(SUCCESS));
+
+ if (global()) {
+ // Restart matching if the regular expression is flagged as global.
+ __ ldr(r0, MemOperand(frame_pointer(), kSuccessfulCaptures));
+ __ ldr(r1, MemOperand(frame_pointer(), kNumOutputRegisters));
+ __ ldr(r2, MemOperand(frame_pointer(), kRegisterOutput));
+ // Increment success counter.
+ __ add(r0, r0, Operand(1));
+ __ str(r0, MemOperand(frame_pointer(), kSuccessfulCaptures));
+ // Capture results have been stored, so the number of remaining global
+ // output registers is reduced by the number of stored captures.
+ __ sub(r1, r1, Operand(num_saved_registers_));
+ // Check whether we have enough room for another set of capture results.
+ __ cmp(r1, Operand(num_saved_registers_));
+ __ b(lt, &return_r0);
+
+ __ str(r1, MemOperand(frame_pointer(), kNumOutputRegisters));
+ // Advance the location for output.
+ __ add(r2, r2, Operand(num_saved_registers_ * kPointerSize));
+ __ str(r2, MemOperand(frame_pointer(), kRegisterOutput));
+
+ // Prepare r0 to initialize registers with its value in the next run.
+ __ ldr(r0, MemOperand(frame_pointer(), kInputStartMinusOne));
+
+ if (global_with_zero_length_check()) {
+ // Special case for zero-length matches.
+ // r4: capture start index
+ __ cmp(current_input_offset(), r4);
+ // Not a zero-length match, restart.
+ __ b(ne, &load_char_start_regexp);
+ // Offset from the end is zero if we already reached the end.
+ __ cmp(current_input_offset(), Operand(0));
+ __ b(eq, &exit_label_);
+ // Advance current position after a zero-length match.
+ __ add(current_input_offset(),
+ current_input_offset(),
+ Operand((mode_ == UC16) ? 2 : 1));
+ }
+
+ __ b(&load_char_start_regexp);
+ } else {
+ __ mov(r0, Operand(SUCCESS));
+ }
}
+
// Exit and return r0
__ bind(&exit_label_);
+ if (global()) {
+ __ ldr(r0, MemOperand(frame_pointer(), kSuccessfulCaptures));
+ }
+
+ __ bind(&return_r0);
// Skip sp past regexp registers and local variables..
__ mov(sp, frame_pointer());
// Restore registers r4..r11 and return (restoring lr to pc).
@@ -822,7 +876,7 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
__ cmp(r0, Operand(0, RelocInfo::NONE));
// If returning non-zero, we should end execution with the given
// result as return value.
- __ b(ne, &exit_label_);
+ __ b(ne, &return_r0);
// String might have moved: Reload end of string from frame.
__ ldr(end_of_input_address(), MemOperand(frame_pointer(), kInputEnd));
@@ -859,7 +913,7 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
__ bind(&exit_with_exception);
// Exit with Result EXCEPTION(-1) to signal thrown exception.
__ mov(r0, Operand(EXCEPTION));
- __ jmp(&exit_label_);
+ __ jmp(&return_r0);
}
CodeDesc code_desc;
@@ -1014,8 +1068,9 @@ void RegExpMacroAssemblerARM::SetRegister(int register_index, int to) {
}
-void RegExpMacroAssemblerARM::Succeed() {
+bool RegExpMacroAssemblerARM::Succeed() {
__ jmp(&success_label_);
+ return global();
}
@@ -1303,20 +1358,26 @@ void RegExpMacroAssemblerARM::CallCFunctionUsingStub(
}
+bool RegExpMacroAssemblerARM::CanReadUnaligned() {
+ return CpuFeatures::IsSupported(UNALIGNED_ACCESSES) && !slow_safe();
+}
+
+
void RegExpMacroAssemblerARM::LoadCurrentCharacterUnchecked(int cp_offset,
int characters) {
Register offset = current_input_offset();
if (cp_offset != 0) {
- __ add(r0, current_input_offset(), Operand(cp_offset * char_size()));
- offset = r0;
+ // r4 is not being used to store the capture start index at this point.
+ __ add(r4, current_input_offset(), Operand(cp_offset * char_size()));
+ offset = r4;
}
// The ldr, str, ldrh, strh instructions can do unaligned accesses, if the CPU
// and the operating system running on the target allow it.
// If unaligned load/stores are not supported then this function must only
// be used to load a single character at a time.
-#if !V8_TARGET_CAN_READ_UNALIGNED
- ASSERT(characters == 1);
-#endif
+ if (!CanReadUnaligned()) {
+ ASSERT(characters == 1);
+ }
if (mode_ == ASCII) {
if (characters == 4) {
diff --git a/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.h b/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.h
index 14f984f..c45669a 100644
--- a/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.h
+++ b/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -35,17 +35,10 @@ namespace v8 {
namespace internal {
-#ifdef V8_INTERPRETED_REGEXP
-class RegExpMacroAssemblerARM: public RegExpMacroAssembler {
- public:
- RegExpMacroAssemblerARM();
- virtual ~RegExpMacroAssemblerARM();
-};
-
-#else // V8_INTERPRETED_REGEXP
+#ifndef V8_INTERPRETED_REGEXP
class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
public:
- RegExpMacroAssemblerARM(Mode mode, int registers_to_save);
+ RegExpMacroAssemblerARM(Mode mode, int registers_to_save, Zone* zone);
virtual ~RegExpMacroAssemblerARM();
virtual int stack_limit_slack();
virtual void AdvanceCurrentPosition(int by);
@@ -70,7 +63,6 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
Label* on_no_match);
- virtual void CheckNotRegistersEqual(int reg1, int reg2, Label* on_not_equal);
virtual void CheckNotCharacter(unsigned c, Label* on_not_equal);
virtual void CheckNotCharacterAfterAnd(unsigned c,
unsigned mask,
@@ -113,10 +105,11 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
virtual void ReadStackPointerFromRegister(int reg);
virtual void SetCurrentPositionFromEnd(int by);
virtual void SetRegister(int register_index, int to);
- virtual void Succeed();
+ virtual bool Succeed();
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset);
virtual void ClearRegisters(int reg_from, int reg_to);
virtual void WriteStackPointerToRegister(int reg);
+ virtual bool CanReadUnaligned();
// Called from RegExp if the stack-guard is triggered.
// If the code object is relocated, the return address is fixed before
@@ -137,7 +130,8 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
static const int kSecondaryReturnAddress = kReturnAddress + kPointerSize;
// Stack parameters placed by caller.
static const int kRegisterOutput = kSecondaryReturnAddress + kPointerSize;
- static const int kStackHighEnd = kRegisterOutput + kPointerSize;
+ static const int kNumOutputRegisters = kRegisterOutput + kPointerSize;
+ static const int kStackHighEnd = kNumOutputRegisters + kPointerSize;
static const int kDirectCall = kStackHighEnd + kPointerSize;
static const int kIsolate = kDirectCall + kPointerSize;
@@ -149,10 +143,10 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
static const int kInputString = kStartIndex - kPointerSize;
// When adding local variables remember to push space for them in
// the frame in GetCode.
- static const int kInputStartMinusOne = kInputString - kPointerSize;
- static const int kAtStart = kInputStartMinusOne - kPointerSize;
+ static const int kSuccessfulCaptures = kInputString - kPointerSize;
+ static const int kInputStartMinusOne = kSuccessfulCaptures - kPointerSize;
// First register address. Following registers are below it on the stack.
- static const int kRegisterZero = kAtStart - kPointerSize;
+ static const int kRegisterZero = kInputStartMinusOne - kPointerSize;
// Initial size of code buffer.
static const size_t kRegExpCodeSize = 1024;
diff --git a/src/3rdparty/v8/src/arm/simulator-arm.cc b/src/3rdparty/v8/src/arm/simulator-arm.cc
index 629c209..bd7f1bd 100644
--- a/src/3rdparty/v8/src/arm/simulator-arm.cc
+++ b/src/3rdparty/v8/src/arm/simulator-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -276,7 +276,7 @@ void ArmDebugger::Debug() {
// make them invisible to all commands.
UndoBreakpoints();
- while (!done) {
+ while (!done && !sim_->has_bad_pc()) {
if (last_pc != sim_->get_pc()) {
disasm::NameConverter converter;
disasm::Disassembler dasm(converter);
@@ -945,73 +945,31 @@ unsigned int Simulator::get_s_register(int sreg) const {
}
-void Simulator::set_s_register_from_float(int sreg, const float flt) {
- ASSERT((sreg >= 0) && (sreg < num_s_registers));
- // Read the bits from the single precision floating point value
- // into the unsigned integer element of vfp_register[] given by index=sreg.
- char buffer[sizeof(vfp_register[0])];
- memcpy(buffer, &flt, sizeof(vfp_register[0]));
- memcpy(&vfp_register[sreg], buffer, sizeof(vfp_register[0]));
-}
-
-
-void Simulator::set_s_register_from_sinteger(int sreg, const int sint) {
- ASSERT((sreg >= 0) && (sreg < num_s_registers));
- // Read the bits from the integer value into the unsigned integer element of
- // vfp_register[] given by index=sreg.
- char buffer[sizeof(vfp_register[0])];
- memcpy(buffer, &sint, sizeof(vfp_register[0]));
- memcpy(&vfp_register[sreg], buffer, sizeof(vfp_register[0]));
-}
-
-
-void Simulator::set_d_register_from_double(int dreg, const double& dbl) {
- ASSERT((dreg >= 0) && (dreg < num_d_registers));
- // Read the bits from the double precision floating point value into the two
- // consecutive unsigned integer elements of vfp_register[] given by index
- // 2*sreg and 2*sreg+1.
- char buffer[2 * sizeof(vfp_register[0])];
- memcpy(buffer, &dbl, 2 * sizeof(vfp_register[0]));
- memcpy(&vfp_register[dreg * 2], buffer, 2 * sizeof(vfp_register[0]));
-}
+template<class InputType, int register_size>
+void Simulator::SetVFPRegister(int reg_index, const InputType& value) {
+ ASSERT(reg_index >= 0);
+ if (register_size == 1) ASSERT(reg_index < num_s_registers);
+ if (register_size == 2) ASSERT(reg_index < num_d_registers);
-
-float Simulator::get_float_from_s_register(int sreg) {
- ASSERT((sreg >= 0) && (sreg < num_s_registers));
-
- float sm_val = 0.0;
- // Read the bits from the unsigned integer vfp_register[] array
- // into the single precision floating point value and return it.
- char buffer[sizeof(vfp_register[0])];
- memcpy(buffer, &vfp_register[sreg], sizeof(vfp_register[0]));
- memcpy(&sm_val, buffer, sizeof(vfp_register[0]));
- return(sm_val);
+ char buffer[register_size * sizeof(vfp_register[0])];
+ memcpy(buffer, &value, register_size * sizeof(vfp_register[0]));
+ memcpy(&vfp_register[reg_index * register_size], buffer,
+ register_size * sizeof(vfp_register[0]));
}
-int Simulator::get_sinteger_from_s_register(int sreg) {
- ASSERT((sreg >= 0) && (sreg < num_s_registers));
-
- int sm_val = 0;
- // Read the bits from the unsigned integer vfp_register[] array
- // into the single precision floating point value and return it.
- char buffer[sizeof(vfp_register[0])];
- memcpy(buffer, &vfp_register[sreg], sizeof(vfp_register[0]));
- memcpy(&sm_val, buffer, sizeof(vfp_register[0]));
- return(sm_val);
-}
+template<class ReturnType, int register_size>
+ReturnType Simulator::GetFromVFPRegister(int reg_index) {
+ ASSERT(reg_index >= 0);
+ if (register_size == 1) ASSERT(reg_index < num_s_registers);
+ if (register_size == 2) ASSERT(reg_index < num_d_registers);
-
-double Simulator::get_double_from_d_register(int dreg) {
- ASSERT((dreg >= 0) && (dreg < num_d_registers));
-
- double dm_val = 0.0;
- // Read the bits from the unsigned integer vfp_register[] array
- // into the double precision floating point value and return it.
- char buffer[2 * sizeof(vfp_register[0])];
- memcpy(buffer, &vfp_register[2 * dreg], 2 * sizeof(vfp_register[0]));
- memcpy(&dm_val, buffer, 2 * sizeof(vfp_register[0]));
- return(dm_val);
+ ReturnType value = 0;
+ char buffer[register_size * sizeof(vfp_register[0])];
+ memcpy(buffer, &vfp_register[register_size * reg_index],
+ register_size * sizeof(vfp_register[0]));
+ memcpy(&value, buffer, register_size * sizeof(vfp_register[0]));
+ return value;
}
@@ -1108,111 +1066,83 @@ void Simulator::TrashCallerSaveRegisters() {
int Simulator::ReadW(int32_t addr, Instruction* instr) {
-#if V8_TARGET_CAN_READ_UNALIGNED
- intptr_t* ptr = reinterpret_cast<intptr_t*>(addr);
- return *ptr;
-#else
- if ((addr & 3) == 0) {
+ if (FLAG_enable_unaligned_accesses || (addr & 3) == 0) {
intptr_t* ptr = reinterpret_cast<intptr_t*>(addr);
return *ptr;
+ } else {
+ PrintF("Unaligned read at 0x%08x, pc=0x%08" V8PRIxPTR "\n",
+ addr,
+ reinterpret_cast<intptr_t>(instr));
+ UNIMPLEMENTED();
+ return 0;
}
- PrintF("Unaligned read at 0x%08x, pc=0x%08" V8PRIxPTR "\n",
- addr,
- reinterpret_cast<intptr_t>(instr));
- UNIMPLEMENTED();
- return 0;
-#endif
}
void Simulator::WriteW(int32_t addr, int value, Instruction* instr) {
-#if V8_TARGET_CAN_READ_UNALIGNED
- intptr_t* ptr = reinterpret_cast<intptr_t*>(addr);
- *ptr = value;
- return;
-#else
- if ((addr & 3) == 0) {
+ if (FLAG_enable_unaligned_accesses || (addr & 3) == 0) {
intptr_t* ptr = reinterpret_cast<intptr_t*>(addr);
*ptr = value;
- return;
+ } else {
+ PrintF("Unaligned write at 0x%08x, pc=0x%08" V8PRIxPTR "\n",
+ addr,
+ reinterpret_cast<intptr_t>(instr));
+ UNIMPLEMENTED();
}
- PrintF("Unaligned write at 0x%08x, pc=0x%08" V8PRIxPTR "\n",
- addr,
- reinterpret_cast<intptr_t>(instr));
- UNIMPLEMENTED();
-#endif
}
uint16_t Simulator::ReadHU(int32_t addr, Instruction* instr) {
-#if V8_TARGET_CAN_READ_UNALIGNED
- uint16_t* ptr = reinterpret_cast<uint16_t*>(addr);
- return *ptr;
-#else
- if ((addr & 1) == 0) {
+ if (FLAG_enable_unaligned_accesses || (addr & 1) == 0) {
uint16_t* ptr = reinterpret_cast<uint16_t*>(addr);
return *ptr;
+ } else {
+ PrintF("Unaligned unsigned halfword read at 0x%08x, pc=0x%08"
+ V8PRIxPTR "\n",
+ addr,
+ reinterpret_cast<intptr_t>(instr));
+ UNIMPLEMENTED();
+ return 0;
}
- PrintF("Unaligned unsigned halfword read at 0x%08x, pc=0x%08" V8PRIxPTR "\n",
- addr,
- reinterpret_cast<intptr_t>(instr));
- UNIMPLEMENTED();
- return 0;
-#endif
}
int16_t Simulator::ReadH(int32_t addr, Instruction* instr) {
-#if V8_TARGET_CAN_READ_UNALIGNED
- int16_t* ptr = reinterpret_cast<int16_t*>(addr);
- return *ptr;
-#else
- if ((addr & 1) == 0) {
+ if (FLAG_enable_unaligned_accesses || (addr & 1) == 0) {
int16_t* ptr = reinterpret_cast<int16_t*>(addr);
return *ptr;
+ } else {
+ PrintF("Unaligned signed halfword read at 0x%08x\n", addr);
+ UNIMPLEMENTED();
+ return 0;
}
- PrintF("Unaligned signed halfword read at 0x%08x\n", addr);
- UNIMPLEMENTED();
- return 0;
-#endif
}
void Simulator::WriteH(int32_t addr, uint16_t value, Instruction* instr) {
-#if V8_TARGET_CAN_READ_UNALIGNED
- uint16_t* ptr = reinterpret_cast<uint16_t*>(addr);
- *ptr = value;
- return;
-#else
- if ((addr & 1) == 0) {
+ if (FLAG_enable_unaligned_accesses || (addr & 1) == 0) {
uint16_t* ptr = reinterpret_cast<uint16_t*>(addr);
*ptr = value;
- return;
+ } else {
+ PrintF("Unaligned unsigned halfword write at 0x%08x, pc=0x%08"
+ V8PRIxPTR "\n",
+ addr,
+ reinterpret_cast<intptr_t>(instr));
+ UNIMPLEMENTED();
}
- PrintF("Unaligned unsigned halfword write at 0x%08x, pc=0x%08" V8PRIxPTR "\n",
- addr,
- reinterpret_cast<intptr_t>(instr));
- UNIMPLEMENTED();
-#endif
}
void Simulator::WriteH(int32_t addr, int16_t value, Instruction* instr) {
-#if V8_TARGET_CAN_READ_UNALIGNED
- int16_t* ptr = reinterpret_cast<int16_t*>(addr);
- *ptr = value;
- return;
-#else
- if ((addr & 1) == 0) {
+ if (FLAG_enable_unaligned_accesses || (addr & 1) == 0) {
int16_t* ptr = reinterpret_cast<int16_t*>(addr);
*ptr = value;
- return;
+ } else {
+ PrintF("Unaligned halfword write at 0x%08x, pc=0x%08" V8PRIxPTR "\n",
+ addr,
+ reinterpret_cast<intptr_t>(instr));
+ UNIMPLEMENTED();
}
- PrintF("Unaligned halfword write at 0x%08x, pc=0x%08" V8PRIxPTR "\n",
- addr,
- reinterpret_cast<intptr_t>(instr));
- UNIMPLEMENTED();
-#endif
}
@@ -1241,37 +1171,26 @@ void Simulator::WriteB(int32_t addr, int8_t value) {
int32_t* Simulator::ReadDW(int32_t addr) {
-#if V8_TARGET_CAN_READ_UNALIGNED
- int32_t* ptr = reinterpret_cast<int32_t*>(addr);
- return ptr;
-#else
- if ((addr & 3) == 0) {
+ if (FLAG_enable_unaligned_accesses || (addr & 3) == 0) {
int32_t* ptr = reinterpret_cast<int32_t*>(addr);
return ptr;
+ } else {
+ PrintF("Unaligned read at 0x%08x\n", addr);
+ UNIMPLEMENTED();
+ return 0;
}
- PrintF("Unaligned read at 0x%08x\n", addr);
- UNIMPLEMENTED();
- return 0;
-#endif
}
void Simulator::WriteDW(int32_t addr, int32_t value1, int32_t value2) {
-#if V8_TARGET_CAN_READ_UNALIGNED
- int32_t* ptr = reinterpret_cast<int32_t*>(addr);
- *ptr++ = value1;
- *ptr = value2;
- return;
-#else
- if ((addr & 3) == 0) {
+ if (FLAG_enable_unaligned_accesses || (addr & 3) == 0) {
int32_t* ptr = reinterpret_cast<int32_t*>(addr);
*ptr++ = value1;
*ptr = value2;
- return;
+ } else {
+ PrintF("Unaligned write at 0x%08x\n", addr);
+ UNIMPLEMENTED();
}
- PrintF("Unaligned write at 0x%08x\n", addr);
- UNIMPLEMENTED();
-#endif
}
@@ -1468,7 +1387,14 @@ int32_t Simulator::GetShiftRm(Instruction* instr, bool* carry_out) {
}
case ROR: {
- UNIMPLEMENTED();
+ if (shift_amount == 0) {
+ *carry_out = c_flag_;
+ } else {
+ uint32_t left = static_cast<uint32_t>(result) >> shift_amount;
+ uint32_t right = static_cast<uint32_t>(result) << (32 - shift_amount);
+ result = right | left;
+ *carry_out = (static_cast<uint32_t>(result) >> 31) != 0;
+ }
break;
}
@@ -1540,7 +1466,14 @@ int32_t Simulator::GetShiftRm(Instruction* instr, bool* carry_out) {
}
case ROR: {
- UNIMPLEMENTED();
+ if (shift_amount == 0) {
+ *carry_out = c_flag_;
+ } else {
+ uint32_t left = static_cast<uint32_t>(result) >> shift_amount;
+ uint32_t right = static_cast<uint32_t>(result) << (32 - shift_amount);
+ result = right | left;
+ *carry_out = (static_cast<uint32_t>(result) >> 31) != 0;
+ }
break;
}
@@ -2028,11 +1961,23 @@ void Simulator::DecodeType01(Instruction* instr) {
SetNZFlags(alu_out);
}
} else {
- // The MLA instruction description (A 4.1.28) refers to the order
- // of registers as "Rd, Rm, Rs, Rn". But confusingly it uses the
- // Rn field to encode the Rd register and the Rd field to encode
- // the Rn register.
- Format(instr, "mla'cond's 'rn, 'rm, 'rs, 'rd");
+ int rd = instr->RdValue();
+ int32_t acc_value = get_register(rd);
+ if (instr->Bit(22) == 0) {
+ // The MLA instruction description (A 4.1.28) refers to the order
+ // of registers as "Rd, Rm, Rs, Rn". But confusingly it uses the
+ // Rn field to encode the Rd register and the Rd field to encode
+ // the Rn register.
+ // Format(instr, "mla'cond's 'rn, 'rm, 'rs, 'rd");
+ int32_t mul_out = rm_val * rs_val;
+ int32_t result = acc_value + mul_out;
+ set_register(rn, result);
+ } else {
+ // Format(instr, "mls'cond's 'rn, 'rm, 'rs, 'rd");
+ int32_t mul_out = rm_val * rs_val;
+ int32_t result = acc_value - mul_out;
+ set_register(rn, result);
+ }
}
} else {
// The signed/long multiply instructions use the terms RdHi and RdLo
@@ -2252,6 +2197,8 @@ void Simulator::DecodeType01(Instruction* instr) {
PrintF("%08x\n", instr->InstructionBits());
UNIMPLEMENTED();
}
+ } else if ((type == 1) && instr->IsNopType1()) {
+ // NOP.
} else {
int rd = instr->RdValue();
int rn = instr->RnValue();
@@ -2408,7 +2355,7 @@ void Simulator::DecodeType01(Instruction* instr) {
// Format(instr, "cmn'cond 'rn, 'imm");
alu_out = rn_val + shifter_operand;
SetNZFlags(alu_out);
- SetCFlag(!CarryFrom(rn_val, shifter_operand));
+ SetCFlag(CarryFrom(rn_val, shifter_operand));
SetVFlag(OverflowFrom(alu_out, rn_val, shifter_operand, true));
} else {
// Other instructions matching this pattern are handled in the
@@ -2588,6 +2535,25 @@ void Simulator::DecodeType3(Instruction* instr) {
break;
}
case db_x: {
+ if (FLAG_enable_sudiv) {
+ if (!instr->HasW()) {
+ if (instr->Bits(5, 4) == 0x1) {
+ if ((instr->Bit(22) == 0x0) && (instr->Bit(20) == 0x1)) {
+ // sdiv (in V8 notation matching ARM ISA format) rn = rm/rs
+ // Format(instr, "'sdiv'cond'b 'rn, 'rm, 'rs);
+ int rm = instr->RmValue();
+ int32_t rm_val = get_register(rm);
+ int rs = instr->RsValue();
+ int32_t rs_val = get_register(rs);
+ int32_t ret_val = 0;
+ ASSERT(rs_val != 0);
+ ret_val = rm_val/rs_val;
+ set_register(rn, ret_val);
+ return;
+ }
+ }
+ }
+ }
// Format(instr, "'memop'cond'b 'rd, ['rn, -'shift_rm]'w");
addr = rn_val - shifter_operand;
if (instr->HasW()) {
diff --git a/src/3rdparty/v8/src/arm/simulator-arm.h b/src/3rdparty/v8/src/arm/simulator-arm.h
index 585f1e0..abc91bb 100644
--- a/src/3rdparty/v8/src/arm/simulator-arm.h
+++ b/src/3rdparty/v8/src/arm/simulator-arm.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -49,16 +49,16 @@ namespace internal {
(entry(p0, p1, p2, p3, p4))
typedef int (*arm_regexp_matcher)(String*, int, const byte*, const byte*,
- void*, int*, Address, int, Isolate*);
+ void*, int*, int, Address, int, Isolate*);
// Call the generated regexp code directly. The code at the entry address
// should act as a function matching the type arm_regexp_matcher.
// The fifth argument is a dummy that reserves the space used for
// the return address added by the ExitFrame in native calls.
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7, p8) \
(FUNCTION_CAST<arm_regexp_matcher>(entry)( \
- p0, p1, p2, p3, NULL, p4, p5, p6, p7))
+ p0, p1, p2, p3, NULL, p4, p5, p6, p7, p8))
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
reinterpret_cast<TryCatch*>(try_catch_address)
@@ -163,12 +163,30 @@ class Simulator {
// Support for VFP.
void set_s_register(int reg, unsigned int value);
unsigned int get_s_register(int reg) const;
- void set_d_register_from_double(int dreg, const double& dbl);
- double get_double_from_d_register(int dreg);
- void set_s_register_from_float(int sreg, const float dbl);
- float get_float_from_s_register(int sreg);
- void set_s_register_from_sinteger(int reg, const int value);
- int get_sinteger_from_s_register(int reg);
+
+ void set_d_register_from_double(int dreg, const double& dbl) {
+ SetVFPRegister<double, 2>(dreg, dbl);
+ }
+
+ double get_double_from_d_register(int dreg) {
+ return GetFromVFPRegister<double, 2>(dreg);
+ }
+
+ void set_s_register_from_float(int sreg, const float flt) {
+ SetVFPRegister<float, 1>(sreg, flt);
+ }
+
+ float get_float_from_s_register(int sreg) {
+ return GetFromVFPRegister<float, 1>(sreg);
+ }
+
+ void set_s_register_from_sinteger(int sreg, const int sint) {
+ SetVFPRegister<int, 1>(sreg, sint);
+ }
+
+ int get_sinteger_from_s_register(int sreg) {
+ return GetFromVFPRegister<int, 1>(sreg);
+ }
// Special case of set_register and get_register to access the raw PC value.
void set_pc(int32_t value);
@@ -332,6 +350,12 @@ class Simulator {
void SetFpResult(const double& result);
void TrashCallerSaveRegisters();
+ template<class ReturnType, int register_size>
+ ReturnType GetFromVFPRegister(int reg_index);
+
+ template<class InputType, int register_size>
+ void SetVFPRegister(int reg_index, const InputType& value);
+
// Architecture state.
// Saturating instructions require a Q flag to indicate saturation.
// There is currently no way to read the CPSR directly, and thus read the Q
@@ -401,9 +425,9 @@ class Simulator {
reinterpret_cast<Object*>(Simulator::current(Isolate::Current())->Call( \
FUNCTION_ADDR(entry), 5, p0, p1, p2, p3, p4))
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7, p8) \
Simulator::current(Isolate::Current())->Call( \
- entry, 9, p0, p1, p2, p3, NULL, p4, p5, p6, p7)
+ entry, 10, p0, p1, p2, p3, NULL, p4, p5, p6, p7, p8)
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
try_catch_address == NULL ? \
diff --git a/src/3rdparty/v8/src/arm/stub-cache-arm.cc b/src/3rdparty/v8/src/arm/stub-cache-arm.cc
index 49c0982..9fc39d4 100644
--- a/src/3rdparty/v8/src/arm/stub-cache-arm.cc
+++ b/src/3rdparty/v8/src/arm/stub-cache-arm.cc
@@ -283,11 +283,12 @@ void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
int index,
Register prototype) {
// Load the global or builtins object from the current context.
- __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- // Load the global context from the global or builtins object.
__ ldr(prototype,
- FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
- // Load the function from the global context.
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ // Load the native context from the global or builtins object.
+ __ ldr(prototype,
+ FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
+ // Load the function from the native context.
__ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
// Load the initial map. The global functions all have initial maps.
__ ldr(prototype,
@@ -304,13 +305,14 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
Label* miss) {
Isolate* isolate = masm->isolate();
// Check we're still in the same context.
- __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ Move(ip, isolate->global());
+ __ ldr(prototype,
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ Move(ip, isolate->global_object());
__ cmp(prototype, ip);
__ b(ne, miss);
// Get the global function with the given index.
Handle<JSFunction> function(
- JSFunction::cast(isolate->global_context()->get(index)));
+ JSFunction::cast(isolate->native_context()->get(index)));
// Load its initial map. The global functions all have initial maps.
__ Move(prototype, Handle<Map>(function->initial_map()));
// Load the prototype from the initial map.
@@ -435,22 +437,59 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
Handle<JSObject> object,
int index,
Handle<Map> transition,
+ Handle<String> name,
Register receiver_reg,
Register name_reg,
- Register scratch,
+ Register scratch1,
+ Register scratch2,
Label* miss_label) {
// r0 : value
Label exit;
+ LookupResult lookup(masm->isolate());
+ object->Lookup(*name, &lookup);
+ if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
+ // In sloppy mode, we could just return the value and be done. However, we
+ // might be in strict mode, where we have to throw. Since we cannot tell,
+ // go into slow case unconditionally.
+ __ jmp(miss_label);
+ return;
+ }
+
// Check that the map of the object hasn't changed.
CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
: REQUIRE_EXACT_MAP;
- __ CheckMap(receiver_reg, scratch, Handle<Map>(object->map()), miss_label,
+ __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label,
DO_SMI_CHECK, mode);
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
+ __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
+ }
+
+ // Check that we are allowed to write this.
+ if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
+ JSObject* holder;
+ if (lookup.IsFound()) {
+ holder = lookup.holder();
+ } else {
+ // Find the top object.
+ holder = *object;
+ do {
+ holder = JSObject::cast(holder->GetPrototype());
+ } while (holder->GetPrototype()->IsJSObject());
+ }
+ // We need an extra register, push
+ __ push(name_reg);
+ Label miss_pop, done_check;
+ CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
+ scratch1, scratch2, name, &miss_pop);
+ __ jmp(&done_check);
+ __ bind(&miss_pop);
+ __ pop(name_reg);
+ __ jmp(miss_label);
+ __ bind(&done_check);
+ __ pop(name_reg);
}
// Stub never generated for non-global objects that require access
@@ -473,10 +512,20 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
}
if (!transition.is_null()) {
- // Update the map of the object; no write barrier updating is
- // needed because the map is never in new space.
- __ mov(ip, Operand(transition));
- __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
+ // Update the map of the object.
+ __ mov(scratch1, Operand(transition));
+ __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
+
+ // Update the write barrier for the map field and pass the now unused
+ // name_reg as scratch register.
+ __ RecordWriteField(receiver_reg,
+ HeapObject::kMapOffset,
+ scratch1,
+ name_reg,
+ kLRHasNotBeenSaved,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
}
// Adjust for the number of properties stored in the object. Even in the
@@ -498,15 +547,16 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
__ RecordWriteField(receiver_reg,
offset,
name_reg,
- scratch,
+ scratch1,
kLRHasNotBeenSaved,
kDontSaveFPRegs);
} else {
// Write to the properties array.
int offset = index * kPointerSize + FixedArray::kHeaderSize;
// Get the properties array
- __ ldr(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
- __ str(r0, FieldMemOperand(scratch, offset));
+ __ ldr(scratch1,
+ FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
+ __ str(r0, FieldMemOperand(scratch1, offset));
// Skip updating write barrier if storing a smi.
__ JumpIfSmi(r0, &exit);
@@ -514,7 +564,7 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
// Update the write barrier for the array address.
// Ok to clobber receiver_reg and name_reg, since we return.
__ mov(name_reg, r0);
- __ RecordWriteField(scratch,
+ __ RecordWriteField(scratch1,
offset,
name_reg,
receiver_reg,
@@ -938,8 +988,8 @@ static void StoreIntAsFloat(MacroAssembler* masm,
Register fval,
Register scratch1,
Register scratch2) {
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
__ vmov(s0, ival);
__ add(scratch1, dst, Operand(wordoffset, LSL, 2));
__ vcvt_f32_s32(s0, s0);
@@ -1182,6 +1232,45 @@ void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
}
+void StubCompiler::GenerateDictionaryLoadCallback(Register receiver,
+ Register name_reg,
+ Register scratch1,
+ Register scratch2,
+ Register scratch3,
+ Handle<AccessorInfo> callback,
+ Handle<String> name,
+ Label* miss) {
+ ASSERT(!receiver.is(scratch1));
+ ASSERT(!receiver.is(scratch2));
+ ASSERT(!receiver.is(scratch3));
+
+ // Load the properties dictionary.
+ Register dictionary = scratch1;
+ __ ldr(dictionary, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
+
+ // Probe the dictionary.
+ Label probe_done;
+ StringDictionaryLookupStub::GeneratePositiveLookup(masm(),
+ miss,
+ &probe_done,
+ dictionary,
+ name_reg,
+ scratch2,
+ scratch3);
+ __ bind(&probe_done);
+
+ // If probing finds an entry in the dictionary, scratch3 contains the
+ // pointer into the dictionary. Check that the value is the callback.
+ Register pointer = scratch3;
+ const int kElementsStartOffset = StringDictionary::kHeaderSize +
+ StringDictionary::kElementsStartIndex * kPointerSize;
+ const int kValueOffset = kElementsStartOffset + kPointerSize;
+ __ ldr(scratch2, FieldMemOperand(pointer, kValueOffset));
+ __ cmp(scratch2, Operand(callback));
+ __ b(ne, miss);
+}
+
+
void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
Handle<JSObject> holder,
Register receiver,
@@ -1189,6 +1278,7 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
Register scratch1,
Register scratch2,
Register scratch3,
+ Register scratch4,
Handle<AccessorInfo> callback,
Handle<String> name,
Label* miss) {
@@ -1199,6 +1289,11 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
Register reg = CheckPrototypes(object, receiver, holder, scratch1,
scratch2, scratch3, name, miss);
+ if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
+ GenerateDictionaryLoadCallback(
+ reg, name_reg, scratch2, scratch3, scratch4, callback, name, miss);
+ }
+
// Build AccessorInfo::args_ list on the stack and push property name below
// the exit frame to make GC aware of them and store pointers to them.
__ push(receiver);
@@ -1255,12 +1350,13 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
// later.
bool compile_followup_inline = false;
if (lookup->IsFound() && lookup->IsCacheable()) {
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
lookup->GetCallbackObject()->IsAccessorInfo()) {
- compile_followup_inline =
- AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL;
+ AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
+ compile_followup_inline = callback->getter() != NULL &&
+ callback->IsCompatibleReceiver(*object);
}
}
@@ -1328,7 +1424,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
miss);
}
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
// We found FIELD property in prototype chain of interceptor's holder.
// Retrieve a field from field's holder.
GenerateFastPropertyLoad(masm(), r0, holder_reg,
@@ -1477,7 +1573,7 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
GenerateMissBranch();
// Return the generated code.
- return GetCode(FIELD, name);
+ return GetCode(Code::FIELD, name);
}
@@ -1581,16 +1677,29 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
- __ CheckFastSmiOnlyElements(r3, r7, &call_builtin);
+ __ CheckFastSmiElements(r3, r7, &call_builtin);
// edx: receiver
// r3: map
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ Label try_holey_map;
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
r3,
r7,
+ &try_holey_map);
+ __ mov(r2, receiver);
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
+ __ jmp(&fast_object);
+
+ __ bind(&try_holey_map);
+ __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
+ FAST_HOLEY_ELEMENTS,
+ r3,
+ r7,
&call_builtin);
__ mov(r2, receiver);
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(r3, r3, &call_builtin);
@@ -2009,7 +2118,7 @@ Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
GenerateMissBranch();
// Return the generated code.
- return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
+ return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
}
@@ -2027,11 +2136,11 @@ Handle<Code> CallStubCompiler::CompileMathFloorCall(
// -- sp[argc * 4] : receiver
// -----------------------------------
- if (!CpuFeatures::IsSupported(VFP3)) {
+ if (!CpuFeatures::IsSupported(VFP2)) {
return Handle<Code>::null();
}
- CpuFeatures::Scope scope_vfp3(VFP3);
+ CpuFeatures::Scope scope_vfp2(VFP2);
const int argc = arguments().immediate();
// If the object is not a JSObject or we got an unexpected number of
// arguments, bail out to the regular call.
@@ -2155,7 +2264,7 @@ Handle<Code> CallStubCompiler::CompileMathFloorCall(
GenerateMissBranch();
// Return the generated code.
- return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
+ return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
}
@@ -2254,7 +2363,7 @@ Handle<Code> CallStubCompiler::CompileMathAbsCall(
GenerateMissBranch();
// Return the generated code.
- return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
+ return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
}
@@ -2471,7 +2580,7 @@ Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
GenerateMissBranch();
// Return the generated code.
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2529,7 +2638,7 @@ Handle<Code> CallStubCompiler::CompileCallGlobal(
GenerateMissBranch();
// Return the generated code.
- return GetCode(NORMAL, name);
+ return GetCode(Code::NORMAL, name);
}
@@ -2545,20 +2654,29 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
// -----------------------------------
Label miss;
- GenerateStoreField(masm(), object, index, transition, r1, r2, r3, &miss);
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ r1, r2, r3, r4,
+ &miss);
__ bind(&miss);
Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
+ return GetCode(transition.is_null()
+ ? Code::FIELD
+ : Code::MAP_TRANSITION, name);
}
Handle<Code> StoreStubCompiler::CompileStoreCallback(
- Handle<JSObject> object,
- Handle<AccessorInfo> callback,
- Handle<String> name) {
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<AccessorInfo> callback) {
// ----------- S t a t e -------------
// -- r0 : value
// -- r1 : receiver
@@ -2566,19 +2684,12 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
// -- lr : return address
// -----------------------------------
Label miss;
+ // Check that the maps haven't changed.
+ __ JumpIfSmi(r1, &miss);
+ CheckPrototypes(receiver, r1, holder, r3, r4, r5, name, &miss);
- // Check that the map of the object hasn't changed.
- __ CheckMap(r1, r3, Handle<Map>(object->map()), &miss,
- DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
-
- // Perform global security token check if needed.
- if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(r1, r3, &miss);
- }
-
- // Stub never generated for non-global objects that require access
- // checks.
- ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
+ // Stub never generated for non-global objects that require access checks.
+ ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
__ push(r1); // receiver
__ mov(ip, Operand(callback)); // callback info
@@ -2596,7 +2707,80 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm)
+
+
+void StoreStubCompiler::GenerateStoreViaSetter(
+ MacroAssembler* masm,
+ Handle<JSFunction> setter) {
+ // ----------- S t a t e -------------
+ // -- r0 : value
+ // -- r1 : receiver
+ // -- r2 : name
+ // -- lr : return address
+ // -----------------------------------
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ // Save value register, so we can restore it later.
+ __ push(r0);
+
+ if (!setter.is_null()) {
+ // Call the JavaScript setter with receiver and value on the stack.
+ __ Push(r1, r0);
+ ParameterCount actual(1);
+ __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+ } else {
+ // If we generate a global code snippet for deoptimization only, remember
+ // the place to continue after deoptimization.
+ masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
+ }
+
+ // We have to return the passed value, not the return value of the setter.
+ __ pop(r0);
+
+ // Restore context register.
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ }
+ __ Ret();
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm())
+
+
+Handle<Code> StoreStubCompiler::CompileStoreViaSetter(
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> setter) {
+ // ----------- S t a t e -------------
+ // -- r0 : value
+ // -- r1 : receiver
+ // -- r2 : name
+ // -- lr : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the maps haven't changed.
+ __ JumpIfSmi(r1, &miss);
+ CheckPrototypes(receiver, r1, holder, r3, r4, r5, name, &miss);
+
+ GenerateStoreViaSetter(masm(), setter);
+
+ __ bind(&miss);
+ Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2641,7 +2825,7 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2687,7 +2871,7 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal(
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, name);
+ return GetCode(Code::NORMAL, name);
}
@@ -2722,7 +2906,7 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(NONEXISTENT, factory()->empty_string());
+ return GetCode(Code::NONEXISTENT, factory()->empty_string());
}
@@ -2742,7 +2926,7 @@ Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(FIELD, name);
+ return GetCode(Code::FIELD, name);
}
@@ -2757,13 +2941,76 @@ Handle<Code> LoadStubCompiler::CompileLoadCallback(
// -- lr : return address
// -----------------------------------
Label miss;
- GenerateLoadCallback(object, holder, r0, r2, r3, r1, r4, callback, name,
+ GenerateLoadCallback(object, holder, r0, r2, r3, r1, r4, r5, callback, name,
&miss);
__ bind(&miss);
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm)
+
+
+void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
+ Handle<JSFunction> getter) {
+ // ----------- S t a t e -------------
+ // -- r0 : receiver
+ // -- r2 : name
+ // -- lr : return address
+ // -----------------------------------
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ if (!getter.is_null()) {
+ // Call the JavaScript getter with the receiver on the stack.
+ __ push(r0);
+ ParameterCount actual(0);
+ __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+ } else {
+ // If we generate a global code snippet for deoptimization only, remember
+ // the place to continue after deoptimization.
+ masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
+ }
+
+ // Restore context register.
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ }
+ __ Ret();
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm())
+
+
+Handle<Code> LoadStubCompiler::CompileLoadViaGetter(
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> getter) {
+ // ----------- S t a t e -------------
+ // -- r0 : receiver
+ // -- r2 : name
+ // -- lr : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the maps haven't changed.
+ __ JumpIfSmi(r0, &miss);
+ CheckPrototypes(receiver, r0, holder, r3, r4, r1, name, &miss);
+
+ GenerateLoadViaGetter(masm(), getter);
+
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2783,7 +3030,7 @@ Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(CONSTANT_FUNCTION, name);
+ return GetCode(Code::CONSTANT_FUNCTION, name);
}
@@ -2805,7 +3052,7 @@ Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> object,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2847,7 +3094,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(NORMAL, name);
+ return GetCode(Code::NORMAL, name);
}
@@ -2870,7 +3117,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
__ bind(&miss);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
- return GetCode(FIELD, name);
+ return GetCode(Code::FIELD, name);
}
@@ -2890,12 +3137,12 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback(
__ cmp(r0, Operand(name));
__ b(ne, &miss);
- GenerateLoadCallback(receiver, holder, r1, r0, r2, r3, r4, callback, name,
+ GenerateLoadCallback(receiver, holder, r1, r0, r2, r3, r4, r5, callback, name,
&miss);
__ bind(&miss);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2920,7 +3167,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(CONSTANT_FUNCTION, name);
+ return GetCode(Code::CONSTANT_FUNCTION, name);
}
@@ -2946,7 +3193,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor(
__ bind(&miss);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2967,7 +3214,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
__ bind(&miss);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2993,7 +3240,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -3018,7 +3265,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
__ DecrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -3038,7 +3285,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string());
+ return GetCode(Code::NORMAL, factory()->empty_string());
}
@@ -3066,7 +3313,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
__ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
+ return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC);
}
@@ -3091,7 +3338,13 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
// r3 is used as scratch register. r1 and r2 keep their values if a jump to
// the miss label is generated.
- GenerateStoreField(masm(), object, index, transition, r2, r1, r3, &miss);
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ r2, r1, r3, r4,
+ &miss);
__ bind(&miss);
__ DecrementCounter(counters->keyed_store_field(), 1, r3, r4);
@@ -3099,7 +3352,9 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
+ return GetCode(transition.is_null()
+ ? Code::FIELD
+ : Code::MAP_TRANSITION, name);
}
@@ -3123,7 +3378,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string());
+ return GetCode(Code::NORMAL, factory()->empty_string());
}
@@ -3162,7 +3417,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
__ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
+ return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC);
}
@@ -3212,7 +3467,13 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
// r1: constructor function
// r2: initial map
// r7: undefined
+ ASSERT(function->has_initial_map());
__ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
+#ifdef DEBUG
+ int instance_size = function->initial_map()->instance_size();
+ __ cmp(r3, Operand(instance_size >> kPointerSizeLog2));
+ __ Check(eq, "Instance size of initial map changed.");
+#endif
__ AllocateInNewSpace(r3, r4, r5, r6, &generic_stub_call, SIZE_IN_WORDS);
// Allocated the JSObject, now initialize the fields. Map is set to initial
@@ -3270,7 +3531,6 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
}
// Fill the unused in-object property fields with undefined.
- ASSERT(function->has_initial_map());
for (int i = shared->this_property_assignments_count();
i < function->initial_map()->inobject_properties();
i++) {
@@ -3372,8 +3632,11 @@ static bool IsElementTypeSigned(ElementsKind elements_kind) {
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3388,9 +3651,10 @@ static void GenerateSmiKeyCheck(MacroAssembler* masm,
Register scratch0,
Register scratch1,
DwVfpRegister double_scratch0,
+ DwVfpRegister double_scratch1,
Label* fail) {
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
Label key_ok;
// Check for smi or a smi inside a heap number. We convert the heap
// number and check if the conversion is exact and fits into the smi
@@ -3404,13 +3668,12 @@ static void GenerateSmiKeyCheck(MacroAssembler* masm,
__ sub(ip, key, Operand(kHeapObjectTag));
__ vldr(double_scratch0, ip, HeapNumber::kValueOffset);
__ EmitVFPTruncate(kRoundToZero,
- double_scratch0.low(),
- double_scratch0,
scratch0,
+ double_scratch0,
scratch1,
+ double_scratch1,
kCheckForInexactConversion);
__ b(ne, fail);
- __ vmov(scratch0, double_scratch0.low());
__ TrySmiTag(scratch0, fail, scratch1);
__ mov(key, scratch0);
__ bind(&key_ok);
@@ -3438,7 +3701,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// have been verified by the caller to not be a smi.
// Check that the key is a smi or a heap number convertible to a smi.
- GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic);
+ GenerateSmiKeyCheck(masm, key, r4, r5, d1, d2, &miss_force_generic);
__ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
// r3: elements array
@@ -3476,8 +3739,8 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
__ ldr(value, MemOperand(r3, key, LSL, 1));
break;
case EXTERNAL_FLOAT_ELEMENTS:
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
__ add(r2, r3, Operand(key, LSL, 1));
__ vldr(s0, r2, 0);
} else {
@@ -3485,8 +3748,8 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
}
break;
case EXTERNAL_DOUBLE_ELEMENTS:
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
__ add(r2, r3, Operand(key, LSL, 2));
__ vldr(d0, r2, 0);
} else {
@@ -3497,8 +3760,11 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
}
break;
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3526,22 +3792,28 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
__ Ret();
__ bind(&box_int);
- // Allocate a HeapNumber for the result and perform int-to-double
- // conversion. Don't touch r0 or r1 as they are needed if allocation
- // fails.
- __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
- __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
- // Now we can use r0 for the result as key is not needed any more.
- __ mov(r0, r5);
-
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
+ // Allocate a HeapNumber for the result and perform int-to-double
+ // conversion. Don't touch r0 or r1 as they are needed if allocation
+ // fails.
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
+
+ __ AllocateHeapNumber(r5, r3, r4, r6, &slow, DONT_TAG_RESULT);
+ // Now we can use r0 for the result as key is not needed any more.
+ __ add(r0, r5, Operand(kHeapObjectTag));
__ vmov(s0, value);
__ vcvt_f64_s32(d0, s0);
- __ sub(r3, r0, Operand(kHeapObjectTag));
- __ vstr(d0, r3, HeapNumber::kValueOffset);
+ __ vstr(d0, r5, HeapNumber::kValueOffset);
__ Ret();
} else {
+ // Allocate a HeapNumber for the result and perform int-to-double
+ // conversion. Don't touch r0 or r1 as they are needed if allocation
+ // fails.
+ __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
+ __ AllocateHeapNumber(r5, r3, r4, r6, &slow, TAG_RESULT);
+ // Now we can use r0 for the result as key is not needed any more.
+ __ mov(r0, r5);
Register dst1 = r1;
Register dst2 = r3;
FloatingPointHelper::Destination dest =
@@ -3562,8 +3834,8 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// The test is different for unsigned int values. Since we need
// the value to be in the range of a positive smi, we can't
// handle either of the top two bits being set in the value.
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
Label box_int, done;
__ tst(value, Operand(0xC0000000));
__ b(ne, &box_int);
@@ -3577,13 +3849,12 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// conversion. Don't use r0 and r1 as AllocateHeapNumber clobbers all
// registers - also when jumping due to exhausted young space.
__ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
- __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
+ __ AllocateHeapNumber(r2, r3, r4, r6, &slow, DONT_TAG_RESULT);
__ vcvt_f64_u32(d0, s0);
- __ sub(r1, r2, Operand(kHeapObjectTag));
- __ vstr(d0, r1, HeapNumber::kValueOffset);
+ __ vstr(d0, r2, HeapNumber::kValueOffset);
- __ mov(r0, r2);
+ __ add(r0, r2, Operand(kHeapObjectTag));
__ Ret();
} else {
// Check whether unsigned integer fits into smi.
@@ -3615,7 +3886,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// clobbers all registers - also when jumping due to exhausted young
// space.
__ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
- __ AllocateHeapNumber(r4, r5, r7, r6, &slow);
+ __ AllocateHeapNumber(r4, r5, r7, r6, &slow, TAG_RESULT);
__ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset));
__ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
@@ -3626,25 +3897,24 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
} else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
// For the floating-point array type, we need to always allocate a
// HeapNumber.
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
// Allocate a HeapNumber for the result. Don't use r0 and r1 as
// AllocateHeapNumber clobbers all registers - also when jumping due to
// exhausted young space.
__ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
- __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
+ __ AllocateHeapNumber(r2, r3, r4, r6, &slow, DONT_TAG_RESULT);
__ vcvt_f64_f32(d0, s0);
- __ sub(r1, r2, Operand(kHeapObjectTag));
- __ vstr(d0, r1, HeapNumber::kValueOffset);
+ __ vstr(d0, r2, HeapNumber::kValueOffset);
- __ mov(r0, r2);
+ __ add(r0, r2, Operand(kHeapObjectTag));
__ Ret();
} else {
// Allocate a HeapNumber for the result. Don't use r0 and r1 as
// AllocateHeapNumber clobbers all registers - also when jumping due to
// exhausted young space.
__ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
- __ AllocateHeapNumber(r3, r4, r5, r6, &slow);
+ __ AllocateHeapNumber(r3, r4, r5, r6, &slow, TAG_RESULT);
// VFP is not available, do manual single to double conversion.
// r2: floating point value (binary32)
@@ -3694,24 +3964,23 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
__ Ret();
}
} else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
// Allocate a HeapNumber for the result. Don't use r0 and r1 as
// AllocateHeapNumber clobbers all registers - also when jumping due to
// exhausted young space.
__ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
- __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
- __ sub(r1, r2, Operand(kHeapObjectTag));
- __ vstr(d0, r1, HeapNumber::kValueOffset);
+ __ AllocateHeapNumber(r2, r3, r4, r6, &slow, DONT_TAG_RESULT);
+ __ vstr(d0, r2, HeapNumber::kValueOffset);
- __ mov(r0, r2);
+ __ add(r0, r2, Operand(kHeapObjectTag));
__ Ret();
} else {
// Allocate a HeapNumber for the result. Don't use r0 and r1 as
// AllocateHeapNumber clobbers all registers - also when jumping due to
// exhausted young space.
__ LoadRoot(r7, Heap::kHeapNumberMapRootIndex);
- __ AllocateHeapNumber(r4, r5, r6, r7, &slow);
+ __ AllocateHeapNumber(r4, r5, r6, r7, &slow, TAG_RESULT);
__ str(r2, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
__ str(r3, FieldMemOperand(r4, HeapNumber::kExponentOffset));
@@ -3769,7 +4038,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
// have been verified by the caller to not be a smi.
// Check that the key is a smi or a heap number convertible to a smi.
- GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic);
+ GenerateSmiKeyCheck(masm, key, r4, r5, d1, d2, &miss_force_generic);
__ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
@@ -3820,7 +4089,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
__ add(r3, r3, Operand(key, LSL, 2));
// r3: effective address of the double element
FloatingPointHelper::Destination destination;
- if (CpuFeatures::IsSupported(VFP3)) {
+ if (CpuFeatures::IsSupported(VFP2)) {
destination = FloatingPointHelper::kVFPRegisters;
} else {
destination = FloatingPointHelper::kCoreRegisters;
@@ -3830,7 +4099,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
d0, r6, r7, // These are: double_dst, dst1, dst2.
r4, s2); // These are: scratch2, single_scratch.
if (destination == FloatingPointHelper::kVFPRegisters) {
- CpuFeatures::Scope scope(VFP3);
+ CpuFeatures::Scope scope(VFP2);
__ vstr(d0, r3, 0);
} else {
__ str(r6, MemOperand(r3, 0));
@@ -3838,8 +4107,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
}
break;
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3862,8 +4134,8 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
// The WebGL specification leaves the behavior of storing NaN and
// +/-Infinity into integer arrays basically undefined. For more
// reproducible behavior, convert these to zero.
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
// vldr requires offset to be a multiple of 4 so we can not
@@ -3902,8 +4174,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -4042,8 +4317,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -4095,7 +4373,7 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
// have been verified by the caller to not be a smi.
// Check that the key is a smi or a heap number convertible to a smi.
- GenerateSmiKeyCheck(masm, r0, r4, r5, d1, &miss_force_generic);
+ GenerateSmiKeyCheck(masm, r0, r4, r5, d1, d2, &miss_force_generic);
// Get the elements array.
__ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
@@ -4147,7 +4425,7 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
// have been verified by the caller to not be a smi.
// Check that the key is a smi or a heap number convertible to a smi.
- GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic);
+ GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, d2, &miss_force_generic);
// Get the elements array.
__ ldr(elements_reg,
@@ -4169,7 +4447,7 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
// Non-NaN. Allocate a new heap number and copy the double value into it.
__ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
__ AllocateHeapNumber(heap_number_reg, scratch2, scratch3,
- heap_number_map, &slow_allocate_heapnumber);
+ heap_number_map, &slow_allocate_heapnumber, TAG_RESULT);
// Don't need to reload the upper 32 bits of the double, it's already in
// scratch.
@@ -4223,9 +4501,9 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// have been verified by the caller to not be a smi.
// Check that the key is a smi or a heap number convertible to a smi.
- GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic);
+ GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, d2, &miss_force_generic);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
__ JumpIfNotSmi(value_reg, &transition_elements_kind);
}
@@ -4253,7 +4531,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
DONT_DO_SMI_CHECK);
__ bind(&finish_store);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
__ add(scratch,
elements_reg,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -4263,7 +4541,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
__ str(value_reg, MemOperand(scratch));
} else {
- ASSERT(elements_kind == FAST_ELEMENTS);
+ ASSERT(IsFastObjectElementsKind(elements_kind));
__ add(scratch,
elements_reg,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -4391,7 +4669,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
// have been verified by the caller to not be a smi.
// Check that the key is a smi or a heap number convertible to a smi.
- GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic);
+ GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, d2, &miss_force_generic);
__ ldr(elements_reg,
FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
@@ -4416,6 +4694,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
__ StoreNumberToDoubleElements(value_reg,
key_reg,
receiver_reg,
+ // All registers after this are overwritten.
elements_reg,
scratch1,
scratch2,
diff --git a/src/3rdparty/v8/src/array.js b/src/3rdparty/v8/src/array.js
index a1cc5b6..37053ce 100644
--- a/src/3rdparty/v8/src/array.js
+++ b/src/3rdparty/v8/src/array.js
@@ -62,7 +62,7 @@ function GetSortedArrayKeys(array, intervals) {
}
}
}
- keys.sort(function(a, b) { return a - b; });
+ %_CallFunction(keys, function(a, b) { return a - b; }, ArraySort);
return keys;
}
@@ -441,8 +441,8 @@ function ArrayPop() {
}
n--;
var value = this[n];
- this.length = n;
delete this[n];
+ this.length = n;
return value;
}
@@ -581,7 +581,7 @@ function ArrayShift() {
var first = this[0];
- if (IS_ARRAY(this)) {
+ if (IS_ARRAY(this) && !%IsObserved(this)) {
SmartMove(this, 0, 1, len, 0);
} else {
SimpleMove(this, 0, 1, len, 0);
@@ -602,7 +602,7 @@ function ArrayUnshift(arg1) { // length == 1
var len = TO_UINT32(this.length);
var num_arguments = %_ArgumentsLength();
- if (IS_ARRAY(this)) {
+ if (IS_ARRAY(this) && !%IsObserved(this)) {
SmartMove(this, 0, 0, len, num_arguments);
} else {
SimpleMove(this, 0, 0, len, num_arguments);
@@ -649,6 +649,7 @@ function ArraySlice(start, end) {
if (end_i < start_i) return result;
if (IS_ARRAY(this) &&
+ !%IsObserved(this) &&
(end_i > 1000) &&
(%EstimateNumberOfElements(this) < end_i)) {
SmartSlice(this, start_i, end_i - start_i, len, result);
@@ -705,7 +706,9 @@ function ArraySplice(start, delete_count) {
var use_simple_splice = true;
- if (IS_ARRAY(this) && num_additional_args !== del_count) {
+ if (IS_ARRAY(this) &&
+ !%IsObserved(this) &&
+ num_additional_args !== del_count) {
// If we are only deleting/moving a few things near the end of the
// array then the simple version is going to be faster, because it
// doesn't touch most of the array.
@@ -777,78 +780,103 @@ function ArraySort(comparefn) {
}
};
- var QuickSort = function QuickSort(a, from, to) {
- // Insertion sort is faster for short arrays.
- if (to - from <= 10) {
- InsertionSort(a, from, to);
- return;
+ var GetThirdIndex = function(a, from, to) {
+ var t_array = [];
+ // Use both 'from' and 'to' to determine the pivot candidates.
+ var increment = 200 + ((to - from) & 15);
+ for (var i = from + 1; i < to - 1; i += increment) {
+ t_array.push([i, a[i]]);
}
- // Find a pivot as the median of first, last and middle element.
- var v0 = a[from];
- var v1 = a[to - 1];
- var middle_index = from + ((to - from) >> 1);
- var v2 = a[middle_index];
- var c01 = %_CallFunction(receiver, v0, v1, comparefn);
- if (c01 > 0) {
- // v1 < v0, so swap them.
- var tmp = v0;
- v0 = v1;
- v1 = tmp;
- } // v0 <= v1.
- var c02 = %_CallFunction(receiver, v0, v2, comparefn);
- if (c02 >= 0) {
- // v2 <= v0 <= v1.
- var tmp = v0;
- v0 = v2;
- v2 = v1;
- v1 = tmp;
- } else {
- // v0 <= v1 && v0 < v2
- var c12 = %_CallFunction(receiver, v1, v2, comparefn);
- if (c12 > 0) {
- // v0 <= v2 < v1
- var tmp = v1;
- v1 = v2;
- v2 = tmp;
+ t_array.sort(function(a, b) {
+ return %_CallFunction(receiver, a[1], b[1], comparefn) } );
+ var third_index = t_array[t_array.length >> 1][0];
+ return third_index;
+ }
+
+ var QuickSort = function QuickSort(a, from, to) {
+ var third_index = 0;
+ while (true) {
+ // Insertion sort is faster for short arrays.
+ if (to - from <= 10) {
+ InsertionSort(a, from, to);
+ return;
}
- }
- // v0 <= v1 <= v2
- a[from] = v0;
- a[to - 1] = v2;
- var pivot = v1;
- var low_end = from + 1; // Upper bound of elements lower than pivot.
- var high_start = to - 1; // Lower bound of elements greater than pivot.
- a[middle_index] = a[low_end];
- a[low_end] = pivot;
-
- // From low_end to i are elements equal to pivot.
- // From i to high_start are elements that haven't been compared yet.
- partition: for (var i = low_end + 1; i < high_start; i++) {
- var element = a[i];
- var order = %_CallFunction(receiver, element, pivot, comparefn);
- if (order < 0) {
- a[i] = a[low_end];
- a[low_end] = element;
- low_end++;
- } else if (order > 0) {
- do {
- high_start--;
- if (high_start == i) break partition;
- var top_elem = a[high_start];
- order = %_CallFunction(receiver, top_elem, pivot, comparefn);
- } while (order > 0);
- a[i] = a[high_start];
- a[high_start] = element;
+ if (to - from > 1000) {
+ third_index = GetThirdIndex(a, from, to);
+ } else {
+ third_index = from + ((to - from) >> 1);
+ }
+ // Find a pivot as the median of first, last and middle element.
+ var v0 = a[from];
+ var v1 = a[to - 1];
+ var v2 = a[third_index];
+ var c01 = %_CallFunction(receiver, v0, v1, comparefn);
+ if (c01 > 0) {
+ // v1 < v0, so swap them.
+ var tmp = v0;
+ v0 = v1;
+ v1 = tmp;
+ } // v0 <= v1.
+ var c02 = %_CallFunction(receiver, v0, v2, comparefn);
+ if (c02 >= 0) {
+ // v2 <= v0 <= v1.
+ var tmp = v0;
+ v0 = v2;
+ v2 = v1;
+ v1 = tmp;
+ } else {
+ // v0 <= v1 && v0 < v2
+ var c12 = %_CallFunction(receiver, v1, v2, comparefn);
+ if (c12 > 0) {
+ // v0 <= v2 < v1
+ var tmp = v1;
+ v1 = v2;
+ v2 = tmp;
+ }
+ }
+ // v0 <= v1 <= v2
+ a[from] = v0;
+ a[to - 1] = v2;
+ var pivot = v1;
+ var low_end = from + 1; // Upper bound of elements lower than pivot.
+ var high_start = to - 1; // Lower bound of elements greater than pivot.
+ a[third_index] = a[low_end];
+ a[low_end] = pivot;
+
+ // From low_end to i are elements equal to pivot.
+ // From i to high_start are elements that haven't been compared yet.
+ partition: for (var i = low_end + 1; i < high_start; i++) {
+ var element = a[i];
+ var order = %_CallFunction(receiver, element, pivot, comparefn);
if (order < 0) {
- element = a[i];
a[i] = a[low_end];
a[low_end] = element;
low_end++;
+ } else if (order > 0) {
+ do {
+ high_start--;
+ if (high_start == i) break partition;
+ var top_elem = a[high_start];
+ order = %_CallFunction(receiver, top_elem, pivot, comparefn);
+ } while (order > 0);
+ a[i] = a[high_start];
+ a[high_start] = element;
+ if (order < 0) {
+ element = a[i];
+ a[i] = a[low_end];
+ a[low_end] = element;
+ low_end++;
+ }
}
}
+ if (to - high_start < low_end - from) {
+ QuickSort(a, high_start, to);
+ to = low_end;
+ } else {
+ QuickSort(a, from, low_end);
+ from = high_start;
+ }
}
- QuickSort(a, from, low_end);
- QuickSort(a, high_start, to);
};
// Copy elements in the range 0..length from obj's prototype chain
@@ -1524,9 +1552,11 @@ function SetUpArray() {
// exposed to user code.
// Adding only the functions that are actually used.
SetUpLockedPrototype(InternalArray, $Array(), $Array(
+ "indexOf", getFunction("indexOf", ArrayIndexOf),
"join", getFunction("join", ArrayJoin),
"pop", getFunction("pop", ArrayPop),
- "push", getFunction("push", ArrayPush)
+ "push", getFunction("push", ArrayPush),
+ "splice", getFunction("splice", ArraySplice)
));
}
diff --git a/src/3rdparty/v8/src/assembler.cc b/src/3rdparty/v8/src/assembler.cc
index 4c1c744..c0867dd 100644
--- a/src/3rdparty/v8/src/assembler.cc
+++ b/src/3rdparty/v8/src/assembler.cc
@@ -108,7 +108,9 @@ const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
AssemblerBase::AssemblerBase(Isolate* isolate)
: isolate_(isolate),
- jit_cookie_(0) {
+ jit_cookie_(0),
+ emit_debug_code_(FLAG_debug_code),
+ predictable_code_size_(false) {
if (FLAG_mask_constants_with_cookie && isolate != NULL) {
jit_cookie_ = V8::RandomPrivate(isolate);
}
@@ -141,7 +143,7 @@ int Label::pos() const {
// an iteration.
//
// The encoding relies on the fact that there are fewer than 14
-// different non-compactly encoded relocation modes.
+// different relocation modes using standard non-compact encoding.
//
// The first byte of a relocation record has a tag in its low 2 bits:
// Here are the record schemes, depending on the low tag and optional higher
@@ -173,7 +175,9 @@ int Label::pos() const {
// 00 [4 bit middle_tag] 11 followed by
// 00 [6 bit pc delta]
//
-// 1101: not used (would allow one more relocation mode to be added)
+// 1101: constant pool. Used on ARM only for now.
+// The format is: 11 1101 11
+// signed int (size of the constant pool).
// 1110: long_data_record
// The format is: [2-bit data_type_tag] 1110 11
// signed intptr_t, lowest byte written first
@@ -194,7 +198,7 @@ int Label::pos() const {
// dropped, and last non-zero chunk tagged with 1.)
-const int kMaxRelocModes = 14;
+const int kMaxStandardNonCompactModes = 14;
const int kTagBits = 2;
const int kTagMask = (1 << kTagBits) - 1;
@@ -228,6 +232,9 @@ const int kNonstatementPositionTag = 1;
const int kStatementPositionTag = 2;
const int kCommentTag = 3;
+const int kConstPoolExtraTag = kPCJumpExtraTag - 2;
+const int kConstPoolTag = 3;
+
uint32_t RelocInfoWriter::WriteVariableLengthPCJump(uint32_t pc_delta) {
// Return if the pc_delta can fit in kSmallPCDeltaBits bits.
@@ -285,6 +292,15 @@ void RelocInfoWriter::WriteExtraTaggedIntData(int data_delta, int top_tag) {
}
}
+void RelocInfoWriter::WriteExtraTaggedConstPoolData(int data) {
+ WriteExtraTag(kConstPoolExtraTag, kConstPoolTag);
+ for (int i = 0; i < kIntSize; i++) {
+ *--pos_ = static_cast<byte>(data);
+ // Signed right shift is arithmetic shift. Tested in test-utils.cc.
+ data = data >> kBitsPerByte;
+ }
+}
+
void RelocInfoWriter::WriteExtraTaggedData(intptr_t data_delta, int top_tag) {
WriteExtraTag(kDataJumpExtraTag, top_tag);
for (int i = 0; i < kIntptrSize; i++) {
@@ -299,9 +315,10 @@ void RelocInfoWriter::Write(const RelocInfo* rinfo) {
#ifdef DEBUG
byte* begin_pos = pos_;
#endif
+ ASSERT(rinfo->rmode() < RelocInfo::NUMBER_OF_MODES);
ASSERT(rinfo->pc() - last_pc_ >= 0);
- ASSERT(RelocInfo::NUMBER_OF_MODES - RelocInfo::LAST_COMPACT_ENUM <=
- kMaxRelocModes);
+ ASSERT(RelocInfo::LAST_STANDARD_NONCOMPACT_ENUM - RelocInfo::LAST_COMPACT_ENUM
+ <= kMaxStandardNonCompactModes);
// Use unsigned delta-encoding for pc.
uint32_t pc_delta = static_cast<uint32_t>(rinfo->pc() - last_pc_);
RelocInfo::Mode rmode = rinfo->rmode();
@@ -347,6 +364,9 @@ void RelocInfoWriter::Write(const RelocInfo* rinfo) {
WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
WriteExtraTaggedData(rinfo->data(), kCommentTag);
ASSERT(begin_pos - pos_ >= RelocInfo::kMinRelocCommentSize);
+ } else if (RelocInfo::IsConstPool(rmode)) {
+ WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
+ WriteExtraTaggedConstPoolData(static_cast<int>(rinfo->data()));
} else {
ASSERT(rmode > RelocInfo::LAST_COMPACT_ENUM);
int saved_mode = rmode - RelocInfo::LAST_COMPACT_ENUM;
@@ -397,6 +417,15 @@ void RelocIterator::AdvanceReadId() {
}
+void RelocIterator::AdvanceReadConstPoolData() {
+ int x = 0;
+ for (int i = 0; i < kIntSize; i++) {
+ x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
+ }
+ rinfo_.data_ = x;
+}
+
+
void RelocIterator::AdvanceReadPosition() {
int x = 0;
for (int i = 0; i < kIntSize; i++) {
@@ -500,8 +529,7 @@ void RelocIterator::next() {
ASSERT(tag == kDefaultTag);
int extra_tag = GetExtraTag();
if (extra_tag == kPCJumpExtraTag) {
- int top_tag = GetTopTag();
- if (top_tag == kVariableLengthPCJumpTopTag) {
+ if (GetTopTag() == kVariableLengthPCJumpTopTag) {
AdvanceReadVariableLengthPCJump();
} else {
AdvanceReadPC();
@@ -531,6 +559,13 @@ void RelocIterator::next() {
}
Advance(kIntptrSize);
}
+ } else if ((extra_tag == kConstPoolExtraTag) &&
+ (GetTopTag() == kConstPoolTag)) {
+ if (SetMode(RelocInfo::CONST_POOL)) {
+ AdvanceReadConstPoolData();
+ return;
+ }
+ Advance(kIntSize);
} else {
AdvanceReadPC();
int rmode = extra_tag + RelocInfo::LAST_COMPACT_ENUM;
@@ -538,6 +573,15 @@ void RelocIterator::next() {
}
}
}
+ if (code_age_sequence_ != NULL) {
+ byte* old_code_age_sequence = code_age_sequence_;
+ code_age_sequence_ = NULL;
+ if (SetMode(RelocInfo::CODE_AGE_SEQUENCE)) {
+ rinfo_.data_ = 0;
+ rinfo_.pc_ = old_code_age_sequence;
+ return;
+ }
+ }
done_ = true;
}
@@ -553,6 +597,12 @@ RelocIterator::RelocIterator(Code* code, int mode_mask) {
mode_mask_ = mode_mask;
last_id_ = 0;
last_position_ = 0;
+ byte* sequence = code->FindCodeAgeSequence();
+ if (sequence != NULL && !Code::IsYoungSequence(sequence)) {
+ code_age_sequence_ = sequence;
+ } else {
+ code_age_sequence_ = NULL;
+ }
if (mode_mask_ == 0) pos_ = end_;
next();
}
@@ -568,6 +618,7 @@ RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask) {
mode_mask_ = mode_mask;
last_id_ = 0;
last_position_ = 0;
+ code_age_sequence_ = NULL;
if (mode_mask_ == 0) pos_ = end_;
next();
}
@@ -613,11 +664,15 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
return "external reference";
case RelocInfo::INTERNAL_REFERENCE:
return "internal reference";
+ case RelocInfo::CONST_POOL:
+ return "constant pool";
case RelocInfo::DEBUG_BREAK_SLOT:
#ifndef ENABLE_DEBUGGER_SUPPORT
UNREACHABLE();
#endif
return "debug break slot";
+ case RelocInfo::CODE_AGE_SEQUENCE:
+ return "code_age_sequence";
case RelocInfo::NUMBER_OF_MODES:
UNREACHABLE();
return "number_of_modes";
@@ -627,43 +682,43 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
void RelocInfo::Print(FILE* out) {
- PrintF(out, "%p %s", pc_, RelocModeName(rmode_));
+ FPrintF(out, "%p %s", pc_, RelocModeName(rmode_));
if (IsComment(rmode_)) {
- PrintF(out, " (%s)", reinterpret_cast<char*>(data_));
+ FPrintF(out, " (%s)", reinterpret_cast<char*>(data_));
} else if (rmode_ == EMBEDDED_OBJECT) {
- PrintF(out, " (");
+ FPrintF(out, " (");
target_object()->ShortPrint(out);
- PrintF(out, ")");
+ FPrintF(out, ")");
} else if (rmode_ == EXTERNAL_REFERENCE) {
ExternalReferenceEncoder ref_encoder;
- PrintF(out, " (%s) (%p)",
+ FPrintF(out, " (%s) (%p)",
ref_encoder.NameOfAddress(*target_reference_address()),
*target_reference_address());
} else if (IsCodeTarget(rmode_)) {
Code* code = Code::GetCodeFromTargetAddress(target_address());
- PrintF(out, " (%s) (%p)", Code::Kind2String(code->kind()),
+ FPrintF(out, " (%s) (%p)", Code::Kind2String(code->kind()),
target_address());
if (rmode_ == CODE_TARGET_WITH_ID) {
PrintF(" (id=%d)", static_cast<int>(data_));
}
} else if (IsPosition(rmode_)) {
- PrintF(out, " (%" V8_PTR_PREFIX "d)", data());
+ FPrintF(out, " (%" V8_PTR_PREFIX "d)", data());
} else if (rmode_ == RelocInfo::RUNTIME_ENTRY &&
Isolate::Current()->deoptimizer_data() != NULL) {
// Depotimization bailouts are stored as runtime entries.
int id = Deoptimizer::GetDeoptimizationId(
target_address(), Deoptimizer::EAGER);
if (id != Deoptimizer::kNotDeoptimizationEntry) {
- PrintF(out, " (deoptimization bailout %d)", id);
+ FPrintF(out, " (deoptimization bailout %d)", id);
}
}
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
#endif // ENABLE_DISASSEMBLER
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void RelocInfo::Verify() {
switch (rmode_) {
case EMBEDDED_OBJECT:
@@ -683,12 +738,12 @@ void RelocInfo::Verify() {
case CODE_TARGET: {
// convert inline target address to code object
Address addr = target_address();
- ASSERT(addr != NULL);
+ CHECK(addr != NULL);
// Check that we can find the right code object.
Code* code = Code::GetCodeFromTargetAddress(addr);
Object* found = HEAP->FindCodeObject(addr);
- ASSERT(found->IsCode());
- ASSERT(code->address() == HeapObject::cast(found)->address());
+ CHECK(found->IsCode());
+ CHECK(code->address() == HeapObject::cast(found)->address());
break;
}
case RUNTIME_ENTRY:
@@ -698,15 +753,19 @@ void RelocInfo::Verify() {
case STATEMENT_POSITION:
case EXTERNAL_REFERENCE:
case INTERNAL_REFERENCE:
+ case CONST_POOL:
case DEBUG_BREAK_SLOT:
case NONE:
break;
case NUMBER_OF_MODES:
UNREACHABLE();
break;
+ case CODE_AGE_SEQUENCE:
+ ASSERT(Code::IsYoungSequence(pc_) || code_age_stub()->IsCode());
+ break;
}
}
-#endif // DEBUG
+#endif // VERIFY_HEAP
// -----------------------------------------------------------------------------
@@ -839,6 +898,13 @@ ExternalReference ExternalReference::get_date_field_function(
}
+ExternalReference ExternalReference::get_make_code_young_function(
+ Isolate* isolate) {
+ return ExternalReference(Redirect(
+ isolate, FUNCTION_ADDR(Code::MakeCodeAgeSequenceYoung)));
+}
+
+
ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
return ExternalReference(isolate->date_cache()->stamp_address());
}
@@ -955,6 +1021,24 @@ ExternalReference ExternalReference::scheduled_exception_address(
}
+ExternalReference ExternalReference::address_of_pending_message_obj(
+ Isolate* isolate) {
+ return ExternalReference(isolate->pending_message_obj_address());
+}
+
+
+ExternalReference ExternalReference::address_of_has_pending_message(
+ Isolate* isolate) {
+ return ExternalReference(isolate->has_pending_message_address());
+}
+
+
+ExternalReference ExternalReference::address_of_pending_message_script(
+ Isolate* isolate) {
+ return ExternalReference(isolate->pending_message_script_address());
+}
+
+
ExternalReference ExternalReference::address_of_min_int() {
return ExternalReference(reinterpret_cast<void*>(&double_constants.min_int));
}
@@ -1039,7 +1123,7 @@ ExternalReference ExternalReference::re_word_character_map() {
ExternalReference ExternalReference::address_of_static_offsets_vector(
Isolate* isolate) {
return ExternalReference(
- OffsetsVector::static_offsets_vector_address(isolate));
+ reinterpret_cast<Address>(isolate->jsregexp_static_offsets_vector()));
}
ExternalReference ExternalReference::address_of_regexp_stack_memory_address(
@@ -1133,6 +1217,12 @@ ExternalReference ExternalReference::math_log_double_function(
}
+ExternalReference ExternalReference::page_flags(Page* page) {
+ return ExternalReference(reinterpret_cast<Address>(page) +
+ MemoryChunk::kFlagsOffset);
+}
+
+
// Helper function to compute x^y, where y is known to be an
// integer. Uses binary decomposition to limit the number of
// multiplications; see the discussion in "Hacker's Delight" by Henry
diff --git a/src/3rdparty/v8/src/assembler.h b/src/3rdparty/v8/src/assembler.h
index 05fe320..037799d 100644
--- a/src/3rdparty/v8/src/assembler.h
+++ b/src/3rdparty/v8/src/assembler.h
@@ -51,7 +51,6 @@ class ApiFunction;
namespace internal {
struct StatsCounter;
-const unsigned kNoASTId = -1;
// -----------------------------------------------------------------------------
// Platform independent assembler base class.
@@ -60,7 +59,13 @@ class AssemblerBase: public Malloced {
explicit AssemblerBase(Isolate* isolate);
Isolate* isolate() const { return isolate_; }
- int jit_cookie() { return jit_cookie_; }
+ int jit_cookie() const { return jit_cookie_; }
+
+ bool emit_debug_code() const { return emit_debug_code_; }
+ void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
+
+ bool predictable_code_size() const { return predictable_code_size_; }
+ void set_predictable_code_size(bool value) { predictable_code_size_ = value; }
// Overwrite a host NaN with a quiet target NaN. Used by mksnapshot for
// cross-snapshotting.
@@ -69,6 +74,28 @@ class AssemblerBase: public Malloced {
private:
Isolate* isolate_;
int jit_cookie_;
+ bool emit_debug_code_;
+ bool predictable_code_size_;
+};
+
+
+// Avoids using instructions that vary in size in unpredictable ways between the
+// snapshot and the running VM.
+class PredictableCodeSizeScope {
+ public:
+ explicit PredictableCodeSizeScope(AssemblerBase* assembler)
+ : assembler_(assembler) {
+ old_value_ = assembler_->predictable_code_size();
+ assembler_->set_predictable_code_size(true);
+ }
+
+ ~PredictableCodeSizeScope() {
+ assembler_->set_predictable_code_size(old_value_);
+ }
+
+ private:
+ AssemblerBase* assembler_;
+ bool old_value_;
};
@@ -204,14 +231,25 @@ class RelocInfo BASE_EMBEDDED {
EXTERNAL_REFERENCE, // The address of an external C++ function.
INTERNAL_REFERENCE, // An address inside the same function.
+ // Marks a constant pool. Only used on ARM.
+ // It uses a custom noncompact encoding.
+ CONST_POOL,
+
// add more as needed
// Pseudo-types
- NUMBER_OF_MODES, // There are at most 14 modes with noncompact encoding.
+ NUMBER_OF_MODES, // There are at most 15 modes with noncompact encoding.
NONE, // never recorded
+ CODE_AGE_SEQUENCE, // Not stored in RelocInfo array, used explictly by
+ // code aging.
+ FIRST_REAL_RELOC_MODE = CODE_TARGET,
+ LAST_REAL_RELOC_MODE = CONST_POOL,
+ FIRST_PSEUDO_RELOC_MODE = CODE_AGE_SEQUENCE,
+ LAST_PSEUDO_RELOC_MODE = CODE_AGE_SEQUENCE,
LAST_CODE_ENUM = DEBUG_BREAK,
LAST_GCED_ENUM = GLOBAL_PROPERTY_CELL,
// Modes <= LAST_COMPACT_ENUM are guaranteed to have compact encoding.
- LAST_COMPACT_ENUM = CODE_TARGET_WITH_ID
+ LAST_COMPACT_ENUM = CODE_TARGET_WITH_ID,
+ LAST_STANDARD_NONCOMPACT_ENUM = INTERNAL_REFERENCE
};
@@ -221,6 +259,15 @@ class RelocInfo BASE_EMBEDDED {
: pc_(pc), rmode_(rmode), data_(data), host_(host) {
}
+ static inline bool IsRealRelocMode(Mode mode) {
+ return mode >= FIRST_REAL_RELOC_MODE &&
+ mode <= LAST_REAL_RELOC_MODE;
+ }
+ static inline bool IsPseudoRelocMode(Mode mode) {
+ ASSERT(!IsRealRelocMode(mode));
+ return mode >= FIRST_PSEUDO_RELOC_MODE &&
+ mode <= LAST_PSEUDO_RELOC_MODE;
+ }
static inline bool IsConstructCall(Mode mode) {
return mode == CONSTRUCT_CALL;
}
@@ -240,6 +287,9 @@ class RelocInfo BASE_EMBEDDED {
static inline bool IsComment(Mode mode) {
return mode == COMMENT;
}
+ static inline bool IsConstPool(Mode mode) {
+ return mode == CONST_POOL;
+ }
static inline bool IsPosition(Mode mode) {
return mode == POSITION || mode == STATEMENT_POSITION;
}
@@ -255,6 +305,9 @@ class RelocInfo BASE_EMBEDDED {
static inline bool IsDebugBreakSlot(Mode mode) {
return mode == DEBUG_BREAK_SLOT;
}
+ static inline bool IsCodeAgeSequence(Mode mode) {
+ return mode == CODE_AGE_SEQUENCE;
+ }
static inline int ModeMask(Mode mode) { return 1 << mode; }
// Accessors
@@ -287,7 +340,8 @@ class RelocInfo BASE_EMBEDDED {
INLINE(Handle<JSGlobalPropertyCell> target_cell_handle());
INLINE(void set_target_cell(JSGlobalPropertyCell* cell,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER));
-
+ INLINE(Code* code_age_stub());
+ INLINE(void set_code_age_stub(Code* stub));
// Read the address of the word containing the target_address in an
// instruction stream. What this means exactly is architecture-independent.
@@ -342,8 +396,7 @@ class RelocInfo BASE_EMBEDDED {
static const char* RelocModeName(Mode rmode);
void Print(FILE* out);
#endif // ENABLE_DISASSEMBLER
-#ifdef DEBUG
- // Debugging
+#ifdef VERIFY_HEAP
void Verify();
#endif
@@ -362,19 +415,17 @@ class RelocInfo BASE_EMBEDDED {
Mode rmode_;
intptr_t data_;
Code* host_;
-#ifdef V8_TARGET_ARCH_MIPS
- // Code and Embedded Object pointers in mips are stored split
+ // Code and Embedded Object pointers on some platforms are stored split
// across two consecutive 32-bit instructions. Heap management
// routines expect to access these pointers indirectly. The following
- // location provides a place for these pointers to exist natually
+ // location provides a place for these pointers to exist naturally
// when accessed via the Iterator.
Object* reconstructed_obj_ptr_;
// External-reference pointers are also split across instruction-pairs
- // in mips, but are accessed via indirect pointers. This location
+ // on some platforms, but are accessed via indirect pointers. This location
// provides a place for that pointer to exist naturally. Its address
// is returned by RelocInfo::target_reference_address().
Address reconstructed_adr_ptr_;
-#endif // V8_TARGET_ARCH_MIPS
friend class RelocIterator;
};
@@ -416,6 +467,7 @@ class RelocInfoWriter BASE_EMBEDDED {
inline void WriteTaggedPC(uint32_t pc_delta, int tag);
inline void WriteExtraTaggedPC(uint32_t pc_delta, int extra_tag);
inline void WriteExtraTaggedIntData(int data_delta, int top_tag);
+ inline void WriteExtraTaggedConstPoolData(int data);
inline void WriteExtraTaggedData(intptr_t data_delta, int top_tag);
inline void WriteTaggedData(intptr_t data_delta, int tag);
inline void WriteExtraTag(int extra_tag, int top_tag);
@@ -466,6 +518,7 @@ class RelocIterator: public Malloced {
void ReadTaggedPC();
void AdvanceReadPC();
void AdvanceReadId();
+ void AdvanceReadConstPoolData();
void AdvanceReadPosition();
void AdvanceReadData();
void AdvanceReadVariableLengthPCJump();
@@ -481,6 +534,7 @@ class RelocIterator: public Malloced {
byte* pos_;
byte* end_;
+ byte* code_age_sequence_;
RelocInfo rinfo_;
bool done_;
int mode_mask_;
@@ -589,6 +643,8 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference get_date_field_function(Isolate* isolate);
static ExternalReference date_cache_stamp(Isolate* isolate);
+ static ExternalReference get_make_code_young_function(Isolate* isolate);
+
// Deoptimization support.
static ExternalReference new_deoptimizer_function(Isolate* isolate);
static ExternalReference compute_output_frames_function(Isolate* isolate);
@@ -640,6 +696,9 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference handle_scope_level_address();
static ExternalReference scheduled_exception_address(Isolate* isolate);
+ static ExternalReference address_of_pending_message_obj(Isolate* isolate);
+ static ExternalReference address_of_has_pending_message(Isolate* isolate);
+ static ExternalReference address_of_pending_message_script(Isolate* isolate);
// Static variables containing common double constants.
static ExternalReference address_of_min_int();
@@ -656,6 +715,8 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference math_tan_double_function(Isolate* isolate);
static ExternalReference math_log_double_function(Isolate* isolate);
+ static ExternalReference page_flags(Page* page);
+
Address address() const {return reinterpret_cast<Address>(address_);}
#ifdef ENABLE_DEBUGGER_SUPPORT
diff --git a/src/3rdparty/v8/src/ast.cc b/src/3rdparty/v8/src/ast.cc
index a02cede..3015b1e 100644
--- a/src/3rdparty/v8/src/ast.cc
+++ b/src/3rdparty/v8/src/ast.cc
@@ -85,8 +85,8 @@ VariableProxy::VariableProxy(Isolate* isolate, Variable* var)
VariableProxy::VariableProxy(Isolate* isolate,
Handle<String> name,
bool is_this,
- int position,
- Interface* interface)
+ Interface* interface,
+ int position)
: Expression(isolate),
name_(name),
var_(NULL),
@@ -125,10 +125,7 @@ Assignment::Assignment(Isolate* isolate,
value_(value),
pos_(pos),
binary_operation_(NULL),
- compound_load_id_(kNoNumber),
assignment_id_(GetNextId(isolate)),
- block_start_(false),
- block_end_(false),
is_monomorphic_(false) { }
@@ -156,6 +153,11 @@ bool FunctionLiteral::AllowsLazyCompilation() {
}
+bool FunctionLiteral::AllowsLazyCompilationWithoutContext() {
+ return scope()->AllowsLazyCompilationWithoutContext();
+}
+
+
int FunctionLiteral::start_position() const {
return scope()->start_position();
}
@@ -247,8 +249,11 @@ bool IsEqualNumber(void* first, void* second) {
}
-void ObjectLiteral::CalculateEmitStore() {
- ZoneHashMap table(Literal::Match);
+void ObjectLiteral::CalculateEmitStore(Zone* zone) {
+ ZoneAllocationPolicy allocator(zone);
+
+ ZoneHashMap table(Literal::Match, ZoneHashMap::kDefaultHashMapCapacity,
+ allocator);
for (int i = properties()->length() - 1; i >= 0; i--) {
ObjectLiteral::Property* property = properties()->at(i);
Literal* literal = property->key();
@@ -257,23 +262,23 @@ void ObjectLiteral::CalculateEmitStore() {
// If the key of a computed property is in the table, do not emit
// a store for the property later.
if (property->kind() == ObjectLiteral::Property::COMPUTED &&
- table.Lookup(literal, hash, false) != NULL) {
+ table.Lookup(literal, hash, false, allocator) != NULL) {
property->set_emit_store(false);
} else {
// Add key to the table.
- table.Lookup(literal, hash, true);
+ table.Lookup(literal, hash, true, allocator);
}
}
}
-void TargetCollector::AddTarget(Label* target) {
+void TargetCollector::AddTarget(Label* target, Zone* zone) {
// Add the label to the collector, but discard duplicates.
int length = targets_.length();
for (int i = 0; i < length; i++) {
if (targets_[i] == target) return;
}
- targets_.Add(target);
+ targets_.Add(target, zone);
}
@@ -402,7 +407,8 @@ bool FunctionDeclaration::IsInlineable() const {
// ----------------------------------------------------------------------------
// Recording of type feedback
-void Property::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
+void Property::RecordTypeFeedback(TypeFeedbackOracle* oracle,
+ Zone* zone) {
// Record type feedback from the oracle in the AST.
is_uninitialized_ = oracle->LoadIsUninitialized(this);
if (is_uninitialized_) return;
@@ -426,18 +432,21 @@ void Property::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
} else if (oracle->LoadIsBuiltin(this, Builtins::kKeyedLoadIC_String)) {
is_string_access_ = true;
} else if (is_monomorphic_) {
- receiver_types_.Add(oracle->LoadMonomorphicReceiverType(this));
+ receiver_types_.Add(oracle->LoadMonomorphicReceiverType(this),
+ zone);
} else if (oracle->LoadIsMegamorphicWithTypeInfo(this)) {
- receiver_types_.Reserve(kMaxKeyedPolymorphism);
- oracle->CollectKeyedReceiverTypes(this->id(), &receiver_types_);
+ receiver_types_.Reserve(kMaxKeyedPolymorphism, zone);
+ oracle->CollectKeyedReceiverTypes(PropertyFeedbackId(), &receiver_types_);
}
}
-void Assignment::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
+void Assignment::RecordTypeFeedback(TypeFeedbackOracle* oracle,
+ Zone* zone) {
Property* prop = target()->AsProperty();
ASSERT(prop != NULL);
- is_monomorphic_ = oracle->StoreIsMonomorphicNormal(this);
+ TypeFeedbackId id = AssignmentFeedbackId();
+ is_monomorphic_ = oracle->StoreIsMonomorphicNormal(id);
receiver_types_.Clear();
if (prop->key()->IsPropertyName()) {
Literal* lit_key = prop->key()->AsLiteral();
@@ -446,23 +455,26 @@ void Assignment::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
oracle->StoreReceiverTypes(this, name, &receiver_types_);
} else if (is_monomorphic_) {
// Record receiver type for monomorphic keyed stores.
- receiver_types_.Add(oracle->StoreMonomorphicReceiverType(this));
- } else if (oracle->StoreIsMegamorphicWithTypeInfo(this)) {
- receiver_types_.Reserve(kMaxKeyedPolymorphism);
- oracle->CollectKeyedReceiverTypes(this->id(), &receiver_types_);
+ receiver_types_.Add(oracle->StoreMonomorphicReceiverType(id), zone);
+ } else if (oracle->StoreIsMegamorphicWithTypeInfo(id)) {
+ receiver_types_.Reserve(kMaxKeyedPolymorphism, zone);
+ oracle->CollectKeyedReceiverTypes(id, &receiver_types_);
}
}
-void CountOperation::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
- is_monomorphic_ = oracle->StoreIsMonomorphicNormal(this);
+void CountOperation::RecordTypeFeedback(TypeFeedbackOracle* oracle,
+ Zone* zone) {
+ TypeFeedbackId id = CountStoreFeedbackId();
+ is_monomorphic_ = oracle->StoreIsMonomorphicNormal(id);
receiver_types_.Clear();
if (is_monomorphic_) {
// Record receiver type for monomorphic keyed stores.
- receiver_types_.Add(oracle->StoreMonomorphicReceiverType(this));
- } else if (oracle->StoreIsMegamorphicWithTypeInfo(this)) {
- receiver_types_.Reserve(kMaxKeyedPolymorphism);
- oracle->CollectKeyedReceiverTypes(this->id(), &receiver_types_);
+ receiver_types_.Add(
+ oracle->StoreMonomorphicReceiverType(id), zone);
+ } else if (oracle->StoreIsMegamorphicWithTypeInfo(id)) {
+ receiver_types_.Reserve(kMaxKeyedPolymorphism, zone);
+ oracle->CollectKeyedReceiverTypes(id, &receiver_types_);
}
}
@@ -496,7 +508,7 @@ bool Call::ComputeTarget(Handle<Map> type, Handle<String> name) {
}
LookupResult lookup(type->GetIsolate());
while (true) {
- type->LookupInDescriptors(NULL, *name, &lookup);
+ type->LookupDescriptor(NULL, *name, &lookup);
if (lookup.IsFound()) {
switch (lookup.type()) {
case CONSTANT_FUNCTION:
@@ -511,11 +523,9 @@ bool Call::ComputeTarget(Handle<Map> type, Handle<String> name) {
case INTERCEPTOR:
// We don't know the target.
return false;
- case MAP_TRANSITION:
- case ELEMENTS_TRANSITION:
- case CONSTANT_TRANSITION:
- case NULL_DESCRIPTOR:
- // Perhaps something interesting is up in the prototype chain...
+ case TRANSITION:
+ case NONEXISTENT:
+ UNREACHABLE();
break;
}
}
@@ -523,6 +533,7 @@ bool Call::ComputeTarget(Handle<Map> type, Handle<String> name) {
if (!type->prototype()->IsJSObject()) return false;
// Go up the prototype chain, recording where we are currently.
holder_ = Handle<JSObject>(JSObject::cast(type->prototype()));
+ if (!holder_->HasFastProperties()) return false;
type = Handle<Map>(holder()->map());
}
}
@@ -794,7 +805,7 @@ bool RegExpCapture::IsAnchoredAtEnd() {
// output formats are alike.
class RegExpUnparser: public RegExpVisitor {
public:
- RegExpUnparser();
+ explicit RegExpUnparser(Zone* zone);
void VisitCharacterRange(CharacterRange that);
SmartArrayPointer<const char> ToString() { return stream_.ToCString(); }
#define MAKE_CASE(Name) virtual void* Visit##Name(RegExp##Name*, void* data);
@@ -804,10 +815,11 @@ class RegExpUnparser: public RegExpVisitor {
StringStream* stream() { return &stream_; }
HeapStringAllocator alloc_;
StringStream stream_;
+ Zone* zone_;
};
-RegExpUnparser::RegExpUnparser() : stream_(&alloc_) {
+RegExpUnparser::RegExpUnparser(Zone* zone) : stream_(&alloc_), zone_(zone) {
}
@@ -847,9 +859,9 @@ void* RegExpUnparser::VisitCharacterClass(RegExpCharacterClass* that,
if (that->is_negated())
stream()->Add("^");
stream()->Add("[");
- for (int i = 0; i < that->ranges()->length(); i++) {
+ for (int i = 0; i < that->ranges(zone_)->length(); i++) {
if (i > 0) stream()->Add(" ");
- VisitCharacterRange(that->ranges()->at(i));
+ VisitCharacterRange(that->ranges(zone_)->at(i));
}
stream()->Add("]");
return NULL;
@@ -951,8 +963,8 @@ void* RegExpUnparser::VisitEmpty(RegExpEmpty* that, void* data) {
}
-SmartArrayPointer<const char> RegExpTree::ToString() {
- RegExpUnparser unparser;
+SmartArrayPointer<const char> RegExpTree::ToString(Zone* zone) {
+ RegExpUnparser unparser(zone);
Accept(&unparser, NULL);
return unparser.ToString();
}
@@ -1029,6 +1041,14 @@ CaseClause::CaseClause(Isolate* isolate,
increase_node_count(); \
add_flag(kDontSelfOptimize); \
}
+#define DONT_CACHE_NODE(NodeType) \
+ void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
+ increase_node_count(); \
+ add_flag(kDontOptimize); \
+ add_flag(kDontInline); \
+ add_flag(kDontSelfOptimize); \
+ add_flag(kDontCache); \
+ }
REGULAR_NODE(VariableDeclaration)
REGULAR_NODE(FunctionDeclaration)
@@ -1043,6 +1063,7 @@ REGULAR_NODE(SwitchStatement)
REGULAR_NODE(Conditional)
REGULAR_NODE(Literal)
REGULAR_NODE(ObjectLiteral)
+REGULAR_NODE(RegExpLiteral)
REGULAR_NODE(Assignment)
REGULAR_NODE(Throw)
REGULAR_NODE(Property)
@@ -1059,10 +1080,13 @@ REGULAR_NODE(CallNew)
// LOOKUP variables only result from constructs that cannot be inlined anyway.
REGULAR_NODE(VariableProxy)
+// We currently do not optimize any modules. Note in particular, that module
+// instance objects associated with ModuleLiterals are allocated during
+// scope resolution, and references to them are embedded into the code.
+// That code may hence neither be cached nor re-compiled.
DONT_OPTIMIZE_NODE(ModuleDeclaration)
DONT_OPTIMIZE_NODE(ImportDeclaration)
DONT_OPTIMIZE_NODE(ExportDeclaration)
-DONT_OPTIMIZE_NODE(ModuleLiteral)
DONT_OPTIMIZE_NODE(ModuleVariable)
DONT_OPTIMIZE_NODE(ModulePath)
DONT_OPTIMIZE_NODE(ModuleUrl)
@@ -1072,15 +1096,16 @@ DONT_OPTIMIZE_NODE(TryFinallyStatement)
DONT_OPTIMIZE_NODE(DebuggerStatement)
DONT_OPTIMIZE_NODE(SharedFunctionInfoLiteral)
-DONT_INLINE_NODE(FunctionLiteral)
-DONT_INLINE_NODE(RegExpLiteral) // TODO(1322): Allow materialized literals.
DONT_INLINE_NODE(ArrayLiteral) // TODO(1322): Allow materialized literals.
+DONT_INLINE_NODE(FunctionLiteral)
DONT_SELFOPTIMIZE_NODE(DoWhileStatement)
DONT_SELFOPTIMIZE_NODE(WhileStatement)
DONT_SELFOPTIMIZE_NODE(ForStatement)
DONT_SELFOPTIMIZE_NODE(ForInStatement)
+DONT_CACHE_NODE(ModuleLiteral)
+
void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) {
increase_node_count();
if (node->is_jsruntime()) {
@@ -1101,6 +1126,7 @@ void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) {
#undef DONT_OPTIMIZE_NODE
#undef DONT_INLINE_NODE
#undef DONT_SELFOPTIMIZE_NODE
+#undef DONT_CACHE_NODE
Handle<String> Literal::ToString() {
diff --git a/src/3rdparty/v8/src/ast.h b/src/3rdparty/v8/src/ast.h
index d2785c2..d3f90b2 100644
--- a/src/3rdparty/v8/src/ast.h
+++ b/src/3rdparty/v8/src/ast.h
@@ -37,7 +37,7 @@
#include "list-inl.h"
#include "runtime.h"
#include "small-pointer-list.h"
-#include "smart-array-pointer.h"
+#include "smart-pointers.h"
#include "token.h"
#include "utils.h"
#include "variables.h"
@@ -158,14 +158,16 @@ typedef ZoneList<Handle<Object> > ZoneObjectList;
#define DECLARE_NODE_TYPE(type) \
virtual void Accept(AstVisitor* v); \
- virtual AstNode::Type node_type() const { return AstNode::k##type; }
+ virtual AstNode::Type node_type() const { return AstNode::k##type; } \
+ template<class> friend class AstNodeFactory;
enum AstPropertiesFlag {
kDontInline,
kDontOptimize,
kDontSelfOptimize,
- kDontSoftInline
+ kDontSoftInline,
+ kDontCache
};
@@ -194,13 +196,6 @@ class AstNode: public ZoneObject {
};
#undef DECLARE_TYPE_ENUM
- static const int kNoNumber = -1;
- static const int kFunctionEntryId = 2; // Using 0 could disguise errors.
- // This AST id identifies the point after the declarations have been
- // visited. We need it to capture the environment effects of declarations
- // that emit code (function declarations).
- static const int kDeclarationsId = 3;
-
void* operator new(size_t size, Zone* zone) {
return zone->New(static_cast<int>(size));
}
@@ -210,7 +205,7 @@ class AstNode: public ZoneObject {
virtual ~AstNode() { }
virtual void Accept(AstVisitor* v) = 0;
- virtual Type node_type() const { return kInvalid; }
+ virtual Type node_type() const = 0;
// Type testing & conversion functions overridden by concrete subclasses.
#define DECLARE_NODE_FUNCTIONS(type) \
@@ -219,9 +214,6 @@ class AstNode: public ZoneObject {
AST_NODE_LIST(DECLARE_NODE_FUNCTIONS)
#undef DECLARE_NODE_FUNCTIONS
- virtual Declaration* AsDeclaration() { return NULL; }
- virtual Statement* AsStatement() { return NULL; }
- virtual Expression* AsExpression() { return NULL; }
virtual TargetCollector* AsTargetCollector() { return NULL; }
virtual BreakableStatement* AsBreakableStatement() { return NULL; }
virtual IterationStatement* AsIterationStatement() { return NULL; }
@@ -238,6 +230,12 @@ class AstNode: public ZoneObject {
return tmp;
}
+ // Some nodes re-use bailout IDs for type feedback.
+ static TypeFeedbackId reuse(BailoutId id) {
+ return TypeFeedbackId(id.ToInt());
+ }
+
+
private:
// Hidden to prevent accidental usage. It would have to load the
// current zone from the TLS.
@@ -251,8 +249,6 @@ class Statement: public AstNode {
public:
Statement() : statement_pos_(RelocInfo::kNoPosition) {}
- virtual Statement* AsStatement() { return this; }
-
bool IsEmpty() { return AsEmptyStatement() != NULL; }
void set_statement_pos(int statement_pos) { statement_pos_ = statement_pos; }
@@ -266,17 +262,17 @@ class Statement: public AstNode {
class SmallMapList {
public:
SmallMapList() {}
- explicit SmallMapList(int capacity) : list_(capacity) {}
+ SmallMapList(int capacity, Zone* zone) : list_(capacity, zone) {}
- void Reserve(int capacity) { list_.Reserve(capacity); }
+ void Reserve(int capacity, Zone* zone) { list_.Reserve(capacity, zone); }
void Clear() { list_.Clear(); }
void Sort() { list_.Sort(); }
bool is_empty() const { return list_.is_empty(); }
int length() const { return list_.length(); }
- void Add(Handle<Map> handle) {
- list_.Add(handle.location());
+ void Add(Handle<Map> handle, Zone* zone) {
+ list_.Add(handle.location(), zone);
}
Handle<Map> at(int i) const {
@@ -313,8 +309,6 @@ class Expression: public AstNode {
return 0;
}
- virtual Expression* AsExpression() { return this; }
-
virtual bool IsValidLeftHandSide() { return false; }
// Helpers for ToBoolean conversion.
@@ -355,8 +349,8 @@ class Expression: public AstNode {
return types->at(0);
}
- unsigned id() const { return id_; }
- unsigned test_id() const { return test_id_; }
+ BailoutId id() const { return id_; }
+ TypeFeedbackId test_id() const { return test_id_; }
protected:
explicit Expression(Isolate* isolate)
@@ -364,8 +358,8 @@ class Expression: public AstNode {
test_id_(GetNextId(isolate)) {}
private:
- int id_;
- int test_id_;
+ const BailoutId id_;
+ const TypeFeedbackId test_id_;
};
@@ -389,9 +383,8 @@ class BreakableStatement: public Statement {
// Testers.
bool is_target_for_anonymous() const { return type_ == TARGET_FOR_ANONYMOUS; }
- // Bailout support.
- int EntryId() const { return entry_id_; }
- int ExitId() const { return exit_id_; }
+ BailoutId EntryId() const { return entry_id_; }
+ BailoutId ExitId() const { return exit_id_; }
protected:
BreakableStatement(Isolate* isolate, ZoneStringList* labels, Type type)
@@ -407,8 +400,8 @@ class BreakableStatement: public Statement {
ZoneStringList* labels_;
Type type_;
Label break_target_;
- int entry_id_;
- int exit_id_;
+ const BailoutId entry_id_;
+ const BailoutId exit_id_;
};
@@ -416,7 +409,9 @@ class Block: public BreakableStatement {
public:
DECLARE_NODE_TYPE(Block)
- void AddStatement(Statement* statement) { statements_.Add(statement); }
+ void AddStatement(Statement* statement, Zone* zone) {
+ statements_.Add(statement, zone);
+ }
ZoneList<Statement*>* statements() { return &statements_; }
bool is_initializer_block() const { return is_initializer_block_; }
@@ -425,14 +420,13 @@ class Block: public BreakableStatement {
void set_scope(Scope* scope) { scope_ = scope; }
protected:
- template<class> friend class AstNodeFactory;
-
Block(Isolate* isolate,
ZoneStringList* labels,
int capacity,
- bool is_initializer_block)
+ bool is_initializer_block,
+ Zone* zone)
: BreakableStatement(isolate, labels, TARGET_FOR_NAMED_ONLY),
- statements_(capacity),
+ statements_(capacity, zone),
is_initializer_block_(is_initializer_block),
scope_(NULL) {
}
@@ -452,8 +446,6 @@ class Declaration: public AstNode {
virtual InitializationFlag initialization() const = 0;
virtual bool IsInlineable() const;
- virtual Declaration* AsDeclaration() { return this; }
-
protected:
Declaration(VariableProxy* proxy,
VariableMode mode,
@@ -461,10 +453,7 @@ class Declaration: public AstNode {
: proxy_(proxy),
mode_(mode),
scope_(scope) {
- ASSERT(mode == VAR ||
- mode == CONST ||
- mode == CONST_HARMONY ||
- mode == LET);
+ ASSERT(IsDeclaredVariableMode(mode));
}
private:
@@ -485,8 +474,6 @@ class VariableDeclaration: public Declaration {
}
protected:
- template<class> friend class AstNodeFactory;
-
VariableDeclaration(VariableProxy* proxy,
VariableMode mode,
Scope* scope)
@@ -506,8 +493,6 @@ class FunctionDeclaration: public Declaration {
virtual bool IsInlineable() const;
protected:
- template<class> friend class AstNodeFactory;
-
FunctionDeclaration(VariableProxy* proxy,
VariableMode mode,
FunctionLiteral* fun,
@@ -534,8 +519,6 @@ class ModuleDeclaration: public Declaration {
}
protected:
- template<class> friend class AstNodeFactory;
-
ModuleDeclaration(VariableProxy* proxy,
Module* module,
Scope* scope)
@@ -558,8 +541,6 @@ class ImportDeclaration: public Declaration {
}
protected:
- template<class> friend class AstNodeFactory;
-
ImportDeclaration(VariableProxy* proxy,
Module* module,
Scope* scope)
@@ -581,25 +562,27 @@ class ExportDeclaration: public Declaration {
}
protected:
- template<class> friend class AstNodeFactory;
-
- ExportDeclaration(VariableProxy* proxy,
- Scope* scope)
- : Declaration(proxy, LET, scope) {
- }
+ ExportDeclaration(VariableProxy* proxy, Scope* scope)
+ : Declaration(proxy, LET, scope) {}
};
class Module: public AstNode {
public:
Interface* interface() const { return interface_; }
+ Block* body() const { return body_; }
protected:
- Module() : interface_(Interface::NewModule()) {}
- explicit Module(Interface* interface) : interface_(interface) {}
+ explicit Module(Zone* zone)
+ : interface_(Interface::NewModule(zone)),
+ body_(NULL) {}
+ explicit Module(Interface* interface, Block* body = NULL)
+ : interface_(interface),
+ body_(body) {}
private:
Interface* interface_;
+ Block* body_;
};
@@ -607,20 +590,8 @@ class ModuleLiteral: public Module {
public:
DECLARE_NODE_TYPE(ModuleLiteral)
- Block* body() const { return body_; }
- Handle<Context> context() const { return context_; }
-
protected:
- template<class> friend class AstNodeFactory;
-
- ModuleLiteral(Block* body, Interface* interface)
- : Module(interface),
- body_(body) {
- }
-
- private:
- Block* body_;
- Handle<Context> context_;
+ ModuleLiteral(Block* body, Interface* interface) : Module(interface, body) {}
};
@@ -631,8 +602,6 @@ class ModuleVariable: public Module {
VariableProxy* proxy() const { return proxy_; }
protected:
- template<class> friend class AstNodeFactory;
-
inline explicit ModuleVariable(VariableProxy* proxy);
private:
@@ -648,10 +617,9 @@ class ModulePath: public Module {
Handle<String> name() const { return name_; }
protected:
- template<class> friend class AstNodeFactory;
-
- ModulePath(Module* module, Handle<String> name)
- : module_(module),
+ ModulePath(Module* module, Handle<String> name, Zone* zone)
+ : Module(zone),
+ module_(module),
name_(name) {
}
@@ -668,9 +636,8 @@ class ModuleUrl: public Module {
Handle<String> url() const { return url_; }
protected:
- template<class> friend class AstNodeFactory;
-
- explicit ModuleUrl(Handle<String> url) : url_(url) {
+ ModuleUrl(Handle<String> url, Zone* zone)
+ : Module(zone), url_(url) {
}
private:
@@ -685,10 +652,9 @@ class IterationStatement: public BreakableStatement {
Statement* body() const { return body_; }
- // Bailout support.
- int OsrEntryId() const { return osr_entry_id_; }
- virtual int ContinueId() const = 0;
- virtual int StackCheckId() const = 0;
+ BailoutId OsrEntryId() const { return osr_entry_id_; }
+ virtual BailoutId ContinueId() const = 0;
+ virtual BailoutId StackCheckId() const = 0;
// Code generation
Label* continue_target() { return &continue_target_; }
@@ -707,7 +673,7 @@ class IterationStatement: public BreakableStatement {
private:
Statement* body_;
Label continue_target_;
- int osr_entry_id_;
+ const BailoutId osr_entry_id_;
};
@@ -727,14 +693,11 @@ class DoWhileStatement: public IterationStatement {
int condition_position() { return condition_position_; }
void set_condition_position(int pos) { condition_position_ = pos; }
- // Bailout support.
- virtual int ContinueId() const { return continue_id_; }
- virtual int StackCheckId() const { return back_edge_id_; }
- int BackEdgeId() const { return back_edge_id_; }
+ virtual BailoutId ContinueId() const { return continue_id_; }
+ virtual BailoutId StackCheckId() const { return back_edge_id_; }
+ BailoutId BackEdgeId() const { return back_edge_id_; }
protected:
- template<class> friend class AstNodeFactory;
-
DoWhileStatement(Isolate* isolate, ZoneStringList* labels)
: IterationStatement(isolate, labels),
cond_(NULL),
@@ -746,8 +709,8 @@ class DoWhileStatement: public IterationStatement {
private:
Expression* cond_;
int condition_position_;
- int continue_id_;
- int back_edge_id_;
+ const BailoutId continue_id_;
+ const BailoutId back_edge_id_;
};
@@ -768,14 +731,11 @@ class WhileStatement: public IterationStatement {
may_have_function_literal_ = value;
}
- // Bailout support.
- virtual int ContinueId() const { return EntryId(); }
- virtual int StackCheckId() const { return body_id_; }
- int BodyId() const { return body_id_; }
+ virtual BailoutId ContinueId() const { return EntryId(); }
+ virtual BailoutId StackCheckId() const { return body_id_; }
+ BailoutId BodyId() const { return body_id_; }
protected:
- template<class> friend class AstNodeFactory;
-
WhileStatement(Isolate* isolate, ZoneStringList* labels)
: IterationStatement(isolate, labels),
cond_(NULL),
@@ -787,7 +747,7 @@ class WhileStatement: public IterationStatement {
Expression* cond_;
// True if there is a function literal subexpression in the condition.
bool may_have_function_literal_;
- int body_id_;
+ const BailoutId body_id_;
};
@@ -816,18 +776,15 @@ class ForStatement: public IterationStatement {
may_have_function_literal_ = value;
}
- // Bailout support.
- virtual int ContinueId() const { return continue_id_; }
- virtual int StackCheckId() const { return body_id_; }
- int BodyId() const { return body_id_; }
+ virtual BailoutId ContinueId() const { return continue_id_; }
+ virtual BailoutId StackCheckId() const { return body_id_; }
+ BailoutId BodyId() const { return body_id_; }
bool is_fast_smi_loop() { return loop_variable_ != NULL; }
Variable* loop_variable() { return loop_variable_; }
void set_loop_variable(Variable* var) { loop_variable_ = var; }
protected:
- template<class> friend class AstNodeFactory;
-
ForStatement(Isolate* isolate, ZoneStringList* labels)
: IterationStatement(isolate, labels),
init_(NULL),
@@ -846,8 +803,8 @@ class ForStatement: public IterationStatement {
// True if there is a function literal subexpression in the condition.
bool may_have_function_literal_;
Variable* loop_variable_;
- int continue_id_;
- int body_id_;
+ const BailoutId continue_id_;
+ const BailoutId body_id_;
};
@@ -864,14 +821,14 @@ class ForInStatement: public IterationStatement {
Expression* each() const { return each_; }
Expression* enumerable() const { return enumerable_; }
- virtual int ContinueId() const { return EntryId(); }
- virtual int StackCheckId() const { return body_id_; }
- int BodyId() const { return body_id_; }
- int PrepareId() const { return prepare_id_; }
+ virtual BailoutId ContinueId() const { return EntryId(); }
+ virtual BailoutId StackCheckId() const { return body_id_; }
+ BailoutId BodyId() const { return body_id_; }
+ BailoutId PrepareId() const { return prepare_id_; }
- protected:
- template<class> friend class AstNodeFactory;
+ TypeFeedbackId ForInFeedbackId() const { return reuse(PrepareId()); }
+ protected:
ForInStatement(Isolate* isolate, ZoneStringList* labels)
: IterationStatement(isolate, labels),
each_(NULL),
@@ -883,8 +840,8 @@ class ForInStatement: public IterationStatement {
private:
Expression* each_;
Expression* enumerable_;
- int body_id_;
- int prepare_id_;
+ const BailoutId body_id_;
+ const BailoutId prepare_id_;
};
@@ -896,8 +853,6 @@ class ExpressionStatement: public Statement {
Expression* expression() const { return expression_; }
protected:
- template<class> friend class AstNodeFactory;
-
explicit ExpressionStatement(Expression* expression)
: expression_(expression) { }
@@ -913,8 +868,6 @@ class ContinueStatement: public Statement {
IterationStatement* target() const { return target_; }
protected:
- template<class> friend class AstNodeFactory;
-
explicit ContinueStatement(IterationStatement* target)
: target_(target) { }
@@ -930,8 +883,6 @@ class BreakStatement: public Statement {
BreakableStatement* target() const { return target_; }
protected:
- template<class> friend class AstNodeFactory;
-
explicit BreakStatement(BreakableStatement* target)
: target_(target) { }
@@ -947,8 +898,6 @@ class ReturnStatement: public Statement {
Expression* expression() const { return expression_; }
protected:
- template<class> friend class AstNodeFactory;
-
explicit ReturnStatement(Expression* expression)
: expression_(expression) { }
@@ -965,8 +914,6 @@ class WithStatement: public Statement {
Statement* statement() const { return statement_; }
protected:
- template<class> friend class AstNodeFactory;
-
WithStatement(Expression* expression, Statement* statement)
: expression_(expression),
statement_(statement) { }
@@ -995,10 +942,10 @@ class CaseClause: public ZoneObject {
int position() const { return position_; }
void set_position(int pos) { position_ = pos; }
- int EntryId() { return entry_id_; }
- int CompareId() { return compare_id_; }
+ BailoutId EntryId() const { return entry_id_; }
// Type feedback information.
+ TypeFeedbackId CompareId() { return compare_id_; }
void RecordTypeFeedback(TypeFeedbackOracle* oracle);
bool IsSmiCompare() { return compare_type_ == SMI_ONLY; }
bool IsSymbolCompare() { return compare_type_ == SYMBOL_ONLY; }
@@ -1018,8 +965,8 @@ class CaseClause: public ZoneObject {
OBJECT_ONLY
};
CompareTypeFeedback compare_type_;
- int compare_id_;
- int entry_id_;
+ const TypeFeedbackId compare_id_;
+ const BailoutId entry_id_;
};
@@ -1036,8 +983,6 @@ class SwitchStatement: public BreakableStatement {
ZoneList<CaseClause*>* cases() const { return cases_; }
protected:
- template<class> friend class AstNodeFactory;
-
SwitchStatement(Isolate* isolate, ZoneStringList* labels)
: BreakableStatement(isolate, labels, TARGET_FOR_ANONYMOUS),
tag_(NULL),
@@ -1065,13 +1010,11 @@ class IfStatement: public Statement {
Statement* then_statement() const { return then_statement_; }
Statement* else_statement() const { return else_statement_; }
- int IfId() const { return if_id_; }
- int ThenId() const { return then_id_; }
- int ElseId() const { return else_id_; }
+ BailoutId IfId() const { return if_id_; }
+ BailoutId ThenId() const { return then_id_; }
+ BailoutId ElseId() const { return else_id_; }
protected:
- template<class> friend class AstNodeFactory;
-
IfStatement(Isolate* isolate,
Expression* condition,
Statement* then_statement,
@@ -1088,9 +1031,9 @@ class IfStatement: public Statement {
Expression* condition_;
Statement* then_statement_;
Statement* else_statement_;
- int if_id_;
- int then_id_;
- int else_id_;
+ const BailoutId if_id_;
+ const BailoutId then_id_;
+ const BailoutId else_id_;
};
@@ -1098,15 +1041,16 @@ class IfStatement: public Statement {
// stack in the compiler; this should probably be reworked.
class TargetCollector: public AstNode {
public:
- TargetCollector() : targets_(0) { }
+ explicit TargetCollector(Zone* zone) : targets_(0, zone) { }
// Adds a jump target to the collector. The collector stores a pointer not
// a copy of the target to make binding work, so make sure not to pass in
// references to something on the stack.
- void AddTarget(Label* target);
+ void AddTarget(Label* target, Zone* zone);
// Virtual behaviour. TargetCollectors are never part of the AST.
virtual void Accept(AstVisitor* v) { UNREACHABLE(); }
+ virtual Type node_type() const { return kInvalid; }
virtual TargetCollector* AsTargetCollector() { return this; }
ZoneList<Label*>* targets() { return &targets_; }
@@ -1150,8 +1094,6 @@ class TryCatchStatement: public TryStatement {
Block* catch_block() const { return catch_block_; }
protected:
- template<class> friend class AstNodeFactory;
-
TryCatchStatement(int index,
Block* try_block,
Scope* scope,
@@ -1177,8 +1119,6 @@ class TryFinallyStatement: public TryStatement {
Block* finally_block() const { return finally_block_; }
protected:
- template<class> friend class AstNodeFactory;
-
TryFinallyStatement(int index, Block* try_block, Block* finally_block)
: TryStatement(index, try_block),
finally_block_(finally_block) { }
@@ -1193,8 +1133,6 @@ class DebuggerStatement: public Statement {
DECLARE_NODE_TYPE(DebuggerStatement)
protected:
- template<class> friend class AstNodeFactory;
-
DebuggerStatement() {}
};
@@ -1204,8 +1142,6 @@ class EmptyStatement: public Statement {
DECLARE_NODE_TYPE(EmptyStatement)
protected:
- template<class> friend class AstNodeFactory;
-
EmptyStatement() {}
};
@@ -1256,9 +1192,9 @@ class Literal: public Expression {
return s1->Equals(*s2);
}
- protected:
- template<class> friend class AstNodeFactory;
+ TypeFeedbackId LiteralFeedbackId() const { return reuse(id()); }
+ protected:
Literal(Isolate* isolate, Handle<Object> handle)
: Expression(isolate),
handle_(handle) { }
@@ -1361,7 +1297,7 @@ class ObjectLiteral: public MaterializedLiteral {
// Mark all computed expressions that are bound to a key that
// is shadowed by a later occurrence of the same key. For the
// marked expressions, no store code is emitted.
- void CalculateEmitStore();
+ void CalculateEmitStore(Zone* zone);
enum Flags {
kNoFlags = 0,
@@ -1376,8 +1312,6 @@ class ObjectLiteral: public MaterializedLiteral {
};
protected:
- template<class> friend class AstNodeFactory;
-
ObjectLiteral(Isolate* isolate,
Handle<FixedArray> constant_properties,
ZoneList<Property*>* properties,
@@ -1409,8 +1343,6 @@ class RegExpLiteral: public MaterializedLiteral {
Handle<String> flags() const { return flags_; }
protected:
- template<class> friend class AstNodeFactory;
-
RegExpLiteral(Isolate* isolate,
Handle<String> pattern,
Handle<String> flags,
@@ -1434,11 +1366,11 @@ class ArrayLiteral: public MaterializedLiteral {
ZoneList<Expression*>* values() const { return values_; }
// Return an AST id for an element that is used in simulate instructions.
- int GetIdForElement(int i) { return first_element_id_ + i; }
+ BailoutId GetIdForElement(int i) {
+ return BailoutId(first_element_id_.ToInt() + i);
+ }
protected:
- template<class> friend class AstNodeFactory;
-
ArrayLiteral(Isolate* isolate,
Handle<FixedArray> constant_elements,
ZoneList<Expression*>* values,
@@ -1453,7 +1385,7 @@ class ArrayLiteral: public MaterializedLiteral {
private:
Handle<FixedArray> constant_elements_;
ZoneList<Expression*>* values_;
- int first_element_id_;
+ const BailoutId first_element_id_;
};
@@ -1489,15 +1421,13 @@ class VariableProxy: public Expression {
void BindTo(Variable* var);
protected:
- template<class> friend class AstNodeFactory;
-
VariableProxy(Isolate* isolate, Variable* var);
VariableProxy(Isolate* isolate,
Handle<String> name,
bool is_this,
- int position,
- Interface* interface);
+ Interface* interface,
+ int position);
Handle<String> name_;
Variable* var_; // resolved variable, or NULL
@@ -1521,20 +1451,21 @@ class Property: public Expression {
Expression* key() const { return key_; }
virtual int position() const { return pos_; }
+ BailoutId LoadId() const { return load_id_; }
+
bool IsStringLength() const { return is_string_length_; }
bool IsStringAccess() const { return is_string_access_; }
bool IsFunctionPrototype() const { return is_function_prototype_; }
// Type feedback information.
- void RecordTypeFeedback(TypeFeedbackOracle* oracle);
+ void RecordTypeFeedback(TypeFeedbackOracle* oracle, Zone* zone);
virtual bool IsMonomorphic() { return is_monomorphic_; }
virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
bool IsArrayLength() { return is_array_length_; }
bool IsUninitialized() { return is_uninitialized_; }
+ TypeFeedbackId PropertyFeedbackId() { return reuse(id()); }
protected:
- template<class> friend class AstNodeFactory;
-
Property(Isolate* isolate,
Expression* obj,
Expression* key,
@@ -1543,6 +1474,7 @@ class Property: public Expression {
obj_(obj),
key_(key),
pos_(pos),
+ load_id_(GetNextId(isolate)),
is_monomorphic_(false),
is_uninitialized_(false),
is_array_length_(false),
@@ -1554,6 +1486,7 @@ class Property: public Expression {
Expression* obj_;
Expression* key_;
int pos_;
+ const BailoutId load_id_;
SmallMapList receiver_types_;
bool is_monomorphic_ : 1;
@@ -1573,20 +1506,25 @@ class Call: public Expression {
ZoneList<Expression*>* arguments() const { return arguments_; }
virtual int position() const { return pos_; }
- void RecordTypeFeedback(TypeFeedbackOracle* oracle,
- CallKind call_kind);
+ // Type feedback information.
+ TypeFeedbackId CallFeedbackId() const { return reuse(id()); }
+ void RecordTypeFeedback(TypeFeedbackOracle* oracle, CallKind call_kind);
virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
virtual bool IsMonomorphic() { return is_monomorphic_; }
CheckType check_type() const { return check_type_; }
Handle<JSFunction> target() { return target_; }
+
+ // A cache for the holder, set as a side effect of computing the target of the
+ // call. Note that it contains the null handle when the receiver is the same
+ // as the holder!
Handle<JSObject> holder() { return holder_; }
+
Handle<JSGlobalPropertyCell> cell() { return cell_; }
bool ComputeTarget(Handle<Map> type, Handle<String> name);
bool ComputeGlobalTarget(Handle<GlobalObject> global, LookupResult* lookup);
- // Bailout support.
- int ReturnId() const { return return_id_; }
+ BailoutId ReturnId() const { return return_id_; }
#ifdef DEBUG
// Used to assert that the FullCodeGenerator records the return site.
@@ -1594,8 +1532,6 @@ class Call: public Expression {
#endif
protected:
- template<class> friend class AstNodeFactory;
-
Call(Isolate* isolate,
Expression* expression,
ZoneList<Expression*>* arguments,
@@ -1620,7 +1556,7 @@ class Call: public Expression {
Handle<JSObject> holder_;
Handle<JSGlobalPropertyCell> cell_;
- int return_id_;
+ const BailoutId return_id_;
};
@@ -1632,16 +1568,15 @@ class CallNew: public Expression {
ZoneList<Expression*>* arguments() const { return arguments_; }
virtual int position() const { return pos_; }
+ // Type feedback information.
+ TypeFeedbackId CallNewFeedbackId() const { return reuse(id()); }
void RecordTypeFeedback(TypeFeedbackOracle* oracle);
virtual bool IsMonomorphic() { return is_monomorphic_; }
Handle<JSFunction> target() { return target_; }
- // Bailout support.
- int ReturnId() const { return return_id_; }
+ BailoutId ReturnId() const { return return_id_; }
protected:
- template<class> friend class AstNodeFactory;
-
CallNew(Isolate* isolate,
Expression* expression,
ZoneList<Expression*>* arguments,
@@ -1661,7 +1596,7 @@ class CallNew: public Expression {
bool is_monomorphic_;
Handle<JSFunction> target_;
- int return_id_;
+ const BailoutId return_id_;
};
@@ -1678,9 +1613,9 @@ class CallRuntime: public Expression {
ZoneList<Expression*>* arguments() const { return arguments_; }
bool is_jsruntime() const { return function_ == NULL; }
- protected:
- template<class> friend class AstNodeFactory;
+ TypeFeedbackId CallRuntimeFeedbackId() const { return reuse(id()); }
+ protected:
CallRuntime(Isolate* isolate,
Handle<String> name,
const Runtime::Function* function,
@@ -1707,12 +1642,12 @@ class UnaryOperation: public Expression {
Expression* expression() const { return expression_; }
virtual int position() const { return pos_; }
- int MaterializeTrueId() { return materialize_true_id_; }
- int MaterializeFalseId() { return materialize_false_id_; }
+ BailoutId MaterializeTrueId() { return materialize_true_id_; }
+ BailoutId MaterializeFalseId() { return materialize_false_id_; }
- protected:
- template<class> friend class AstNodeFactory;
+ TypeFeedbackId UnaryOperationFeedbackId() const { return reuse(id()); }
+ protected:
UnaryOperation(Isolate* isolate,
Token::Value op,
Expression* expression,
@@ -1721,13 +1656,9 @@ class UnaryOperation: public Expression {
op_(op),
expression_(expression),
pos_(pos),
- materialize_true_id_(AstNode::kNoNumber),
- materialize_false_id_(AstNode::kNoNumber) {
+ materialize_true_id_(GetNextId(isolate)),
+ materialize_false_id_(GetNextId(isolate)) {
ASSERT(Token::IsUnaryOp(op));
- if (op == Token::NOT) {
- materialize_true_id_ = GetNextId(isolate);
- materialize_false_id_ = GetNextId(isolate);
- }
}
private:
@@ -1737,8 +1668,8 @@ class UnaryOperation: public Expression {
// For unary not (Token::NOT), the AST ids where true and false will
// actually be materialized, respectively.
- int materialize_true_id_;
- int materialize_false_id_;
+ const BailoutId materialize_true_id_;
+ const BailoutId materialize_false_id_;
};
@@ -1753,22 +1684,23 @@ class BinaryOperation: public Expression {
Expression* right() const { return right_; }
virtual int position() const { return pos_; }
- // Bailout support.
- int RightId() const { return right_id_; }
+ BailoutId RightId() const { return right_id_; }
- protected:
- template<class> friend class AstNodeFactory;
+ TypeFeedbackId BinaryOperationFeedbackId() const { return reuse(id()); }
+ protected:
BinaryOperation(Isolate* isolate,
Token::Value op,
Expression* left,
Expression* right,
int pos)
- : Expression(isolate), op_(op), left_(left), right_(right), pos_(pos) {
+ : Expression(isolate),
+ op_(op),
+ left_(left),
+ right_(right),
+ pos_(pos),
+ right_id_(GetNextId(isolate)) {
ASSERT(Token::IsBinaryOp(op));
- right_id_ = (op == Token::AND || op == Token::OR)
- ? GetNextId(isolate)
- : AstNode::kNoNumber;
}
private:
@@ -1776,9 +1708,9 @@ class BinaryOperation: public Expression {
Expression* left_;
Expression* right_;
int pos_;
- // The short-circuit logical operations have an AST ID for their
+ // The short-circuit logical operations need an AST ID for their
// right-hand subexpression.
- int right_id_;
+ const BailoutId right_id_;
};
@@ -1799,17 +1731,16 @@ class CountOperation: public Expression {
virtual void MarkAsStatement() { is_prefix_ = true; }
- void RecordTypeFeedback(TypeFeedbackOracle* oracle);
+ void RecordTypeFeedback(TypeFeedbackOracle* oracle, Zone* znoe);
virtual bool IsMonomorphic() { return is_monomorphic_; }
virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
- // Bailout support.
- int AssignmentId() const { return assignment_id_; }
- int CountId() const { return count_id_; }
+ BailoutId AssignmentId() const { return assignment_id_; }
- protected:
- template<class> friend class AstNodeFactory;
+ TypeFeedbackId CountBinOpFeedbackId() const { return count_id_; }
+ TypeFeedbackId CountStoreFeedbackId() const { return reuse(id()); }
+ protected:
CountOperation(Isolate* isolate,
Token::Value op,
bool is_prefix,
@@ -1829,8 +1760,8 @@ class CountOperation: public Expression {
bool is_monomorphic_;
Expression* expression_;
int pos_;
- int assignment_id_;
- int count_id_;
+ const BailoutId assignment_id_;
+ const TypeFeedbackId count_id_;
SmallMapList receiver_types_;
};
@@ -1845,6 +1776,7 @@ class CompareOperation: public Expression {
virtual int position() const { return pos_; }
// Type feedback information.
+ TypeFeedbackId CompareOperationFeedbackId() const { return reuse(id()); }
void RecordTypeFeedback(TypeFeedbackOracle* oracle);
bool IsSmiCompare() { return compare_type_ == SMI_ONLY; }
bool IsObjectCompare() { return compare_type_ == OBJECT_ONLY; }
@@ -1855,8 +1787,6 @@ class CompareOperation: public Expression {
bool IsLiteralCompareNull(Expression** expr);
protected:
- template<class> friend class AstNodeFactory;
-
CompareOperation(Isolate* isolate,
Token::Value op,
Expression* left,
@@ -1893,12 +1823,10 @@ class Conditional: public Expression {
int then_expression_position() const { return then_expression_position_; }
int else_expression_position() const { return else_expression_position_; }
- int ThenId() const { return then_id_; }
- int ElseId() const { return else_id_; }
+ BailoutId ThenId() const { return then_id_; }
+ BailoutId ElseId() const { return else_id_; }
protected:
- template<class> friend class AstNodeFactory;
-
Conditional(Isolate* isolate,
Expression* condition,
Expression* then_expression,
@@ -1920,8 +1848,8 @@ class Conditional: public Expression {
Expression* else_expression_;
int then_expression_position_;
int else_expression_position_;
- int then_id_;
- int else_id_;
+ const BailoutId then_id_;
+ const BailoutId else_id_;
};
@@ -1942,27 +1870,15 @@ class Assignment: public Expression {
// This check relies on the definition order of token in token.h.
bool is_compound() const { return op() > Token::ASSIGN; }
- // An initialization block is a series of statments of the form
- // x.y.z.a = ...; x.y.z.b = ...; etc. The parser marks the beginning and
- // ending of these blocks to allow for optimizations of initialization
- // blocks.
- bool starts_initialization_block() { return block_start_; }
- bool ends_initialization_block() { return block_end_; }
- void mark_block_start() { block_start_ = true; }
- void mark_block_end() { block_end_ = true; }
+ BailoutId AssignmentId() const { return assignment_id_; }
// Type feedback information.
- void RecordTypeFeedback(TypeFeedbackOracle* oracle);
+ TypeFeedbackId AssignmentFeedbackId() { return reuse(id()); }
+ void RecordTypeFeedback(TypeFeedbackOracle* oracle, Zone* zone);
virtual bool IsMonomorphic() { return is_monomorphic_; }
virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
- // Bailout support.
- int CompoundLoadId() const { return compound_load_id_; }
- int AssignmentId() const { return assignment_id_; }
-
protected:
- template<class> friend class AstNodeFactory;
-
Assignment(Isolate* isolate,
Token::Value op,
Expression* target,
@@ -1975,7 +1891,6 @@ class Assignment: public Expression {
if (is_compound()) {
binary_operation_ =
factory->NewBinaryOperation(binary_op(), target_, value_, pos_ + 1);
- compound_load_id_ = GetNextId(isolate);
}
}
@@ -1985,11 +1900,7 @@ class Assignment: public Expression {
Expression* value_;
int pos_;
BinaryOperation* binary_operation_;
- int compound_load_id_;
- int assignment_id_;
-
- bool block_start_;
- bool block_end_;
+ const BailoutId assignment_id_;
bool is_monomorphic_;
SmallMapList receiver_types_;
@@ -2004,8 +1915,6 @@ class Throw: public Expression {
virtual int position() const { return pos_; }
protected:
- template<class> friend class AstNodeFactory;
-
Throw(Isolate* isolate, Expression* exception, int pos)
: Expression(isolate), exception_(exception), pos_(pos) {}
@@ -2033,6 +1942,11 @@ class FunctionLiteral: public Expression {
kIsFunction
};
+ enum IsParenthesizedFlag {
+ kIsParenthesized,
+ kNotParenthesized
+ };
+
DECLARE_NODE_TYPE(FunctionLiteral)
Handle<String> name() const { return name_; }
@@ -2062,6 +1976,7 @@ class FunctionLiteral: public Expression {
int parameter_count() { return parameter_count_; }
bool AllowsLazyCompilation();
+ bool AllowsLazyCompilationWithoutContext();
Handle<String> debug_name() const {
if (name_->length() > 0) return name_;
@@ -2082,6 +1997,18 @@ class FunctionLiteral: public Expression {
bool is_function() { return IsFunction::decode(bitfield_) == kIsFunction; }
+ // This is used as a heuristic on when to eagerly compile a function
+ // literal. We consider the following constructs as hints that the
+ // function will be called immediately:
+ // - (function() { ... })();
+ // - var x = function() { ... }();
+ bool is_parenthesized() {
+ return IsParenthesized::decode(bitfield_) == kIsParenthesized;
+ }
+ void set_parenthesized() {
+ bitfield_ = IsParenthesized::update(bitfield_, kIsParenthesized);
+ }
+
int ast_node_count() { return ast_properties_.node_count(); }
AstProperties::Flags* flags() { return ast_properties_.flags(); }
void set_ast_properties(AstProperties* ast_properties) {
@@ -2089,8 +2016,6 @@ class FunctionLiteral: public Expression {
}
protected:
- template<class> friend class AstNodeFactory;
-
FunctionLiteral(Isolate* isolate,
Handle<String> name,
Scope* scope,
@@ -2103,7 +2028,8 @@ class FunctionLiteral: public Expression {
int parameter_count,
Type type,
ParameterFlag has_duplicate_parameters,
- IsFunctionFlag is_function)
+ IsFunctionFlag is_function,
+ IsParenthesizedFlag is_parenthesized)
: Expression(isolate),
name_(name),
scope_(scope),
@@ -2122,7 +2048,8 @@ class FunctionLiteral: public Expression {
IsAnonymous::encode(type == ANONYMOUS_EXPRESSION) |
Pretenure::encode(false) |
HasDuplicateParameters::encode(has_duplicate_parameters) |
- IsFunction::encode(is_function);
+ IsFunction::encode(is_function) |
+ IsParenthesized::encode(is_parenthesized);
}
private:
@@ -2146,6 +2073,7 @@ class FunctionLiteral: public Expression {
class Pretenure: public BitField<bool, 3, 1> {};
class HasDuplicateParameters: public BitField<ParameterFlag, 4, 1> {};
class IsFunction: public BitField<IsFunctionFlag, 5, 1> {};
+ class IsParenthesized: public BitField<IsParenthesizedFlag, 6, 1> {};
};
@@ -2158,8 +2086,6 @@ class SharedFunctionInfoLiteral: public Expression {
}
protected:
- template<class> friend class AstNodeFactory;
-
SharedFunctionInfoLiteral(
Isolate* isolate,
Handle<SharedFunctionInfo> shared_function_info)
@@ -2176,8 +2102,6 @@ class ThisFunction: public Expression {
DECLARE_NODE_TYPE(ThisFunction)
protected:
- template<class> friend class AstNodeFactory;
-
explicit ThisFunction(Isolate* isolate): Expression(isolate) {}
};
@@ -2213,8 +2137,8 @@ class RegExpTree: public ZoneObject {
// Returns the interval of registers used for captures within this
// expression.
virtual Interval CaptureRegisters() { return Interval::Empty(); }
- virtual void AppendToText(RegExpText* text);
- SmartArrayPointer<const char> ToString();
+ virtual void AppendToText(RegExpText* text, Zone* zone);
+ SmartArrayPointer<const char> ToString(Zone* zone);
#define MAKE_ASTYPE(Name) \
virtual RegExp##Name* As##Name(); \
virtual bool Is##Name();
@@ -2299,7 +2223,7 @@ class CharacterSet BASE_EMBEDDED {
explicit CharacterSet(ZoneList<CharacterRange>* ranges)
: ranges_(ranges),
standard_set_type_(0) {}
- ZoneList<CharacterRange>* ranges();
+ ZoneList<CharacterRange>* ranges(Zone* zone);
uc16 standard_set_type() { return standard_set_type_; }
void set_standard_set_type(uc16 special_set_type) {
standard_set_type_ = special_set_type;
@@ -2330,11 +2254,11 @@ class RegExpCharacterClass: public RegExpTree {
virtual bool IsTextElement() { return true; }
virtual int min_match() { return 1; }
virtual int max_match() { return 1; }
- virtual void AppendToText(RegExpText* text);
+ virtual void AppendToText(RegExpText* text, Zone* zone);
CharacterSet character_set() { return set_; }
// TODO(lrn): Remove need for complex version if is_standard that
// recognizes a mangled standard set and just do { return set_.is_special(); }
- bool is_standard();
+ bool is_standard(Zone* zone);
// Returns a value representing the standard character set if is_standard()
// returns true.
// Currently used values are:
@@ -2347,7 +2271,7 @@ class RegExpCharacterClass: public RegExpTree {
// . : non-unicode non-newline
// * : All characters
uc16 standard_type() { return set_.standard_set_type(); }
- ZoneList<CharacterRange>* ranges() { return set_.ranges(); }
+ ZoneList<CharacterRange>* ranges(Zone* zone) { return set_.ranges(zone); }
bool is_negated() { return is_negated_; }
private:
@@ -2367,7 +2291,7 @@ class RegExpAtom: public RegExpTree {
virtual bool IsTextElement() { return true; }
virtual int min_match() { return data_.length(); }
virtual int max_match() { return data_.length(); }
- virtual void AppendToText(RegExpText* text);
+ virtual void AppendToText(RegExpText* text, Zone* zone);
Vector<const uc16> data() { return data_; }
int length() { return data_.length(); }
private:
@@ -2377,7 +2301,7 @@ class RegExpAtom: public RegExpTree {
class RegExpText: public RegExpTree {
public:
- RegExpText() : elements_(2), length_(0) {}
+ explicit RegExpText(Zone* zone) : elements_(2, zone), length_(0) {}
virtual void* Accept(RegExpVisitor* visitor, void* data);
virtual RegExpNode* ToNode(RegExpCompiler* compiler,
RegExpNode* on_success);
@@ -2386,9 +2310,9 @@ class RegExpText: public RegExpTree {
virtual bool IsTextElement() { return true; }
virtual int min_match() { return length_; }
virtual int max_match() { return length_; }
- virtual void AppendToText(RegExpText* text);
- void AddElement(TextElement elm) {
- elements_.Add(elm);
+ virtual void AppendToText(RegExpText* text, Zone* zone);
+ void AddElement(TextElement elm, Zone* zone) {
+ elements_.Add(elm, zone);
length_ += elm.length();
}
ZoneList<TextElement>* elements() { return &elements_; }
@@ -2635,9 +2559,9 @@ class AstNullVisitor BASE_EMBEDDED {
template<class Visitor>
class AstNodeFactory BASE_EMBEDDED {
public:
- explicit AstNodeFactory(Isolate* isolate)
+ AstNodeFactory(Isolate* isolate, Zone* zone)
: isolate_(isolate),
- zone_(isolate_->zone()) { }
+ zone_(zone) { }
Visitor* visitor() { return &visitor_; }
@@ -2696,12 +2620,12 @@ class AstNodeFactory BASE_EMBEDDED {
}
ModulePath* NewModulePath(Module* origin, Handle<String> name) {
- ModulePath* module = new(zone_) ModulePath(origin, name);
+ ModulePath* module = new(zone_) ModulePath(origin, name, zone_);
VISIT_AND_RETURN(ModulePath, module)
}
ModuleUrl* NewModuleUrl(Handle<String> url) {
- ModuleUrl* module = new(zone_) ModuleUrl(url);
+ ModuleUrl* module = new(zone_) ModuleUrl(url, zone_);
VISIT_AND_RETURN(ModuleUrl, module)
}
@@ -2709,7 +2633,7 @@ class AstNodeFactory BASE_EMBEDDED {
int capacity,
bool is_initializer_block) {
Block* block = new(zone_) Block(
- isolate_, labels, capacity, is_initializer_block);
+ isolate_, labels, capacity, is_initializer_block, zone_);
VISIT_AND_RETURN(Block, block)
}
@@ -2842,11 +2766,10 @@ class AstNodeFactory BASE_EMBEDDED {
VariableProxy* NewVariableProxy(Handle<String> name,
bool is_this,
- int position = RelocInfo::kNoPosition,
- Interface* interface =
- Interface::NewValue()) {
+ Interface* interface = Interface::NewValue(),
+ int position = RelocInfo::kNoPosition) {
VariableProxy* proxy =
- new(zone_) VariableProxy(isolate_, name, is_this, position, interface);
+ new(zone_) VariableProxy(isolate_, name, is_this, interface, position);
VISIT_AND_RETURN(VariableProxy, proxy)
}
@@ -2950,12 +2873,14 @@ class AstNodeFactory BASE_EMBEDDED {
int parameter_count,
FunctionLiteral::ParameterFlag has_duplicate_parameters,
FunctionLiteral::Type type,
- FunctionLiteral::IsFunctionFlag is_function) {
+ FunctionLiteral::IsFunctionFlag is_function,
+ FunctionLiteral::IsParenthesizedFlag is_parenthesized) {
FunctionLiteral* lit = new(zone_) FunctionLiteral(
isolate_, name, scope, body,
materialized_literal_count, expected_property_count, handler_count,
has_only_simple_this_property_assignments, this_property_assignments,
- parameter_count, type, has_duplicate_parameters, is_function);
+ parameter_count, type, has_duplicate_parameters, is_function,
+ is_parenthesized);
// Top-level literal doesn't count for the AST's properties.
if (is_function == FunctionLiteral::kIsFunction) {
visitor_.VisitFunctionLiteral(lit);
diff --git a/src/3rdparty/v8/src/atomicops.h b/src/3rdparty/v8/src/atomicops.h
index 55de87c..ec92ce6 100644
--- a/src/3rdparty/v8/src/atomicops.h
+++ b/src/3rdparty/v8/src/atomicops.h
@@ -69,7 +69,11 @@ typedef intptr_t Atomic64;
// Use AtomicWord for a machine-sized pointer. It will use the Atomic32 or
// Atomic64 routines below, depending on your architecture.
+#if defined(__OpenBSD__) && defined(__i386__)
+typedef Atomic32 AtomicWord;
+#else
typedef intptr_t AtomicWord;
+#endif
// Atomically execute:
// result = *ptr;
@@ -148,7 +152,7 @@ Atomic64 Release_Load(volatile const Atomic64* ptr);
// Include our platform specific implementation.
#if defined(_MSC_VER) && \
- (defined(V8_HOST_ARCH_IA32) || defined(V8_HOST_ARCH_X64))
+ (defined(V8_HOST_ARCH_IA32) || defined(V8_HOST_ARCH_X64) || defined(_WIN32_WCE))
#include "atomicops_internals_x86_msvc.h"
#elif defined(__APPLE__) && \
(defined(V8_HOST_ARCH_IA32) || defined(V8_HOST_ARCH_X64))
diff --git a/src/3rdparty/v8/src/atomicops_internals_x86_msvc.h b/src/3rdparty/v8/src/atomicops_internals_x86_msvc.h
index fcf6a65..6677e64 100644
--- a/src/3rdparty/v8/src/atomicops_internals_x86_msvc.h
+++ b/src/3rdparty/v8/src/atomicops_internals_x86_msvc.h
@@ -69,10 +69,16 @@ inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
#if !(defined(_MSC_VER) && _MSC_VER >= 1400)
#error "We require at least vs2005 for MemoryBarrier"
#endif
+// For Windows CE there is no MemoryBarrier needed
+#ifdef _WIN32_WCE
+inline void MemoryBarrier() {
+}
+#else
inline void MemoryBarrier() {
// We use MemoryBarrier from WinNT.h
::MemoryBarrier();
}
+#endif
inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
Atomic32 old_value,
diff --git a/src/3rdparty/v8/src/bootstrapper.cc b/src/3rdparty/v8/src/bootstrapper.cc
index 36260ba..c06d73d 100644
--- a/src/3rdparty/v8/src/bootstrapper.cc
+++ b/src/3rdparty/v8/src/bootstrapper.cc
@@ -42,6 +42,7 @@
#include "snapshot.h"
#include "extensions/externalize-string-extension.h"
#include "extensions/gc-extension.h"
+#include "extensions/statistics-extension.h"
namespace v8 {
namespace internal {
@@ -95,6 +96,7 @@ void Bootstrapper::Initialize(bool create_heap_objects) {
extensions_cache_.Initialize(create_heap_objects);
GCExtension::Register();
ExternalizeStringExtension::Register();
+ StatisticsExtension::Register();
}
@@ -154,7 +156,7 @@ class Genesis BASE_EMBEDDED {
Heap* heap() const { return isolate_->heap(); }
private:
- Handle<Context> global_context_;
+ Handle<Context> native_context_;
Isolate* isolate_;
// There may be more than one active genesis object: When GC is
@@ -162,7 +164,7 @@ class Genesis BASE_EMBEDDED {
// processing callbacks which may create new environments.
Genesis* previous_;
- Handle<Context> global_context() { return global_context_; }
+ Handle<Context> native_context() { return native_context_; }
// Creates some basic objects. Used for creating a context from scratch.
void CreateRoots();
@@ -226,13 +228,13 @@ class Genesis BASE_EMBEDDED {
// Used both for deserialized and from-scratch contexts to add the extensions
// provided.
- static bool InstallExtensions(Handle<Context> global_context,
+ static bool InstallExtensions(Handle<Context> native_context,
v8::ExtensionConfiguration* extensions);
static bool InstallExtension(const char* name,
ExtensionStates* extension_states);
static bool InstallExtension(v8::RegisteredExtension* current,
ExtensionStates* extension_states);
- static void InstallSpecialObjects(Handle<Context> global_context);
+ static void InstallSpecialObjects(Handle<Context> native_context);
bool InstallJSBuiltins(Handle<JSBuiltinsObject> builtins);
bool ConfigureApiObject(Handle<JSObject> object,
Handle<ObjectTemplateInfo> object_template);
@@ -253,16 +255,16 @@ class Genesis BASE_EMBEDDED {
Handle<Map> CreateFunctionMap(PrototypePropertyMode prototype_mode);
- Handle<DescriptorArray> ComputeFunctionInstanceDescriptor(
- PrototypePropertyMode prototypeMode);
+ void SetFunctionInstanceDescriptor(Handle<Map> map,
+ PrototypePropertyMode prototypeMode);
void MakeFunctionInstancePrototypeWritable();
Handle<Map> CreateStrictModeFunctionMap(
PrototypePropertyMode prototype_mode,
Handle<JSFunction> empty_function);
- Handle<DescriptorArray> ComputeStrictFunctionInstanceDescriptor(
- PrototypePropertyMode propertyMode);
+ void SetStrictFunctionInstanceDescriptor(Handle<Map> map,
+ PrototypePropertyMode propertyMode);
static bool CompileBuiltin(Isolate* isolate, int index);
static bool CompileExperimentalBuiltin(Isolate* isolate, int index);
@@ -317,7 +319,7 @@ static void SetObjectPrototype(Handle<JSObject> object, Handle<Object> proto) {
// object.__proto__ = proto;
Factory* factory = object->GetIsolate()->factory();
Handle<Map> old_to_map = Handle<Map>(object->map());
- Handle<Map> new_to_map = factory->CopyMapDropTransitions(old_to_map);
+ Handle<Map> new_to_map = factory->CopyMap(old_to_map);
new_to_map->set_prototype(*proto);
object->set_map(*new_to_map);
}
@@ -325,22 +327,20 @@ static void SetObjectPrototype(Handle<JSObject> object, Handle<Object> proto) {
void Bootstrapper::DetachGlobal(Handle<Context> env) {
Factory* factory = env->GetIsolate()->factory();
- JSGlobalProxy::cast(env->global_proxy())->set_context(*factory->null_value());
- SetObjectPrototype(Handle<JSObject>(env->global_proxy()),
- factory->null_value());
- env->set_global_proxy(env->global());
- env->global()->set_global_receiver(env->global());
+ Handle<JSGlobalProxy> global_proxy(JSGlobalProxy::cast(env->global_proxy()));
+ global_proxy->set_native_context(*factory->null_value());
+ SetObjectPrototype(global_proxy, factory->null_value());
+ env->set_global_proxy(env->global_object());
+ env->global_object()->set_global_receiver(env->global_object());
}
void Bootstrapper::ReattachGlobal(Handle<Context> env,
- Handle<Object> global_object) {
- ASSERT(global_object->IsJSGlobalProxy());
- Handle<JSGlobalProxy> global = Handle<JSGlobalProxy>::cast(global_object);
- env->global()->set_global_receiver(*global);
- env->set_global_proxy(*global);
- SetObjectPrototype(global, Handle<JSObject>(env->global()));
- global->set_context(*env);
+ Handle<JSGlobalProxy> global_proxy) {
+ env->global_object()->set_global_receiver(*global_proxy);
+ env->set_global_proxy(*global_proxy);
+ SetObjectPrototype(global_proxy, Handle<JSObject>(env->global_object()));
+ global_proxy->set_native_context(*env);
}
@@ -381,54 +381,54 @@ static Handle<JSFunction> InstallFunction(Handle<JSObject> target,
}
-Handle<DescriptorArray> Genesis::ComputeFunctionInstanceDescriptor(
- PrototypePropertyMode prototypeMode) {
+void Genesis::SetFunctionInstanceDescriptor(
+ Handle<Map> map, PrototypePropertyMode prototypeMode) {
int size = (prototypeMode == DONT_ADD_PROTOTYPE) ? 4 : 5;
- Handle<DescriptorArray> descriptors(factory()->NewDescriptorArray(size));
+ Handle<DescriptorArray> descriptors(factory()->NewDescriptorArray(0, size));
+ DescriptorArray::WhitenessWitness witness(*descriptors);
+
+ Handle<Foreign> length(factory()->NewForeign(&Accessors::FunctionLength));
+ Handle<Foreign> name(factory()->NewForeign(&Accessors::FunctionName));
+ Handle<Foreign> args(factory()->NewForeign(&Accessors::FunctionArguments));
+ Handle<Foreign> caller(factory()->NewForeign(&Accessors::FunctionCaller));
+ Handle<Foreign> prototype;
+ if (prototypeMode != DONT_ADD_PROTOTYPE) {
+ prototype = factory()->NewForeign(&Accessors::FunctionPrototype);
+ }
PropertyAttributes attribs = static_cast<PropertyAttributes>(
DONT_ENUM | DONT_DELETE | READ_ONLY);
-
- DescriptorArray::WhitenessWitness witness(*descriptors);
+ map->set_instance_descriptors(*descriptors);
{ // Add length.
- Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionLength));
- CallbacksDescriptor d(*factory()->length_symbol(), *f, attribs);
- descriptors->Set(0, &d, witness);
+ CallbacksDescriptor d(*factory()->length_symbol(), *length, attribs);
+ map->AppendDescriptor(&d, witness);
}
{ // Add name.
- Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionName));
- CallbacksDescriptor d(*factory()->name_symbol(), *f, attribs);
- descriptors->Set(1, &d, witness);
+ CallbacksDescriptor d(*factory()->name_symbol(), *name, attribs);
+ map->AppendDescriptor(&d, witness);
}
{ // Add arguments.
- Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionArguments));
- CallbacksDescriptor d(*factory()->arguments_symbol(), *f, attribs);
- descriptors->Set(2, &d, witness);
+ CallbacksDescriptor d(*factory()->arguments_symbol(), *args, attribs);
+ map->AppendDescriptor(&d, witness);
}
{ // Add caller.
- Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionCaller));
- CallbacksDescriptor d(*factory()->caller_symbol(), *f, attribs);
- descriptors->Set(3, &d, witness);
+ CallbacksDescriptor d(*factory()->caller_symbol(), *caller, attribs);
+ map->AppendDescriptor(&d, witness);
}
if (prototypeMode != DONT_ADD_PROTOTYPE) {
// Add prototype.
if (prototypeMode == ADD_WRITEABLE_PROTOTYPE) {
attribs = static_cast<PropertyAttributes>(attribs & ~READ_ONLY);
}
- Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionPrototype));
- CallbacksDescriptor d(*factory()->prototype_symbol(), *f, attribs);
- descriptors->Set(4, &d, witness);
+ CallbacksDescriptor d(*factory()->prototype_symbol(), *prototype, attribs);
+ map->AppendDescriptor(&d, witness);
}
- descriptors->Sort(witness);
- return descriptors;
}
Handle<Map> Genesis::CreateFunctionMap(PrototypePropertyMode prototype_mode) {
Handle<Map> map = factory()->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
- Handle<DescriptorArray> descriptors =
- ComputeFunctionInstanceDescriptor(prototype_mode);
- map->set_instance_descriptors(*descriptors);
+ SetFunctionInstanceDescriptor(map, prototype_mode);
map->set_function_with_prototype(prototype_mode != DONT_ADD_PROTOTYPE);
return map;
}
@@ -442,20 +442,20 @@ Handle<JSFunction> Genesis::CreateEmptyFunction(Isolate* isolate) {
// writable.
Handle<Map> function_instance_map =
CreateFunctionMap(ADD_WRITEABLE_PROTOTYPE);
- global_context()->set_function_instance_map(*function_instance_map);
+ native_context()->set_function_instance_map(*function_instance_map);
// Functions with this map will not have a 'prototype' property, and
// can not be used as constructors.
Handle<Map> function_without_prototype_map =
CreateFunctionMap(DONT_ADD_PROTOTYPE);
- global_context()->set_function_without_prototype_map(
+ native_context()->set_function_without_prototype_map(
*function_without_prototype_map);
// Allocate the function map. This map is temporary, used only for processing
// of builtins.
// Later the map is replaced with writable prototype map, allocated below.
Handle<Map> function_map = CreateFunctionMap(ADD_READONLY_PROTOTYPE);
- global_context()->set_function_map(*function_map);
+ native_context()->set_function_map(*function_map);
// The final map for functions. Writeable prototype.
// This map is installed in MakeFunctionInstancePrototypeWritable.
@@ -475,17 +475,15 @@ Handle<JSFunction> Genesis::CreateEmptyFunction(Isolate* isolate) {
object_fun->set_initial_map(*object_function_map);
object_function_map->set_constructor(*object_fun);
- global_context()->set_object_function(*object_fun);
+ native_context()->set_object_function(*object_fun);
// Allocate a new prototype for the object function.
Handle<JSObject> prototype = factory->NewJSObject(
isolate->object_function(),
TENURED);
- global_context()->set_initial_object_prototype(*prototype);
+ native_context()->set_initial_object_prototype(*prototype);
SetPrototype(object_fun, prototype);
- object_function_map->set_instance_descriptors(
- heap->empty_descriptor_array());
}
// Allocate the empty function as the prototype for function ECMAScript
@@ -509,63 +507,63 @@ Handle<JSFunction> Genesis::CreateEmptyFunction(Isolate* isolate) {
empty_function->shared()->DontAdaptArguments();
// Set prototypes for the function maps.
- global_context()->function_map()->set_prototype(*empty_function);
- global_context()->function_instance_map()->set_prototype(*empty_function);
- global_context()->function_without_prototype_map()->
+ native_context()->function_map()->set_prototype(*empty_function);
+ native_context()->function_instance_map()->set_prototype(*empty_function);
+ native_context()->function_without_prototype_map()->
set_prototype(*empty_function);
function_instance_map_writable_prototype_->set_prototype(*empty_function);
// Allocate the function map first and then patch the prototype later
Handle<Map> empty_function_map = CreateFunctionMap(DONT_ADD_PROTOTYPE);
empty_function_map->set_prototype(
- global_context()->object_function()->prototype());
+ native_context()->object_function()->prototype());
empty_function->set_map(*empty_function_map);
return empty_function;
}
-Handle<DescriptorArray> Genesis::ComputeStrictFunctionInstanceDescriptor(
- PrototypePropertyMode prototypeMode) {
+void Genesis::SetStrictFunctionInstanceDescriptor(
+ Handle<Map> map, PrototypePropertyMode prototypeMode) {
int size = (prototypeMode == DONT_ADD_PROTOTYPE) ? 4 : 5;
- Handle<DescriptorArray> descriptors(factory()->NewDescriptorArray(size));
+ Handle<DescriptorArray> descriptors(factory()->NewDescriptorArray(0, size));
+ DescriptorArray::WhitenessWitness witness(*descriptors);
+
+ Handle<Foreign> length(factory()->NewForeign(&Accessors::FunctionLength));
+ Handle<Foreign> name(factory()->NewForeign(&Accessors::FunctionName));
+ Handle<AccessorPair> arguments(factory()->NewAccessorPair());
+ Handle<AccessorPair> caller(factory()->NewAccessorPair());
+ Handle<Foreign> prototype;
+ if (prototypeMode != DONT_ADD_PROTOTYPE) {
+ prototype = factory()->NewForeign(&Accessors::FunctionPrototype);
+ }
PropertyAttributes attribs = static_cast<PropertyAttributes>(
DONT_ENUM | DONT_DELETE);
-
- DescriptorArray::WhitenessWitness witness(*descriptors);
+ map->set_instance_descriptors(*descriptors);
{ // Add length.
- Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionLength));
- CallbacksDescriptor d(*factory()->length_symbol(), *f, attribs);
- descriptors->Set(0, &d, witness);
+ CallbacksDescriptor d(*factory()->length_symbol(), *length, attribs);
+ map->AppendDescriptor(&d, witness);
}
{ // Add name.
- Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionName));
- CallbacksDescriptor d(*factory()->name_symbol(), *f, attribs);
- descriptors->Set(1, &d, witness);
+ CallbacksDescriptor d(*factory()->name_symbol(), *name, attribs);
+ map->AppendDescriptor(&d, witness);
}
{ // Add arguments.
- Handle<AccessorPair> arguments(factory()->NewAccessorPair());
CallbacksDescriptor d(*factory()->arguments_symbol(), *arguments, attribs);
- descriptors->Set(2, &d, witness);
+ map->AppendDescriptor(&d, witness);
}
{ // Add caller.
- Handle<AccessorPair> caller(factory()->NewAccessorPair());
CallbacksDescriptor d(*factory()->caller_symbol(), *caller, attribs);
- descriptors->Set(3, &d, witness);
+ map->AppendDescriptor(&d, witness);
}
-
if (prototypeMode != DONT_ADD_PROTOTYPE) {
// Add prototype.
if (prototypeMode != ADD_WRITEABLE_PROTOTYPE) {
attribs = static_cast<PropertyAttributes>(attribs | READ_ONLY);
}
- Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionPrototype));
- CallbacksDescriptor d(*factory()->prototype_symbol(), *f, attribs);
- descriptors->Set(4, &d, witness);
+ CallbacksDescriptor d(*factory()->prototype_symbol(), *prototype, attribs);
+ map->AppendDescriptor(&d, witness);
}
-
- descriptors->Sort(witness);
- return descriptors;
}
@@ -578,7 +576,7 @@ Handle<JSFunction> Genesis::GetThrowTypeErrorFunction() {
Handle<Code> code(isolate()->builtins()->builtin(
Builtins::kStrictModePoisonPill));
throw_type_error_function->set_map(
- global_context()->function_map());
+ native_context()->function_map());
throw_type_error_function->set_code(*code);
throw_type_error_function->shared()->set_code(*code);
throw_type_error_function->shared()->DontAdaptArguments();
@@ -593,9 +591,7 @@ Handle<Map> Genesis::CreateStrictModeFunctionMap(
PrototypePropertyMode prototype_mode,
Handle<JSFunction> empty_function) {
Handle<Map> map = factory()->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
- Handle<DescriptorArray> descriptors =
- ComputeStrictFunctionInstanceDescriptor(prototype_mode);
- map->set_instance_descriptors(*descriptors);
+ SetStrictFunctionInstanceDescriptor(map, prototype_mode);
map->set_function_with_prototype(prototype_mode != DONT_ADD_PROTOTYPE);
map->set_prototype(*empty_function);
return map;
@@ -606,13 +602,13 @@ void Genesis::CreateStrictModeFunctionMaps(Handle<JSFunction> empty) {
// Allocate map for the strict mode function instances.
Handle<Map> strict_mode_function_instance_map =
CreateStrictModeFunctionMap(ADD_WRITEABLE_PROTOTYPE, empty);
- global_context()->set_strict_mode_function_instance_map(
+ native_context()->set_strict_mode_function_instance_map(
*strict_mode_function_instance_map);
// Allocate map for the prototype-less strict mode instances.
Handle<Map> strict_mode_function_without_prototype_map =
CreateStrictModeFunctionMap(DONT_ADD_PROTOTYPE, empty);
- global_context()->set_strict_mode_function_without_prototype_map(
+ native_context()->set_strict_mode_function_without_prototype_map(
*strict_mode_function_without_prototype_map);
// Allocate map for the strict mode functions. This map is temporary, used
@@ -620,7 +616,7 @@ void Genesis::CreateStrictModeFunctionMaps(Handle<JSFunction> empty) {
// Later the map is replaced with writable prototype map, allocated below.
Handle<Map> strict_mode_function_map =
CreateStrictModeFunctionMap(ADD_READONLY_PROTOTYPE, empty);
- global_context()->set_strict_mode_function_map(
+ native_context()->set_strict_mode_function_map(
*strict_mode_function_map);
// The final map for the strict mode functions. Writeable prototype.
@@ -641,7 +637,7 @@ static void SetAccessors(Handle<Map> map,
Handle<String> name,
Handle<JSFunction> func) {
DescriptorArray* descs = map->instance_descriptors();
- int number = descs->Search(*name);
+ int number = descs->SearchWithCache(*name, *map);
AccessorPair* accessors = AccessorPair::cast(descs->GetValue(number));
accessors->set_getter(*func);
accessors->set_setter(*func);
@@ -654,39 +650,39 @@ void Genesis::PoisonArgumentsAndCaller(Handle<Map> map) {
}
-static void AddToWeakGlobalContextList(Context* context) {
- ASSERT(context->IsGlobalContext());
+static void AddToWeakNativeContextList(Context* context) {
+ ASSERT(context->IsNativeContext());
Heap* heap = context->GetIsolate()->heap();
#ifdef DEBUG
{ // NOLINT
ASSERT(context->get(Context::NEXT_CONTEXT_LINK)->IsUndefined());
// Check that context is not in the list yet.
- for (Object* current = heap->global_contexts_list();
+ for (Object* current = heap->native_contexts_list();
!current->IsUndefined();
current = Context::cast(current)->get(Context::NEXT_CONTEXT_LINK)) {
ASSERT(current != context);
}
}
#endif
- context->set(Context::NEXT_CONTEXT_LINK, heap->global_contexts_list());
- heap->set_global_contexts_list(context);
+ context->set(Context::NEXT_CONTEXT_LINK, heap->native_contexts_list());
+ heap->set_native_contexts_list(context);
}
void Genesis::CreateRoots() {
- // Allocate the global context FixedArray first and then patch the
+ // Allocate the native context FixedArray first and then patch the
// closure and extension object later (we need the empty function
// and the global object, but in order to create those, we need the
- // global context).
- global_context_ = Handle<Context>::cast(isolate()->global_handles()->Create(
- *factory()->NewGlobalContext()));
- AddToWeakGlobalContextList(*global_context_);
- isolate()->set_context(*global_context());
+ // native context).
+ native_context_ = Handle<Context>::cast(isolate()->global_handles()->Create(
+ *factory()->NewNativeContext()));
+ AddToWeakNativeContextList(*native_context_);
+ isolate()->set_context(*native_context());
// Allocate the message listeners object.
{
v8::NeanderArray listeners;
- global_context()->set_message_listeners(*listeners.value());
+ native_context()->set_message_listeners(*listeners.value());
}
}
@@ -749,6 +745,7 @@ Handle<JSGlobalProxy> Genesis::CreateNewGlobals(
}
js_global_function->initial_map()->set_is_hidden_prototype();
+ js_global_function->initial_map()->set_dictionary_map(true);
Handle<GlobalObject> inner_global =
factory()->NewGlobalObject(js_global_function);
if (inner_global_out != NULL) {
@@ -795,22 +792,23 @@ Handle<JSGlobalProxy> Genesis::CreateNewGlobals(
void Genesis::HookUpGlobalProxy(Handle<GlobalObject> inner_global,
Handle<JSGlobalProxy> global_proxy) {
- // Set the global context for the global object.
- inner_global->set_global_context(*global_context());
+ // Set the native context for the global object.
+ inner_global->set_native_context(*native_context());
+ inner_global->set_global_context(*native_context());
inner_global->set_global_receiver(*global_proxy);
- global_proxy->set_context(*global_context());
- global_context()->set_global_proxy(*global_proxy);
+ global_proxy->set_native_context(*native_context());
+ native_context()->set_global_proxy(*global_proxy);
}
void Genesis::HookUpInnerGlobal(Handle<GlobalObject> inner_global) {
Handle<GlobalObject> inner_global_from_snapshot(
- GlobalObject::cast(global_context_->extension()));
- Handle<JSBuiltinsObject> builtins_global(global_context_->builtins());
- global_context_->set_extension(*inner_global);
- global_context_->set_global(*inner_global);
- global_context_->set_qml_global(*inner_global);
- global_context_->set_security_token(*inner_global);
+ GlobalObject::cast(native_context_->extension()));
+ Handle<JSBuiltinsObject> builtins_global(native_context_->builtins());
+ native_context_->set_extension(*inner_global);
+ native_context_->set_global_object(*inner_global);
+ native_context_->set_qml_global_object(*inner_global);
+ native_context_->set_security_token(*inner_global);
static const PropertyAttributes attributes =
static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
ForceSetProperty(builtins_global,
@@ -830,17 +828,17 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
Handle<JSFunction> empty_function) {
// --- G l o b a l C o n t e x t ---
// Use the empty function as closure (no scope info).
- global_context()->set_closure(*empty_function);
- global_context()->set_previous(NULL);
+ native_context()->set_closure(*empty_function);
+ native_context()->set_previous(NULL);
// Set extension and global object.
- global_context()->set_extension(*inner_global);
- global_context()->set_global(*inner_global);
- global_context()->set_qml_global(*inner_global);
+ native_context()->set_extension(*inner_global);
+ native_context()->set_global_object(*inner_global);
+ native_context()->set_qml_global_object(*inner_global);
// Security setup: Set the security token of the global object to
// its the inner global. This makes the security check between two
// different contexts fail by default even in case of global
// object reinitialization.
- global_context()->set_security_token(*inner_global);
+ native_context()->set_security_token(*inner_global);
Isolate* isolate = inner_global->GetIsolate();
Factory* factory = isolate->factory();
@@ -852,7 +850,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
inner_global, object_name,
isolate->object_function(), DONT_ENUM));
- Handle<JSObject> global = Handle<JSObject>(global_context()->global());
+ Handle<JSObject> global = Handle<JSObject>(native_context()->global_object());
// Install global Function object
InstallFunction(global, "Function", JS_FUNCTION_TYPE, JSFunction::kSize,
@@ -870,19 +868,27 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
// This seems a bit hackish, but we need to make sure Array.length
// is 1.
array_function->shared()->set_length(1);
- Handle<DescriptorArray> array_descriptors =
- factory->CopyAppendForeignDescriptor(
- factory->empty_descriptor_array(),
- factory->length_symbol(),
- factory->NewForeign(&Accessors::ArrayLength),
- static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE));
+
+ Handle<Map> initial_map(array_function->initial_map());
+ Handle<DescriptorArray> array_descriptors(
+ factory->NewDescriptorArray(0, 1));
+ DescriptorArray::WhitenessWitness witness(*array_descriptors);
+
+ Handle<Foreign> array_length(factory->NewForeign(&Accessors::ArrayLength));
+ PropertyAttributes attribs = static_cast<PropertyAttributes>(
+ DONT_ENUM | DONT_DELETE);
+ initial_map->set_instance_descriptors(*array_descriptors);
+
+ { // Add length.
+ CallbacksDescriptor d(*factory->length_symbol(), *array_length, attribs);
+ array_function->initial_map()->AppendDescriptor(&d, witness);
+ }
// array_function is used internally. JS code creating array object should
// search for the 'Array' property on the global object and use that one
// as the constructor. 'Array' property on a global object can be
// overwritten by JS code.
- global_context()->set_array_function(*array_function);
- array_function->initial_map()->set_instance_descriptors(*array_descriptors);
+ native_context()->set_array_function(*array_function);
}
{ // --- N u m b e r ---
@@ -890,7 +896,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
InstallFunction(global, "Number", JS_VALUE_TYPE, JSValue::kSize,
isolate->initial_object_prototype(),
Builtins::kIllegal, true);
- global_context()->set_number_function(*number_fun);
+ native_context()->set_number_function(*number_fun);
}
{ // --- B o o l e a n ---
@@ -898,7 +904,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
InstallFunction(global, "Boolean", JS_VALUE_TYPE, JSValue::kSize,
isolate->initial_object_prototype(),
Builtins::kIllegal, true);
- global_context()->set_boolean_function(*boolean_fun);
+ native_context()->set_boolean_function(*boolean_fun);
}
{ // --- S t r i n g ---
@@ -908,20 +914,24 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
Builtins::kIllegal, true);
string_fun->shared()->set_construct_stub(
isolate->builtins()->builtin(Builtins::kStringConstructCode));
- global_context()->set_string_function(*string_fun);
- // Add 'length' property to strings.
- Handle<DescriptorArray> string_descriptors =
- factory->CopyAppendForeignDescriptor(
- factory->empty_descriptor_array(),
- factory->length_symbol(),
- factory->NewForeign(&Accessors::StringLength),
- static_cast<PropertyAttributes>(DONT_ENUM |
- DONT_DELETE |
- READ_ONLY));
+ native_context()->set_string_function(*string_fun);
Handle<Map> string_map =
- Handle<Map>(global_context()->string_function()->initial_map());
+ Handle<Map>(native_context()->string_function()->initial_map());
+ Handle<DescriptorArray> string_descriptors(
+ factory->NewDescriptorArray(0, 1));
+ DescriptorArray::WhitenessWitness witness(*string_descriptors);
+
+ Handle<Foreign> string_length(
+ factory->NewForeign(&Accessors::StringLength));
+ PropertyAttributes attribs = static_cast<PropertyAttributes>(
+ DONT_ENUM | DONT_DELETE | READ_ONLY);
string_map->set_instance_descriptors(*string_descriptors);
+
+ { // Add length.
+ CallbacksDescriptor d(*factory->length_symbol(), *string_length, attribs);
+ string_map->AppendDescriptor(&d, witness);
+ }
}
{ // --- D a t e ---
@@ -931,7 +941,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
isolate->initial_object_prototype(),
Builtins::kIllegal, true);
- global_context()->set_date_function(*date_fun);
+ native_context()->set_date_function(*date_fun);
}
@@ -941,49 +951,46 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
InstallFunction(global, "RegExp", JS_REGEXP_TYPE, JSRegExp::kSize,
isolate->initial_object_prototype(),
Builtins::kIllegal, true);
- global_context()->set_regexp_function(*regexp_fun);
+ native_context()->set_regexp_function(*regexp_fun);
ASSERT(regexp_fun->has_initial_map());
Handle<Map> initial_map(regexp_fun->initial_map());
ASSERT_EQ(0, initial_map->inobject_properties());
- Handle<DescriptorArray> descriptors = factory->NewDescriptorArray(5);
- DescriptorArray::WhitenessWitness witness(*descriptors);
PropertyAttributes final =
static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
- int enum_index = 0;
+ Handle<DescriptorArray> descriptors = factory->NewDescriptorArray(0, 5);
+ DescriptorArray::WhitenessWitness witness(*descriptors);
+ initial_map->set_instance_descriptors(*descriptors);
+
{
// ECMA-262, section 15.10.7.1.
FieldDescriptor field(heap->source_symbol(),
JSRegExp::kSourceFieldIndex,
- final,
- enum_index++);
- descriptors->Set(0, &field, witness);
+ final);
+ initial_map->AppendDescriptor(&field, witness);
}
{
// ECMA-262, section 15.10.7.2.
FieldDescriptor field(heap->global_symbol(),
JSRegExp::kGlobalFieldIndex,
- final,
- enum_index++);
- descriptors->Set(1, &field, witness);
+ final);
+ initial_map->AppendDescriptor(&field, witness);
}
{
// ECMA-262, section 15.10.7.3.
FieldDescriptor field(heap->ignore_case_symbol(),
JSRegExp::kIgnoreCaseFieldIndex,
- final,
- enum_index++);
- descriptors->Set(2, &field, witness);
+ final);
+ initial_map->AppendDescriptor(&field, witness);
}
{
// ECMA-262, section 15.10.7.4.
FieldDescriptor field(heap->multiline_symbol(),
JSRegExp::kMultilineFieldIndex,
- final,
- enum_index++);
- descriptors->Set(3, &field, witness);
+ final);
+ initial_map->AppendDescriptor(&field, witness);
}
{
// ECMA-262, section 15.10.7.5.
@@ -991,24 +998,20 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE);
FieldDescriptor field(heap->last_index_symbol(),
JSRegExp::kLastIndexFieldIndex,
- writable,
- enum_index++);
- descriptors->Set(4, &field, witness);
+ writable);
+ initial_map->AppendDescriptor(&field, witness);
}
- descriptors->SetNextEnumerationIndex(enum_index);
- descriptors->Sort(witness);
initial_map->set_inobject_properties(5);
initial_map->set_pre_allocated_property_fields(5);
initial_map->set_unused_property_fields(0);
initial_map->set_instance_size(
initial_map->instance_size() + 5 * kPointerSize);
- initial_map->set_instance_descriptors(*descriptors);
initial_map->set_visitor_id(StaticVisitorBase::GetVisitorId(*initial_map));
// RegExp prototype object is itself a RegExp.
- Handle<Map> proto_map = factory->CopyMapDropTransitions(initial_map);
- proto_map->set_prototype(global_context()->initial_object_prototype());
+ Handle<Map> proto_map = factory->CopyMap(initial_map);
+ proto_map->set_prototype(native_context()->initial_object_prototype());
Handle<JSObject> proto = factory->NewJSObjectFromMap(proto_map);
proto->InObjectPropertyAtPut(JSRegExp::kSourceFieldIndex,
heap->query_colon_symbol());
@@ -1032,7 +1035,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
Handle<JSFunction> cons = factory->NewFunction(name,
factory->the_hole_value());
{ MaybeObject* result = cons->SetInstancePrototype(
- global_context()->initial_object_prototype());
+ native_context()->initial_object_prototype());
if (result->IsFailure()) return false;
}
cons->SetInstanceClassName(*name);
@@ -1041,7 +1044,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
CHECK_NOT_EMPTY_HANDLE(isolate,
JSObject::SetLocalPropertyIgnoreAttributes(
global, name, json_object, DONT_ENUM));
- global_context()->set_json_object(*json_object);
+ native_context()->set_json_object(*json_object);
}
{ // --- arguments_boilerplate_
@@ -1053,7 +1056,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
isolate->builtins()->builtin(Builtins::kIllegal));
Handle<JSObject> prototype =
Handle<JSObject>(
- JSObject::cast(global_context()->object_function()->prototype()));
+ JSObject::cast(native_context()->object_function()->prototype()));
Handle<JSFunction> function =
factory->NewFunctionWithPrototype(symbol,
@@ -1067,7 +1070,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
function->shared()->set_expected_nof_properties(2);
Handle<JSObject> result = factory->NewJSObject(function);
- global_context()->set_arguments_boilerplate(*result);
+ native_context()->set_arguments_boilerplate(*result);
// Note: length must be added as the first property and
// callee must be added as the second property.
CHECK_NOT_EMPTY_HANDLE(isolate,
@@ -1082,11 +1085,11 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
#ifdef DEBUG
LookupResult lookup(isolate);
result->LocalLookup(heap->callee_symbol(), &lookup);
- ASSERT(lookup.IsFound() && (lookup.type() == FIELD));
+ ASSERT(lookup.IsField());
ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsCalleeIndex);
result->LocalLookup(heap->length_symbol(), &lookup);
- ASSERT(lookup.IsFound() && (lookup.type() == FIELD));
+ ASSERT(lookup.IsField());
ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsLengthIndex);
ASSERT(result->map()->inobject_properties() > Heap::kArgumentsCalleeIndex);
@@ -1094,7 +1097,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
// Check the state of the object.
ASSERT(result->HasFastProperties());
- ASSERT(result->HasFastElements());
+ ASSERT(result->HasFastObjectElements());
#endif
}
@@ -1108,8 +1111,8 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
array = factory->NewFixedArray(0);
elements->set(1, *array);
- Handle<Map> old_map(global_context()->arguments_boilerplate()->map());
- Handle<Map> new_map = factory->CopyMapDropTransitions(old_map);
+ Handle<Map> old_map(native_context()->arguments_boilerplate()->map());
+ Handle<Map> new_map = factory->CopyMap(old_map);
new_map->set_pre_allocated_property_fields(2);
Handle<JSObject> result = factory->NewJSObjectFromMap(new_map);
// Set elements kind after allocating the object because
@@ -1117,7 +1120,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
new_map->set_elements_kind(NON_STRICT_ARGUMENTS_ELEMENTS);
result->set_elements(*elements);
ASSERT(result->HasNonStrictArgumentsElements());
- global_context()->set_aliased_arguments_boilerplate(*result);
+ native_context()->set_aliased_arguments_boilerplate(*result);
}
{ // --- strict mode arguments boilerplate
@@ -1137,39 +1140,43 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
caller->set_getter(*throw_function);
caller->set_setter(*throw_function);
+ // Create the map. Allocate one in-object field for length.
+ Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE,
+ Heap::kArgumentsObjectSizeStrict);
// Create the descriptor array for the arguments object.
- Handle<DescriptorArray> descriptors = factory->NewDescriptorArray(3);
+ Handle<DescriptorArray> descriptors = factory->NewDescriptorArray(0, 3);
DescriptorArray::WhitenessWitness witness(*descriptors);
+ map->set_instance_descriptors(*descriptors);
+
{ // length
FieldDescriptor d(*factory->length_symbol(), 0, DONT_ENUM);
- descriptors->Set(0, &d, witness);
+ map->AppendDescriptor(&d, witness);
}
{ // callee
- CallbacksDescriptor d(*factory->callee_symbol(), *callee, attributes);
- descriptors->Set(1, &d, witness);
+ CallbacksDescriptor d(*factory->callee_symbol(),
+ *callee,
+ attributes);
+ map->AppendDescriptor(&d, witness);
}
{ // caller
- CallbacksDescriptor d(*factory->caller_symbol(), *caller, attributes);
- descriptors->Set(2, &d, witness);
+ CallbacksDescriptor d(*factory->caller_symbol(),
+ *caller,
+ attributes);
+ map->AppendDescriptor(&d, witness);
}
- descriptors->Sort(witness);
- // Create the map. Allocate one in-object field for length.
- Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE,
- Heap::kArgumentsObjectSizeStrict);
- map->set_instance_descriptors(*descriptors);
map->set_function_with_prototype(true);
- map->set_prototype(global_context()->object_function()->prototype());
+ map->set_prototype(native_context()->object_function()->prototype());
map->set_pre_allocated_property_fields(1);
map->set_inobject_properties(1);
// Copy constructor from the non-strict arguments boilerplate.
map->set_constructor(
- global_context()->arguments_boilerplate()->map()->constructor());
+ native_context()->arguments_boilerplate()->map()->constructor());
// Allocate the arguments boilerplate object.
Handle<JSObject> result = factory->NewJSObjectFromMap(map);
- global_context()->set_strict_mode_arguments_boilerplate(*result);
+ native_context()->set_strict_mode_arguments_boilerplate(*result);
// Add length property only for strict mode boilerplate.
CHECK_NOT_EMPTY_HANDLE(isolate,
@@ -1180,14 +1187,14 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
#ifdef DEBUG
LookupResult lookup(isolate);
result->LocalLookup(heap->length_symbol(), &lookup);
- ASSERT(lookup.IsFound() && (lookup.type() == FIELD));
+ ASSERT(lookup.IsField());
ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsLengthIndex);
ASSERT(result->map()->inobject_properties() > Heap::kArgumentsLengthIndex);
// Check the state of the object.
ASSERT(result->HasFastProperties());
- ASSERT(result->HasFastElements());
+ ASSERT(result->HasFastObjectElements());
#endif
}
@@ -1204,7 +1211,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
Handle<String> name = factory->LookupAsciiSymbol("context_extension");
context_extension_fun->shared()->set_instance_class_name(*name);
- global_context()->set_context_extension_function(*context_extension_fun);
+ native_context()->set_context_extension_function(*context_extension_fun);
}
@@ -1216,7 +1223,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
Handle<JSFunction> delegate =
factory->NewFunction(factory->empty_symbol(), JS_OBJECT_TYPE,
JSObject::kHeaderSize, code, true);
- global_context()->set_call_as_function_delegate(*delegate);
+ native_context()->set_call_as_function_delegate(*delegate);
delegate->shared()->DontAdaptArguments();
}
@@ -1228,21 +1235,21 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
Handle<JSFunction> delegate =
factory->NewFunction(factory->empty_symbol(), JS_OBJECT_TYPE,
JSObject::kHeaderSize, code, true);
- global_context()->set_call_as_constructor_delegate(*delegate);
+ native_context()->set_call_as_constructor_delegate(*delegate);
delegate->shared()->DontAdaptArguments();
}
// Initialize the out of memory slot.
- global_context()->set_out_of_memory(heap->false_value());
+ native_context()->set_out_of_memory(heap->false_value());
// Initialize the data slot.
- global_context()->set_data(heap->undefined_value());
+ native_context()->set_data(heap->undefined_value());
{
// Initialize the random seed slot.
Handle<ByteArray> zeroed_byte_array(
factory->NewByteArray(kRandomStateSize));
- global_context()->set_random_seed(*zeroed_byte_array);
+ native_context()->set_random_seed(*zeroed_byte_array);
memset(zeroed_byte_array->GetDataStartAddress(), 0, kRandomStateSize);
}
return true;
@@ -1250,7 +1257,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
void Genesis::InitializeExperimentalGlobal() {
- Handle<JSObject> global = Handle<JSObject>(global_context()->global());
+ Handle<JSObject> global = Handle<JSObject>(native_context()->global_object());
// TODO(mstarzinger): Move this into Genesis::InitializeGlobal once we no
// longer need to live behind a flag, so functions get added to the snapshot.
@@ -1342,6 +1349,7 @@ bool Genesis::CompileScriptCached(Vector<const char> name,
script_name,
0,
0,
+ top_context,
extension,
NULL,
Handle<String>::null(),
@@ -1353,7 +1361,7 @@ bool Genesis::CompileScriptCached(Vector<const char> name,
// Set up the function context. Conceptually, we should clone the
// function before overwriting the context but since we're in a
// single-threaded environment it is not strictly necessary.
- ASSERT(top_context->IsGlobalContext());
+ ASSERT(top_context->IsNativeContext());
Handle<Context> context =
Handle<Context>(use_runtime_context
? Handle<Context>(top_context->runtime_context())
@@ -1366,7 +1374,7 @@ bool Genesis::CompileScriptCached(Vector<const char> name,
Handle<Object> receiver =
Handle<Object>(use_runtime_context
? top_context->builtins()
- : top_context->global());
+ : top_context->global_object());
bool has_pending_exception;
Execution::Call(fun, receiver, 0, NULL, &has_pending_exception);
if (has_pending_exception) return false;
@@ -1377,9 +1385,9 @@ bool Genesis::CompileScriptCached(Vector<const char> name,
#define INSTALL_NATIVE(Type, name, var) \
Handle<String> var##_name = factory()->LookupAsciiSymbol(name); \
Object* var##_native = \
- global_context()->builtins()->GetPropertyNoExceptionThrown( \
+ native_context()->builtins()->GetPropertyNoExceptionThrown( \
*var##_name); \
- global_context()->set_##var(Type::cast(var##_native));
+ native_context()->set_##var(Type::cast(var##_native));
void Genesis::InstallNativeFunctions() {
@@ -1409,6 +1417,11 @@ void Genesis::InstallExperimentalNativeFunctions() {
INSTALL_NATIVE(JSFunction, "DerivedSetTrap", derived_set_trap);
INSTALL_NATIVE(JSFunction, "ProxyEnumerate", proxy_enumerate);
}
+ if (FLAG_harmony_observation) {
+ INSTALL_NATIVE(JSFunction, "NotifyChange", observers_notify_change);
+ INSTALL_NATIVE(JSFunction, "DeliverChangeRecords",
+ observers_deliver_changes);
+ }
}
#undef INSTALL_NATIVE
@@ -1419,7 +1432,7 @@ bool Genesis::InstallNatives() {
// Create a function for the builtins object. Allocate space for the
// JavaScript builtins, a reference to the builtins object
- // (itself) and a reference to the global_context directly in the object.
+ // (itself) and a reference to the native_context directly in the object.
Handle<Code> code = Handle<Code>(
isolate()->builtins()->builtin(Builtins::kIllegal));
Handle<JSFunction> builtins_fun =
@@ -1429,12 +1442,15 @@ bool Genesis::InstallNatives() {
Handle<String> name = factory()->LookupAsciiSymbol("builtins");
builtins_fun->shared()->set_instance_class_name(*name);
+ builtins_fun->initial_map()->set_dictionary_map(true);
+ builtins_fun->initial_map()->set_prototype(heap()->null_value());
// Allocate the builtins object.
Handle<JSBuiltinsObject> builtins =
Handle<JSBuiltinsObject>::cast(factory()->NewGlobalObject(builtins_fun));
builtins->set_builtins(*builtins);
- builtins->set_global_context(*global_context());
+ builtins->set_native_context(*native_context());
+ builtins->set_global_context(*native_context());
builtins->set_global_receiver(*builtins);
// Set up the 'global' properties of the builtins object. The
@@ -1444,26 +1460,27 @@ bool Genesis::InstallNatives() {
static const PropertyAttributes attributes =
static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
Handle<String> global_symbol = factory()->LookupAsciiSymbol("global");
- Handle<Object> global_obj(global_context()->global());
+ Handle<Object> global_obj(native_context()->global_object());
CHECK_NOT_EMPTY_HANDLE(isolate(),
JSObject::SetLocalPropertyIgnoreAttributes(
builtins, global_symbol, global_obj, attributes));
// Set up the reference from the global object to the builtins object.
- JSGlobalObject::cast(global_context()->global())->set_builtins(*builtins);
+ JSGlobalObject::cast(native_context()->global_object())->
+ set_builtins(*builtins);
- // Create a bridge function that has context in the global context.
+ // Create a bridge function that has context in the native context.
Handle<JSFunction> bridge =
factory()->NewFunction(factory()->empty_symbol(),
factory()->undefined_value());
- ASSERT(bridge->context() == *isolate()->global_context());
+ ASSERT(bridge->context() == *isolate()->native_context());
// Allocate the builtins context.
Handle<Context> context =
factory()->NewFunctionContext(Context::MIN_CONTEXT_SLOTS, bridge);
- context->set_global(*builtins); // override builtins global object
+ context->set_global_object(*builtins); // override builtins global object
- global_context()->set_runtime_context(*context);
+ native_context()->set_runtime_context(*context);
{ // -- S c r i p t
// Builtin functions for Script.
@@ -1474,118 +1491,134 @@ bool Genesis::InstallNatives() {
Handle<JSObject> prototype =
factory()->NewJSObject(isolate()->object_function(), TENURED);
SetPrototype(script_fun, prototype);
- global_context()->set_script_function(*script_fun);
-
- // Add 'source' and 'data' property to scripts.
- PropertyAttributes common_attributes =
- static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
- Handle<Foreign> foreign_source =
- factory()->NewForeign(&Accessors::ScriptSource);
- Handle<DescriptorArray> script_descriptors =
- factory()->CopyAppendForeignDescriptor(
- factory()->empty_descriptor_array(),
- factory()->LookupAsciiSymbol("source"),
- foreign_source,
- common_attributes);
- Handle<Foreign> foreign_name =
- factory()->NewForeign(&Accessors::ScriptName);
- script_descriptors =
- factory()->CopyAppendForeignDescriptor(
- script_descriptors,
- factory()->LookupAsciiSymbol("name"),
- foreign_name,
- common_attributes);
- Handle<Foreign> foreign_id = factory()->NewForeign(&Accessors::ScriptId);
- script_descriptors =
- factory()->CopyAppendForeignDescriptor(
- script_descriptors,
- factory()->LookupAsciiSymbol("id"),
- foreign_id,
- common_attributes);
- Handle<Foreign> foreign_line_offset =
- factory()->NewForeign(&Accessors::ScriptLineOffset);
- script_descriptors =
- factory()->CopyAppendForeignDescriptor(
- script_descriptors,
- factory()->LookupAsciiSymbol("line_offset"),
- foreign_line_offset,
- common_attributes);
- Handle<Foreign> foreign_column_offset =
- factory()->NewForeign(&Accessors::ScriptColumnOffset);
- script_descriptors =
- factory()->CopyAppendForeignDescriptor(
- script_descriptors,
- factory()->LookupAsciiSymbol("column_offset"),
- foreign_column_offset,
- common_attributes);
- Handle<Foreign> foreign_data =
- factory()->NewForeign(&Accessors::ScriptData);
- script_descriptors =
- factory()->CopyAppendForeignDescriptor(
- script_descriptors,
- factory()->LookupAsciiSymbol("data"),
- foreign_data,
- common_attributes);
- Handle<Foreign> foreign_type =
- factory()->NewForeign(&Accessors::ScriptType);
- script_descriptors =
- factory()->CopyAppendForeignDescriptor(
- script_descriptors,
- factory()->LookupAsciiSymbol("type"),
- foreign_type,
- common_attributes);
- Handle<Foreign> foreign_compilation_type =
- factory()->NewForeign(&Accessors::ScriptCompilationType);
- script_descriptors =
- factory()->CopyAppendForeignDescriptor(
- script_descriptors,
- factory()->LookupAsciiSymbol("compilation_type"),
- foreign_compilation_type,
- common_attributes);
- Handle<Foreign> foreign_line_ends =
- factory()->NewForeign(&Accessors::ScriptLineEnds);
- script_descriptors =
- factory()->CopyAppendForeignDescriptor(
- script_descriptors,
- factory()->LookupAsciiSymbol("line_ends"),
- foreign_line_ends,
- common_attributes);
- Handle<Foreign> foreign_context_data =
- factory()->NewForeign(&Accessors::ScriptContextData);
- script_descriptors =
- factory()->CopyAppendForeignDescriptor(
- script_descriptors,
- factory()->LookupAsciiSymbol("context_data"),
- foreign_context_data,
- common_attributes);
- Handle<Foreign> foreign_eval_from_script =
- factory()->NewForeign(&Accessors::ScriptEvalFromScript);
- script_descriptors =
- factory()->CopyAppendForeignDescriptor(
- script_descriptors,
- factory()->LookupAsciiSymbol("eval_from_script"),
- foreign_eval_from_script,
- common_attributes);
- Handle<Foreign> foreign_eval_from_script_position =
- factory()->NewForeign(&Accessors::ScriptEvalFromScriptPosition);
- script_descriptors =
- factory()->CopyAppendForeignDescriptor(
- script_descriptors,
- factory()->LookupAsciiSymbol("eval_from_script_position"),
- foreign_eval_from_script_position,
- common_attributes);
- Handle<Foreign> foreign_eval_from_function_name =
- factory()->NewForeign(&Accessors::ScriptEvalFromFunctionName);
- script_descriptors =
- factory()->CopyAppendForeignDescriptor(
- script_descriptors,
- factory()->LookupAsciiSymbol("eval_from_function_name"),
- foreign_eval_from_function_name,
- common_attributes);
+ native_context()->set_script_function(*script_fun);
Handle<Map> script_map = Handle<Map>(script_fun->initial_map());
+
+ Handle<DescriptorArray> script_descriptors(
+ factory()->NewDescriptorArray(0, 13));
+ DescriptorArray::WhitenessWitness witness(*script_descriptors);
+
+ Handle<Foreign> script_source(
+ factory()->NewForeign(&Accessors::ScriptSource));
+ Handle<Foreign> script_name(factory()->NewForeign(&Accessors::ScriptName));
+ Handle<String> id_symbol(factory()->LookupAsciiSymbol("id"));
+ Handle<Foreign> script_id(factory()->NewForeign(&Accessors::ScriptId));
+ Handle<String> line_offset_symbol(
+ factory()->LookupAsciiSymbol("line_offset"));
+ Handle<Foreign> script_line_offset(
+ factory()->NewForeign(&Accessors::ScriptLineOffset));
+ Handle<String> column_offset_symbol(
+ factory()->LookupAsciiSymbol("column_offset"));
+ Handle<Foreign> script_column_offset(
+ factory()->NewForeign(&Accessors::ScriptColumnOffset));
+ Handle<String> data_symbol(factory()->LookupAsciiSymbol("data"));
+ Handle<Foreign> script_data(factory()->NewForeign(&Accessors::ScriptData));
+ Handle<String> type_symbol(factory()->LookupAsciiSymbol("type"));
+ Handle<Foreign> script_type(factory()->NewForeign(&Accessors::ScriptType));
+ Handle<String> compilation_type_symbol(
+ factory()->LookupAsciiSymbol("compilation_type"));
+ Handle<Foreign> script_compilation_type(
+ factory()->NewForeign(&Accessors::ScriptCompilationType));
+ Handle<String> line_ends_symbol(factory()->LookupAsciiSymbol("line_ends"));
+ Handle<Foreign> script_line_ends(
+ factory()->NewForeign(&Accessors::ScriptLineEnds));
+ Handle<String> context_data_symbol(
+ factory()->LookupAsciiSymbol("context_data"));
+ Handle<Foreign> script_context_data(
+ factory()->NewForeign(&Accessors::ScriptContextData));
+ Handle<String> eval_from_script_symbol(
+ factory()->LookupAsciiSymbol("eval_from_script"));
+ Handle<Foreign> script_eval_from_script(
+ factory()->NewForeign(&Accessors::ScriptEvalFromScript));
+ Handle<String> eval_from_script_position_symbol(
+ factory()->LookupAsciiSymbol("eval_from_script_position"));
+ Handle<Foreign> script_eval_from_script_position(
+ factory()->NewForeign(&Accessors::ScriptEvalFromScriptPosition));
+ Handle<String> eval_from_function_name_symbol(
+ factory()->LookupAsciiSymbol("eval_from_function_name"));
+ Handle<Foreign> script_eval_from_function_name(
+ factory()->NewForeign(&Accessors::ScriptEvalFromFunctionName));
+ PropertyAttributes attribs =
+ static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
script_map->set_instance_descriptors(*script_descriptors);
+ {
+ CallbacksDescriptor d(
+ *factory()->source_symbol(), *script_source, attribs);
+ script_map->AppendDescriptor(&d, witness);
+ }
+
+ {
+ CallbacksDescriptor d(*factory()->name_symbol(), *script_name, attribs);
+ script_map->AppendDescriptor(&d, witness);
+ }
+
+ {
+ CallbacksDescriptor d(*id_symbol, *script_id, attribs);
+ script_map->AppendDescriptor(&d, witness);
+ }
+
+ {
+ CallbacksDescriptor d(*line_offset_symbol, *script_line_offset, attribs);
+ script_map->AppendDescriptor(&d, witness);
+ }
+
+ {
+ CallbacksDescriptor d(
+ *column_offset_symbol, *script_column_offset, attribs);
+ script_map->AppendDescriptor(&d, witness);
+ }
+
+ {
+ CallbacksDescriptor d(*data_symbol, *script_data, attribs);
+ script_map->AppendDescriptor(&d, witness);
+ }
+
+ {
+ CallbacksDescriptor d(*type_symbol, *script_type, attribs);
+ script_map->AppendDescriptor(&d, witness);
+ }
+
+ {
+ CallbacksDescriptor d(
+ *compilation_type_symbol, *script_compilation_type, attribs);
+ script_map->AppendDescriptor(&d, witness);
+ }
+
+ {
+ CallbacksDescriptor d(*line_ends_symbol, *script_line_ends, attribs);
+ script_map->AppendDescriptor(&d, witness);
+ }
+
+ {
+ CallbacksDescriptor d(
+ *context_data_symbol, *script_context_data, attribs);
+ script_map->AppendDescriptor(&d, witness);
+ }
+
+ {
+ CallbacksDescriptor d(
+ *eval_from_script_symbol, *script_eval_from_script, attribs);
+ script_map->AppendDescriptor(&d, witness);
+ }
+
+ {
+ CallbacksDescriptor d(
+ *eval_from_script_position_symbol,
+ *script_eval_from_script_position,
+ attribs);
+ script_map->AppendDescriptor(&d, witness);
+ }
+
+ {
+ CallbacksDescriptor d(
+ *eval_from_function_name_symbol,
+ *script_eval_from_function_name,
+ attribs);
+ script_map->AppendDescriptor(&d, witness);
+ }
+
// Allocate the empty script.
Handle<Script> script = factory()->NewScript(factory()->empty_string());
script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
@@ -1603,7 +1636,7 @@ bool Genesis::InstallNatives() {
Handle<JSObject> prototype =
factory()->NewJSObject(isolate()->object_function(), TENURED);
SetPrototype(opaque_reference_fun, prototype);
- global_context()->set_opaque_reference_function(*opaque_reference_fun);
+ native_context()->set_opaque_reference_function(*opaque_reference_fun);
}
{ // --- I n t e r n a l A r r a y ---
@@ -1633,25 +1666,31 @@ bool Genesis::InstallNatives() {
// elements in InternalArrays can be set to non-Smi values without going
// through a common bottleneck that would make the SMI_ONLY -> FAST_ELEMENT
// transition easy to trap. Moreover, they rarely are smi-only.
- MaybeObject* maybe_map =
- array_function->initial_map()->CopyDropTransitions();
+ MaybeObject* maybe_map = array_function->initial_map()->Copy();
Map* new_map;
- if (!maybe_map->To<Map>(&new_map)) return false;
- new_map->set_elements_kind(FAST_ELEMENTS);
+ if (!maybe_map->To(&new_map)) return false;
+ new_map->set_elements_kind(FAST_HOLEY_ELEMENTS);
array_function->set_initial_map(new_map);
// Make "length" magic on instances.
- Handle<DescriptorArray> array_descriptors =
- factory()->CopyAppendForeignDescriptor(
- factory()->empty_descriptor_array(),
- factory()->length_symbol(),
- factory()->NewForeign(&Accessors::ArrayLength),
- static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE));
-
- array_function->initial_map()->set_instance_descriptors(
- *array_descriptors);
+ Handle<Map> initial_map(array_function->initial_map());
+ Handle<DescriptorArray> array_descriptors(
+ factory()->NewDescriptorArray(0, 1));
+ DescriptorArray::WhitenessWitness witness(*array_descriptors);
+
+ Handle<Foreign> array_length(factory()->NewForeign(
+ &Accessors::ArrayLength));
+ PropertyAttributes attribs = static_cast<PropertyAttributes>(
+ DONT_ENUM | DONT_DELETE);
+ initial_map->set_instance_descriptors(*array_descriptors);
+
+ { // Add length.
+ CallbacksDescriptor d(
+ *factory()->length_symbol(), *array_length, attribs);
+ array_function->initial_map()->AppendDescriptor(&d, witness);
+ }
- global_context()->set_internal_array_function(*array_function);
+ native_context()->set_internal_array_function(*array_function);
}
if (FLAG_disable_native_files) {
@@ -1674,16 +1713,16 @@ bool Genesis::InstallNatives() {
// Store the map for the string prototype after the natives has been compiled
// and the String function has been set up.
- Handle<JSFunction> string_function(global_context()->string_function());
+ Handle<JSFunction> string_function(native_context()->string_function());
ASSERT(JSObject::cast(
string_function->initial_map()->prototype())->HasFastProperties());
- global_context()->set_string_function_prototype_map(
+ native_context()->set_string_function_prototype_map(
HeapObject::cast(string_function->initial_map()->prototype())->map());
// Install Function.prototype.call and apply.
{ Handle<String> key = factory()->function_class_symbol();
Handle<JSFunction> function =
- Handle<JSFunction>::cast(GetProperty(isolate()->global(), key));
+ Handle<JSFunction>::cast(GetProperty(isolate()->global_object(), key));
Handle<JSObject> proto =
Handle<JSObject>(JSObject::cast(function->instance_prototype()));
@@ -1721,7 +1760,7 @@ bool Genesis::InstallNatives() {
// RegExpResult initial map.
// Find global.Array.prototype to inherit from.
- Handle<JSFunction> array_constructor(global_context()->array_function());
+ Handle<JSFunction> array_constructor(native_context()->array_function());
Handle<JSObject> array_prototype(
JSObject::cast(array_constructor->instance_prototype()));
@@ -1736,44 +1775,45 @@ bool Genesis::InstallNatives() {
// Update map with length accessor from Array and add "index" and "input".
Handle<DescriptorArray> reresult_descriptors =
- factory()->NewDescriptorArray(3);
+ factory()->NewDescriptorArray(0, 3);
DescriptorArray::WhitenessWitness witness(*reresult_descriptors);
+ initial_map->set_instance_descriptors(*reresult_descriptors);
- JSFunction* array_function = global_context()->array_function();
- Handle<DescriptorArray> array_descriptors(
- array_function->initial_map()->instance_descriptors());
- int index = array_descriptors->SearchWithCache(heap()->length_symbol());
- MaybeObject* copy_result =
- reresult_descriptors->CopyFrom(0, *array_descriptors, index, witness);
- if (copy_result->IsFailure()) return false;
-
- int enum_index = 0;
+ {
+ JSFunction* array_function = native_context()->array_function();
+ Handle<DescriptorArray> array_descriptors(
+ array_function->initial_map()->instance_descriptors());
+ String* length = heap()->length_symbol();
+ int old = array_descriptors->SearchWithCache(
+ length, array_function->initial_map());
+ ASSERT(old != DescriptorArray::kNotFound);
+ CallbacksDescriptor desc(length,
+ array_descriptors->GetValue(old),
+ array_descriptors->GetDetails(old).attributes());
+ initial_map->AppendDescriptor(&desc, witness);
+ }
{
FieldDescriptor index_field(heap()->index_symbol(),
JSRegExpResult::kIndexIndex,
- NONE,
- enum_index++);
- reresult_descriptors->Set(1, &index_field, witness);
+ NONE);
+ initial_map->AppendDescriptor(&index_field, witness);
}
{
FieldDescriptor input_field(heap()->input_symbol(),
JSRegExpResult::kInputIndex,
- NONE,
- enum_index++);
- reresult_descriptors->Set(2, &input_field, witness);
+ NONE);
+ initial_map->AppendDescriptor(&input_field, witness);
}
- reresult_descriptors->Sort(witness);
initial_map->set_inobject_properties(2);
initial_map->set_pre_allocated_property_fields(2);
initial_map->set_unused_property_fields(0);
- initial_map->set_instance_descriptors(*reresult_descriptors);
- global_context()->set_regexp_result_map(*initial_map);
+ native_context()->set_regexp_result_map(*initial_map);
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
builtins->Verify();
#endif
@@ -1795,6 +1835,11 @@ bool Genesis::InstallExperimentalNatives() {
"native collection.js") == 0) {
if (!CompileExperimentalBuiltin(isolate(), i)) return false;
}
+ if (FLAG_harmony_observation &&
+ strcmp(ExperimentalNatives::GetScriptName(i).start(),
+ "native object-observe.js") == 0) {
+ if (!CompileExperimentalBuiltin(isolate(), i)) return false;
+ }
}
InstallExperimentalNativeFunctions();
@@ -1804,10 +1849,10 @@ bool Genesis::InstallExperimentalNatives() {
static Handle<JSObject> ResolveBuiltinIdHolder(
- Handle<Context> global_context,
+ Handle<Context> native_context,
const char* holder_expr) {
- Factory* factory = global_context->GetIsolate()->factory();
- Handle<GlobalObject> global(global_context->global());
+ Factory* factory = native_context->GetIsolate()->factory();
+ Handle<GlobalObject> global(native_context->global_object());
const char* period_pos = strchr(holder_expr, '.');
if (period_pos == NULL) {
return Handle<JSObject>::cast(
@@ -1838,7 +1883,7 @@ void Genesis::InstallBuiltinFunctionIds() {
#define INSTALL_BUILTIN_ID(holder_expr, fun_name, name) \
{ \
Handle<JSObject> holder = ResolveBuiltinIdHolder( \
- global_context(), #holder_expr); \
+ native_context(), #holder_expr); \
BuiltinFunctionId id = k##name; \
InstallBuiltinFunctionId(holder, #fun_name, id); \
}
@@ -1850,7 +1895,7 @@ void Genesis::InstallBuiltinFunctionIds() {
// Do not forget to update macros.py with named constant
// of cache id.
#define JSFUNCTION_RESULT_CACHE_LIST(F) \
- F(16, global_context()->regexp_function())
+ F(16, native_context()->regexp_function())
static FixedArray* CreateCache(int size, Handle<JSFunction> factory_function) {
@@ -1886,34 +1931,35 @@ void Genesis::InstallJSFunctionResultCaches() {
#undef F
- global_context()->set_jsfunction_result_caches(*caches);
+ native_context()->set_jsfunction_result_caches(*caches);
}
void Genesis::InitializeNormalizedMapCaches() {
Handle<FixedArray> array(
FACTORY->NewFixedArray(NormalizedMapCache::kEntries, TENURED));
- global_context()->set_normalized_map_cache(NormalizedMapCache::cast(*array));
+ native_context()->set_normalized_map_cache(NormalizedMapCache::cast(*array));
}
-bool Bootstrapper::InstallExtensions(Handle<Context> global_context,
+bool Bootstrapper::InstallExtensions(Handle<Context> native_context,
v8::ExtensionConfiguration* extensions) {
- Isolate* isolate = global_context->GetIsolate();
+ Isolate* isolate = native_context->GetIsolate();
BootstrapperActive active;
SaveContext saved_context(isolate);
- isolate->set_context(*global_context);
- if (!Genesis::InstallExtensions(global_context, extensions)) return false;
- Genesis::InstallSpecialObjects(global_context);
+ isolate->set_context(*native_context);
+ if (!Genesis::InstallExtensions(native_context, extensions)) return false;
+ Genesis::InstallSpecialObjects(native_context);
return true;
}
-void Genesis::InstallSpecialObjects(Handle<Context> global_context) {
- Isolate* isolate = global_context->GetIsolate();
+void Genesis::InstallSpecialObjects(Handle<Context> native_context) {
+ Isolate* isolate = native_context->GetIsolate();
Factory* factory = isolate->factory();
HandleScope scope;
- Handle<JSGlobalObject> global(JSGlobalObject::cast(global_context->global()));
+ Handle<JSGlobalObject> global(JSGlobalObject::cast(
+ native_context->global_object()));
// Expose the natives in global if a name for it is specified.
if (FLAG_expose_natives_as != NULL && strlen(FLAG_expose_natives_as) != 0) {
Handle<String> natives = factory->LookupAsciiSymbol(FLAG_expose_natives_as);
@@ -1942,10 +1988,10 @@ void Genesis::InstallSpecialObjects(Handle<Context> global_context) {
// debugger but without tanking the whole context.
if (!debug->Load()) return;
// Set the security token for the debugger context to the same as
- // the shell global context to allow calling between these (otherwise
+ // the shell native context to allow calling between these (otherwise
// exposing debug global object doesn't make much sense).
debug->debug_context()->set_security_token(
- global_context->security_token());
+ native_context->security_token());
Handle<String> debug_string =
factory->LookupAsciiSymbol(FLAG_expose_debug_as);
@@ -1984,7 +2030,7 @@ void Genesis::ExtensionStates::set_state(RegisteredExtension* extension,
reinterpret_cast<void*>(static_cast<intptr_t>(state));
}
-bool Genesis::InstallExtensions(Handle<Context> global_context,
+bool Genesis::InstallExtensions(Handle<Context> native_context,
v8::ExtensionConfiguration* extensions) {
// TODO(isolates): Extensions on multiple isolates may take a little more
// effort. (The external API reads 'ignore'-- does that mean
@@ -2004,6 +2050,9 @@ bool Genesis::InstallExtensions(Handle<Context> global_context,
if (FLAG_expose_externalize_string) {
InstallExtension("v8/externalize", &extension_states);
}
+ if (FLAG_track_gc_object_stats) {
+ InstallExtension("v8/statistics", &extension_states);
+ }
if (extensions == NULL) return true;
// Install required extensions
@@ -2094,14 +2143,10 @@ bool Genesis::InstallJSBuiltins(Handle<JSBuiltinsObject> builtins) {
Handle<JSFunction> function
= Handle<JSFunction>(JSFunction::cast(function_object));
builtins->set_javascript_builtin(id, *function);
- Handle<SharedFunctionInfo> shared
- = Handle<SharedFunctionInfo>(function->shared());
- if (!SharedFunctionInfo::EnsureCompiled(shared, CLEAR_EXCEPTION)) {
+ if (!JSFunction::CompileLazy(function, CLEAR_EXCEPTION)) {
return false;
}
- // Set the code object on the function object.
- function->ReplaceCode(function->shared()->code());
- builtins->set_javascript_builtin_code(id, shared->code());
+ builtins->set_javascript_builtin_code(id, function->shared()->code());
}
return true;
}
@@ -2110,8 +2155,9 @@ bool Genesis::InstallJSBuiltins(Handle<JSBuiltinsObject> builtins) {
bool Genesis::ConfigureGlobalObjects(
v8::Handle<v8::ObjectTemplate> global_proxy_template) {
Handle<JSObject> global_proxy(
- JSObject::cast(global_context()->global_proxy()));
- Handle<JSObject> inner_global(JSObject::cast(global_context()->global()));
+ JSObject::cast(native_context()->global_proxy()));
+ Handle<JSObject> inner_global(
+ JSObject::cast(native_context()->global_object()));
if (!global_proxy_template.IsEmpty()) {
// Configure the global proxy object.
@@ -2185,27 +2231,24 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
LookupResult result(isolate());
to->LocalLookup(descs->GetKey(i), &result);
// If the property is already there we skip it
- if (result.IsProperty()) continue;
+ if (result.IsFound()) continue;
HandleScope inner;
ASSERT(!to->HasFastProperties());
// Add to dictionary.
Handle<String> key = Handle<String>(descs->GetKey(i));
Handle<Object> callbacks(descs->GetCallbacksObject(i));
- PropertyDetails d =
- PropertyDetails(details.attributes(), CALLBACKS, details.index());
+ PropertyDetails d = PropertyDetails(details.attributes(),
+ CALLBACKS,
+ details.descriptor_index());
JSObject::SetNormalizedProperty(to, key, callbacks, d);
break;
}
- case MAP_TRANSITION:
- case ELEMENTS_TRANSITION:
- case CONSTANT_TRANSITION:
- case NULL_DESCRIPTOR:
- // Ignore non-properties.
- break;
case NORMAL:
// Do not occur since the from object has fast properties.
case HANDLER:
case INTERCEPTOR:
+ case TRANSITION:
+ case NONEXISTENT:
// No element in instance descriptors have proxy or interceptor type.
UNREACHABLE();
break;
@@ -2222,7 +2265,7 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
// If the property is already there we skip it.
LookupResult result(isolate());
to->LocalLookup(String::cast(raw_key), &result);
- if (result.IsProperty()) continue;
+ if (result.IsFound()) continue;
// Set the property.
Handle<String> key = Handle<String>(String::cast(raw_key));
Handle<Object> value = Handle<Object>(properties->ValueAt(i));
@@ -2261,7 +2304,7 @@ void Genesis::TransferObject(Handle<JSObject> from, Handle<JSObject> to) {
// Transfer the prototype (new map is needed).
Handle<Map> old_to_map = Handle<Map>(to->map());
- Handle<Map> new_to_map = factory->CopyMapDropTransitions(old_to_map);
+ Handle<Map> new_to_map = factory->CopyMap(old_to_map);
new_to_map->set_prototype(from->map()->prototype());
to->set_map(*new_to_map);
}
@@ -2275,9 +2318,9 @@ void Genesis::MakeFunctionInstancePrototypeWritable() {
ASSERT(!strict_mode_function_instance_map_writable_prototype_.is_null());
// Replace function instance maps to make prototype writable.
- global_context()->set_function_map(
+ native_context()->set_function_map(
*function_instance_map_writable_prototype_);
- global_context()->set_strict_mode_function_map(
+ native_context()->set_strict_mode_function_map(
*strict_mode_function_instance_map_writable_prototype_);
}
@@ -2303,10 +2346,10 @@ Genesis::Genesis(Isolate* isolate,
Handle<Context> new_context = Snapshot::NewContextFromSnapshot();
if (!new_context.is_null()) {
- global_context_ =
+ native_context_ =
Handle<Context>::cast(isolate->global_handles()->Create(*new_context));
- AddToWeakGlobalContextList(*global_context_);
- isolate->set_context(*global_context_);
+ AddToWeakNativeContextList(*native_context_);
+ isolate->set_context(*native_context_);
isolate->counters()->contexts_created_by_snapshot()->Increment();
Handle<GlobalObject> inner_global;
Handle<JSGlobalProxy> global_proxy =
@@ -2342,7 +2385,7 @@ Genesis::Genesis(Isolate* isolate,
InitializeExperimentalGlobal();
if (!InstallExperimentalNatives()) return;
- result_ = global_context_;
+ result_ = native_context_;
}
diff --git a/src/3rdparty/v8/src/bootstrapper.h b/src/3rdparty/v8/src/bootstrapper.h
index 101c2e1..179e65c 100644
--- a/src/3rdparty/v8/src/bootstrapper.h
+++ b/src/3rdparty/v8/src/bootstrapper.h
@@ -104,7 +104,7 @@ class Bootstrapper {
void DetachGlobal(Handle<Context> env);
// Reattach an outer global object to an environment.
- void ReattachGlobal(Handle<Context> env, Handle<Object> global_object);
+ void ReattachGlobal(Handle<Context> env, Handle<JSGlobalProxy> global_proxy);
// Traverses the pointers for memory management.
void Iterate(ObjectVisitor* v);
@@ -126,7 +126,7 @@ class Bootstrapper {
char* AllocateAutoDeletedArray(int bytes);
// Used for new context creation.
- bool InstallExtensions(Handle<Context> global_context,
+ bool InstallExtensions(Handle<Context> native_context,
v8::ExtensionConfiguration* extensions);
SourceCodeCache* extensions_cache() { return &extensions_cache_; }
diff --git a/src/3rdparty/v8/src/builtins.cc b/src/3rdparty/v8/src/builtins.cc
index 84a0c3d..620e4b3 100644
--- a/src/3rdparty/v8/src/builtins.cc
+++ b/src/3rdparty/v8/src/builtins.cc
@@ -35,6 +35,7 @@
#include "ic-inl.h"
#include "heap-profiler.h"
#include "mark-compact.h"
+#include "stub-cache.h"
#include "vm-state-inl.h"
namespace v8 {
@@ -199,10 +200,13 @@ static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
array->set_length(Smi::FromInt(0));
array->set_elements(heap->empty_fixed_array());
if (!FLAG_smi_only_arrays) {
- Context* global_context = isolate->context()->global_context();
- if (array->GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
- !global_context->object_js_array_map()->IsUndefined()) {
- array->set_map(Map::cast(global_context->object_js_array_map()));
+ Context* native_context = isolate->context()->native_context();
+ if (array->GetElementsKind() == GetInitialFastElementsKind() &&
+ !native_context->js_array_maps()->IsUndefined()) {
+ FixedArray* map_array =
+ FixedArray::cast(native_context->js_array_maps());
+ array->set_map(Map::cast(map_array->
+ get(TERMINAL_FAST_ELEMENTS_KIND)));
}
}
} else {
@@ -222,6 +226,13 @@ static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
{ MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(len);
if (!maybe_obj->ToObject(&fixed_array)) return maybe_obj;
}
+ ElementsKind elements_kind = array->GetElementsKind();
+ if (!IsFastHoleyElementsKind(elements_kind)) {
+ elements_kind = GetHoleyElementsKind(elements_kind);
+ MaybeObject* maybe_array =
+ array->TransitionElementsKind(elements_kind);
+ if (maybe_array->IsFailure()) return maybe_array;
+ }
// We do not use SetContent to skip the unnecessary elements type check.
array->set_elements(FixedArray::cast(fixed_array));
array->set_length(Smi::cast(obj));
@@ -250,7 +261,7 @@ static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
// Allocate an appropriately typed elements array.
MaybeObject* maybe_elms;
ElementsKind elements_kind = array->GetElementsKind();
- if (elements_kind == FAST_DOUBLE_ELEMENTS) {
+ if (IsFastDoubleElementsKind(elements_kind)) {
maybe_elms = heap->AllocateUninitializedFixedDoubleArray(
number_of_elements);
} else {
@@ -261,13 +272,15 @@ static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
// Fill in the content
switch (array->GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS: {
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_SMI_ELEMENTS: {
FixedArray* smi_elms = FixedArray::cast(elms);
for (int index = 0; index < number_of_elements; index++) {
smi_elms->set(index, (*args)[index+1], SKIP_WRITE_BARRIER);
}
break;
}
+ case FAST_HOLEY_ELEMENTS:
case FAST_ELEMENTS: {
AssertNoAllocation no_gc;
WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
@@ -277,6 +290,7 @@ static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
}
break;
}
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS: {
FixedDoubleArray* double_elms = FixedDoubleArray::cast(elms);
for (int index = 0; index < number_of_elements; index++) {
@@ -299,7 +313,7 @@ BUILTIN(InternalArrayCodeGeneric) {
return ArrayCodeGenericCommon(
&args,
isolate,
- isolate->context()->global_context()->internal_array_function());
+ isolate->context()->native_context()->internal_array_function());
}
@@ -307,7 +321,7 @@ BUILTIN(ArrayCodeGeneric) {
return ArrayCodeGenericCommon(
&args,
isolate,
- isolate->context()->global_context()->array_function());
+ isolate->context()->native_context()->array_function());
}
@@ -389,7 +403,7 @@ static FixedArray* LeftTrimFixedArray(Heap* heap,
static bool ArrayPrototypeHasNoElements(Heap* heap,
- Context* global_context,
+ Context* native_context,
JSObject* array_proto) {
// This method depends on non writability of Object and Array prototype
// fields.
@@ -398,7 +412,7 @@ static bool ArrayPrototypeHasNoElements(Heap* heap,
Object* proto = array_proto->GetPrototype();
if (proto == heap->null_value()) return false;
array_proto = JSObject::cast(proto);
- if (array_proto != global_context->initial_object_prototype()) return false;
+ if (array_proto != native_context->initial_object_prototype()) return false;
if (array_proto->elements() != heap->empty_fixed_array()) return false;
return array_proto->GetPrototype()->IsNull();
}
@@ -412,7 +426,7 @@ static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
HeapObject* elms = array->elements();
Map* map = elms->map();
if (map == heap->fixed_array_map()) {
- if (args == NULL || array->HasFastElements()) return elms;
+ if (args == NULL || array->HasFastObjectElements()) return elms;
if (array->HasFastDoubleElements()) {
ASSERT(elms == heap->empty_fixed_array());
MaybeObject* maybe_transition =
@@ -422,7 +436,7 @@ static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
}
} else if (map == heap->fixed_cow_array_map()) {
MaybeObject* maybe_writable_result = array->EnsureWritableFastElements();
- if (args == NULL || array->HasFastElements() ||
+ if (args == NULL || array->HasFastObjectElements() ||
maybe_writable_result->IsFailure()) {
return maybe_writable_result;
}
@@ -448,11 +462,11 @@ static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
JSArray* receiver) {
if (!FLAG_clever_optimizations) return false;
- Context* global_context = heap->isolate()->context()->global_context();
+ Context* native_context = heap->isolate()->context()->native_context();
JSObject* array_proto =
- JSObject::cast(global_context->array_function()->prototype());
+ JSObject::cast(native_context->array_function()->prototype());
return receiver->GetPrototype() == array_proto &&
- ArrayPrototypeHasNoElements(heap, global_context, array_proto);
+ ArrayPrototypeHasNoElements(heap, native_context, array_proto);
}
@@ -463,7 +477,7 @@ MUST_USE_RESULT static MaybeObject* CallJsBuiltin(
HandleScope handleScope(isolate);
Handle<Object> js_builtin =
- GetProperty(Handle<JSObject>(isolate->global_context()->builtins()),
+ GetProperty(Handle<JSObject>(isolate->native_context()->builtins()),
name);
Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin);
int argc = args.length() - 1;
@@ -496,6 +510,10 @@ BUILTIN(ArrayPush) {
FixedArray* elms = FixedArray::cast(elms_obj);
JSArray* array = JSArray::cast(receiver);
+ if (FLAG_harmony_observation && array->map()->is_observed()) {
+ return CallJsBuiltin(isolate, "ArrayPush", args);
+ }
+
int len = Smi::cast(array->length())->value();
int to_add = args.length() - 1;
if (to_add == 0) {
@@ -516,8 +534,8 @@ BUILTIN(ArrayPush) {
}
FixedArray* new_elms = FixedArray::cast(obj);
- CopyObjectToObjectElements(elms, FAST_ELEMENTS, 0,
- new_elms, FAST_ELEMENTS, 0, len);
+ ElementsKind kind = array->GetElementsKind();
+ CopyObjectToObjectElements(elms, kind, 0, new_elms, kind, 0, len);
FillWithHoles(heap, new_elms, new_length, capacity);
elms = new_elms;
@@ -552,11 +570,15 @@ BUILTIN(ArrayPop) {
FixedArray* elms = FixedArray::cast(elms_obj);
JSArray* array = JSArray::cast(receiver);
+ if (FLAG_harmony_observation && array->map()->is_observed()) {
+ return CallJsBuiltin(isolate, "ArrayPop", args);
+ }
+
int len = Smi::cast(array->length())->value();
if (len == 0) return heap->undefined_value();
// Get top element
- MaybeObject* top = elms->get(len - 1);
+ Object* top = elms->get(len - 1);
// Set the length.
array->set_length(Smi::FromInt(len - 1));
@@ -567,9 +589,7 @@ BUILTIN(ArrayPop) {
return top;
}
- top = array->GetPrototype()->GetElement(len - 1);
-
- return top;
+ return array->GetPrototype()->GetElement(len - 1);
}
@@ -588,7 +608,11 @@ BUILTIN(ArrayShift) {
}
FixedArray* elms = FixedArray::cast(elms_obj);
JSArray* array = JSArray::cast(receiver);
- ASSERT(array->HasFastTypeElements());
+ ASSERT(array->HasFastSmiOrObjectElements());
+
+ if (FLAG_harmony_observation && array->map()->is_observed()) {
+ return CallJsBuiltin(isolate, "ArrayShift", args);
+ }
int len = Smi::cast(array->length())->value();
if (len == 0) return heap->undefined_value();
@@ -630,7 +654,11 @@ BUILTIN(ArrayUnshift) {
}
FixedArray* elms = FixedArray::cast(elms_obj);
JSArray* array = JSArray::cast(receiver);
- ASSERT(array->HasFastTypeElements());
+ ASSERT(array->HasFastSmiOrObjectElements());
+
+ if (FLAG_harmony_observation && array->map()->is_observed()) {
+ return CallJsBuiltin(isolate, "ArrayUnshift", args);
+ }
int len = Smi::cast(array->length())->value();
int to_add = args.length() - 1;
@@ -652,8 +680,8 @@ BUILTIN(ArrayUnshift) {
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
FixedArray* new_elms = FixedArray::cast(obj);
- CopyObjectToObjectElements(elms, FAST_ELEMENTS, 0,
- new_elms, FAST_ELEMENTS, to_add, len);
+ ElementsKind kind = array->GetElementsKind();
+ CopyObjectToObjectElements(elms, kind, 0, new_elms, kind, to_add, len);
FillWithHoles(heap, new_elms, new_length, capacity);
elms = new_elms;
array->set_elements(elms);
@@ -682,7 +710,7 @@ BUILTIN(ArraySlice) {
int len = -1;
if (receiver->IsJSArray()) {
JSArray* array = JSArray::cast(receiver);
- if (!array->HasFastTypeElements() ||
+ if (!array->HasFastSmiOrObjectElements() ||
!IsJSArrayFastElementMovingAllowed(heap, array)) {
return CallJsBuiltin(isolate, "ArraySlice", args);
}
@@ -693,12 +721,12 @@ BUILTIN(ArraySlice) {
// Array.slice(arguments, ...) is quite a common idiom (notably more
// than 50% of invocations in Web apps). Treat it in C++ as well.
Map* arguments_map =
- isolate->context()->global_context()->arguments_boilerplate()->map();
+ isolate->context()->native_context()->arguments_boilerplate()->map();
bool is_arguments_object_with_fast_elements =
receiver->IsJSObject()
&& JSObject::cast(receiver)->map() == arguments_map
- && JSObject::cast(receiver)->HasFastTypeElements();
+ && JSObject::cast(receiver)->HasFastSmiOrObjectElements();
if (!is_arguments_object_with_fast_elements) {
return CallJsBuiltin(isolate, "ArraySlice", args);
}
@@ -763,9 +791,9 @@ BUILTIN(ArraySlice) {
JSArray* result_array;
if (!maybe_array->To(&result_array)) return maybe_array;
- CopyObjectToObjectElements(elms, FAST_ELEMENTS, k,
+ CopyObjectToObjectElements(elms, elements_kind, k,
FixedArray::cast(result_array->elements()),
- FAST_ELEMENTS, 0, result_len);
+ elements_kind, 0, result_len);
return result_array;
}
@@ -786,7 +814,11 @@ BUILTIN(ArraySplice) {
}
FixedArray* elms = FixedArray::cast(elms_obj);
JSArray* array = JSArray::cast(receiver);
- ASSERT(array->HasFastTypeElements());
+ ASSERT(array->HasFastSmiOrObjectElements());
+
+ if (FLAG_harmony_observation && array->map()->is_observed()) {
+ return CallJsBuiltin(isolate, "ArraySplice", args);
+ }
int len = Smi::cast(array->length())->value();
@@ -837,9 +869,9 @@ BUILTIN(ArraySplice) {
{
// Fill newly created array.
- CopyObjectToObjectElements(elms, FAST_ELEMENTS, actual_start,
+ CopyObjectToObjectElements(elms, elements_kind, actual_start,
FixedArray::cast(result_array->elements()),
- FAST_ELEMENTS, 0, actual_delete_count);
+ elements_kind, 0, actual_delete_count);
}
int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
@@ -888,12 +920,13 @@ BUILTIN(ArraySplice) {
{
// Copy the part before actual_start as is.
- CopyObjectToObjectElements(elms, FAST_ELEMENTS, 0,
- new_elms, FAST_ELEMENTS, 0, actual_start);
+ ElementsKind kind = array->GetElementsKind();
+ CopyObjectToObjectElements(elms, kind, 0,
+ new_elms, kind, 0, actual_start);
const int to_copy = len - actual_delete_count - actual_start;
- CopyObjectToObjectElements(elms, FAST_ELEMENTS,
+ CopyObjectToObjectElements(elms, kind,
actual_start + actual_delete_count,
- new_elms, FAST_ELEMENTS,
+ new_elms, kind,
actual_start + item_count, to_copy);
}
@@ -929,10 +962,10 @@ BUILTIN(ArraySplice) {
BUILTIN(ArrayConcat) {
Heap* heap = isolate->heap();
- Context* global_context = isolate->context()->global_context();
+ Context* native_context = isolate->context()->native_context();
JSObject* array_proto =
- JSObject::cast(global_context->array_function()->prototype());
- if (!ArrayPrototypeHasNoElements(heap, global_context, array_proto)) {
+ JSObject::cast(native_context->array_function()->prototype());
+ if (!ArrayPrototypeHasNoElements(heap, native_context, array_proto)) {
return CallJsBuiltin(isolate, "ArrayConcat", args);
}
@@ -940,11 +973,12 @@ BUILTIN(ArrayConcat) {
// and calculating total length.
int n_arguments = args.length();
int result_len = 0;
- ElementsKind elements_kind = FAST_SMI_ONLY_ELEMENTS;
+ ElementsKind elements_kind = GetInitialFastElementsKind();
for (int i = 0; i < n_arguments; i++) {
Object* arg = args[i];
- if (!arg->IsJSArray() || !JSArray::cast(arg)->HasFastTypeElements()
- || JSArray::cast(arg)->GetPrototype() != array_proto) {
+ if (!arg->IsJSArray() ||
+ !JSArray::cast(arg)->HasFastSmiOrObjectElements() ||
+ JSArray::cast(arg)->GetPrototype() != array_proto) {
return CallJsBuiltin(isolate, "ArrayConcat", args);
}
@@ -961,8 +995,18 @@ BUILTIN(ArrayConcat) {
return CallJsBuiltin(isolate, "ArrayConcat", args);
}
- if (!JSArray::cast(arg)->HasFastSmiOnlyElements()) {
- elements_kind = FAST_ELEMENTS;
+ if (!JSArray::cast(arg)->HasFastSmiElements()) {
+ if (IsFastSmiElementsKind(elements_kind)) {
+ if (IsFastHoleyElementsKind(elements_kind)) {
+ elements_kind = FAST_HOLEY_ELEMENTS;
+ } else {
+ elements_kind = FAST_ELEMENTS;
+ }
+ }
+ }
+
+ if (JSArray::cast(arg)->HasFastHoleyElements()) {
+ elements_kind = GetHoleyElementsKind(elements_kind);
}
}
@@ -982,8 +1026,8 @@ BUILTIN(ArrayConcat) {
JSArray* array = JSArray::cast(args[i]);
int len = Smi::cast(array->length())->value();
FixedArray* elms = FixedArray::cast(array->elements());
- CopyObjectToObjectElements(elms, FAST_ELEMENTS, 0,
- result_elms, FAST_ELEMENTS,
+ CopyObjectToObjectElements(elms, elements_kind, 0,
+ result_elms, elements_kind,
start_pos, len);
start_pos += len;
}
@@ -1123,6 +1167,7 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallHelper(
result = heap->undefined_value();
} else {
result = *reinterpret_cast<Object**>(*value);
+ result->VerifyApiCallResultType();
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
@@ -1199,6 +1244,7 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor(
result = heap->undefined_value();
} else {
result = *reinterpret_cast<Object**>(*value);
+ result->VerifyApiCallResultType();
}
}
// Check for exceptions and return result.
@@ -1266,6 +1312,11 @@ static void Generate_LoadIC_Normal(MacroAssembler* masm) {
}
+static void Generate_LoadIC_Getter_ForDeopt(MacroAssembler* masm) {
+ LoadStubCompiler::GenerateLoadViaGetter(masm, Handle<JSFunction>());
+}
+
+
static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
KeyedLoadIC::GenerateInitialize(masm);
}
@@ -1363,6 +1414,11 @@ static void Generate_StoreIC_GlobalProxy_Strict(MacroAssembler* masm) {
}
+static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) {
+ StoreStubCompiler::GenerateStoreViaSetter(masm, Handle<JSFunction>());
+}
+
+
static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) {
KeyedStoreIC::GenerateGeneric(masm, kNonStrictMode);
}
@@ -1582,7 +1638,7 @@ void Builtins::SetUp(bool create_heap_objects) {
// For now we generate builtin adaptor code into a stack-allocated
// buffer, before copying it into individual code objects. Be careful
// with alignment, some platforms don't like unaligned code.
- union { int force_alignment; byte buffer[4*KB]; } u;
+ union { int force_alignment; byte buffer[8*KB]; } u;
// Traverse the list of builtins and generate an adaptor in a
// separate code object for each one.
diff --git a/src/3rdparty/v8/src/builtins.h b/src/3rdparty/v8/src/builtins.h
index 3ea3393..a2f752e 100644
--- a/src/3rdparty/v8/src/builtins.h
+++ b/src/3rdparty/v8/src/builtins.h
@@ -38,6 +38,25 @@ enum BuiltinExtraArguments {
};
+#define CODE_AGE_LIST_WITH_ARG(V, A) \
+ V(Quadragenarian, A) \
+ V(Quinquagenarian, A) \
+ V(Sexagenarian, A) \
+ V(Septuagenarian, A) \
+ V(Octogenarian, A)
+
+#define CODE_AGE_LIST_IGNORE_ARG(X, V) V(X)
+
+#define CODE_AGE_LIST(V) \
+ CODE_AGE_LIST_WITH_ARG(CODE_AGE_LIST_IGNORE_ARG, V)
+
+#define DECLARE_CODE_AGE_BUILTIN(C, V) \
+ V(Make##C##CodeYoungAgainOddMarking, BUILTIN, \
+ UNINITIALIZED, Code::kNoExtraICState) \
+ V(Make##C##CodeYoungAgainEvenMarking, BUILTIN, \
+ UNINITIALIZED, Code::kNoExtraICState)
+
+
// Define list of builtins implemented in C++.
#define BUILTIN_LIST_C(V) \
V(Illegal, NO_EXTRA_ARGUMENTS) \
@@ -66,6 +85,8 @@ enum BuiltinExtraArguments {
#define BUILTIN_LIST_A(V) \
V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
+ V(InRecompileQueue, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, \
@@ -80,6 +101,8 @@ enum BuiltinExtraArguments {
Code::kNoExtraICState) \
V(LazyRecompile, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
+ V(ParallelRecompile, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
V(NotifyDeoptimized, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, \
@@ -119,6 +142,8 @@ enum BuiltinExtraArguments {
Code::kNoExtraICState) \
V(LoadIC_Megamorphic, LOAD_IC, MEGAMORPHIC, \
Code::kNoExtraICState) \
+ V(LoadIC_Getter_ForDeopt, LOAD_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
\
V(KeyedLoadIC_Initialize, KEYED_LOAD_IC, UNINITIALIZED, \
Code::kNoExtraICState) \
@@ -153,6 +178,8 @@ enum BuiltinExtraArguments {
kStrictMode) \
V(StoreIC_GlobalProxy_Strict, STORE_IC, MEGAMORPHIC, \
kStrictMode) \
+ V(StoreIC_Setter_ForDeopt, STORE_IC, MONOMORPHIC, \
+ kStrictMode) \
\
V(KeyedStoreIC_Initialize, KEYED_STORE_IC, UNINITIALIZED, \
Code::kNoExtraICState) \
@@ -187,8 +214,8 @@ enum BuiltinExtraArguments {
Code::kNoExtraICState) \
\
V(OnStackReplacement, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState)
-
+ Code::kNoExtraICState) \
+ CODE_AGE_LIST_WITH_ARG(DECLARE_CODE_AGE_BUILTIN, V)
#ifdef ENABLE_DEBUGGER_SUPPORT
// Define list of builtins used by the debugger implemented in assembly.
@@ -347,6 +374,8 @@ class Builtins {
static void Generate_Adaptor(MacroAssembler* masm,
CFunctionId id,
BuiltinExtraArguments extra_args);
+ static void Generate_InRecompileQueue(MacroAssembler* masm);
+ static void Generate_ParallelRecompile(MacroAssembler* masm);
static void Generate_JSConstructStubCountdown(MacroAssembler* masm);
static void Generate_JSConstructStubGeneric(MacroAssembler* masm);
static void Generate_JSConstructStubApi(MacroAssembler* masm);
@@ -369,6 +398,14 @@ class Builtins {
static void Generate_StringConstructCode(MacroAssembler* masm);
static void Generate_OnStackReplacement(MacroAssembler* masm);
+#define DECLARE_CODE_AGE_BUILTIN_GENERATOR(C) \
+ static void Generate_Make##C##CodeYoungAgainEvenMarking( \
+ MacroAssembler* masm); \
+ static void Generate_Make##C##CodeYoungAgainOddMarking( \
+ MacroAssembler* masm);
+ CODE_AGE_LIST(DECLARE_CODE_AGE_BUILTIN_GENERATOR)
+#undef DECLARE_CODE_AGE_BUILTIN_GENERATOR
+
static void InitBuiltinFunctionTable();
bool initialized_;
diff --git a/src/3rdparty/v8/src/checks.h b/src/3rdparty/v8/src/checks.h
index 608aa14..d0a0c2b 100644
--- a/src/3rdparty/v8/src/checks.h
+++ b/src/3rdparty/v8/src/checks.h
@@ -284,4 +284,12 @@ extern bool FLAG_enable_slow_asserts;
#define ASSERT_NOT_NULL(p) ASSERT_NE(NULL, p)
+// "Extra checks" are lightweight checks that are enabled in some release
+// builds.
+#ifdef ENABLE_EXTRA_CHECKS
+#define EXTRA_CHECK(condition) CHECK(condition)
+#else
+#define EXTRA_CHECK(condition) ((void) 0)
+#endif
+
#endif // V8_CHECKS_H_
diff --git a/src/3rdparty/v8/src/code-stubs.cc b/src/3rdparty/v8/src/code-stubs.cc
index 814e358..7a72059 100644
--- a/src/3rdparty/v8/src/code-stubs.cc
+++ b/src/3rdparty/v8/src/code-stubs.cc
@@ -142,7 +142,9 @@ Handle<Code> CodeStub::GetCode() {
}
Activate(code);
- ASSERT(!NeedsImmovableCode() || heap->lo_space()->Contains(code));
+ ASSERT(!NeedsImmovableCode() ||
+ heap->lo_space()->Contains(code) ||
+ heap->code_space()->FirstPage()->Contains(code->address()));
return Handle<Code>(code, isolate);
}
@@ -172,7 +174,9 @@ void ICCompareStub::AddToSpecialCache(Handle<Code> new_object) {
Isolate* isolate = new_object->GetIsolate();
Factory* factory = isolate->factory();
return Map::UpdateCodeCache(known_map_,
- factory->compare_ic_symbol(),
+ strict() ?
+ factory->strict_compare_ic_symbol() :
+ factory->compare_ic_symbol(),
new_object);
}
@@ -183,10 +187,16 @@ bool ICCompareStub::FindCodeInSpecialCache(Code** code_out) {
Code::Flags flags = Code::ComputeFlags(
static_cast<Code::Kind>(GetCodeKind()),
UNINITIALIZED);
+ ASSERT(op_ == Token::EQ || op_ == Token::EQ_STRICT);
Handle<Object> probe(
- known_map_->FindInCodeCache(*factory->compare_ic_symbol(), flags));
+ known_map_->FindInCodeCache(
+ strict() ?
+ *factory->strict_compare_ic_symbol() :
+ *factory->compare_ic_symbol(),
+ flags));
if (probe->IsCode()) {
*code_out = Code::cast(*probe);
+ ASSERT(op_ == (*code_out)->compare_operation() + Token::EQ);
return true;
}
return false;
@@ -262,10 +272,13 @@ void JSEntryStub::FinishCode(Handle<Code> code) {
void KeyedLoadElementStub::Generate(MacroAssembler* masm) {
switch (elements_kind_) {
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
KeyedLoadStubCompiler::GenerateLoadFastElement(masm);
break;
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(masm);
break;
case EXTERNAL_BYTE_ELEMENTS:
@@ -292,7 +305,9 @@ void KeyedLoadElementStub::Generate(MacroAssembler* masm) {
void KeyedStoreElementStub::Generate(MacroAssembler* masm) {
switch (elements_kind_) {
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS: {
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS: {
KeyedStoreStubCompiler::GenerateStoreFastElement(masm,
is_js_array_,
elements_kind_,
@@ -300,6 +315,7 @@ void KeyedStoreElementStub::Generate(MacroAssembler* masm) {
}
break;
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(masm,
is_js_array_,
grow_mode_);
@@ -430,24 +446,32 @@ bool ToBooleanStub::Types::CanBeUndetectable() const {
void ElementsTransitionAndStoreStub::Generate(MacroAssembler* masm) {
Label fail;
+ ASSERT(!IsFastHoleyElementsKind(from_) || IsFastHoleyElementsKind(to_));
if (!FLAG_trace_elements_transitions) {
- if (to_ == FAST_ELEMENTS) {
- if (from_ == FAST_SMI_ONLY_ELEMENTS) {
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
- } else if (from_ == FAST_DOUBLE_ELEMENTS) {
+ if (IsFastSmiOrObjectElementsKind(to_)) {
+ if (IsFastSmiOrObjectElementsKind(from_)) {
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm);
+ } else if (IsFastDoubleElementsKind(from_)) {
+ ASSERT(!IsFastSmiElementsKind(to_));
ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
} else {
UNREACHABLE();
}
KeyedStoreStubCompiler::GenerateStoreFastElement(masm,
is_jsarray_,
- FAST_ELEMENTS,
+ to_,
grow_mode_);
- } else if (from_ == FAST_SMI_ONLY_ELEMENTS && to_ == FAST_DOUBLE_ELEMENTS) {
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+ } else if (IsFastSmiElementsKind(from_) &&
+ IsFastDoubleElementsKind(to_)) {
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(masm,
is_jsarray_,
grow_mode_);
+ } else if (IsFastDoubleElementsKind(from_)) {
+ ASSERT(to_ == FAST_HOLEY_DOUBLE_ELEMENTS);
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm);
} else {
UNREACHABLE();
}
@@ -456,4 +480,26 @@ void ElementsTransitionAndStoreStub::Generate(MacroAssembler* masm) {
KeyedStoreIC::GenerateRuntimeSetProperty(masm, strict_mode_);
}
+
+FunctionEntryHook ProfileEntryHookStub::entry_hook_ = NULL;
+
+
+void ProfileEntryHookStub::EntryHookTrampoline(intptr_t function,
+ intptr_t stack_pointer) {
+ if (entry_hook_ != NULL)
+ entry_hook_(function, stack_pointer);
+}
+
+
+bool ProfileEntryHookStub::SetFunctionEntryHook(FunctionEntryHook entry_hook) {
+ // We don't allow setting a new entry hook over one that's
+ // already active, as the hooks won't stack.
+ if (entry_hook != 0 && entry_hook_ != 0)
+ return false;
+
+ entry_hook_ = entry_hook;
+ return true;
+}
+
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/code-stubs.h b/src/3rdparty/v8/src/code-stubs.h
index e281a36..8288f4d 100644
--- a/src/3rdparty/v8/src/code-stubs.h
+++ b/src/3rdparty/v8/src/code-stubs.h
@@ -73,7 +73,8 @@ namespace internal {
V(DebuggerStatement) \
V(StringDictionaryLookup) \
V(ElementsTransitionAndStore) \
- V(StoreArrayLiteralElement)
+ V(StoreArrayLiteralElement) \
+ V(ProfileEntryHook)
// List of code stubs only used on ARM platforms.
#ifdef V8_TARGET_ARCH_ARM
@@ -162,8 +163,7 @@ class CodeStub BASE_EMBEDDED {
bool FindCodeInCache(Code** code_out);
protected:
- static const int kMajorBits = 6;
- static const int kMinorBits = kBitsPerInt - kSmiTagSize - kMajorBits;
+ static bool CanUseFPRegisters();
private:
// Nonvirtual wrapper around the stub-specific Generate function. Call
@@ -222,8 +222,9 @@ class CodeStub BASE_EMBEDDED {
MajorKeyBits::encode(MajorKey());
}
- class MajorKeyBits: public BitField<uint32_t, 0, kMajorBits> {};
- class MinorKeyBits: public BitField<uint32_t, kMajorBits, kMinorBits> {};
+ class MajorKeyBits: public BitField<uint32_t, 0, kStubMajorKeyBits> {};
+ class MinorKeyBits: public BitField<uint32_t,
+ kStubMajorKeyBits, kStubMinorKeyBits> {}; // NOLINT
friend class BreakPointIterator;
};
@@ -498,7 +499,7 @@ class ICCompareStub: public CodeStub {
virtual void FinishCode(Handle<Code> code) {
code->set_compare_state(state_);
- code->set_compare_operation(op_);
+ code->set_compare_operation(op_ - Token::EQ);
}
virtual CodeStub::Major MajorKey() { return CompareIC; }
@@ -1000,13 +1001,15 @@ class KeyedStoreElementStub : public CodeStub {
KeyedAccessGrowMode grow_mode)
: is_js_array_(is_js_array),
elements_kind_(elements_kind),
- grow_mode_(grow_mode) { }
+ grow_mode_(grow_mode),
+ fp_registers_(CanUseFPRegisters()) { }
Major MajorKey() { return KeyedStoreElement; }
int MinorKey() {
return ElementsKindBits::encode(elements_kind_) |
IsJSArrayBits::encode(is_js_array_) |
- GrowModeBits::encode(grow_mode_);
+ GrowModeBits::encode(grow_mode_) |
+ FPRegisters::encode(fp_registers_);
}
void Generate(MacroAssembler* masm);
@@ -1015,10 +1018,12 @@ class KeyedStoreElementStub : public CodeStub {
class ElementsKindBits: public BitField<ElementsKind, 0, 8> {};
class GrowModeBits: public BitField<KeyedAccessGrowMode, 8, 1> {};
class IsJSArrayBits: public BitField<bool, 9, 1> {};
+ class FPRegisters: public BitField<bool, 10, 1> {};
bool is_js_array_;
ElementsKind elements_kind_;
KeyedAccessGrowMode grow_mode_;
+ bool fp_registers_;
DISALLOW_COPY_AND_ASSIGN(KeyedStoreElementStub);
};
@@ -1134,17 +1139,55 @@ class ElementsTransitionAndStoreStub : public CodeStub {
class StoreArrayLiteralElementStub : public CodeStub {
public:
- explicit StoreArrayLiteralElementStub() {}
+ StoreArrayLiteralElementStub()
+ : fp_registers_(CanUseFPRegisters()) { }
private:
+ class FPRegisters: public BitField<bool, 0, 1> {};
+
Major MajorKey() { return StoreArrayLiteralElement; }
- int MinorKey() { return 0; }
+ int MinorKey() { return FPRegisters::encode(fp_registers_); }
void Generate(MacroAssembler* masm);
+ bool fp_registers_;
+
DISALLOW_COPY_AND_ASSIGN(StoreArrayLiteralElementStub);
};
+
+class ProfileEntryHookStub : public CodeStub {
+ public:
+ explicit ProfileEntryHookStub() {}
+
+ // The profile entry hook function is not allowed to cause a GC.
+ virtual bool SometimesSetsUpAFrame() { return false; }
+
+ // Generates a call to the entry hook if it's enabled.
+ static void MaybeCallEntryHook(MacroAssembler* masm);
+
+ // Sets or unsets the entry hook function. Returns true on success,
+ // false on an attempt to replace a non-NULL entry hook with another
+ // non-NULL hook.
+ static bool SetFunctionEntryHook(FunctionEntryHook entry_hook);
+
+ static bool HasEntryHook() { return entry_hook_ != NULL; }
+
+ private:
+ static void EntryHookTrampoline(intptr_t function,
+ intptr_t stack_pointer);
+
+ Major MajorKey() { return ProfileEntryHook; }
+ int MinorKey() { return 0; }
+
+ void Generate(MacroAssembler* masm);
+
+ // The current function entry hook.
+ static FunctionEntryHook entry_hook_;
+
+ DISALLOW_COPY_AND_ASSIGN(ProfileEntryHookStub);
+};
+
} } // namespace v8::internal
#endif // V8_CODE_STUBS_H_
diff --git a/src/3rdparty/v8/src/codegen.h b/src/3rdparty/v8/src/codegen.h
index 50d70f2..08a777f 100644
--- a/src/3rdparty/v8/src/codegen.h
+++ b/src/3rdparty/v8/src/codegen.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -95,8 +95,8 @@ UnaryMathFunction CreateSqrtFunction();
class ElementsTransitionGenerator : public AllStatic {
public:
- static void GenerateSmiOnlyToObject(MacroAssembler* masm);
- static void GenerateSmiOnlyToDouble(MacroAssembler* masm, Label* fail);
+ static void GenerateMapChangeElementsTransition(MacroAssembler* masm);
+ static void GenerateSmiToDouble(MacroAssembler* masm, Label* fail);
static void GenerateDoubleToObject(MacroAssembler* masm, Label* fail);
private:
diff --git a/src/3rdparty/v8/src/collection.js b/src/3rdparty/v8/src/collection.js
index 75fe3d5..b3c2db7 100644
--- a/src/3rdparty/v8/src/collection.js
+++ b/src/3rdparty/v8/src/collection.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -79,7 +79,31 @@ function SetDelete(key) {
if (IS_UNDEFINED(key)) {
key = undefined_sentinel;
}
- return %SetDelete(this, key);
+ if (%SetHas(this, key)) {
+ %SetDelete(this, key);
+ return true;
+ } else {
+ return false;
+ }
+}
+
+
+function SetGetSize() {
+ if (!IS_SET(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['Set.prototype.size', this]);
+ }
+ return %SetGetSize(this);
+}
+
+
+function SetClear() {
+ if (!IS_SET(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['Set.prototype.clear', this]);
+ }
+ // Replace the internal table with a new empty table.
+ %SetInitialize(this);
}
@@ -124,7 +148,7 @@ function MapHas(key) {
if (IS_UNDEFINED(key)) {
key = undefined_sentinel;
}
- return !IS_UNDEFINED(%MapGet(this, key));
+ return %MapHas(this, key);
}
@@ -136,12 +160,26 @@ function MapDelete(key) {
if (IS_UNDEFINED(key)) {
key = undefined_sentinel;
}
- if (!IS_UNDEFINED(%MapGet(this, key))) {
- %MapSet(this, key, void 0);
- return true;
- } else {
- return false;
+ return %MapDelete(this, key);
+}
+
+
+function MapGetSize() {
+ if (!IS_MAP(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['Map.prototype.size', this]);
}
+ return %MapGetSize(this);
+}
+
+
+function MapClear() {
+ if (!IS_MAP(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['Map.prototype.clear', this]);
+ }
+ // Replace the internal table with a new empty table.
+ %MapInitialize(this);
}
@@ -186,7 +224,7 @@ function WeakMapHas(key) {
if (!IS_SPEC_OBJECT(key)) {
throw %MakeTypeError('invalid_weakmap_key', [this, key]);
}
- return !IS_UNDEFINED(%WeakMapGet(this, key));
+ return %WeakMapHas(this, key);
}
@@ -198,12 +236,7 @@ function WeakMapDelete(key) {
if (!IS_SPEC_OBJECT(key)) {
throw %MakeTypeError('invalid_weakmap_key', [this, key]);
}
- if (!IS_UNDEFINED(%WeakMapGet(this, key))) {
- %WeakMapSet(this, key, void 0);
- return true;
- } else {
- return false;
- }
+ return %WeakMapDelete(this, key);
}
// -------------------------------------------------------------------
@@ -220,18 +253,22 @@ function WeakMapDelete(key) {
%SetProperty($Map.prototype, "constructor", $Map, DONT_ENUM);
// Set up the non-enumerable functions on the Set prototype object.
+ InstallGetter($Set.prototype, "size", SetGetSize);
InstallFunctions($Set.prototype, DONT_ENUM, $Array(
"add", SetAdd,
"has", SetHas,
- "delete", SetDelete
+ "delete", SetDelete,
+ "clear", SetClear
));
// Set up the non-enumerable functions on the Map prototype object.
+ InstallGetter($Map.prototype, "size", MapGetSize);
InstallFunctions($Map.prototype, DONT_ENUM, $Array(
"get", MapGet,
"set", MapSet,
"has", MapHas,
- "delete", MapDelete
+ "delete", MapDelete,
+ "clear", MapClear
));
// Set up the WeakMap constructor function.
diff --git a/src/3rdparty/v8/src/compilation-cache.cc b/src/3rdparty/v8/src/compilation-cache.cc
index 82cc223..904e84f 100644
--- a/src/3rdparty/v8/src/compilation-cache.cc
+++ b/src/3rdparty/v8/src/compilation-cache.cc
@@ -98,7 +98,7 @@ void CompilationSubCache::Age() {
void CompilationSubCache::IterateFunctions(ObjectVisitor* v) {
- Object* undefined = isolate()->heap()->raw_unchecked_undefined_value();
+ Object* undefined = isolate()->heap()->undefined_value();
for (int i = 0; i < generations_; i++) {
if (tables_[i] != undefined) {
reinterpret_cast<CompilationCacheTable*>(tables_[i])->IterateElements(v);
@@ -165,10 +165,12 @@ bool CompilationCacheScript::HasOrigin(
// be cached in the same script generation. Currently the first use
// will be cached, but subsequent code from different source / line
// won't.
-Handle<SharedFunctionInfo> CompilationCacheScript::Lookup(Handle<String> source,
- Handle<Object> name,
- int line_offset,
- int column_offset) {
+Handle<SharedFunctionInfo> CompilationCacheScript::Lookup(
+ Handle<String> source,
+ Handle<Object> name,
+ int line_offset,
+ int column_offset,
+ Handle<Context> context) {
Object* result = NULL;
int generation;
@@ -177,7 +179,7 @@ Handle<SharedFunctionInfo> CompilationCacheScript::Lookup(Handle<String> source,
{ HandleScope scope(isolate());
for (generation = 0; generation < generations(); generation++) {
Handle<CompilationCacheTable> table = GetTable(generation);
- Handle<Object> probe(table->Lookup(*source), isolate());
+ Handle<Object> probe(table->Lookup(*source, *context), isolate());
if (probe->IsSharedFunctionInfo()) {
Handle<SharedFunctionInfo> function_info =
Handle<SharedFunctionInfo>::cast(probe);
@@ -214,7 +216,7 @@ Handle<SharedFunctionInfo> CompilationCacheScript::Lookup(Handle<String> source,
ASSERT(HasOrigin(shared, name, line_offset, column_offset));
// If the script was found in a later generation, we promote it to
// the first generation to let it survive longer in the cache.
- if (generation != 0) Put(source, shared);
+ if (generation != 0) Put(source, context, shared);
isolate()->counters()->compilation_cache_hits()->Increment();
return shared;
} else {
@@ -226,25 +228,28 @@ Handle<SharedFunctionInfo> CompilationCacheScript::Lookup(Handle<String> source,
MaybeObject* CompilationCacheScript::TryTablePut(
Handle<String> source,
+ Handle<Context> context,
Handle<SharedFunctionInfo> function_info) {
Handle<CompilationCacheTable> table = GetFirstTable();
- return table->Put(*source, *function_info);
+ return table->Put(*source, *context, *function_info);
}
Handle<CompilationCacheTable> CompilationCacheScript::TablePut(
Handle<String> source,
+ Handle<Context> context,
Handle<SharedFunctionInfo> function_info) {
CALL_HEAP_FUNCTION(isolate(),
- TryTablePut(source, function_info),
+ TryTablePut(source, context, function_info),
CompilationCacheTable);
}
void CompilationCacheScript::Put(Handle<String> source,
+ Handle<Context> context,
Handle<SharedFunctionInfo> function_info) {
HandleScope scope(isolate());
- SetFirstTable(TablePut(source, function_info));
+ SetFirstTable(TablePut(source, context, function_info));
}
@@ -380,15 +385,17 @@ void CompilationCache::Remove(Handle<SharedFunctionInfo> function_info) {
}
-Handle<SharedFunctionInfo> CompilationCache::LookupScript(Handle<String> source,
- Handle<Object> name,
- int line_offset,
- int column_offset) {
+Handle<SharedFunctionInfo> CompilationCache::LookupScript(
+ Handle<String> source,
+ Handle<Object> name,
+ int line_offset,
+ int column_offset,
+ Handle<Context> context) {
if (!IsEnabled()) {
return Handle<SharedFunctionInfo>::null();
}
- return script_.Lookup(source, name, line_offset, column_offset);
+ return script_.Lookup(source, name, line_offset, column_offset, context);
}
@@ -426,12 +433,13 @@ Handle<FixedArray> CompilationCache::LookupRegExp(Handle<String> source,
void CompilationCache::PutScript(Handle<String> source,
+ Handle<Context> context,
Handle<SharedFunctionInfo> function_info) {
if (!IsEnabled()) {
return;
}
- script_.Put(source, function_info);
+ script_.Put(source, context, function_info);
}
diff --git a/src/3rdparty/v8/src/compilation-cache.h b/src/3rdparty/v8/src/compilation-cache.h
index 2f2fbad..7a236e8 100644
--- a/src/3rdparty/v8/src/compilation-cache.h
+++ b/src/3rdparty/v8/src/compilation-cache.h
@@ -98,16 +98,23 @@ class CompilationCacheScript : public CompilationSubCache {
Handle<SharedFunctionInfo> Lookup(Handle<String> source,
Handle<Object> name,
int line_offset,
- int column_offset);
- void Put(Handle<String> source, Handle<SharedFunctionInfo> function_info);
+ int column_offset,
+ Handle<Context> context);
+ void Put(Handle<String> source,
+ Handle<Context> context,
+ Handle<SharedFunctionInfo> function_info);
private:
MUST_USE_RESULT MaybeObject* TryTablePut(
- Handle<String> source, Handle<SharedFunctionInfo> function_info);
+ Handle<String> source,
+ Handle<Context> context,
+ Handle<SharedFunctionInfo> function_info);
// Note: Returns a new hash table if operation results in expansion.
Handle<CompilationCacheTable> TablePut(
- Handle<String> source, Handle<SharedFunctionInfo> function_info);
+ Handle<String> source,
+ Handle<Context> context,
+ Handle<SharedFunctionInfo> function_info);
bool HasOrigin(Handle<SharedFunctionInfo> function_info,
Handle<Object> name,
@@ -122,7 +129,7 @@ class CompilationCacheScript : public CompilationSubCache {
// Sub-cache for eval scripts. Two caches for eval are used. One for eval calls
-// in global contexts and one for eval calls in other contexts. The cache
+// in native contexts and one for eval calls in other contexts. The cache
// considers the following pieces of information when checking for matching
// entries:
// 1. The source string.
@@ -204,7 +211,8 @@ class CompilationCache {
Handle<SharedFunctionInfo> LookupScript(Handle<String> source,
Handle<Object> name,
int line_offset,
- int column_offset);
+ int column_offset,
+ Handle<Context> context);
// Finds the shared function info for a source string for eval in a
// given context. Returns an empty handle if the cache doesn't
@@ -223,6 +231,7 @@ class CompilationCache {
// Associate the (source, kind) pair to the shared function
// info. This may overwrite an existing mapping.
void PutScript(Handle<String> source,
+ Handle<Context> context,
Handle<SharedFunctionInfo> function_info);
// Associate the (source, context->closure()->shared(), kind) triple
diff --git a/src/3rdparty/v8/src/compiler-intrinsics.h b/src/3rdparty/v8/src/compiler-intrinsics.h
index b73e8ac..c1693b0 100644
--- a/src/3rdparty/v8/src/compiler-intrinsics.h
+++ b/src/3rdparty/v8/src/compiler-intrinsics.h
@@ -28,6 +28,10 @@
#ifndef V8_COMPILER_INTRINSICS_H_
#define V8_COMPILER_INTRINSICS_H_
+#if defined(_WIN32_WCE)
+#include <cmnintrin.h>
+#endif
+
namespace v8 {
namespace internal {
@@ -58,7 +62,7 @@ int CompilerIntrinsics::CountSetBits(uint32_t value) {
return __builtin_popcount(value);
}
-#elif defined(_MSC_VER)
+#elif defined(_MSC_VER) && !defined(_WIN32_WCE)
#pragma intrinsic(_BitScanForward)
#pragma intrinsic(_BitScanReverse)
@@ -75,6 +79,21 @@ int CompilerIntrinsics::CountLeadingZeros(uint32_t value) {
return 31 - static_cast<int>(result);
}
+#elif defined(_WIN32_WCE)
+int CompilerIntrinsics::CountTrailingZeros(uint32_t value) {
+ // taken from http://graphics.stanford.edu/~seander/bithacks.html#ZerosOnRightFloatCast
+ float f = (float)(value & -value); // cast the least significant bit in v to a float
+ return (*(uint32_t *)&f >> 23) - 0x7f;
+}
+
+int CompilerIntrinsics::CountLeadingZeros(uint32_t value) {
+ return _CountLeadingZeros(value);
+}
+#else
+#error Unsupported compiler
+#endif
+
+#if defined(_MSC_VER)
int CompilerIntrinsics::CountSetBits(uint32_t value) {
// Manually count set bits.
value = ((value >> 1) & 0x55555555) + (value & 0x55555555);
@@ -84,9 +103,6 @@ int CompilerIntrinsics::CountSetBits(uint32_t value) {
value = ((value >> 16) & 0x0000ffff) + (value & 0x0000ffff);
return value;
}
-
-#else
-#error Unsupported compiler
#endif
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/compiler.cc b/src/3rdparty/v8/src/compiler.cc
index 4060b15..b1f40b6 100644
--- a/src/3rdparty/v8/src/compiler.cc
+++ b/src/3rdparty/v8/src/compiler.cc
@@ -51,7 +51,7 @@ namespace v8 {
namespace internal {
-CompilationInfo::CompilationInfo(Handle<Script> script)
+CompilationInfo::CompilationInfo(Handle<Script> script, Zone* zone)
: isolate_(script->GetIsolate()),
flags_(LanguageModeField::encode(CLASSIC_MODE)),
function_(NULL),
@@ -60,12 +60,15 @@ CompilationInfo::CompilationInfo(Handle<Script> script)
script_(script),
extension_(NULL),
pre_parse_data_(NULL),
- osr_ast_id_(AstNode::kNoNumber) {
+ osr_ast_id_(BailoutId::None()),
+ zone_(zone),
+ deferred_handles_(NULL) {
Initialize(BASE);
}
-CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info)
+CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info,
+ Zone* zone)
: isolate_(shared_info->GetIsolate()),
flags_(LanguageModeField::encode(CLASSIC_MODE) |
IsLazy::encode(true)),
@@ -76,12 +79,14 @@ CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info)
script_(Handle<Script>(Script::cast(shared_info->script()))),
extension_(NULL),
pre_parse_data_(NULL),
- osr_ast_id_(AstNode::kNoNumber) {
+ osr_ast_id_(BailoutId::None()),
+ zone_(zone),
+ deferred_handles_(NULL) {
Initialize(BASE);
}
-CompilationInfo::CompilationInfo(Handle<JSFunction> closure)
+CompilationInfo::CompilationInfo(Handle<JSFunction> closure, Zone* zone)
: isolate_(closure->GetIsolate()),
flags_(LanguageModeField::encode(CLASSIC_MODE) |
IsLazy::encode(true)),
@@ -93,11 +98,19 @@ CompilationInfo::CompilationInfo(Handle<JSFunction> closure)
script_(Handle<Script>(Script::cast(shared_info_->script()))),
extension_(NULL),
pre_parse_data_(NULL),
- osr_ast_id_(AstNode::kNoNumber) {
+ context_(closure->context()),
+ osr_ast_id_(BailoutId::None()),
+ zone_(zone),
+ deferred_handles_(NULL) {
Initialize(BASE);
}
+CompilationInfo::~CompilationInfo() {
+ delete deferred_handles_;
+}
+
+
// Disable optimization for the rest of the compilation pipeline.
void CompilationInfo::DisableOptimization() {
bool is_optimizable_closure =
@@ -118,7 +131,7 @@ bool CompilationInfo::ShouldSelfOptimize() {
FLAG_crankshaft &&
!function()->flags()->Contains(kDontSelfOptimize) &&
!function()->flags()->Contains(kDontOptimize) &&
- function()->scope()->AllowsLazyRecompilation() &&
+ function()->scope()->AllowsLazyCompilation() &&
(shared_info().is_null() || !shared_info()->optimization_disabled());
}
@@ -137,9 +150,8 @@ void CompilationInfo::AbortOptimization() {
// all. However crankshaft support recompilation of functions, so in this case
// the full compiler need not be be used if a debugger is attached, but only if
// break points has actually been set.
-static bool is_debugging_active() {
+static bool IsDebuggerActive(Isolate* isolate) {
#ifdef ENABLE_DEBUGGER_SUPPORT
- Isolate* isolate = Isolate::Current();
return V8::UseCrankshaft() ?
isolate->debug()->has_break_points() :
isolate->debugger()->IsDebuggerActive();
@@ -149,27 +161,32 @@ static bool is_debugging_active() {
}
-static bool AlwaysFullCompiler() {
- return FLAG_always_full_compiler || is_debugging_active();
+static bool AlwaysFullCompiler(Isolate* isolate) {
+ return FLAG_always_full_compiler || IsDebuggerActive(isolate);
}
-static void FinishOptimization(Handle<JSFunction> function, int64_t start) {
+void OptimizingCompiler::RecordOptimizationStats() {
+ Handle<JSFunction> function = info()->closure();
int opt_count = function->shared()->opt_count();
function->shared()->set_opt_count(opt_count + 1);
- double ms = static_cast<double>(OS::Ticks() - start) / 1000;
+ double ms_creategraph =
+ static_cast<double>(time_taken_to_create_graph_) / 1000;
+ double ms_optimize = static_cast<double>(time_taken_to_optimize_) / 1000;
+ double ms_codegen = static_cast<double>(time_taken_to_codegen_) / 1000;
if (FLAG_trace_opt) {
PrintF("[optimizing: ");
function->PrintName();
PrintF(" / %" V8PRIxPTR, reinterpret_cast<intptr_t>(*function));
- PrintF(" - took %0.3f ms]\n", ms);
+ PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
+ ms_codegen);
}
if (FLAG_trace_opt_stats) {
static double compilation_time = 0.0;
static int compiled_functions = 0;
static int code_size = 0;
- compilation_time += ms;
+ compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
compiled_functions++;
code_size += function->shared()->SourceSize();
PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
@@ -180,46 +197,54 @@ static void FinishOptimization(Handle<JSFunction> function, int64_t start) {
}
+// A return value of true indicates the compilation pipeline is still
+// going, not necessarily that we optimized the code.
static bool MakeCrankshaftCode(CompilationInfo* info) {
- // Test if we can optimize this function when asked to. We can only
- // do this after the scopes are computed.
- if (!V8::UseCrankshaft()) {
- info->DisableOptimization();
- }
+ OptimizingCompiler compiler(info);
+ OptimizingCompiler::Status status = compiler.CreateGraph();
- // In case we are not optimizing simply return the code from
- // the full code generator.
- if (!info->IsOptimizing()) {
- return FullCodeGenerator::MakeCode(info);
+ if (status != OptimizingCompiler::SUCCEEDED) {
+ return status != OptimizingCompiler::FAILED;
+ }
+ status = compiler.OptimizeGraph();
+ if (status != OptimizingCompiler::SUCCEEDED) {
+ status = compiler.AbortOptimization();
+ return status != OptimizingCompiler::FAILED;
}
+ status = compiler.GenerateAndInstallCode();
+ return status != OptimizingCompiler::FAILED;
+}
- // We should never arrive here if there is not code object on the
+
+OptimizingCompiler::Status OptimizingCompiler::CreateGraph() {
+ ASSERT(V8::UseCrankshaft());
+ ASSERT(info()->IsOptimizing());
+ ASSERT(!info()->IsCompilingForDebugging());
+
+ // We should never arrive here if there is no code object on the
// shared function object.
- Handle<Code> code(info->shared_info()->code());
+ Handle<Code> code(info()->shared_info()->code());
ASSERT(code->kind() == Code::FUNCTION);
// We should never arrive here if optimization has been disabled on the
// shared function info.
- ASSERT(!info->shared_info()->optimization_disabled());
+ ASSERT(!info()->shared_info()->optimization_disabled());
// Fall back to using the full code generator if it's not possible
// to use the Hydrogen-based optimizing compiler. We already have
// generated code for this from the shared function object.
- if (AlwaysFullCompiler()) {
- info->SetCode(code);
- return true;
+ if (AlwaysFullCompiler(info()->isolate())) {
+ info()->SetCode(code);
+ return SetLastStatus(BAILED_OUT);
}
// Limit the number of times we re-compile a functions with
// the optimizing compiler.
const int kMaxOptCount =
- FLAG_deopt_every_n_times == 0 ? Compiler::kDefaultMaxOptCount : 1000;
- if (info->shared_info()->opt_count() > kMaxOptCount) {
- info->AbortOptimization();
- info->shared_info()->DisableOptimization();
- // True indicates the compilation pipeline is still going, not
- // necessarily that we optimized the code.
- return true;
+ FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
+ if (info()->shared_info()->opt_count() > kMaxOptCount) {
+ info()->set_bailout_reason("optimized too many times");
+ return AbortOptimization();
}
// Due to an encoding limit on LUnallocated operands in the Lithium
@@ -230,27 +255,28 @@ static bool MakeCrankshaftCode(CompilationInfo* info) {
// The encoding is as a signed value, with parameters and receiver using
// the negative indices and locals the non-negative ones.
const int parameter_limit = -LUnallocated::kMinFixedIndex;
+ Scope* scope = info()->scope();
+ if ((scope->num_parameters() + 1) > parameter_limit) {
+ info()->set_bailout_reason("too many parameters");
+ return AbortOptimization();
+ }
+
const int locals_limit = LUnallocated::kMaxFixedIndex;
- Scope* scope = info->scope();
- if ((scope->num_parameters() + 1) > parameter_limit ||
- (info->osr_ast_id() != AstNode::kNoNumber &&
- scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit)) {
- info->AbortOptimization();
- info->shared_info()->DisableOptimization();
- // True indicates the compilation pipeline is still going, not
- // necessarily that we optimized the code.
- return true;
+ if (!info()->osr_ast_id().IsNone() &&
+ scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
+ info()->set_bailout_reason("too many parameters/locals");
+ return AbortOptimization();
}
// Take --hydrogen-filter into account.
- Handle<String> name = info->function()->debug_name();
+ Handle<String> name = info()->function()->debug_name();
if (*FLAG_hydrogen_filter != '\0') {
Vector<const char> filter = CStrVector(FLAG_hydrogen_filter);
if ((filter[0] == '-'
&& name->IsEqualTo(filter.SubVector(1, filter.length())))
|| (filter[0] != '-' && !name->IsEqualTo(filter))) {
- info->SetCode(code);
- return true;
+ info()->SetCode(code);
+ return SetLastStatus(BAILED_OUT);
}
}
@@ -258,20 +284,21 @@ static bool MakeCrankshaftCode(CompilationInfo* info) {
// doesn't have deoptimization support. Alternatively, we may decide to
// run the full code generator to get a baseline for the compile-time
// performance of the hydrogen-based compiler.
- int64_t start = OS::Ticks();
- bool should_recompile = !info->shared_info()->has_deoptimization_support();
+ Timer t(this, &time_taken_to_create_graph_);
+ bool should_recompile = !info()->shared_info()->has_deoptimization_support();
if (should_recompile || FLAG_hydrogen_stats) {
HPhase phase(HPhase::kFullCodeGen);
- CompilationInfo unoptimized(info->shared_info());
+ CompilationInfoWithZone unoptimized(info()->shared_info());
// Note that we use the same AST that we will use for generating the
// optimized code.
- unoptimized.SetFunction(info->function());
- unoptimized.SetScope(info->scope());
+ unoptimized.SetFunction(info()->function());
+ unoptimized.SetScope(info()->scope());
+ unoptimized.SetContext(info()->context());
if (should_recompile) unoptimized.EnableDeoptimizationSupport();
bool succeeded = FullCodeGenerator::MakeCode(&unoptimized);
if (should_recompile) {
- if (!succeeded) return false;
- Handle<SharedFunctionInfo> shared = info->shared_info();
+ if (!succeeded) return SetLastStatus(FAILED);
+ Handle<SharedFunctionInfo> shared = info()->shared_info();
shared->EnableDeoptimizationSupport(*unoptimized.code());
// The existing unoptimized code was replaced with the new one.
Compiler::RecordFunctionCompilation(
@@ -285,50 +312,93 @@ static bool MakeCrankshaftCode(CompilationInfo* info) {
// is safe as long as the unoptimized code has deoptimization
// support.
ASSERT(FLAG_always_opt || code->optimizable());
- ASSERT(info->shared_info()->has_deoptimization_support());
+ ASSERT(info()->shared_info()->has_deoptimization_support());
if (FLAG_trace_hydrogen) {
PrintF("-----------------------------------------------------------\n");
PrintF("Compiling method %s using hydrogen\n", *name->ToCString());
- HTracer::Instance()->TraceCompilation(info->function());
+ HTracer::Instance()->TraceCompilation(info()->function());
}
-
- Handle<Context> global_context(info->closure()->context()->global_context());
- TypeFeedbackOracle oracle(code, global_context, info->isolate());
- HGraphBuilder builder(info, &oracle);
+ Handle<Context> native_context(
+ info()->closure()->context()->native_context());
+ oracle_ = new(info()->zone()) TypeFeedbackOracle(
+ code, native_context, info()->isolate(), info()->zone());
+ graph_builder_ = new(info()->zone()) HGraphBuilder(info(), oracle_);
HPhase phase(HPhase::kTotal);
- HGraph* graph = builder.CreateGraph();
- if (info->isolate()->has_pending_exception()) {
- info->SetCode(Handle<Code>::null());
- return false;
+ graph_ = graph_builder_->CreateGraph();
+
+ if (info()->isolate()->has_pending_exception()) {
+ info()->SetCode(Handle<Code>::null());
+ return SetLastStatus(FAILED);
}
- if (graph != NULL) {
- Handle<Code> optimized_code = graph->Compile(info);
- if (!optimized_code.is_null()) {
- info->SetCode(optimized_code);
- FinishOptimization(info->closure(), start);
- return true;
+ // The function being compiled may have bailed out due to an inline
+ // candidate bailing out. In such a case, we don't disable
+ // optimization on the shared_info.
+ ASSERT(!graph_builder_->inline_bailout() || graph_ == NULL);
+ if (graph_ == NULL) {
+ if (graph_builder_->inline_bailout()) {
+ info_->AbortOptimization();
+ return SetLastStatus(BAILED_OUT);
+ } else {
+ return AbortOptimization();
+ }
+ }
+
+ return SetLastStatus(SUCCEEDED);
+}
+
+OptimizingCompiler::Status OptimizingCompiler::OptimizeGraph() {
+ AssertNoAllocation no_gc;
+ NoHandleAllocation no_handles;
+
+ ASSERT(last_status() == SUCCEEDED);
+ Timer t(this, &time_taken_to_optimize_);
+ ASSERT(graph_ != NULL);
+ SmartArrayPointer<char> bailout_reason;
+ if (!graph_->Optimize(&bailout_reason)) {
+ if (!bailout_reason.is_empty()) graph_builder_->Bailout(*bailout_reason);
+ return SetLastStatus(BAILED_OUT);
+ } else {
+ chunk_ = LChunk::NewChunk(graph_);
+ if (chunk_ == NULL) {
+ return SetLastStatus(BAILED_OUT);
}
}
+ return SetLastStatus(SUCCEEDED);
+}
+
- // Keep using the shared code.
- info->AbortOptimization();
- if (!builder.inline_bailout()) {
- // Mark the shared code as unoptimizable unless it was an inlined
- // function that bailed out.
- info->shared_info()->DisableOptimization();
+OptimizingCompiler::Status OptimizingCompiler::GenerateAndInstallCode() {
+ ASSERT(last_status() == SUCCEEDED);
+ Timer timer(this, &time_taken_to_codegen_);
+ ASSERT(chunk_ != NULL);
+ ASSERT(graph_ != NULL);
+ Handle<Code> optimized_code = chunk_->Codegen();
+ if (optimized_code.is_null()) {
+ info()->set_bailout_reason("code generation failed");
+ return AbortOptimization();
}
- // True indicates the compilation pipeline is still going, not necessarily
- // that we optimized the code.
- return true;
+ info()->SetCode(optimized_code);
+ RecordOptimizationStats();
+ return SetLastStatus(SUCCEEDED);
}
static bool GenerateCode(CompilationInfo* info) {
- return info->IsCompilingForDebugging() || !V8::UseCrankshaft() ?
- FullCodeGenerator::MakeCode(info) :
- MakeCrankshaftCode(info);
+ bool is_optimizing = V8::UseCrankshaft() &&
+ !info->IsCompilingForDebugging() &&
+ info->IsOptimizing();
+ if (is_optimizing) {
+ return MakeCrankshaftCode(info);
+ } else {
+ if (info->IsOptimizing()) {
+ // Have the CompilationInfo decide if the compilation should be
+ // BASE or NONOPT.
+ info->DisableOptimization();
+ }
+ return FullCodeGenerator::MakeCode(info);
+ }
}
@@ -346,7 +416,8 @@ bool Compiler::MakeCodeForLiveEdit(CompilationInfo* info) {
// the compilation info is set if compilation succeeded.
bool succeeded = MakeCode(info);
if (!info->shared_info().is_null()) {
- Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope());
+ Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope(),
+ info->zone());
info->shared_info()->set_scope_info(*scope_info);
}
return succeeded;
@@ -356,12 +427,12 @@ bool Compiler::MakeCodeForLiveEdit(CompilationInfo* info) {
static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
Isolate* isolate = info->isolate();
- ZoneScope zone_scope(isolate, DELETE_ON_EXIT);
+ ZoneScope zone_scope(info->zone(), DELETE_ON_EXIT);
PostponeInterruptsScope postpone(isolate);
- ASSERT(!isolate->global_context().is_null());
+ ASSERT(!isolate->native_context().is_null());
Handle<Script> script = info->script();
- script->set_context_data((*isolate->global_context())->data());
+ script->set_context_data((*isolate->native_context())->data());
#ifdef ENABLE_DEBUGGER_SUPPORT
if (info->is_eval()) {
@@ -420,7 +491,7 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
lit->name(),
lit->materialized_literal_count(),
info->code(),
- ScopeInfo::Create(info->scope()));
+ ScopeInfo::Create(info->scope(), info->zone()));
ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
Compiler::SetFunctionInfo(result, lit, true, script);
@@ -462,7 +533,7 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
script, Debugger::NO_AFTER_COMPILE_FLAGS);
#endif
- live_edit_tracker.RecordFunctionInfo(result, lit);
+ live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
return result;
}
@@ -472,6 +543,7 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
Handle<Object> script_name,
int line_offset,
int column_offset,
+ Handle<Context> context,
v8::Extension* extension,
ScriptDataImpl* pre_data,
Handle<Object> script_data,
@@ -493,7 +565,8 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
result = compilation_cache->LookupScript(source,
script_name,
line_offset,
- column_offset);
+ column_offset,
+ context);
}
if (result.is_null()) {
@@ -521,22 +594,24 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
: *script_data);
// Compile the function and add it to the cache.
- CompilationInfo info(script);
+ CompilationInfoWithZone info(script);
info.MarkAsGlobal();
info.SetExtension(extension);
info.SetPreParseData(pre_data);
+ info.SetContext(context);
if (FLAG_use_strict) {
info.SetLanguageMode(FLAG_harmony_scoping ? EXTENDED_MODE : STRICT_MODE);
}
if (compile_flags & v8::Script::QmlMode) info.MarkAsQmlMode();
result = MakeFunctionInfo(&info);
- if (extension == NULL && !result.is_null()) {
- compilation_cache->PutScript(source, result);
+ if (extension == NULL && !result.is_null() && !result->dont_cache()) {
+ compilation_cache->PutScript(source, context, result);
}
} else {
if (result->ic_age() != HEAP->global_ic_age()) {
result->ResetForNewContext(HEAP->global_ic_age());
}
+ result->code()->MakeYoung();
}
if (result.is_null()) isolate->ReportPendingMessages();
@@ -571,17 +646,17 @@ Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source,
if (result.is_null()) {
// Create a script object describing the script to be compiled.
Handle<Script> script = isolate->factory()->NewScript(source);
- CompilationInfo info(script);
+ CompilationInfoWithZone info(script);
info.MarkAsEval();
if (is_global) info.MarkAsGlobal();
info.SetLanguageMode(language_mode);
if (qml_mode) info.MarkAsQmlMode();
- info.SetCallingContext(context);
+ info.SetContext(context);
result = MakeFunctionInfo(&info);
if (!result.is_null()) {
// Explicitly disable optimization for eval code. We're not yet prepared
// to handle eval-code in the optimizing compiler.
- result->DisableOptimization();
+ result->DisableOptimization("eval");
// If caller is strict mode, the result must be in strict mode or
// extended mode as well, but not the other way around. Consider:
@@ -591,23 +666,133 @@ Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source,
// extended mode.
ASSERT(language_mode != EXTENDED_MODE ||
result->is_extended_mode());
- compilation_cache->PutEval(
- source, context, is_global, result, scope_position);
+ if (!result->dont_cache()) {
+ compilation_cache->PutEval(
+ source, context, is_global, result, scope_position);
+ }
}
} else {
if (result->ic_age() != HEAP->global_ic_age()) {
result->ResetForNewContext(HEAP->global_ic_age());
}
+ result->code()->MakeYoung();
}
return result;
}
+static bool InstallFullCode(CompilationInfo* info) {
+ // Update the shared function info with the compiled code and the
+ // scope info. Please note, that the order of the shared function
+ // info initialization is important since set_scope_info might
+ // trigger a GC, causing the ASSERT below to be invalid if the code
+ // was flushed. By setting the code object last we avoid this.
+ Handle<SharedFunctionInfo> shared = info->shared_info();
+ Handle<Code> code = info->code();
+ Handle<JSFunction> function = info->closure();
+ Handle<ScopeInfo> scope_info =
+ ScopeInfo::Create(info->scope(), info->zone());
+ shared->set_scope_info(*scope_info);
+ shared->set_code(*code);
+ if (!function.is_null()) {
+ function->ReplaceCode(*code);
+ ASSERT(!function->IsOptimized());
+ }
+
+ // Set the expected number of properties for instances.
+ FunctionLiteral* lit = info->function();
+ int expected = lit->expected_property_count();
+ SetExpectedNofPropertiesFromEstimate(shared, expected);
+
+ // Set the optimization hints after performing lazy compilation, as
+ // these are not set when the function is set up as a lazily
+ // compiled function.
+ shared->SetThisPropertyAssignmentsInfo(
+ lit->has_only_simple_this_property_assignments(),
+ *lit->this_property_assignments());
+
+ // Check the function has compiled code.
+ ASSERT(shared->is_compiled());
+ shared->set_code_age(0);
+ shared->set_dont_optimize(lit->flags()->Contains(kDontOptimize));
+ shared->set_dont_inline(lit->flags()->Contains(kDontInline));
+ shared->set_ast_node_count(lit->ast_node_count());
+
+ if (V8::UseCrankshaft() &&
+ !function.is_null() &&
+ !shared->optimization_disabled()) {
+ // If we're asked to always optimize, we compile the optimized
+ // version of the function right away - unless the debugger is
+ // active as it makes no sense to compile optimized code then.
+ if (FLAG_always_opt &&
+ !Isolate::Current()->DebuggerHasBreakPoints()) {
+ CompilationInfoWithZone optimized(function);
+ optimized.SetOptimizing(BailoutId::None());
+ return Compiler::CompileLazy(&optimized);
+ }
+ }
+ return true;
+}
+
+
+static void InstallCodeCommon(CompilationInfo* info) {
+ Handle<SharedFunctionInfo> shared = info->shared_info();
+ Handle<Code> code = info->code();
+ ASSERT(!code.is_null());
+
+ // Set optimizable to false if this is disallowed by the shared
+ // function info, e.g., we might have flushed the code and must
+ // reset this bit when lazy compiling the code again.
+ if (shared->optimization_disabled()) code->set_optimizable(false);
+
+ Compiler::RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info, shared);
+}
+
+
+static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
+ Handle<Code> code = info->code();
+ if (FLAG_cache_optimized_code &&
+ info->osr_ast_id().IsNone() &&
+ code->kind() == Code::OPTIMIZED_FUNCTION) {
+ Handle<JSFunction> function = info->closure();
+ Handle<SharedFunctionInfo> shared(function->shared());
+ Handle<FixedArray> literals(function->literals());
+ Handle<Context> native_context(function->context()->native_context());
+ SharedFunctionInfo::AddToOptimizedCodeMap(
+ shared, native_context, code, literals);
+ }
+}
+
+
+static bool InstallCodeFromOptimizedCodeMap(CompilationInfo* info) {
+ if (FLAG_cache_optimized_code &&
+ info->osr_ast_id().IsNone() &&
+ info->IsOptimizing()) {
+ Handle<SharedFunctionInfo> shared = info->shared_info();
+ Handle<JSFunction> function = info->closure();
+ ASSERT(!function.is_null());
+ Handle<Context> native_context(function->context()->native_context());
+ int index = shared->SearchOptimizedCodeMap(*native_context);
+ if (index > 0) {
+ if (FLAG_trace_opt) {
+ PrintF("[found optimized code for: ");
+ function->PrintName();
+ PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(*function));
+ }
+ // Caching of optimized code enabled and optimized code found.
+ shared->InstallFromOptimizedCodeMap(*function, index);
+ return true;
+ }
+ }
+ return false;
+}
+
+
bool Compiler::CompileLazy(CompilationInfo* info) {
Isolate* isolate = info->isolate();
- ZoneScope zone_scope(isolate, DELETE_ON_EXIT);
+ ZoneScope zone_scope(info->zone(), DELETE_ON_EXIT);
// The VM is in the COMPILER state until exiting this function.
VMState state(isolate, COMPILER);
@@ -618,6 +803,8 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
int compiled_size = shared->end_position() - shared->start_position();
isolate->counters()->total_compile_size()->Increment(compiled_size);
+ if (InstallCodeFromOptimizedCodeMap(info)) return true;
+
// Generate the AST for the lazily compiled function.
if (ParserApi::Parse(info, kNoParsingFlags)) {
// Measure how long it takes to do the lazy compilation; only take the
@@ -642,68 +829,17 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
isolate->StackOverflow();
}
} else {
- ASSERT(!info->code().is_null());
- Handle<Code> code = info->code();
- // Set optimizable to false if this is disallowed by the shared
- // function info, e.g., we might have flushed the code and must
- // reset this bit when lazy compiling the code again.
- if (shared->optimization_disabled()) code->set_optimizable(false);
-
- Handle<JSFunction> function = info->closure();
- RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info, shared);
+ InstallCodeCommon(info);
if (info->IsOptimizing()) {
+ Handle<Code> code = info->code();
ASSERT(shared->scope_info() != ScopeInfo::Empty());
- function->ReplaceCode(*code);
+ info->closure()->ReplaceCode(*code);
+ InsertCodeIntoOptimizedCodeMap(info);
+ return true;
} else {
- // Update the shared function info with the compiled code and the
- // scope info. Please note, that the order of the shared function
- // info initialization is important since set_scope_info might
- // trigger a GC, causing the ASSERT below to be invalid if the code
- // was flushed. By setting the code object last we avoid this.
- Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope());
- shared->set_scope_info(*scope_info);
- shared->set_code(*code);
- if (!function.is_null()) {
- function->ReplaceCode(*code);
- ASSERT(!function->IsOptimized());
- }
-
- // Set the expected number of properties for instances.
- FunctionLiteral* lit = info->function();
- int expected = lit->expected_property_count();
- SetExpectedNofPropertiesFromEstimate(shared, expected);
-
- // Set the optimization hints after performing lazy compilation, as
- // these are not set when the function is set up as a lazily
- // compiled function.
- shared->SetThisPropertyAssignmentsInfo(
- lit->has_only_simple_this_property_assignments(),
- *lit->this_property_assignments());
-
- // Check the function has compiled code.
- ASSERT(shared->is_compiled());
- shared->set_code_age(0);
- shared->set_dont_optimize(lit->flags()->Contains(kDontOptimize));
- shared->set_dont_inline(lit->flags()->Contains(kDontInline));
- shared->set_ast_node_count(lit->ast_node_count());
-
- if (V8::UseCrankshaft()&&
- !function.is_null() &&
- !shared->optimization_disabled()) {
- // If we're asked to always optimize, we compile the optimized
- // version of the function right away - unless the debugger is
- // active as it makes no sense to compile optimized code then.
- if (FLAG_always_opt &&
- !Isolate::Current()->DebuggerHasBreakPoints()) {
- CompilationInfo optimized(function);
- optimized.SetOptimizing(AstNode::kNoNumber);
- return CompileLazy(&optimized);
- }
- }
+ return InstallFullCode(info);
}
-
- return true;
}
}
@@ -712,10 +848,97 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
}
+void Compiler::RecompileParallel(Handle<JSFunction> closure) {
+ if (closure->IsInRecompileQueue()) return;
+ ASSERT(closure->IsMarkedForParallelRecompilation());
+
+ Isolate* isolate = closure->GetIsolate();
+ if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) {
+ if (FLAG_trace_parallel_recompilation) {
+ PrintF(" ** Compilation queue, will retry opting on next run.\n");
+ }
+ return;
+ }
+
+ SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(closure));
+ VMState state(isolate, PARALLEL_COMPILER_PROLOGUE);
+ PostponeInterruptsScope postpone(isolate);
+
+ Handle<SharedFunctionInfo> shared = info->shared_info();
+ int compiled_size = shared->end_position() - shared->start_position();
+ isolate->counters()->total_compile_size()->Increment(compiled_size);
+ info->SetOptimizing(BailoutId::None());
+
+ {
+ CompilationHandleScope handle_scope(*info);
+
+ if (InstallCodeFromOptimizedCodeMap(*info)) return;
+
+ if (ParserApi::Parse(*info, kNoParsingFlags)) {
+ LanguageMode language_mode = info->function()->language_mode();
+ info->SetLanguageMode(language_mode);
+ shared->set_language_mode(language_mode);
+ info->SaveHandles();
+
+ if (Rewriter::Rewrite(*info) && Scope::Analyze(*info)) {
+ OptimizingCompiler* compiler =
+ new(info->zone()) OptimizingCompiler(*info);
+ OptimizingCompiler::Status status = compiler->CreateGraph();
+ if (status == OptimizingCompiler::SUCCEEDED) {
+ isolate->optimizing_compiler_thread()->QueueForOptimization(compiler);
+ shared->code()->set_profiler_ticks(0);
+ closure->ReplaceCode(isolate->builtins()->builtin(
+ Builtins::kInRecompileQueue));
+ info.Detach();
+ } else if (status == OptimizingCompiler::BAILED_OUT) {
+ isolate->clear_pending_exception();
+ InstallFullCode(*info);
+ }
+ }
+ }
+ }
+
+ if (isolate->has_pending_exception()) {
+ isolate->clear_pending_exception();
+ }
+}
+
+
+void Compiler::InstallOptimizedCode(OptimizingCompiler* optimizing_compiler) {
+ SmartPointer<CompilationInfo> info(optimizing_compiler->info());
+ // If crankshaft succeeded, install the optimized code else install
+ // the unoptimized code.
+ OptimizingCompiler::Status status = optimizing_compiler->last_status();
+ if (status != OptimizingCompiler::SUCCEEDED) {
+ optimizing_compiler->info()->set_bailout_reason(
+ "failed/bailed out last time");
+ status = optimizing_compiler->AbortOptimization();
+ } else {
+ status = optimizing_compiler->GenerateAndInstallCode();
+ ASSERT(status == OptimizingCompiler::SUCCEEDED ||
+ status == OptimizingCompiler::BAILED_OUT);
+ }
+
+ InstallCodeCommon(*info);
+ if (status == OptimizingCompiler::SUCCEEDED) {
+ Handle<Code> code = info->code();
+ ASSERT(info->shared_info()->scope_info() != ScopeInfo::Empty());
+ info->closure()->ReplaceCode(*code);
+ if (info->shared_info()->SearchOptimizedCodeMap(
+ info->closure()->context()->native_context()) == -1) {
+ InsertCodeIntoOptimizedCodeMap(*info);
+ }
+ } else {
+ info->SetCode(Handle<Code>(info->shared_info()->code()));
+ InstallFullCode(*info);
+ }
+}
+
+
Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
Handle<Script> script) {
// Precondition: code has been parsed and scopes have been analyzed.
- CompilationInfo info(script);
+ CompilationInfoWithZone info(script);
info.SetFunction(literal);
info.SetScope(literal->scope());
info.SetLanguageMode(literal->scope()->language_mode());
@@ -726,19 +949,24 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
// builtins cannot be handled lazily by the parser, since we have to know
// if a function uses the special natives syntax, which is something the
// parser records.
+ // If the debugger requests compilation for break points, we cannot be
+ // aggressive about lazy compilation, because it might trigger compilation
+ // of functions without an outer context when setting a breakpoint through
+ // Debug::FindSharedFunctionInfoInScript.
+ bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
bool allow_lazy = literal->AllowsLazyCompilation() &&
- !LiveEditFunctionTracker::IsActive(info.isolate());
+ !LiveEditFunctionTracker::IsActive(info.isolate()) &&
+ (!info.isolate()->DebuggerHasBreakPoints() || allow_lazy_without_ctx);
Handle<ScopeInfo> scope_info(ScopeInfo::Empty());
// Generate code
- if (FLAG_lazy && allow_lazy) {
+ if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) {
Handle<Code> code = info.isolate()->builtins()->LazyCompile();
info.SetCode(code);
- } else if ((V8::UseCrankshaft() && MakeCrankshaftCode(&info)) ||
- (!V8::UseCrankshaft() && FullCodeGenerator::MakeCode(&info))) {
+ } else if (GenerateCode(&info)) {
ASSERT(!info.code().is_null());
- scope_info = ScopeInfo::Create(info.scope());
+ scope_info = ScopeInfo::Create(info.scope(), info.zone());
} else {
return Handle<SharedFunctionInfo>::null();
}
@@ -752,12 +980,13 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
SetFunctionInfo(result, literal, false, script);
RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
result->set_allows_lazy_compilation(allow_lazy);
+ result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);
// Set the expected number of properties for instances and return
// the resulting function.
SetExpectedNofPropertiesFromEstimate(result,
literal->expected_property_count());
- live_edit_tracker.RecordFunctionInfo(result, literal);
+ live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
return result;
}
@@ -784,6 +1013,8 @@ void Compiler::SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
lit->has_only_simple_this_property_assignments(),
*lit->this_property_assignments());
function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
+ function_info->set_allows_lazy_compilation_without_context(
+ lit->AllowsLazyCompilationWithoutContext());
function_info->set_language_mode(lit->language_mode());
function_info->set_qml_mode(lit->qml_mode());
function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
@@ -792,6 +1023,7 @@ void Compiler::SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
function_info->set_is_function(lit->is_function());
function_info->set_dont_optimize(lit->flags()->Contains(kDontOptimize));
function_info->set_dont_inline(lit->flags()->Contains(kDontInline));
+ function_info->set_dont_cache(lit->flags()->Contains(kDontCache));
}
@@ -804,7 +1036,7 @@ void Compiler::RecordFunctionCompilation(Logger::LogEventsAndTags tag,
// Log the code generation. If source information is available include
// script name and line number. Check explicitly whether logging is
// enabled as finding the line number is not free.
- if (info->isolate()->logger()->is_logging() ||
+ if (info->isolate()->logger()->is_logging_code_events() ||
CpuProfiler::is_profiling(info->isolate())) {
Handle<Script> script = info->script();
Handle<Code> code = info->code();
diff --git a/src/3rdparty/v8/src/compiler.h b/src/3rdparty/v8/src/compiler.h
index 09583c0..b119775 100644
--- a/src/3rdparty/v8/src/compiler.h
+++ b/src/3rdparty/v8/src/compiler.h
@@ -39,16 +39,21 @@ class ScriptDataImpl;
// CompilationInfo encapsulates some information known at compile time. It
// is constructed based on the resources available at compile-time.
-class CompilationInfo BASE_EMBEDDED {
+class CompilationInfo {
public:
- explicit CompilationInfo(Handle<Script> script);
- explicit CompilationInfo(Handle<SharedFunctionInfo> shared_info);
- explicit CompilationInfo(Handle<JSFunction> closure);
+ CompilationInfo(Handle<Script> script, Zone* zone);
+ CompilationInfo(Handle<SharedFunctionInfo> shared_info, Zone* zone);
+ CompilationInfo(Handle<JSFunction> closure, Zone* zone);
+
+ virtual ~CompilationInfo();
Isolate* isolate() {
ASSERT(Isolate::Current() == isolate_);
return isolate_;
}
+ Zone* zone() {
+ return zone_;
+ }
bool is_lazy() const { return IsLazy::decode(flags_); }
bool is_eval() const { return IsEval::decode(flags_); }
bool is_global() const { return IsGlobal::decode(flags_); }
@@ -68,8 +73,8 @@ class CompilationInfo BASE_EMBEDDED {
Handle<Script> script() const { return script_; }
v8::Extension* extension() const { return extension_; }
ScriptDataImpl* pre_parse_data() const { return pre_parse_data_; }
- Handle<Context> calling_context() const { return calling_context_; }
- int osr_ast_id() const { return osr_ast_id_; }
+ Handle<Context> context() const { return context_; }
+ BailoutId osr_ast_id() const { return osr_ast_id_; }
void MarkAsEval() {
ASSERT(!is_lazy());
@@ -119,13 +124,8 @@ class CompilationInfo BASE_EMBEDDED {
ASSERT(!is_lazy());
pre_parse_data_ = pre_parse_data;
}
- void SetCallingContext(Handle<Context> context) {
- ASSERT(is_eval());
- calling_context_ = context;
- }
- void SetOsrAstId(int osr_ast_id) {
- ASSERT(IsOptimizing());
- osr_ast_id_ = osr_ast_id;
+ void SetContext(Handle<Context> context) {
+ context_ = context;
}
void MarkCompilingForDebugging(Handle<Code> current_code) {
ASSERT(mode_ != OPTIMIZE);
@@ -142,17 +142,18 @@ class CompilationInfo BASE_EMBEDDED {
}
bool has_global_object() const {
- return !closure().is_null() && (closure()->context()->global() != NULL);
+ return !closure().is_null() &&
+ (closure()->context()->global_object() != NULL);
}
GlobalObject* global_object() const {
- return has_global_object() ? closure()->context()->global() : NULL;
+ return has_global_object() ? closure()->context()->global_object() : NULL;
}
// Accessors for the different compilation modes.
bool IsOptimizing() const { return mode_ == OPTIMIZE; }
bool IsOptimizable() const { return mode_ == BASE; }
- void SetOptimizing(int osr_ast_id) {
+ void SetOptimizing(BailoutId osr_ast_id) {
SetMode(OPTIMIZE);
osr_ast_id_ = osr_ast_id;
}
@@ -174,6 +175,21 @@ class CompilationInfo BASE_EMBEDDED {
// current compilation pipeline.
void AbortOptimization();
+ void set_deferred_handles(DeferredHandles* deferred_handles) {
+ ASSERT(deferred_handles_ == NULL);
+ deferred_handles_ = deferred_handles;
+ }
+
+ void SaveHandles() {
+ SaveHandle(&closure_);
+ SaveHandle(&shared_info_);
+ SaveHandle(&context_);
+ SaveHandle(&script_);
+ }
+
+ const char* bailout_reason() const { return bailout_reason_; }
+ void set_bailout_reason(const char* reason) { bailout_reason_ = reason; }
+
private:
Isolate* isolate_;
@@ -188,8 +204,6 @@ class CompilationInfo BASE_EMBEDDED {
NONOPT
};
- CompilationInfo() : function_(NULL) {}
-
void Initialize(Mode mode) {
mode_ = V8::UseCrankshaft() ? mode : NONOPT;
ASSERT(!script_.is_null());
@@ -203,6 +217,7 @@ class CompilationInfo BASE_EMBEDDED {
if (!shared_info_.is_null() && shared_info_->qml_mode()) {
MarkAsQmlMode();
}
+ set_bailout_reason("unknown");
}
void SetMode(Mode mode) {
@@ -254,18 +269,148 @@ class CompilationInfo BASE_EMBEDDED {
v8::Extension* extension_;
ScriptDataImpl* pre_parse_data_;
- // The context of the caller is needed for eval code, and will be a null
- // handle otherwise.
- Handle<Context> calling_context_;
+ // The context of the caller for eval code, and the global context for a
+ // global script. Will be a null handle otherwise.
+ Handle<Context> context_;
// Compilation mode flag and whether deoptimization is allowed.
Mode mode_;
- int osr_ast_id_;
+ BailoutId osr_ast_id_;
+
+ // The zone from which the compilation pipeline working on this
+ // CompilationInfo allocates.
+ Zone* zone_;
+
+ DeferredHandles* deferred_handles_;
+
+ template<typename T>
+ void SaveHandle(Handle<T> *object) {
+ if (!object->is_null()) {
+ Handle<T> handle(*(*object));
+ *object = handle;
+ }
+ }
+
+ const char* bailout_reason_;
DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
};
+// Exactly like a CompilationInfo, except also creates and enters a
+// Zone on construction and deallocates it on exit.
+class CompilationInfoWithZone: public CompilationInfo {
+ public:
+ explicit CompilationInfoWithZone(Handle<Script> script)
+ : CompilationInfo(script, &zone_),
+ zone_(script->GetIsolate()),
+ zone_scope_(&zone_, DELETE_ON_EXIT) {}
+ explicit CompilationInfoWithZone(Handle<SharedFunctionInfo> shared_info)
+ : CompilationInfo(shared_info, &zone_),
+ zone_(shared_info->GetIsolate()),
+ zone_scope_(&zone_, DELETE_ON_EXIT) {}
+ explicit CompilationInfoWithZone(Handle<JSFunction> closure)
+ : CompilationInfo(closure, &zone_),
+ zone_(closure->GetIsolate()),
+ zone_scope_(&zone_, DELETE_ON_EXIT) {}
+
+ private:
+ Zone zone_;
+ ZoneScope zone_scope_;
+};
+
+
+// A wrapper around a CompilationInfo that detaches the Handles from
+// the underlying DeferredHandleScope and stores them in info_ on
+// destruction.
+class CompilationHandleScope BASE_EMBEDDED {
+ public:
+ explicit CompilationHandleScope(CompilationInfo* info)
+ : deferred_(info->isolate()), info_(info) {}
+ ~CompilationHandleScope() {
+ info_->set_deferred_handles(deferred_.Detach());
+ }
+
+ private:
+ DeferredHandleScope deferred_;
+ CompilationInfo* info_;
+};
+
+
+class HGraph;
+class HGraphBuilder;
+class LChunk;
+
+// A helper class that calls the three compilation phases in
+// Crankshaft and keeps track of its state. The three phases
+// CreateGraph, OptimizeGraph and GenerateAndInstallCode can either
+// fail, bail-out to the full code generator or succeed. Apart from
+// their return value, the status of the phase last run can be checked
+// using last_status().
+class OptimizingCompiler: public ZoneObject {
+ public:
+ explicit OptimizingCompiler(CompilationInfo* info)
+ : info_(info),
+ oracle_(NULL),
+ graph_builder_(NULL),
+ graph_(NULL),
+ chunk_(NULL),
+ time_taken_to_create_graph_(0),
+ time_taken_to_optimize_(0),
+ time_taken_to_codegen_(0),
+ last_status_(FAILED) { }
+
+ enum Status {
+ FAILED, BAILED_OUT, SUCCEEDED
+ };
+
+ MUST_USE_RESULT Status CreateGraph();
+ MUST_USE_RESULT Status OptimizeGraph();
+ MUST_USE_RESULT Status GenerateAndInstallCode();
+
+ Status last_status() const { return last_status_; }
+ CompilationInfo* info() const { return info_; }
+
+ MUST_USE_RESULT Status AbortOptimization() {
+ info_->AbortOptimization();
+ info_->shared_info()->DisableOptimization(info_->bailout_reason());
+ return SetLastStatus(BAILED_OUT);
+ }
+
+ private:
+ CompilationInfo* info_;
+ TypeFeedbackOracle* oracle_;
+ HGraphBuilder* graph_builder_;
+ HGraph* graph_;
+ LChunk* chunk_;
+ int64_t time_taken_to_create_graph_;
+ int64_t time_taken_to_optimize_;
+ int64_t time_taken_to_codegen_;
+ Status last_status_;
+
+ MUST_USE_RESULT Status SetLastStatus(Status status) {
+ last_status_ = status;
+ return last_status_;
+ }
+ void RecordOptimizationStats();
+
+ struct Timer {
+ Timer(OptimizingCompiler* compiler, int64_t* location)
+ : compiler_(compiler),
+ start_(OS::Ticks()),
+ location_(location) { }
+
+ ~Timer() {
+ *location_ += (OS::Ticks() - start_);
+ }
+
+ OptimizingCompiler* compiler_;
+ int64_t start_;
+ int64_t* location_;
+ };
+};
+
+
// The V8 compiler
//
// General strategy: Source code is translated into an anonymous function w/o
@@ -279,10 +424,6 @@ class CompilationInfo BASE_EMBEDDED {
class Compiler : public AllStatic {
public:
- // Default maximum number of function optimization attempts before we
- // give up.
- static const int kDefaultMaxOptCount = 10;
-
static const int kMaxInliningLevels = 3;
// Call count before primitive functions trigger their own optimization.
@@ -297,6 +438,7 @@ class Compiler : public AllStatic {
Handle<Object> script_name,
int line_offset,
int column_offset,
+ Handle<Context> context,
v8::Extension* extension,
ScriptDataImpl* pre_data,
Handle<Object> script_data,
@@ -315,6 +457,8 @@ class Compiler : public AllStatic {
// success and false if the compilation resulted in a stack overflow.
static bool CompileLazy(CompilationInfo* info);
+ static void RecompileParallel(Handle<JSFunction> function);
+
// Compile a shared function info object (the function is possibly lazily
// compiled).
static Handle<SharedFunctionInfo> BuildFunctionInfo(FunctionLiteral* node,
@@ -326,6 +470,8 @@ class Compiler : public AllStatic {
bool is_toplevel,
Handle<Script> script);
+ static void InstallOptimizedCode(OptimizingCompiler* info);
+
#ifdef ENABLE_DEBUGGER_SUPPORT
static bool MakeCodeForLiveEdit(CompilationInfo* info);
#endif
diff --git a/src/3rdparty/v8/src/contexts.cc b/src/3rdparty/v8/src/contexts.cc
index cf07cc6..662e326 100644
--- a/src/3rdparty/v8/src/contexts.cc
+++ b/src/3rdparty/v8/src/contexts.cc
@@ -36,7 +36,7 @@ namespace internal {
Context* Context::declaration_context() {
Context* current = this;
- while (!current->IsFunctionContext() && !current->IsGlobalContext()) {
+ while (!current->IsFunctionContext() && !current->IsNativeContext()) {
current = current->previous();
ASSERT(current->closure() == closure());
}
@@ -45,7 +45,7 @@ Context* Context::declaration_context() {
JSBuiltinsObject* Context::builtins() {
- GlobalObject* object = global();
+ GlobalObject* object = global_object();
if (object->IsJSGlobalObject()) {
return JSGlobalObject::cast(object)->builtins();
} else {
@@ -55,19 +55,19 @@ JSBuiltinsObject* Context::builtins() {
}
-Context* Context::global_context() {
+Context* Context::native_context() {
// Fast case: the global object for this context has been set. In
// that case, the global object has a direct pointer to the global
// context.
- if (global()->IsGlobalObject()) {
- return global()->global_context();
+ if (global_object()->IsGlobalObject()) {
+ return global_object()->native_context();
}
// During bootstrapping, the global object might not be set and we
- // have to search the context chain to find the global context.
+ // have to search the context chain to find the native context.
ASSERT(Isolate::Current()->bootstrapper()->IsActive());
Context* current = this;
- while (!current->IsGlobalContext()) {
+ while (!current->IsNativeContext()) {
JSFunction* closure = JSFunction::cast(current->closure());
current = Context::cast(closure->context());
}
@@ -76,11 +76,11 @@ Context* Context::global_context() {
JSObject* Context::global_proxy() {
- return global_context()->global_proxy_object();
+ return native_context()->global_proxy_object();
}
void Context::set_global_proxy(JSObject* object) {
- global_context()->set_global_proxy_object(object);
+ native_context()->set_global_proxy_object(object);
}
@@ -109,17 +109,17 @@ Handle<Object> Context::Lookup(Handle<String> name,
do {
if (FLAG_trace_contexts) {
PrintF(" - looking in context %p", reinterpret_cast<void*>(*context));
- if (context->IsGlobalContext()) PrintF(" (global context)");
+ if (context->IsNativeContext()) PrintF(" (native context)");
PrintF("\n");
}
- if (qml_global.is_null() && !context->qml_global()->IsUndefined()) {
- qml_global = Handle<JSObject>(context->qml_global(), isolate);
- qml_global_global = Handle<JSObject>(context->global(), isolate);
+ if (qml_global.is_null() && !context->qml_global_object()->IsUndefined()) {
+ qml_global = Handle<JSObject>(context->qml_global_object(), isolate);
+ qml_global_global = Handle<JSObject>(context->global_object(), isolate);
}
// 1. Check global objects, subjects of with, and extension objects.
- if (context->IsGlobalContext() ||
+ if (context->IsNativeContext() ||
context->IsWithContext() ||
(context->IsFunctionContext() && context->has_extension())) {
Handle<JSObject> object(JSObject::cast(context->extension()), isolate);
@@ -234,7 +234,7 @@ Handle<Object> Context::Lookup(Handle<String> name,
}
// 3. Prepare to continue with the previous (next outermost) context.
- if (context->IsGlobalContext()) {
+ if (context->IsNativeContext()) {
follow_context_chain = false;
} else {
context = Handle<Context>(context->previous(), isolate);
@@ -276,19 +276,19 @@ Handle<Object> Context::Lookup(Handle<String> name,
void Context::AddOptimizedFunction(JSFunction* function) {
- ASSERT(IsGlobalContext());
+ ASSERT(IsNativeContext());
#ifdef DEBUG
- Object* element = get(OPTIMIZED_FUNCTIONS_LIST);
- while (!element->IsUndefined()) {
- CHECK(element != function);
- element = JSFunction::cast(element)->next_function_link();
+ if (FLAG_enable_slow_asserts) {
+ Object* element = get(OPTIMIZED_FUNCTIONS_LIST);
+ while (!element->IsUndefined()) {
+ CHECK(element != function);
+ element = JSFunction::cast(element)->next_function_link();
+ }
}
- CHECK(function->next_function_link()->IsUndefined());
-
- // Check that the context belongs to the weak global contexts list.
+ // Check that the context belongs to the weak native contexts list.
bool found = false;
- Object* context = GetHeap()->global_contexts_list();
+ Object* context = GetHeap()->native_contexts_list();
while (!context->IsUndefined()) {
if (context == this) {
found = true;
@@ -298,13 +298,23 @@ void Context::AddOptimizedFunction(JSFunction* function) {
}
CHECK(found);
#endif
+
+ // If the function link field is already used then the function was
+ // enqueued as a code flushing candidate and we remove it now.
+ if (!function->next_function_link()->IsUndefined()) {
+ CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
+ flusher->EvictCandidate(function);
+ }
+
+ ASSERT(function->next_function_link()->IsUndefined());
+
function->set_next_function_link(get(OPTIMIZED_FUNCTIONS_LIST));
set(OPTIMIZED_FUNCTIONS_LIST, function);
}
void Context::RemoveOptimizedFunction(JSFunction* function) {
- ASSERT(IsGlobalContext());
+ ASSERT(IsNativeContext());
Object* element = get(OPTIMIZED_FUNCTIONS_LIST);
JSFunction* prev = NULL;
while (!element->IsUndefined()) {
@@ -328,7 +338,7 @@ void Context::RemoveOptimizedFunction(JSFunction* function) {
Object* Context::OptimizedFunctionsListHead() {
- ASSERT(IsGlobalContext());
+ ASSERT(IsNativeContext());
return get(OPTIMIZED_FUNCTIONS_LIST);
}
@@ -338,11 +348,28 @@ void Context::ClearOptimizedFunctions() {
}
+Handle<Object> Context::ErrorMessageForCodeGenerationFromStrings() {
+ Handle<Object> result(error_message_for_code_gen_from_strings());
+ if (result->IsUndefined()) {
+ const char* error =
+ "Code generation from strings disallowed for this context";
+ Isolate* isolate = Isolate::Current();
+ result = isolate->factory()->NewStringFromAscii(i::CStrVector(error));
+ }
+ return result;
+}
+
+
#ifdef DEBUG
-bool Context::IsBootstrappingOrContext(Object* object) {
+bool Context::IsBootstrappingOrValidParentContext(
+ Object* object, Context* child) {
// During bootstrapping we allow all objects to pass as
// contexts. This is necessary to fix circular dependencies.
- return Isolate::Current()->bootstrapper()->IsActive() || object->IsContext();
+ if (Isolate::Current()->bootstrapper()->IsActive()) return true;
+ if (!object->IsContext()) return false;
+ Context* context = Context::cast(object);
+ return context->IsNativeContext() || context->IsGlobalContext() ||
+ context->IsModuleContext() || !child->IsModuleContext();
}
diff --git a/src/3rdparty/v8/src/contexts.h b/src/3rdparty/v8/src/contexts.h
index 18f73d9..61e6c66 100644
--- a/src/3rdparty/v8/src/contexts.h
+++ b/src/3rdparty/v8/src/contexts.h
@@ -96,7 +96,7 @@ enum BindingFlags {
// must always be allocated via Heap::AllocateContext() or
// Factory::NewContext.
-#define GLOBAL_CONTEXT_FIELDS(V) \
+#define NATIVE_CONTEXT_FIELDS(V) \
V(GLOBAL_PROXY_INDEX, JSObject, global_proxy_object) \
V(SECURITY_TOKEN_INDEX, Object, security_token) \
V(BOOLEAN_FUNCTION_INDEX, JSFunction, boolean_function) \
@@ -106,9 +106,7 @@ enum BindingFlags {
V(OBJECT_FUNCTION_INDEX, JSFunction, object_function) \
V(INTERNAL_ARRAY_FUNCTION_INDEX, JSFunction, internal_array_function) \
V(ARRAY_FUNCTION_INDEX, JSFunction, array_function) \
- V(SMI_JS_ARRAY_MAP_INDEX, Object, smi_js_array_map) \
- V(DOUBLE_JS_ARRAY_MAP_INDEX, Object, double_js_array_map) \
- V(OBJECT_JS_ARRAY_MAP_INDEX, Object, object_js_array_map) \
+ V(JS_ARRAY_MAPS_INDEX, Object, js_array_maps) \
V(DATE_FUNCTION_INDEX, JSFunction, date_function) \
V(JSON_OBJECT_INDEX, JSObject, json_object) \
V(REGEXP_FUNCTION_INDEX, JSFunction, regexp_function) \
@@ -156,12 +154,16 @@ enum BindingFlags {
V(MAP_CACHE_INDEX, Object, map_cache) \
V(CONTEXT_DATA_INDEX, Object, data) \
V(ALLOW_CODE_GEN_FROM_STRINGS_INDEX, Object, allow_code_gen_from_strings) \
+ V(ERROR_MESSAGE_FOR_CODE_GEN_FROM_STRINGS_INDEX, Object, \
+ error_message_for_code_gen_from_strings) \
V(TO_COMPLETE_PROPERTY_DESCRIPTOR_INDEX, JSFunction, \
to_complete_property_descriptor) \
V(DERIVED_HAS_TRAP_INDEX, JSFunction, derived_has_trap) \
V(DERIVED_GET_TRAP_INDEX, JSFunction, derived_get_trap) \
V(DERIVED_SET_TRAP_INDEX, JSFunction, derived_set_trap) \
- V(PROXY_ENUMERATE, JSFunction, proxy_enumerate) \
+ V(PROXY_ENUMERATE_INDEX, JSFunction, proxy_enumerate) \
+ V(OBSERVERS_NOTIFY_CHANGE_INDEX, JSFunction, observers_notify_change) \
+ V(OBSERVERS_DELIVER_CHANGES_INDEX, JSFunction, observers_deliver_changes) \
V(RANDOM_SEED_INDEX, ByteArray, random_seed)
// JSFunctions are pairs (context, function code), sometimes also called
@@ -192,16 +194,19 @@ enum BindingFlags {
// Dynamically declared variables/functions are also added
// to lazily allocated extension object. Context::Lookup
// searches the extension object for properties.
+// For global and block contexts, contains the respective
+// ScopeInfo.
+// For module contexts, points back to the respective JSModule.
//
-// [ global ] A pointer to the global object. Provided for quick
+// [ global_object ] A pointer to the global object. Provided for quick
// access to the global object from inside the code (since
// we always have a context pointer).
//
// In addition, function contexts may have statically allocated context slots
// to store local variables/functions that are accessed from inner functions
// (via static context addresses) or through 'eval' (dynamic context lookups).
-// Finally, the global context contains additional slots for fast access to
-// global properties.
+// Finally, the native context contains additional slots for fast access to
+// native properties.
class Context: public FixedArray {
public:
@@ -219,16 +224,16 @@ class Context: public FixedArray {
// The extension slot is used for either the global object (in global
// contexts), eval extension object (function contexts), subject of with
// (with contexts), or the variable name (catch contexts), the serialized
- // scope info (block contexts).
+ // scope info (block contexts), or the module instance (module contexts).
EXTENSION_INDEX,
- QML_GLOBAL_INDEX,
- GLOBAL_INDEX,
+ QML_GLOBAL_OBJECT_INDEX,
+ GLOBAL_OBJECT_INDEX,
MIN_CONTEXT_SLOTS,
// This slot holds the thrown value in catch contexts.
THROWN_OBJECT_INDEX = MIN_CONTEXT_SLOTS,
- // These slots are only in global contexts.
+ // These slots are only in native contexts.
GLOBAL_PROXY_INDEX = MIN_CONTEXT_SLOTS,
SECURITY_TOKEN_INDEX,
ARGUMENTS_BOILERPLATE_INDEX,
@@ -249,9 +254,7 @@ class Context: public FixedArray {
OBJECT_FUNCTION_INDEX,
INTERNAL_ARRAY_FUNCTION_INDEX,
ARRAY_FUNCTION_INDEX,
- SMI_JS_ARRAY_MAP_INDEX,
- DOUBLE_JS_ARRAY_MAP_INDEX,
- OBJECT_JS_ARRAY_MAP_INDEX,
+ JS_ARRAY_MAPS_INDEX,
DATE_FUNCTION_INDEX,
JSON_OBJECT_INDEX,
REGEXP_FUNCTION_INDEX,
@@ -283,11 +286,14 @@ class Context: public FixedArray {
OUT_OF_MEMORY_INDEX,
CONTEXT_DATA_INDEX,
ALLOW_CODE_GEN_FROM_STRINGS_INDEX,
+ ERROR_MESSAGE_FOR_CODE_GEN_FROM_STRINGS_INDEX,
TO_COMPLETE_PROPERTY_DESCRIPTOR_INDEX,
DERIVED_HAS_TRAP_INDEX,
DERIVED_GET_TRAP_INDEX,
DERIVED_SET_TRAP_INDEX,
- PROXY_ENUMERATE,
+ PROXY_ENUMERATE_INDEX,
+ OBSERVERS_NOTIFY_CHANGE_INDEX,
+ OBSERVERS_DELIVER_CHANGES_INDEX,
RANDOM_SEED_INDEX,
// Properties from here are treated as weak references by the full GC.
@@ -297,7 +303,7 @@ class Context: public FixedArray {
NEXT_CONTEXT_LINK, // Weak.
// Total number of slots.
- GLOBAL_CONTEXT_SLOTS,
+ NATIVE_CONTEXT_SLOTS,
FIRST_WEAK_SLOT = OPTIMIZED_FUNCTIONS_LIST
};
@@ -308,7 +314,7 @@ class Context: public FixedArray {
Context* previous() {
Object* result = unchecked_previous();
- ASSERT(IsBootstrappingOrContext(result));
+ ASSERT(IsBootstrappingOrValidParentContext(result, this));
return reinterpret_cast<Context*>(result);
}
void set_previous(Context* context) { set(PREVIOUS_INDEX, context); }
@@ -317,19 +323,28 @@ class Context: public FixedArray {
Object* extension() { return get(EXTENSION_INDEX); }
void set_extension(Object* object) { set(EXTENSION_INDEX, object); }
+ JSModule* module() { return JSModule::cast(get(EXTENSION_INDEX)); }
+ void set_module(JSModule* module) { set(EXTENSION_INDEX, module); }
+
// Get the context where var declarations will be hoisted to, which
// may be the context itself.
Context* declaration_context();
- GlobalObject* global() {
- Object* result = get(GLOBAL_INDEX);
+ GlobalObject* global_object() {
+ Object* result = get(GLOBAL_OBJECT_INDEX);
ASSERT(IsBootstrappingOrGlobalObject(result));
return reinterpret_cast<GlobalObject*>(result);
}
- void set_global(GlobalObject* global) { set(GLOBAL_INDEX, global); }
+ void set_global_object(GlobalObject* object) {
+ set(GLOBAL_OBJECT_INDEX, object);
+ }
- JSObject *qml_global() { return reinterpret_cast<JSObject *>(get(QML_GLOBAL_INDEX)); }
- void set_qml_global(JSObject *qml_global) { set(QML_GLOBAL_INDEX, qml_global); }
+ JSObject* qml_global_object() {
+ return reinterpret_cast<JSObject *>(get(QML_GLOBAL_OBJECT_INDEX));
+ }
+ void set_qml_global_object(JSObject *qml_global) {
+ set(QML_GLOBAL_OBJECT_INDEX, qml_global);
+ }
// Returns a JSGlobalProxy object or null.
JSObject* global_proxy();
@@ -338,11 +353,11 @@ class Context: public FixedArray {
// The builtins object.
JSBuiltinsObject* builtins();
- // Compute the global context by traversing the context chain.
- Context* global_context();
+ // Compute the native context by traversing the context chain.
+ Context* native_context();
- // Predicates for context types. IsGlobalContext is defined on Object
- // because we frequently have to know if arbitrary objects are global
+ // Predicates for context types. IsNativeContext is defined on Object
+ // because we frequently have to know if arbitrary objects are natives
// contexts.
bool IsFunctionContext() {
Map* map = this->map();
@@ -364,42 +379,36 @@ class Context: public FixedArray {
Map* map = this->map();
return map == map->GetHeap()->module_context_map();
}
+ bool IsGlobalContext() {
+ Map* map = this->map();
+ return map == map->GetHeap()->global_context_map();
+ }
- // Tells whether the global context is marked with out of memory.
+ // Tells whether the native context is marked with out of memory.
inline bool has_out_of_memory();
- // Mark the global context with out of memory.
+ // Mark the native context with out of memory.
inline void mark_out_of_memory();
- // A global context hold a list of all functions which have been optimized.
+ // A native context hold a list of all functions which have been optimized.
void AddOptimizedFunction(JSFunction* function);
void RemoveOptimizedFunction(JSFunction* function);
Object* OptimizedFunctionsListHead();
void ClearOptimizedFunctions();
- static int GetContextMapIndexFromElementsKind(
- ElementsKind elements_kind) {
- if (elements_kind == FAST_DOUBLE_ELEMENTS) {
- return Context::DOUBLE_JS_ARRAY_MAP_INDEX;
- } else if (elements_kind == FAST_ELEMENTS) {
- return Context::OBJECT_JS_ARRAY_MAP_INDEX;
- } else {
- ASSERT(elements_kind == FAST_SMI_ONLY_ELEMENTS);
- return Context::SMI_JS_ARRAY_MAP_INDEX;
- }
- }
+ Handle<Object> ErrorMessageForCodeGenerationFromStrings();
-#define GLOBAL_CONTEXT_FIELD_ACCESSORS(index, type, name) \
+#define NATIVE_CONTEXT_FIELD_ACCESSORS(index, type, name) \
void set_##name(type* value) { \
- ASSERT(IsGlobalContext()); \
+ ASSERT(IsNativeContext()); \
set(index, value); \
} \
type* name() { \
- ASSERT(IsGlobalContext()); \
+ ASSERT(IsNativeContext()); \
return type::cast(get(index)); \
}
- GLOBAL_CONTEXT_FIELDS(GLOBAL_CONTEXT_FIELD_ACCESSORS)
-#undef GLOBAL_CONTEXT_FIELD_ACCESSORS
+ NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSORS)
+#undef NATIVE_CONTEXT_FIELD_ACCESSORS
// Lookup the slot called name, starting with the current context.
// There are three possibilities:
@@ -429,7 +438,7 @@ class Context: public FixedArray {
return kHeaderSize + index * kPointerSize - kHeapObjectTag;
}
- static const int kSize = kHeaderSize + GLOBAL_CONTEXT_SLOTS * kPointerSize;
+ static const int kSize = kHeaderSize + NATIVE_CONTEXT_SLOTS * kPointerSize;
// GC support.
typedef FixedBodyDescriptor<
@@ -446,7 +455,7 @@ class Context: public FixedArray {
#ifdef DEBUG
// Bootstrapping-aware type checks.
- static bool IsBootstrappingOrContext(Object* object);
+ static bool IsBootstrappingOrValidParentContext(Object* object, Context* kid);
static bool IsBootstrappingOrGlobalObject(Object* object);
#endif
};
diff --git a/src/3rdparty/v8/src/conversions-inl.h b/src/3rdparty/v8/src/conversions-inl.h
index 77b260f..e272fe6 100644
--- a/src/3rdparty/v8/src/conversions-inl.h
+++ b/src/3rdparty/v8/src/conversions-inl.h
@@ -51,6 +51,11 @@ inline double JunkStringValue() {
}
+inline double SignedZero(bool negative) {
+ return negative ? uint64_to_double(Double::kSignMask) : 0.0;
+}
+
+
// The fast double-to-unsigned-int conversion routine does not guarantee
// rounding towards zero, or any reasonable value if the argument is larger
// than what fits in an unsigned 32-bit integer.
@@ -263,6 +268,7 @@ double InternalStringToInt(UnicodeCache* unicode_cache,
if (radix == 0) {
// Radix detection.
+ radix = 10;
if (*current == '0') {
++current;
if (current == end) return SignedZero(negative);
@@ -271,11 +277,8 @@ double InternalStringToInt(UnicodeCache* unicode_cache,
++current;
if (current == end) return JunkStringValue();
} else {
- radix = 8;
leading_zero = true;
}
- } else {
- radix = 10;
}
} else if (radix == 16) {
if (*current == '0') {
@@ -459,16 +462,23 @@ double InternalStringToDouble(UnicodeCache* unicode_cache,
int insignificant_digits = 0;
bool nonzero_digit_dropped = false;
- bool negative = false;
+ enum Sign {
+ NONE,
+ NEGATIVE,
+ POSITIVE
+ };
+
+ Sign sign = NONE;
if (*current == '+') {
// Ignore leading sign.
++current;
if (current == end) return JunkStringValue();
+ sign = POSITIVE;
} else if (*current == '-') {
++current;
if (current == end) return JunkStringValue();
- negative = true;
+ sign = NEGATIVE;
}
static const char kInfinitySymbol[] = "Infinity";
@@ -483,34 +493,34 @@ double InternalStringToDouble(UnicodeCache* unicode_cache,
}
ASSERT(buffer_pos == 0);
- return negative ? -V8_INFINITY : V8_INFINITY;
+ return (sign == NEGATIVE) ? -V8_INFINITY : V8_INFINITY;
}
bool leading_zero = false;
if (*current == '0') {
++current;
- if (current == end) return SignedZero(negative);
+ if (current == end) return SignedZero(sign == NEGATIVE);
leading_zero = true;
// It could be hexadecimal value.
if ((flags & ALLOW_HEX) && (*current == 'x' || *current == 'X')) {
++current;
- if (current == end || !isDigit(*current, 16)) {
+ if (current == end || !isDigit(*current, 16) || sign != NONE) {
return JunkStringValue(); // "0x".
}
return InternalStringToIntDouble<4>(unicode_cache,
current,
end,
- negative,
+ false,
allow_trailing_junk);
}
// Ignore leading zeros in the integer part.
while (*current == '0') {
++current;
- if (current == end) return SignedZero(negative);
+ if (current == end) return SignedZero(sign == NEGATIVE);
}
}
@@ -555,7 +565,7 @@ double InternalStringToDouble(UnicodeCache* unicode_cache,
// leading zeros (if any).
while (*current == '0') {
++current;
- if (current == end) return SignedZero(negative);
+ if (current == end) return SignedZero(sign == NEGATIVE);
exponent--; // Move this 0 into the exponent.
}
}
@@ -647,7 +657,7 @@ double InternalStringToDouble(UnicodeCache* unicode_cache,
return InternalStringToIntDouble<3>(unicode_cache,
buffer,
buffer + buffer_pos,
- negative,
+ sign == NEGATIVE,
allow_trailing_junk);
}
@@ -660,7 +670,7 @@ double InternalStringToDouble(UnicodeCache* unicode_cache,
buffer[buffer_pos] = '\0';
double converted = Strtod(Vector<const char>(buffer, buffer_pos), exponent);
- return negative ? -converted : converted;
+ return (sign == NEGATIVE) ? -converted : converted;
}
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/conversions.h b/src/3rdparty/v8/src/conversions.h
index 70559c9..1fbb5f1 100644
--- a/src/3rdparty/v8/src/conversions.h
+++ b/src/3rdparty/v8/src/conversions.h
@@ -52,8 +52,13 @@ inline bool isDigit(int x, int radix) {
}
-inline double SignedZero(bool negative) {
- return negative ? -0.0 : 0.0;
+// The fast double-to-(unsigned-)int conversion routine does not guarantee
+// rounding towards zero.
+// For NaN and values outside the int range, return INT_MIN or INT_MAX.
+inline int FastD2IChecked(double x) {
+ if (!(x >= INT_MIN)) return INT_MIN; // Negation to catch NaNs.
+ if (x > INT_MAX) return INT_MAX;
+ return static_cast<int>(x);
}
@@ -62,8 +67,6 @@ inline double SignedZero(bool negative) {
// The result is unspecified if x is infinite or NaN, or if the rounded
// integer value is outside the range of type int.
inline int FastD2I(double x) {
- // The static_cast convertion from double to int used to be slow, but
- // as new benchmarks show, now it is much faster than lrint().
return static_cast<int>(x);
}
diff --git a/src/3rdparty/v8/src/counters.cc b/src/3rdparty/v8/src/counters.cc
index faad6d4..811c0aa 100644
--- a/src/3rdparty/v8/src/counters.cc
+++ b/src/3rdparty/v8/src/counters.cc
@@ -1,4 +1,4 @@
-// Copyright 2007-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -64,9 +64,20 @@ void StatsCounterTimer::Stop() {
counter_.Increment(milliseconds);
}
+void Histogram::AddSample(int sample) {
+ if (Enabled()) {
+ Isolate::Current()->stats_table()->AddHistogramSample(histogram_, sample);
+ }
+}
+
+void* Histogram::CreateHistogram() const {
+ return Isolate::Current()->stats_table()->
+ CreateHistogram(name_, min_, max_, num_buckets_);
+}
+
// Start the timer.
void HistogramTimer::Start() {
- if (GetHistogram() != NULL) {
+ if (histogram_.Enabled()) {
stop_time_ = 0;
start_time_ = OS::Ticks();
}
@@ -74,20 +85,13 @@ void HistogramTimer::Start() {
// Stop the timer and record the results.
void HistogramTimer::Stop() {
- if (histogram_ != NULL) {
+ if (histogram_.Enabled()) {
stop_time_ = OS::Ticks();
// Compute the delta between start and stop, in milliseconds.
int milliseconds = static_cast<int>(stop_time_ - start_time_) / 1000;
- Isolate::Current()->stats_table()->
- AddHistogramSample(histogram_, milliseconds);
+ histogram_.AddSample(milliseconds);
}
}
-
-void* HistogramTimer::CreateHistogram() const {
- return Isolate::Current()->stats_table()->
- CreateHistogram(name_, 0, 10000, 50);
-}
-
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/counters.h b/src/3rdparty/v8/src/counters.h
index 6498a02..577280f 100644
--- a/src/3rdparty/v8/src/counters.h
+++ b/src/3rdparty/v8/src/counters.h
@@ -1,4 +1,4 @@
-// Copyright 2007-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -169,8 +169,7 @@ struct StatsCounter {
protected:
// Returns the cached address of this counter location.
int* GetPtr() {
- if (lookup_done_)
- return ptr_;
+ if (lookup_done_) return ptr_;
lookup_done_ = true;
ptr_ = FindLocationInStatsTable();
return ptr_;
@@ -199,25 +198,30 @@ struct StatsCounterTimer {
}
};
-// A HistogramTimer allows distributions of results to be created
-// HistogramTimer t = { L"foo", NULL, false, 0, 0 };
-struct HistogramTimer {
+// A Histogram represents a dynamically created histogram in the StatsTable.
+//
+// This class is designed to be POD initialized. It will be registered with
+// the histogram system on first use. For example:
+// Histogram h = { "myhist", 0, 10000, 50, NULL, false };
+struct Histogram {
const char* name_;
+ int min_;
+ int max_;
+ int num_buckets_;
void* histogram_;
bool lookup_done_;
- int64_t start_time_;
- int64_t stop_time_;
-
- // Start the timer.
- void Start();
+ // Add a single sample to this histogram.
+ void AddSample(int sample);
- // Stop the timer and record the results.
- void Stop();
+ // Returns true if this histogram is enabled.
+ bool Enabled() {
+ return GetHistogram() != NULL;
+ }
- // Returns true if the timer is running.
- bool Running() {
- return (histogram_ != NULL) && (start_time_ != 0) && (stop_time_ == 0);
+ // Reset the cached internal pointer.
+ void Reset() {
+ lookup_done_ = false;
}
protected:
@@ -234,6 +238,30 @@ struct HistogramTimer {
void* CreateHistogram() const;
};
+// A HistogramTimer allows distributions of results to be created
+// HistogramTimer t = { {L"foo", 0, 10000, 50, NULL, false}, 0, 0 };
+struct HistogramTimer {
+ Histogram histogram_;
+
+ int64_t start_time_;
+ int64_t stop_time_;
+
+ // Start the timer.
+ void Start();
+
+ // Stop the timer and record the results.
+ void Stop();
+
+ // Returns true if the timer is running.
+ bool Running() {
+ return histogram_.Enabled() && (start_time_ != 0) && (stop_time_ == 0);
+ }
+
+ void Reset() {
+ histogram_.Reset();
+ }
+};
+
// Helper class for scoping a HistogramTimer.
class HistogramTimerScope BASE_EMBEDDED {
public:
diff --git a/src/3rdparty/v8/src/cpu-profiler.h b/src/3rdparty/v8/src/cpu-profiler.h
index 6e2e771..9cd4484 100644
--- a/src/3rdparty/v8/src/cpu-profiler.h
+++ b/src/3rdparty/v8/src/cpu-profiler.h
@@ -188,7 +188,7 @@ class ProfilerEventsProcessor : public Thread {
#define PROFILE(isolate, Call) \
- LOG(isolate, Call); \
+ LOG_CODE_EVENT(isolate, Call); \
do { \
if (v8::internal::CpuProfiler::is_profiling(isolate)) { \
v8::internal::CpuProfiler::Call; \
diff --git a/src/3rdparty/v8/src/d8.cc b/src/3rdparty/v8/src/d8.cc
index ddd4100..b3b1bb8 100644
--- a/src/3rdparty/v8/src/d8.cc
+++ b/src/3rdparty/v8/src/d8.cc
@@ -200,7 +200,13 @@ Handle<Value> Shell::Write(const Arguments& args) {
if (i != 0) {
printf(" ");
}
- v8::String::Utf8Value str(args[i]);
+
+ // Explicitly catch potential exceptions in toString().
+ v8::TryCatch try_catch;
+ Handle<String> str_obj = args[i]->ToString();
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+
+ v8::String::Utf8Value str(str_obj);
int n = static_cast<int>(fwrite(*str, sizeof(**str), str.length(), stdout));
if (n != str.length()) {
printf("Error in fwrite\n");
@@ -284,9 +290,9 @@ Handle<Value> Shell::Load(const Arguments& args) {
return Undefined();
}
-static size_t convertToUint(Local<Value> value_in, TryCatch* try_catch) {
- if (value_in->IsUint32()) {
- return value_in->Uint32Value();
+static int32_t convertToInt(Local<Value> value_in, TryCatch* try_catch) {
+ if (value_in->IsInt32()) {
+ return value_in->Int32Value();
}
Local<Value> number = value_in->ToNumber();
@@ -296,7 +302,15 @@ static size_t convertToUint(Local<Value> value_in, TryCatch* try_catch) {
Local<Int32> int32 = number->ToInt32();
if (try_catch->HasCaught() || int32.IsEmpty()) return 0;
- int32_t raw_value = int32->Int32Value();
+ int32_t value = int32->Int32Value();
+ if (try_catch->HasCaught()) return 0;
+
+ return value;
+}
+
+
+static int32_t convertToUint(Local<Value> value_in, TryCatch* try_catch) {
+ int32_t raw_value = convertToInt(value_in, try_catch);
if (try_catch->HasCaught()) return 0;
if (raw_value < 0) {
@@ -312,182 +326,463 @@ static size_t convertToUint(Local<Value> value_in, TryCatch* try_catch) {
ThrowException(
String::New("Array length exceeds maximum length."));
}
- return static_cast<size_t>(raw_value);
+ return raw_value;
}
-const char kArrayBufferMarkerPropName[] = "_is_array_buffer_";
-const char kArrayBufferReferencePropName[] = "_array_buffer_ref_";
+// TODO(rossberg): should replace these by proper uses of HasInstance,
+// once we figure out a good way to make the templates global.
+const char kArrayBufferMarkerPropName[] = "d8::_is_array_buffer_";
+const char kArrayMarkerPropName[] = "d8::_is_typed_array_";
-static const int kExternalArrayAllocationHeaderSize = 2;
-Handle<Value> Shell::CreateExternalArray(const Arguments& args,
- ExternalArrayType type,
- size_t element_size) {
- TryCatch try_catch;
- bool is_array_buffer_construct = element_size == 0;
- if (is_array_buffer_construct) {
- type = v8::kExternalByteArray;
- element_size = 1;
+Handle<Value> Shell::CreateExternalArrayBuffer(Handle<Object> buffer,
+ int32_t length) {
+ static const int32_t kMaxSize = 0x7fffffff;
+ // Make sure the total size fits into a (signed) int.
+ if (length < 0 || length > kMaxSize) {
+ return ThrowException(String::New("ArrayBuffer exceeds maximum size (2G)"));
+ }
+ uint8_t* data = new uint8_t[length];
+ if (data == NULL) {
+ return ThrowException(String::New("Memory allocation failed"));
}
- ASSERT(element_size == 1 || element_size == 2 || element_size == 4 ||
- element_size == 8);
+ memset(data, 0, length);
+
+ buffer->SetHiddenValue(String::New(kArrayBufferMarkerPropName), True());
+ Persistent<Object> persistent_array = Persistent<Object>::New(buffer);
+ persistent_array.MakeWeak(data, ExternalArrayWeakCallback);
+ persistent_array.MarkIndependent();
+ V8::AdjustAmountOfExternalAllocatedMemory(length);
+
+ buffer->SetIndexedPropertiesToExternalArrayData(
+ data, v8::kExternalByteArray, length);
+ buffer->Set(String::New("byteLength"), Int32::New(length), ReadOnly);
+
+ return buffer;
+}
+
+
+Handle<Value> Shell::ArrayBuffer(const Arguments& args) {
+ if (!args.IsConstructCall()) {
+ Handle<Value>* rec_args = new Handle<Value>[args.Length()];
+ for (int i = 0; i < args.Length(); ++i) rec_args[i] = args[i];
+ Handle<Value> result = args.Callee()->NewInstance(args.Length(), rec_args);
+ delete[] rec_args;
+ return result;
+ }
+
if (args.Length() == 0) {
return ThrowException(
- String::New("Array constructor must have at least one "
- "parameter."));
- }
- bool first_arg_is_array_buffer =
- args[0]->IsObject() &&
- args[0]->ToObject()->Get(
- String::New(kArrayBufferMarkerPropName))->IsTrue();
- // Currently, only the following constructors are supported:
+ String::New("ArrayBuffer constructor must have one argument"));
+ }
+ TryCatch try_catch;
+ int32_t length = convertToUint(args[0], &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+
+ return CreateExternalArrayBuffer(args.This(), length);
+}
+
+
+Handle<Object> Shell::CreateExternalArray(Handle<Object> array,
+ Handle<Object> buffer,
+ ExternalArrayType type,
+ int32_t length,
+ int32_t byteLength,
+ int32_t byteOffset,
+ int32_t element_size) {
+ ASSERT(element_size == 1 || element_size == 2 ||
+ element_size == 4 || element_size == 8);
+ ASSERT(byteLength == length * element_size);
+
+ void* data = buffer->GetIndexedPropertiesExternalArrayData();
+ ASSERT(data != NULL);
+
+ array->SetIndexedPropertiesToExternalArrayData(
+ static_cast<uint8_t*>(data) + byteOffset, type, length);
+ array->SetHiddenValue(String::New(kArrayMarkerPropName), Int32::New(type));
+ array->Set(String::New("byteLength"), Int32::New(byteLength), ReadOnly);
+ array->Set(String::New("byteOffset"), Int32::New(byteOffset), ReadOnly);
+ array->Set(String::New("length"), Int32::New(length), ReadOnly);
+ array->Set(String::New("BYTES_PER_ELEMENT"), Int32::New(element_size));
+ array->Set(String::New("buffer"), buffer, ReadOnly);
+
+ return array;
+}
+
+
+Handle<Value> Shell::CreateExternalArray(const Arguments& args,
+ ExternalArrayType type,
+ int32_t element_size) {
+ if (!args.IsConstructCall()) {
+ Handle<Value>* rec_args = new Handle<Value>[args.Length()];
+ for (int i = 0; i < args.Length(); ++i) rec_args[i] = args[i];
+ Handle<Value> result = args.Callee()->NewInstance(args.Length(), rec_args);
+ delete[] rec_args;
+ return result;
+ }
+
+ TryCatch try_catch;
+ ASSERT(element_size == 1 || element_size == 2 ||
+ element_size == 4 || element_size == 8);
+
+ // All of the following constructors are supported:
// TypedArray(unsigned long length)
+ // TypedArray(type[] array)
+ // TypedArray(TypedArray array)
// TypedArray(ArrayBuffer buffer,
// optional unsigned long byteOffset,
// optional unsigned long length)
- if (args.Length() > 3) {
+ Handle<Object> buffer;
+ int32_t length;
+ int32_t byteLength;
+ int32_t byteOffset;
+ bool init_from_array = false;
+ if (args.Length() == 0) {
return ThrowException(
- String::New("Array constructor from ArrayBuffer must "
- "have 1-3 parameters."));
- }
-
- Local<Value> length_value = (args.Length() < 3)
- ? (first_arg_is_array_buffer
- ? args[0]->ToObject()->Get(String::New("byteLength"))
- : args[0])
- : args[2];
- size_t byteLength = convertToUint(length_value, &try_catch);
- size_t length = byteLength;
- if (try_catch.HasCaught()) return try_catch.Exception();
-
- void* data = NULL;
- size_t offset = 0;
-
- Handle<Object> array = Object::New();
- if (first_arg_is_array_buffer) {
- Handle<Object> derived_from = args[0]->ToObject();
- data = derived_from->GetIndexedPropertiesExternalArrayData();
-
- size_t array_buffer_length = convertToUint(
- derived_from->Get(String::New("byteLength")),
- &try_catch);
- if (try_catch.HasCaught()) return try_catch.Exception();
-
- if (data == NULL && array_buffer_length != 0) {
- return ThrowException(
- String::New("ArrayBuffer doesn't have data"));
+ String::New("Array constructor must have at least one argument"));
+ }
+ if (args[0]->IsObject() &&
+ !args[0]->ToObject()->GetHiddenValue(
+ String::New(kArrayBufferMarkerPropName)).IsEmpty()) {
+ // Construct from ArrayBuffer.
+ buffer = args[0]->ToObject();
+ int32_t bufferLength =
+ convertToUint(buffer->Get(String::New("byteLength")), &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+
+ if (args.Length() < 2 || args[1]->IsUndefined()) {
+ byteOffset = 0;
+ } else {
+ byteOffset = convertToUint(args[1], &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ if (byteOffset > bufferLength) {
+ return ThrowException(String::New("byteOffset out of bounds"));
+ }
+ if (byteOffset % element_size != 0) {
+ return ThrowException(
+ String::New("byteOffset must be multiple of element size"));
+ }
}
- if (args.Length() > 1) {
- offset = convertToUint(args[1], &try_catch);
- if (try_catch.HasCaught()) return try_catch.Exception();
-
- // The given byteOffset must be a multiple of the element size of the
- // specific type, otherwise an exception is raised.
- if (offset % element_size != 0) {
+ if (args.Length() < 3 || args[2]->IsUndefined()) {
+ byteLength = bufferLength - byteOffset;
+ length = byteLength / element_size;
+ if (byteLength % element_size != 0) {
return ThrowException(
- String::New("offset must be multiple of element_size"));
+ String::New("buffer size must be multiple of element size"));
+ }
+ } else {
+ length = convertToUint(args[2], &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ byteLength = length * element_size;
+ if (byteOffset + byteLength > bufferLength) {
+ return ThrowException(String::New("length out of bounds"));
}
}
-
- if (offset > array_buffer_length) {
- return ThrowException(
- String::New("byteOffset must be less than ArrayBuffer length."));
+ } else {
+ if (args[0]->IsObject() &&
+ args[0]->ToObject()->Has(String::New("length"))) {
+ // Construct from array.
+ length = convertToUint(
+ args[0]->ToObject()->Get(String::New("length")), &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ init_from_array = true;
+ } else {
+ // Construct from size.
+ length = convertToUint(args[0], &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
}
+ byteLength = length * element_size;
+ byteOffset = 0;
+
+ Handle<Object> global = Context::GetCurrent()->Global();
+ Handle<Value> array_buffer = global->Get(String::New("ArrayBuffer"));
+ ASSERT(!try_catch.HasCaught() && array_buffer->IsFunction());
+ Handle<Value> buffer_args[] = { Uint32::New(byteLength) };
+ Handle<Value> result = Handle<Function>::Cast(array_buffer)->NewInstance(
+ 1, buffer_args);
+ if (try_catch.HasCaught()) return result;
+ buffer = result->ToObject();
+ }
- if (args.Length() == 2) {
- // If length is not explicitly specified, the length of the ArrayBuffer
- // minus the byteOffset must be a multiple of the element size of the
- // specific type, or an exception is raised.
- length = array_buffer_length - offset;
- }
+ Handle<Object> array = CreateExternalArray(
+ args.This(), buffer, type, length, byteLength, byteOffset, element_size);
- if (args.Length() != 3) {
- if (length % element_size != 0) {
- return ThrowException(
- String::New("ArrayBuffer length minus the byteOffset must be a "
- "multiple of the element size"));
- }
- length /= element_size;
- }
+ if (init_from_array) {
+ Handle<Object> init = args[0]->ToObject();
+ for (int i = 0; i < length; ++i) array->Set(i, init->Get(i));
+ }
+
+ return array;
+}
- // If a given byteOffset and length references an area beyond the end of
- // the ArrayBuffer an exception is raised.
- if (offset + (length * element_size) > array_buffer_length) {
- return ThrowException(
- String::New("length references an area beyond the end of the "
- "ArrayBuffer"));
- }
- // Hold a reference to the ArrayBuffer so its buffer doesn't get collected.
- array->Set(String::New(kArrayBufferReferencePropName), args[0], ReadOnly);
+Handle<Value> Shell::ArrayBufferSlice(const Arguments& args) {
+ TryCatch try_catch;
+
+ if (!args.This()->IsObject()) {
+ return ThrowException(
+ String::New("'slice' invoked on non-object receiver"));
}
- if (is_array_buffer_construct) {
- array->Set(String::New(kArrayBufferMarkerPropName), True(), ReadOnly);
+ Local<Object> self = args.This();
+ Local<Value> marker =
+ self->GetHiddenValue(String::New(kArrayBufferMarkerPropName));
+ if (marker.IsEmpty()) {
+ return ThrowException(
+ String::New("'slice' invoked on wrong receiver type"));
}
- Persistent<Object> persistent_array = Persistent<Object>::New(array);
- if (data == NULL && length != 0) {
- // Make sure the total size fits into a (signed) int.
- static const int kMaxSize = 0x7fffffff;
- if (length > (kMaxSize - sizeof(size_t)) / element_size) {
- return ThrowException(String::New("Array exceeds maximum size (2G)"));
- }
- // Prepend the size of the allocated chunk to the data itself.
- int total_size = length * element_size +
- kExternalArrayAllocationHeaderSize * sizeof(size_t);
- data = malloc(total_size);
- if (data == NULL) {
- return ThrowException(String::New("Memory allocation failed."));
- }
- *reinterpret_cast<size_t*>(data) = total_size;
- data = reinterpret_cast<size_t*>(data) + kExternalArrayAllocationHeaderSize;
- memset(data, 0, length * element_size);
- V8::AdjustAmountOfExternalAllocatedMemory(total_size);
+ int32_t length =
+ convertToUint(self->Get(String::New("byteLength")), &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+
+ if (args.Length() == 0) {
+ return ThrowException(
+ String::New("'slice' must have at least one argument"));
+ }
+ int32_t begin = convertToInt(args[0], &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ if (begin < 0) begin += length;
+ if (begin < 0) begin = 0;
+ if (begin > length) begin = length;
+
+ int32_t end;
+ if (args.Length() < 2 || args[1]->IsUndefined()) {
+ end = length;
+ } else {
+ end = convertToInt(args[1], &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ if (end < 0) end += length;
+ if (end < 0) end = 0;
+ if (end > length) end = length;
+ if (end < begin) end = begin;
}
- persistent_array.MakeWeak(data, ExternalArrayWeakCallback);
- persistent_array.MarkIndependent();
- array->SetIndexedPropertiesToExternalArrayData(
- reinterpret_cast<uint8_t*>(data) + offset, type,
- static_cast<int>(length));
- array->Set(String::New("byteLength"),
- Int32::New(static_cast<int32_t>(byteLength)), ReadOnly);
- if (!is_array_buffer_construct) {
- array->Set(String::New("length"),
- Int32::New(static_cast<int32_t>(length)), ReadOnly);
- array->Set(String::New("byteOffset"),
- Int32::New(static_cast<int32_t>(offset)), ReadOnly);
- array->Set(String::New("BYTES_PER_ELEMENT"),
- Int32::New(static_cast<int32_t>(element_size)));
- // We currently support 'buffer' property only if constructed from a buffer.
- if (first_arg_is_array_buffer) {
- array->Set(String::New("buffer"), args[0], ReadOnly);
- }
+ Local<Function> constructor = Local<Function>::Cast(self->GetConstructor());
+ Handle<Value> new_args[] = { Uint32::New(end - begin) };
+ Handle<Value> result = constructor->NewInstance(1, new_args);
+ if (try_catch.HasCaught()) return result;
+ Handle<Object> buffer = result->ToObject();
+ uint8_t* dest =
+ static_cast<uint8_t*>(buffer->GetIndexedPropertiesExternalArrayData());
+ uint8_t* src = begin + static_cast<uint8_t*>(
+ self->GetIndexedPropertiesExternalArrayData());
+ memcpy(dest, src, end - begin);
+
+ return buffer;
+}
+
+
+Handle<Value> Shell::ArraySubArray(const Arguments& args) {
+ TryCatch try_catch;
+
+ if (!args.This()->IsObject()) {
+ return ThrowException(
+ String::New("'subarray' invoked on non-object receiver"));
}
- return array;
+
+ Local<Object> self = args.This();
+ Local<Value> marker = self->GetHiddenValue(String::New(kArrayMarkerPropName));
+ if (marker.IsEmpty()) {
+ return ThrowException(
+ String::New("'subarray' invoked on wrong receiver type"));
+ }
+
+ Handle<Object> buffer = self->Get(String::New("buffer"))->ToObject();
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ int32_t length =
+ convertToUint(self->Get(String::New("length")), &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ int32_t byteOffset =
+ convertToUint(self->Get(String::New("byteOffset")), &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ int32_t element_size =
+ convertToUint(self->Get(String::New("BYTES_PER_ELEMENT")), &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+
+ if (args.Length() == 0) {
+ return ThrowException(
+ String::New("'subarray' must have at least one argument"));
+ }
+ int32_t begin = convertToInt(args[0], &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ if (begin < 0) begin += length;
+ if (begin < 0) begin = 0;
+ if (begin > length) begin = length;
+
+ int32_t end;
+ if (args.Length() < 2 || args[1]->IsUndefined()) {
+ end = length;
+ } else {
+ end = convertToInt(args[1], &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ if (end < 0) end += length;
+ if (end < 0) end = 0;
+ if (end > length) end = length;
+ if (end < begin) end = begin;
+ }
+
+ length = end - begin;
+ byteOffset += begin * element_size;
+
+ Local<Function> constructor = Local<Function>::Cast(self->GetConstructor());
+ Handle<Value> construct_args[] = {
+ buffer, Uint32::New(byteOffset), Uint32::New(length)
+ };
+ return constructor->NewInstance(3, construct_args);
}
-void Shell::ExternalArrayWeakCallback(Persistent<Value> object, void* data) {
- HandleScope scope;
- Handle<String> prop_name = String::New(kArrayBufferReferencePropName);
- Handle<Object> converted_object = object->ToObject();
- Local<Value> prop_value = converted_object->Get(prop_name);
- if (data != NULL && !prop_value->IsObject()) {
- data = reinterpret_cast<size_t*>(data) - kExternalArrayAllocationHeaderSize;
- V8::AdjustAmountOfExternalAllocatedMemory(
- -static_cast<int>(*reinterpret_cast<size_t*>(data)));
- free(data);
+Handle<Value> Shell::ArraySet(const Arguments& args) {
+ TryCatch try_catch;
+
+ if (!args.This()->IsObject()) {
+ return ThrowException(
+ String::New("'set' invoked on non-object receiver"));
}
- object.Dispose();
+
+ Local<Object> self = args.This();
+ Local<Value> marker = self->GetHiddenValue(String::New(kArrayMarkerPropName));
+ if (marker.IsEmpty()) {
+ return ThrowException(
+ String::New("'set' invoked on wrong receiver type"));
+ }
+ int32_t length =
+ convertToUint(self->Get(String::New("length")), &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ int32_t element_size =
+ convertToUint(self->Get(String::New("BYTES_PER_ELEMENT")), &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+
+ if (args.Length() == 0) {
+ return ThrowException(
+ String::New("'set' must have at least one argument"));
+ }
+ if (!args[0]->IsObject() ||
+ !args[0]->ToObject()->Has(String::New("length"))) {
+ return ThrowException(
+ String::New("'set' invoked with non-array argument"));
+ }
+ Handle<Object> source = args[0]->ToObject();
+ int32_t source_length =
+ convertToUint(source->Get(String::New("length")), &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+
+ int32_t offset;
+ if (args.Length() < 2 || args[1]->IsUndefined()) {
+ offset = 0;
+ } else {
+ offset = convertToUint(args[1], &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ }
+ if (offset + source_length > length) {
+ return ThrowException(String::New("offset or source length out of bounds"));
+ }
+
+ int32_t source_element_size;
+ if (source->GetHiddenValue(String::New(kArrayMarkerPropName)).IsEmpty()) {
+ source_element_size = 0;
+ } else {
+ source_element_size =
+ convertToUint(source->Get(String::New("BYTES_PER_ELEMENT")), &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ }
+
+ if (element_size == source_element_size &&
+ self->GetConstructor()->StrictEquals(source->GetConstructor())) {
+ // Use memmove on the array buffers.
+ Handle<Object> buffer = self->Get(String::New("buffer"))->ToObject();
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ Handle<Object> source_buffer =
+ source->Get(String::New("buffer"))->ToObject();
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ int32_t byteOffset =
+ convertToUint(self->Get(String::New("byteOffset")), &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ int32_t source_byteOffset =
+ convertToUint(source->Get(String::New("byteOffset")), &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+
+ uint8_t* dest = byteOffset + offset * element_size + static_cast<uint8_t*>(
+ buffer->GetIndexedPropertiesExternalArrayData());
+ uint8_t* src = source_byteOffset + static_cast<uint8_t*>(
+ source_buffer->GetIndexedPropertiesExternalArrayData());
+ memmove(dest, src, source_length * element_size);
+ } else if (source_element_size == 0) {
+ // Source is not a typed array, copy element-wise sequentially.
+ for (int i = 0; i < source_length; ++i) {
+ self->Set(offset + i, source->Get(i));
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ }
+ } else {
+ // Need to copy element-wise to make the right conversions.
+ Handle<Object> buffer = self->Get(String::New("buffer"))->ToObject();
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ Handle<Object> source_buffer =
+ source->Get(String::New("buffer"))->ToObject();
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+
+ if (buffer->StrictEquals(source_buffer)) {
+ // Same backing store, need to handle overlap correctly.
+ // This gets a bit tricky in the case of different element sizes
+ // (which, of course, is extremely unlikely to ever occur in practice).
+ int32_t byteOffset =
+ convertToUint(self->Get(String::New("byteOffset")), &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+ int32_t source_byteOffset =
+ convertToUint(source->Get(String::New("byteOffset")), &try_catch);
+ if (try_catch.HasCaught()) return try_catch.ReThrow();
+
+ // Copy as much as we can from left to right.
+ int i = 0;
+ int32_t next_dest_offset = byteOffset + (offset + 1) * element_size;
+ int32_t next_src_offset = source_byteOffset + source_element_size;
+ while (i < length && next_dest_offset <= next_src_offset) {
+ self->Set(offset + i, source->Get(i));
+ ++i;
+ next_dest_offset += element_size;
+ next_src_offset += source_element_size;
+ }
+ // Of what's left, copy as much as we can from right to left.
+ int j = length - 1;
+ int32_t dest_offset = byteOffset + (offset + j) * element_size;
+ int32_t src_offset = source_byteOffset + j * source_element_size;
+ while (j >= i && dest_offset >= src_offset) {
+ self->Set(offset + j, source->Get(j));
+ --j;
+ dest_offset -= element_size;
+ src_offset -= source_element_size;
+ }
+ // There can be at most 8 entries left in the middle that need buffering
+ // (because the largest element_size is 8 times the smallest).
+ ASSERT(j+1 - i <= 8);
+ Handle<Value> temp[8];
+ for (int k = i; k <= j; ++k) {
+ temp[k - i] = source->Get(k);
+ }
+ for (int k = i; k <= j; ++k) {
+ self->Set(offset + k, temp[k - i]);
+ }
+ } else {
+ // Different backing stores, safe to copy element-wise sequentially.
+ for (int i = 0; i < source_length; ++i)
+ self->Set(offset + i, source->Get(i));
+ }
+ }
+
+ return Undefined();
}
-Handle<Value> Shell::ArrayBuffer(const Arguments& args) {
- return CreateExternalArray(args, v8::kExternalByteArray, 0);
+void Shell::ExternalArrayWeakCallback(Persistent<Value> object, void* data) {
+ HandleScope scope;
+ int32_t length =
+ object->ToObject()->Get(String::New("byteLength"))->Uint32Value();
+ V8::AdjustAmountOfExternalAllocatedMemory(-length);
+ delete[] static_cast<uint8_t*>(data);
+ object.Dispose();
}
@@ -507,8 +802,8 @@ Handle<Value> Shell::Int16Array(const Arguments& args) {
Handle<Value> Shell::Uint16Array(const Arguments& args) {
- return CreateExternalArray(args, kExternalUnsignedShortArray,
- sizeof(uint16_t));
+ return CreateExternalArray(
+ args, kExternalUnsignedShortArray, sizeof(uint16_t));
}
@@ -523,18 +818,18 @@ Handle<Value> Shell::Uint32Array(const Arguments& args) {
Handle<Value> Shell::Float32Array(const Arguments& args) {
- return CreateExternalArray(args, kExternalFloatArray,
- sizeof(float)); // NOLINT
+ return CreateExternalArray(
+ args, kExternalFloatArray, sizeof(float)); // NOLINT
}
Handle<Value> Shell::Float64Array(const Arguments& args) {
- return CreateExternalArray(args, kExternalDoubleArray,
- sizeof(double)); // NOLINT
+ return CreateExternalArray(
+ args, kExternalDoubleArray, sizeof(double)); // NOLINT
}
-Handle<Value> Shell::PixelArray(const Arguments& args) {
+Handle<Value> Shell::Uint8ClampedArray(const Arguments& args) {
return CreateExternalArray(args, kExternalPixelArray, sizeof(uint8_t));
}
@@ -764,7 +1059,7 @@ void Shell::InstallUtilityScript() {
i::Debug* debug = i::Isolate::Current()->debug();
debug->Load();
i::Handle<i::JSObject> js_debug
- = i::Handle<i::JSObject>(debug->debug_context()->global());
+ = i::Handle<i::JSObject>(debug->debug_context()->global_object());
utility_context_->Global()->Set(String::New("$debug"),
Utils::ToLocal(js_debug));
debug->debug_context()->set_security_token(HEAP->undefined_value());
@@ -829,13 +1124,32 @@ class BZip2Decompressor : public v8::StartupDataDecompressor {
};
#endif
+
+Handle<FunctionTemplate> Shell::CreateArrayBufferTemplate(
+ InvocationCallback fun) {
+ Handle<FunctionTemplate> buffer_template = FunctionTemplate::New(fun);
+ Local<Template> proto_template = buffer_template->PrototypeTemplate();
+ proto_template->Set(String::New("slice"),
+ FunctionTemplate::New(ArrayBufferSlice));
+ return buffer_template;
+}
+
+
+Handle<FunctionTemplate> Shell::CreateArrayTemplate(InvocationCallback fun) {
+ Handle<FunctionTemplate> array_template = FunctionTemplate::New(fun);
+ Local<Template> proto_template = array_template->PrototypeTemplate();
+ proto_template->Set(String::New("set"), FunctionTemplate::New(ArraySet));
+ proto_template->Set(String::New("subarray"),
+ FunctionTemplate::New(ArraySubArray));
+ return array_template;
+}
+
+
Handle<ObjectTemplate> Shell::CreateGlobalTemplate() {
Handle<ObjectTemplate> global_template = ObjectTemplate::New();
global_template->Set(String::New("print"), FunctionTemplate::New(Print));
global_template->Set(String::New("write"), FunctionTemplate::New(Write));
global_template->Set(String::New("read"), FunctionTemplate::New(Read));
- global_template->Set(String::New("readbinary"),
- FunctionTemplate::New(ReadBinary));
global_template->Set(String::New("readbuffer"),
FunctionTemplate::New(ReadBuffer));
global_template->Set(String::New("readline"),
@@ -849,26 +1163,28 @@ Handle<ObjectTemplate> Shell::CreateGlobalTemplate() {
FunctionTemplate::New(DisableProfiler));
// Bind the handlers for external arrays.
+ PropertyAttribute attr =
+ static_cast<PropertyAttribute>(ReadOnly | DontDelete);
global_template->Set(String::New("ArrayBuffer"),
- FunctionTemplate::New(ArrayBuffer));
+ CreateArrayBufferTemplate(ArrayBuffer), attr);
global_template->Set(String::New("Int8Array"),
- FunctionTemplate::New(Int8Array));
+ CreateArrayTemplate(Int8Array), attr);
global_template->Set(String::New("Uint8Array"),
- FunctionTemplate::New(Uint8Array));
+ CreateArrayTemplate(Uint8Array), attr);
global_template->Set(String::New("Int16Array"),
- FunctionTemplate::New(Int16Array));
+ CreateArrayTemplate(Int16Array), attr);
global_template->Set(String::New("Uint16Array"),
- FunctionTemplate::New(Uint16Array));
+ CreateArrayTemplate(Uint16Array), attr);
global_template->Set(String::New("Int32Array"),
- FunctionTemplate::New(Int32Array));
+ CreateArrayTemplate(Int32Array), attr);
global_template->Set(String::New("Uint32Array"),
- FunctionTemplate::New(Uint32Array));
+ CreateArrayTemplate(Uint32Array), attr);
global_template->Set(String::New("Float32Array"),
- FunctionTemplate::New(Float32Array));
+ CreateArrayTemplate(Float32Array), attr);
global_template->Set(String::New("Float64Array"),
- FunctionTemplate::New(Float64Array));
- global_template->Set(String::New("PixelArray"),
- FunctionTemplate::New(PixelArray));
+ CreateArrayTemplate(Float64Array), attr);
+ global_template->Set(String::New("Uint8ClampedArray"),
+ CreateArrayTemplate(Uint8ClampedArray), attr);
#ifdef LIVE_OBJECT_LIST
global_template->Set(String::New("lol_is_enabled"), True());
@@ -901,7 +1217,7 @@ void Shell::Initialize() {
// Set up counters
if (i::StrLength(i::FLAG_map_counters) != 0)
MapCounters(i::FLAG_map_counters);
- if (i::FLAG_dump_counters) {
+ if (i::FLAG_dump_counters || i::FLAG_track_gc_object_stats) {
V8::SetCounterFunction(LookupCounter);
V8::SetCreateHistogramFunction(CreateHistogram);
V8::SetAddHistogramSampleFunction(AddHistogramSample);
@@ -991,20 +1307,24 @@ void Shell::OnExit() {
counters[j].key = i.CurrentKey();
}
qsort(counters, number_of_counters, sizeof(counters[0]), CompareKeys);
- printf("+--------------------------------------------+-------------+\n");
- printf("| Name | Value |\n");
- printf("+--------------------------------------------+-------------+\n");
+ printf("+----------------------------------------------------------------+"
+ "-------------+\n");
+ printf("| Name |"
+ " Value |\n");
+ printf("+----------------------------------------------------------------+"
+ "-------------+\n");
for (j = 0; j < number_of_counters; j++) {
Counter* counter = counters[j].counter;
const char* key = counters[j].key;
if (counter->is_histogram()) {
- printf("| c:%-40s | %11i |\n", key, counter->count());
- printf("| t:%-40s | %11i |\n", key, counter->sample_total());
+ printf("| c:%-60s | %11i |\n", key, counter->count());
+ printf("| t:%-60s | %11i |\n", key, counter->sample_total());
} else {
- printf("| %-42s | %11i |\n", key, counter->count());
+ printf("| %-62s | %11i |\n", key, counter->count());
}
}
- printf("+--------------------------------------------+-------------+\n");
+ printf("+----------------------------------------------------------------+"
+ "-------------+\n");
delete [] counters;
}
delete counters_file_;
@@ -1056,45 +1376,29 @@ static char* ReadChars(const char* name, int* size_out) {
}
-Handle<Value> Shell::ReadBinary(const Arguments& args) {
- String::Utf8Value filename(args[0]);
- int size;
- if (*filename == NULL) {
- return ThrowException(String::New("Error loading file"));
- }
- char* chars = ReadChars(*filename, &size);
- if (chars == NULL) {
- return ThrowException(String::New("Error reading file"));
- }
- // We skip checking the string for UTF8 characters and use it raw as
- // backing store for the external string with 8-bit characters.
- BinaryResource* resource = new BinaryResource(chars, size);
- return String::NewExternal(resource);
-}
-
-
Handle<Value> Shell::ReadBuffer(const Arguments& args) {
+ ASSERT(sizeof(char) == sizeof(uint8_t)); // NOLINT
String::Utf8Value filename(args[0]);
int length;
if (*filename == NULL) {
return ThrowException(String::New("Error loading file"));
}
- char* data = ReadChars(*filename, &length);
+
+ uint8_t* data = reinterpret_cast<uint8_t*>(ReadChars(*filename, &length));
if (data == NULL) {
return ThrowException(String::New("Error reading file"));
}
-
Handle<Object> buffer = Object::New();
- buffer->Set(String::New(kArrayBufferMarkerPropName), True(), ReadOnly);
-
+ buffer->SetHiddenValue(String::New(kArrayBufferMarkerPropName), True());
Persistent<Object> persistent_buffer = Persistent<Object>::New(buffer);
persistent_buffer.MakeWeak(data, ExternalArrayWeakCallback);
persistent_buffer.MarkIndependent();
+ V8::AdjustAmountOfExternalAllocatedMemory(length);
buffer->SetIndexedPropertiesToExternalArrayData(
- reinterpret_cast<uint8_t*>(data), kExternalUnsignedByteArray, length);
+ data, kExternalUnsignedByteArray, length);
buffer->Set(String::New("byteLength"),
- Int32::New(static_cast<int32_t>(length)), ReadOnly);
+ Int32::New(static_cast<int32_t>(length)), ReadOnly);
return buffer;
}
@@ -1259,7 +1563,7 @@ void SourceGroup::Execute() {
Handle<String> SourceGroup::ReadFile(const char* name) {
int size;
- const char* chars = ReadChars(name, &size);
+ char* chars = ReadChars(name, &size);
if (chars == NULL) return Handle<String>();
Handle<String> result = String::New(chars, size);
delete[] chars;
@@ -1291,6 +1595,11 @@ void SourceGroup::ExecuteInThread() {
Execute();
}
context.Dispose();
+ if (Shell::options.send_idle_notification) {
+ const int kLongIdlePauseInMs = 1000;
+ V8::ContextDisposedNotification();
+ V8::IdleNotification(kLongIdlePauseInMs);
+ }
}
if (done_semaphore_ != NULL) done_semaphore_->Signal();
} while (!Shell::options.last_run);
@@ -1336,6 +1645,9 @@ bool Shell::SetOptions(int argc, char* argv[]) {
} else if (strcmp(argv[i], "--test") == 0) {
options.test_shell = true;
argv[i] = NULL;
+ } else if (strcmp(argv[i], "--send-idle-notification") == 0) {
+ options.send_idle_notification = true;
+ argv[i] = NULL;
} else if (strcmp(argv[i], "--preemption") == 0) {
#ifdef V8_SHARED
printf("D8 with shared library does not support multi-threading\n");
@@ -1492,13 +1804,11 @@ int Shell::RunMain(int argc, char* argv[]) {
}
if (!options.last_run) {
context.Dispose();
-#if !defined(V8_SHARED)
- if (i::FLAG_send_idle_notification) {
+ if (options.send_idle_notification) {
const int kLongIdlePauseInMs = 1000;
V8::ContextDisposedNotification();
V8::IdleNotification(kLongIdlePauseInMs);
}
-#endif // !V8_SHARED
}
#ifndef V8_SHARED
diff --git a/src/3rdparty/v8/src/d8.h b/src/3rdparty/v8/src/d8.h
index 23fdebc..a62a81f 100644
--- a/src/3rdparty/v8/src/d8.h
+++ b/src/3rdparty/v8/src/d8.h
@@ -31,7 +31,7 @@
#ifndef V8_SHARED
#include "allocation.h"
#include "hashmap.h"
-#include "smart-array-pointer.h"
+#include "smart-pointers.h"
#include "v8.h"
#else
#include "../include/v8.h"
@@ -67,7 +67,7 @@ class CounterCollection {
CounterCollection();
Counter* GetNextCounter();
private:
- static const unsigned kMaxCounters = 256;
+ static const unsigned kMaxCounters = 512;
uint32_t magic_number_;
uint32_t max_counters_;
uint32_t max_name_size_;
@@ -227,6 +227,7 @@ class ShellOptions {
#endif // V8_SHARED
script_executed(false),
last_run(true),
+ send_idle_notification(false),
stress_opt(false),
stress_deopt(false),
interactive_shell(false),
@@ -249,6 +250,7 @@ class ShellOptions {
#endif // V8_SHARED
bool script_executed;
bool last_run;
+ bool send_idle_notification;
bool stress_opt;
bool stress_deopt;
bool interactive_shell;
@@ -307,7 +309,6 @@ class Shell : public i::AllStatic {
static Handle<Value> EnableProfiler(const Arguments& args);
static Handle<Value> DisableProfiler(const Arguments& args);
static Handle<Value> Read(const Arguments& args);
- static Handle<Value> ReadBinary(const Arguments& args);
static Handle<Value> ReadBuffer(const Arguments& args);
static Handle<String> ReadFromStdin();
static Handle<Value> ReadLine(const Arguments& args) {
@@ -323,7 +324,10 @@ class Shell : public i::AllStatic {
static Handle<Value> Uint32Array(const Arguments& args);
static Handle<Value> Float32Array(const Arguments& args);
static Handle<Value> Float64Array(const Arguments& args);
- static Handle<Value> PixelArray(const Arguments& args);
+ static Handle<Value> Uint8ClampedArray(const Arguments& args);
+ static Handle<Value> ArrayBufferSlice(const Arguments& args);
+ static Handle<Value> ArraySubArray(const Arguments& args);
+ static Handle<Value> ArraySet(const Arguments& args);
// The OS object on the global object contains methods for performing
// operating system calls:
//
@@ -384,9 +388,20 @@ class Shell : public i::AllStatic {
static void RunShell();
static bool SetOptions(int argc, char* argv[]);
static Handle<ObjectTemplate> CreateGlobalTemplate();
+ static Handle<FunctionTemplate> CreateArrayBufferTemplate(InvocationCallback);
+ static Handle<FunctionTemplate> CreateArrayTemplate(InvocationCallback);
+ static Handle<Value> CreateExternalArrayBuffer(Handle<Object> buffer,
+ int32_t size);
+ static Handle<Object> CreateExternalArray(Handle<Object> array,
+ Handle<Object> buffer,
+ ExternalArrayType type,
+ int32_t length,
+ int32_t byteLength,
+ int32_t byteOffset,
+ int32_t element_size);
static Handle<Value> CreateExternalArray(const Arguments& args,
ExternalArrayType type,
- size_t element_size);
+ int32_t element_size);
static void ExternalArrayWeakCallback(Persistent<Value> object, void* data);
};
diff --git a/src/3rdparty/v8/src/date.js b/src/3rdparty/v8/src/date.js
index d0e24ab..a54cb23 100644
--- a/src/3rdparty/v8/src/date.js
+++ b/src/3rdparty/v8/src/date.js
@@ -318,7 +318,6 @@ function DateNow() {
// ECMA 262 - 15.9.5.2
function DateToString() {
- CHECK_DATE(this);
var t = UTC_DATE_VALUE(this)
if (NUMBER_IS_NAN(t)) return kInvalidDate;
var time_zone_string = LocalTimezoneString(this)
@@ -328,7 +327,6 @@ function DateToString() {
// ECMA 262 - 15.9.5.3
function DateToDateString() {
- CHECK_DATE(this);
var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
return DateString(this);
@@ -337,7 +335,6 @@ function DateToDateString() {
// ECMA 262 - 15.9.5.4
function DateToTimeString() {
- CHECK_DATE(this);
var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
var time_zone_string = LocalTimezoneString(this);
@@ -353,7 +350,6 @@ function DateToLocaleString() {
// ECMA 262 - 15.9.5.6
function DateToLocaleDateString() {
- CHECK_DATE(this);
var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
return LongDateString(this);
@@ -362,7 +358,6 @@ function DateToLocaleDateString() {
// ECMA 262 - 15.9.5.7
function DateToLocaleTimeString() {
- CHECK_DATE(this);
var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
return TimeString(this);
@@ -371,133 +366,114 @@ function DateToLocaleTimeString() {
// ECMA 262 - 15.9.5.8
function DateValueOf() {
- CHECK_DATE(this);
return UTC_DATE_VALUE(this);
}
// ECMA 262 - 15.9.5.9
function DateGetTime() {
- CHECK_DATE(this);
return UTC_DATE_VALUE(this);
}
// ECMA 262 - 15.9.5.10
function DateGetFullYear() {
- CHECK_DATE(this);
return LOCAL_YEAR(this);
}
// ECMA 262 - 15.9.5.11
function DateGetUTCFullYear() {
- CHECK_DATE(this);
return UTC_YEAR(this);
}
// ECMA 262 - 15.9.5.12
function DateGetMonth() {
- CHECK_DATE(this);
return LOCAL_MONTH(this);
}
// ECMA 262 - 15.9.5.13
function DateGetUTCMonth() {
- CHECK_DATE(this);
return UTC_MONTH(this);
}
// ECMA 262 - 15.9.5.14
function DateGetDate() {
- CHECK_DATE(this);
return LOCAL_DAY(this);
}
// ECMA 262 - 15.9.5.15
function DateGetUTCDate() {
- CHECK_DATE(this);
return UTC_DAY(this);
}
// ECMA 262 - 15.9.5.16
function DateGetDay() {
- CHECK_DATE(this);
return LOCAL_WEEKDAY(this);
}
// ECMA 262 - 15.9.5.17
function DateGetUTCDay() {
- CHECK_DATE(this);
return UTC_WEEKDAY(this);
}
// ECMA 262 - 15.9.5.18
function DateGetHours() {
- CHECK_DATE(this);
return LOCAL_HOUR(this);
}
// ECMA 262 - 15.9.5.19
function DateGetUTCHours() {
- CHECK_DATE(this);
return UTC_HOUR(this);
}
// ECMA 262 - 15.9.5.20
function DateGetMinutes() {
- CHECK_DATE(this);
return LOCAL_MIN(this);
}
// ECMA 262 - 15.9.5.21
function DateGetUTCMinutes() {
- CHECK_DATE(this);
return UTC_MIN(this);
}
// ECMA 262 - 15.9.5.22
function DateGetSeconds() {
- CHECK_DATE(this);
return LOCAL_SEC(this);
}
// ECMA 262 - 15.9.5.23
function DateGetUTCSeconds() {
- CHECK_DATE(this);
return UTC_SEC(this)
}
// ECMA 262 - 15.9.5.24
function DateGetMilliseconds() {
- CHECK_DATE(this);
return LOCAL_MS(this);
}
// ECMA 262 - 15.9.5.25
function DateGetUTCMilliseconds() {
- CHECK_DATE(this);
return UTC_MS(this);
}
// ECMA 262 - 15.9.5.26
function DateGetTimezoneOffset() {
- CHECK_DATE(this);
return TIMEZONE_OFFSET(this);
}
@@ -512,7 +488,6 @@ function DateSetTime(ms) {
// ECMA 262 - 15.9.5.28
function DateSetMilliseconds(ms) {
- CHECK_DATE(this);
var t = LOCAL_DATE_VALUE(this);
ms = ToNumber(ms);
var time = MakeTime(LOCAL_HOUR(this), LOCAL_MIN(this), LOCAL_SEC(this), ms);
@@ -522,7 +497,6 @@ function DateSetMilliseconds(ms) {
// ECMA 262 - 15.9.5.29
function DateSetUTCMilliseconds(ms) {
- CHECK_DATE(this);
var t = UTC_DATE_VALUE(this);
ms = ToNumber(ms);
var time = MakeTime(UTC_HOUR(this),
@@ -535,7 +509,6 @@ function DateSetUTCMilliseconds(ms) {
// ECMA 262 - 15.9.5.30
function DateSetSeconds(sec, ms) {
- CHECK_DATE(this);
var t = LOCAL_DATE_VALUE(this);
sec = ToNumber(sec);
ms = %_ArgumentsLength() < 2 ? LOCAL_MS(this) : ToNumber(ms);
@@ -546,7 +519,6 @@ function DateSetSeconds(sec, ms) {
// ECMA 262 - 15.9.5.31
function DateSetUTCSeconds(sec, ms) {
- CHECK_DATE(this);
var t = UTC_DATE_VALUE(this);
sec = ToNumber(sec);
ms = %_ArgumentsLength() < 2 ? UTC_MS(this) : ToNumber(ms);
@@ -557,7 +529,6 @@ function DateSetUTCSeconds(sec, ms) {
// ECMA 262 - 15.9.5.33
function DateSetMinutes(min, sec, ms) {
- CHECK_DATE(this);
var t = LOCAL_DATE_VALUE(this);
min = ToNumber(min);
var argc = %_ArgumentsLength();
@@ -570,7 +541,6 @@ function DateSetMinutes(min, sec, ms) {
// ECMA 262 - 15.9.5.34
function DateSetUTCMinutes(min, sec, ms) {
- CHECK_DATE(this);
var t = UTC_DATE_VALUE(this);
min = ToNumber(min);
var argc = %_ArgumentsLength();
@@ -583,7 +553,6 @@ function DateSetUTCMinutes(min, sec, ms) {
// ECMA 262 - 15.9.5.35
function DateSetHours(hour, min, sec, ms) {
- CHECK_DATE(this);
var t = LOCAL_DATE_VALUE(this);
hour = ToNumber(hour);
var argc = %_ArgumentsLength();
@@ -597,7 +566,6 @@ function DateSetHours(hour, min, sec, ms) {
// ECMA 262 - 15.9.5.34
function DateSetUTCHours(hour, min, sec, ms) {
- CHECK_DATE(this);
var t = UTC_DATE_VALUE(this);
hour = ToNumber(hour);
var argc = %_ArgumentsLength();
@@ -611,7 +579,6 @@ function DateSetUTCHours(hour, min, sec, ms) {
// ECMA 262 - 15.9.5.36
function DateSetDate(date) {
- CHECK_DATE(this);
var t = LOCAL_DATE_VALUE(this);
date = ToNumber(date);
var day = MakeDay(LOCAL_YEAR(this), LOCAL_MONTH(this), date);
@@ -621,7 +588,6 @@ function DateSetDate(date) {
// ECMA 262 - 15.9.5.37
function DateSetUTCDate(date) {
- CHECK_DATE(this);
var t = UTC_DATE_VALUE(this);
date = ToNumber(date);
var day = MakeDay(UTC_YEAR(this), UTC_MONTH(this), date);
@@ -631,7 +597,6 @@ function DateSetUTCDate(date) {
// ECMA 262 - 15.9.5.38
function DateSetMonth(month, date) {
- CHECK_DATE(this);
var t = LOCAL_DATE_VALUE(this);
month = ToNumber(month);
date = %_ArgumentsLength() < 2 ? LOCAL_DAY(this) : ToNumber(date);
@@ -642,7 +607,6 @@ function DateSetMonth(month, date) {
// ECMA 262 - 15.9.5.39
function DateSetUTCMonth(month, date) {
- CHECK_DATE(this);
var t = UTC_DATE_VALUE(this);
month = ToNumber(month);
date = %_ArgumentsLength() < 2 ? UTC_DAY(this) : ToNumber(date);
@@ -653,7 +617,6 @@ function DateSetUTCMonth(month, date) {
// ECMA 262 - 15.9.5.40
function DateSetFullYear(year, month, date) {
- CHECK_DATE(this);
var t = LOCAL_DATE_VALUE(this);
year = ToNumber(year);
var argc = %_ArgumentsLength();
@@ -674,7 +637,6 @@ function DateSetFullYear(year, month, date) {
// ECMA 262 - 15.9.5.41
function DateSetUTCFullYear(year, month, date) {
- CHECK_DATE(this);
var t = UTC_DATE_VALUE(this);
year = ToNumber(year);
var argc = %_ArgumentsLength();
@@ -695,7 +657,6 @@ function DateSetUTCFullYear(year, month, date) {
// ECMA 262 - 15.9.5.42
function DateToUTCString() {
- CHECK_DATE(this);
var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
// Return UTC string of the form: Sat, 31 Jan 1970 23:00:00 GMT
@@ -709,7 +670,6 @@ function DateToUTCString() {
// ECMA 262 - B.2.4
function DateGetYear() {
- CHECK_DATE(this);
return LOCAL_YEAR(this) - 1900;
}
@@ -757,7 +717,6 @@ function PadInt(n, digits) {
// ECMA 262 - 15.9.5.43
function DateToISOString() {
- CHECK_DATE(this);
var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) throw MakeRangeError("invalid_time_value", []);
var year = this.getUTCFullYear();
diff --git a/src/3rdparty/v8/src/dateparser-inl.h b/src/3rdparty/v8/src/dateparser-inl.h
index 32f0f9e..3cb36fa 100644
--- a/src/3rdparty/v8/src/dateparser-inl.h
+++ b/src/3rdparty/v8/src/dateparser-inl.h
@@ -62,7 +62,8 @@ bool DateParser::Parse(Vector<Char> str,
// sss is in the range 000..999,
// hh is in the range 00..23,
// mm, ss, and sss default to 00 if missing, and
- // timezone defaults to Z if missing.
+ // timezone defaults to Z if missing
+ // (following Safari, ISO actually demands local time).
// Extensions:
// We also allow sss to have more or less than three digits (but at
// least one).
@@ -148,6 +149,9 @@ bool DateParser::Parse(Vector<Char> str,
} else {
// Garbage words are illegal if a number has been read.
if (has_read_number) return false;
+ // The first number has to be separated from garbage words by
+ // whitespace or other separators.
+ if (scanner.Peek().IsNumber()) return false;
}
} else if (token.IsAsciiSign() && (tz.IsUTC() || !time.IsEmpty())) {
// Parse UTC offset (only after UTC or time).
diff --git a/src/3rdparty/v8/src/debug-agent.cc b/src/3rdparty/v8/src/debug-agent.cc
index 10c0053..e856222 100644
--- a/src/3rdparty/v8/src/debug-agent.cc
+++ b/src/3rdparty/v8/src/debug-agent.cc
@@ -157,7 +157,9 @@ void DebuggerAgent::OnSessionClosed(DebuggerAgentSession* session) {
ScopedLock with(session_access_);
ASSERT(session == session_);
if (session == session_) {
- CloseSession();
+ session_->Shutdown();
+ delete session_;
+ session_ = NULL;
}
}
@@ -397,7 +399,7 @@ bool DebuggerAgentUtil::SendMessage(const Socket* conn,
uint16_t character = message[i];
buffer_position +=
unibrow::Utf8::Encode(buffer + buffer_position, character, previous);
- ASSERT(buffer_position < kBufferSize);
+ ASSERT(buffer_position <= kBufferSize);
// Send buffer if full or last character is encoded.
if (kBufferSize - buffer_position <
diff --git a/src/3rdparty/v8/src/debug-debugger.js b/src/3rdparty/v8/src/debug-debugger.js
index 5cdbf14..796d6aa 100644
--- a/src/3rdparty/v8/src/debug-debugger.js
+++ b/src/3rdparty/v8/src/debug-debugger.js
@@ -1454,6 +1454,8 @@ DebugCommandProcessor.prototype.processDebugJSONRequest = function(
this.profileRequest_(request, response);
} else if (request.command == 'changelive') {
this.changeLiveRequest_(request, response);
+ } else if (request.command == 'restartframe') {
+ this.restartFrameRequest_(request, response);
} else if (request.command == 'flags') {
this.debuggerFlagsRequest_(request, response);
} else if (request.command == 'v8flags') {
@@ -2081,7 +2083,7 @@ DebugCommandProcessor.prototype.evaluateRequest_ = function(request, response) {
// Global evaluate.
if (global) {
- // Evaluate in the global context.
+ // Evaluate in the native context.
response.body = this.exec_state_.evaluateGlobal(
expression, Boolean(disable_break), additional_context_object);
return;
@@ -2363,9 +2365,6 @@ DebugCommandProcessor.prototype.profileRequest_ = function(request, response) {
DebugCommandProcessor.prototype.changeLiveRequest_ = function(
request, response) {
- if (!Debug.LiveEdit) {
- return response.failed('LiveEdit feature is not supported');
- }
if (!request.arguments) {
return response.failed('Missing arguments');
}
@@ -2403,6 +2402,37 @@ DebugCommandProcessor.prototype.changeLiveRequest_ = function(
};
+DebugCommandProcessor.prototype.restartFrameRequest_ = function(
+ request, response) {
+ if (!request.arguments) {
+ return response.failed('Missing arguments');
+ }
+ var frame = request.arguments.frame;
+
+ // No frames to evaluate in frame.
+ if (this.exec_state_.frameCount() == 0) {
+ return response.failed('No frames');
+ }
+
+ var frame_mirror;
+ // Check whether a frame was specified.
+ if (!IS_UNDEFINED(frame)) {
+ var frame_number = %ToNumber(frame);
+ if (frame_number < 0 || frame_number >= this.exec_state_.frameCount()) {
+ return response.failed('Invalid frame "' + frame + '"');
+ }
+ // Restart specified frame.
+ frame_mirror = this.exec_state_.frame(frame_number);
+ } else {
+ // Restart selected frame.
+ frame_mirror = this.exec_state_.frame();
+ }
+
+ var result_description = Debug.LiveEdit.RestartFrame(frame_mirror);
+ response.body = {result: result_description};
+};
+
+
DebugCommandProcessor.prototype.debuggerFlagsRequest_ = function(request,
response) {
// Check for legal request.
diff --git a/src/3rdparty/v8/src/debug.cc b/src/3rdparty/v8/src/debug.cc
index 9efb5c3..ec25acc 100644
--- a/src/3rdparty/v8/src/debug.cc
+++ b/src/3rdparty/v8/src/debug.cc
@@ -97,8 +97,8 @@ static v8::Handle<v8::Context> GetDebugEventContext(Isolate* isolate) {
// Isolate::context() may have been NULL when "script collected" event
// occured.
if (context.is_null()) return v8::Local<v8::Context>();
- Handle<Context> global_context(context->global_context());
- return v8::Utils::ToLocal(global_context);
+ Handle<Context> native_context(context->native_context());
+ return v8::Utils::ToLocal(native_context);
}
@@ -261,8 +261,12 @@ void BreakLocationIterator::Reset() {
// Create relocation iterators for the two code objects.
if (reloc_iterator_ != NULL) delete reloc_iterator_;
if (reloc_iterator_original_ != NULL) delete reloc_iterator_original_;
- reloc_iterator_ = new RelocIterator(debug_info_->code());
- reloc_iterator_original_ = new RelocIterator(debug_info_->original_code());
+ reloc_iterator_ = new RelocIterator(
+ debug_info_->code(),
+ ~RelocInfo::ModeMask(RelocInfo::CODE_AGE_SEQUENCE));
+ reloc_iterator_original_ = new RelocIterator(
+ debug_info_->original_code(),
+ ~RelocInfo::ModeMask(RelocInfo::CODE_AGE_SEQUENCE));
// Position at the first break point.
break_point_ = -1;
@@ -698,7 +702,7 @@ void Debug::HandleWeakDebugInfo(v8::Persistent<v8::Value> obj, void* data) {
// We need to clear all breakpoints associated with the function to restore
// original code and avoid patching the code twice later because
// the function will live in the heap until next gc, and can be found by
- // Runtime::FindSharedFunctionInfoInScript.
+ // Debug::FindSharedFunctionInfoInScript.
BreakLocationIterator it(node->debug_info(), ALL_BREAK_LOCATIONS);
it.ClearAllDebugBreak();
debug->RemoveDebugInfo(node->debug_info());
@@ -745,12 +749,15 @@ bool Debug::CompileDebuggerScript(int index) {
isolate->bootstrapper()->NativesSourceLookup(index);
Vector<const char> name = Natives::GetScriptName(index);
Handle<String> script_name = factory->NewStringFromAscii(name);
+ Handle<Context> context = isolate->native_context();
// Compile the script.
Handle<SharedFunctionInfo> function_info;
function_info = Compiler::Compile(source_code,
script_name,
- 0, 0, NULL, NULL,
+ 0, 0,
+ context,
+ NULL, NULL,
Handle<String>::null(),
NATIVES_CODE);
@@ -762,13 +769,12 @@ bool Debug::CompileDebuggerScript(int index) {
}
// Execute the shared function in the debugger context.
- Handle<Context> context = isolate->global_context();
bool caught_exception;
Handle<JSFunction> function =
factory->NewFunctionFromSharedFunctionInfo(function_info, context);
Handle<Object> exception =
- Execution::TryCall(function, Handle<Object>(context->global()),
+ Execution::TryCall(function, Handle<Object>(context->global_object()),
0, NULL, &caught_exception);
// Check for caught exceptions.
@@ -829,7 +835,7 @@ bool Debug::Load() {
// Expose the builtins object in the debugger context.
Handle<String> key = isolate_->factory()->LookupAsciiSymbol("builtins");
- Handle<GlobalObject> global = Handle<GlobalObject>(context->global());
+ Handle<GlobalObject> global = Handle<GlobalObject>(context->global_object());
RETURN_IF_EMPTY_HANDLE_VALUE(
isolate_,
JSReceiver::SetProperty(global, key, Handle<Object>(global->builtins()),
@@ -892,16 +898,6 @@ void Debug::Iterate(ObjectVisitor* v) {
}
-void Debug::PutValuesOnStackAndDie(int start,
- Address c_entry_fp,
- Address last_fp,
- Address larger_fp,
- int count,
- int end) {
- OS::Abort();
-}
-
-
Object* Debug::Break(Arguments args) {
Heap* heap = isolate_->heap();
HandleScope scope(isolate_);
@@ -999,41 +995,16 @@ Object* Debug::Break(Arguments args) {
it.Advance();
}
- // Catch the cases that would lead to crashes and capture
- // - C entry FP at which to start stack crawl.
- // - FP of the frame at which we plan to stop stepping out (last FP).
- // - current FP that's larger than last FP.
- // - Counter for the number of steps to step out.
- if (it.done()) {
- // We crawled the entire stack, never reaching last_fp_.
- PutValuesOnStackAndDie(0xBEEEEEEE,
- frame->fp(),
- thread_local_.last_fp_,
- NULL,
- count,
- 0xFEEEEEEE);
- } else if (it.frame()->fp() != thread_local_.last_fp_) {
- // We crawled over last_fp_, without getting a match.
- PutValuesOnStackAndDie(0xBEEEEEEE,
- frame->fp(),
- thread_local_.last_fp_,
- it.frame()->fp(),
- count,
- 0xFEEEEEEE);
+ // Check that we indeed found the frame we are looking for.
+ CHECK(!it.done() && (it.frame()->fp() == thread_local_.last_fp_));
+ if (step_count > 1) {
+ // Save old count and action to continue stepping after StepOut.
+ thread_local_.queued_step_count_ = step_count - 1;
}
- // If we found original frame
- if (it.frame()->fp() == thread_local_.last_fp_) {
- if (step_count > 1) {
- // Save old count and action to continue stepping after
- // StepOut
- thread_local_.queued_step_count_ = step_count - 1;
- }
-
- // Set up for StepOut to reach target frame
- step_action = StepOut;
- step_count = count;
- }
+ // Set up for StepOut to reach target frame.
+ step_action = StepOut;
+ step_count = count;
}
// Clear all current stepping setup.
@@ -1130,7 +1101,7 @@ bool Debug::CheckBreakPoint(Handle<Object> break_point_object) {
factory->LookupAsciiSymbol("IsBreakPointTriggered");
Handle<JSFunction> check_break_point =
Handle<JSFunction>(JSFunction::cast(
- debug_context()->global()->GetPropertyNoExceptionThrown(
+ debug_context()->global_object()->GetPropertyNoExceptionThrown(
*is_break_point_triggered_symbol)));
// Get the break id as an object.
@@ -1170,14 +1141,16 @@ Handle<DebugInfo> Debug::GetDebugInfo(Handle<SharedFunctionInfo> shared) {
}
-void Debug::SetBreakPoint(Handle<SharedFunctionInfo> shared,
+void Debug::SetBreakPoint(Handle<JSFunction> function,
Handle<Object> break_point_object,
int* source_position) {
HandleScope scope(isolate_);
PrepareForBreakPoints();
- if (!EnsureDebugInfo(shared)) {
+ // Make sure the function is compiled and has set up the debug info.
+ Handle<SharedFunctionInfo> shared(function->shared());
+ if (!EnsureDebugInfo(shared, function)) {
// Return if retrieving debug info failed.
return;
}
@@ -1198,6 +1171,50 @@ void Debug::SetBreakPoint(Handle<SharedFunctionInfo> shared,
}
+bool Debug::SetBreakPointForScript(Handle<Script> script,
+ Handle<Object> break_point_object,
+ int* source_position) {
+ HandleScope scope(isolate_);
+
+ PrepareForBreakPoints();
+
+ // Obtain shared function info for the function.
+ Object* result = FindSharedFunctionInfoInScript(script, *source_position);
+ if (result->IsUndefined()) return false;
+
+ // Make sure the function has set up the debug info.
+ Handle<SharedFunctionInfo> shared(SharedFunctionInfo::cast(result));
+ if (!EnsureDebugInfo(shared, Handle<JSFunction>::null())) {
+ // Return if retrieving debug info failed.
+ return false;
+ }
+
+ // Find position within function. The script position might be before the
+ // source position of the first function.
+ int position;
+ if (shared->start_position() > *source_position) {
+ position = 0;
+ } else {
+ position = *source_position - shared->start_position();
+ }
+
+ Handle<DebugInfo> debug_info = GetDebugInfo(shared);
+ // Source positions starts with zero.
+ ASSERT(position >= 0);
+
+ // Find the break point and change it.
+ BreakLocationIterator it(debug_info, SOURCE_BREAK_LOCATIONS);
+ it.FindBreakLocationFromPosition(position);
+ it.SetBreakPoint(break_point_object);
+
+ *source_position = it.position() + shared->start_position();
+
+ // At least one active break point now.
+ ASSERT(debug_info->GetBreakPointCount() > 0);
+ return true;
+}
+
+
void Debug::ClearBreakPoint(Handle<Object> break_point_object) {
HandleScope scope(isolate_);
@@ -1249,10 +1266,12 @@ void Debug::ClearAllBreakPoints() {
}
-void Debug::FloodWithOneShot(Handle<SharedFunctionInfo> shared) {
+void Debug::FloodWithOneShot(Handle<JSFunction> function) {
PrepareForBreakPoints();
- // Make sure the function has set up the debug info.
- if (!EnsureDebugInfo(shared)) {
+
+ // Make sure the function is compiled and has set up the debug info.
+ Handle<SharedFunctionInfo> shared(function->shared());
+ if (!EnsureDebugInfo(shared, function)) {
// Return if we failed to retrieve the debug info.
return;
}
@@ -1272,8 +1291,8 @@ void Debug::FloodBoundFunctionWithOneShot(Handle<JSFunction> function) {
if (!bindee.is_null() && bindee->IsJSFunction() &&
!JSFunction::cast(*bindee)->IsBuiltin()) {
- Handle<SharedFunctionInfo> shared_info(JSFunction::cast(*bindee)->shared());
- Debug::FloodWithOneShot(shared_info);
+ Handle<JSFunction> bindee_function(JSFunction::cast(*bindee));
+ Debug::FloodWithOneShot(bindee_function);
}
}
@@ -1288,11 +1307,9 @@ void Debug::FloodHandlerWithOneShot() {
for (JavaScriptFrameIterator it(isolate_, id); !it.done(); it.Advance()) {
JavaScriptFrame* frame = it.frame();
if (frame->HasHandler()) {
- Handle<SharedFunctionInfo> shared =
- Handle<SharedFunctionInfo>(
- JSFunction::cast(frame->function())->shared());
// Flood the function with the catch block with break points
- FloodWithOneShot(shared);
+ JSFunction* function = JSFunction::cast(frame->function());
+ FloodWithOneShot(Handle<JSFunction>(function));
return;
}
}
@@ -1359,14 +1376,14 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
frames_it.Advance();
// Fill the function to return to with one-shot break points.
JSFunction* function = JSFunction::cast(frames_it.frame()->function());
- FloodWithOneShot(Handle<SharedFunctionInfo>(function->shared()));
+ FloodWithOneShot(Handle<JSFunction>(function));
return;
}
// Get the debug info (create it if it does not exist).
- Handle<SharedFunctionInfo> shared =
- Handle<SharedFunctionInfo>(JSFunction::cast(frame->function())->shared());
- if (!EnsureDebugInfo(shared)) {
+ Handle<JSFunction> function(JSFunction::cast(frame->function()));
+ Handle<SharedFunctionInfo> shared(function->shared());
+ if (!EnsureDebugInfo(shared, function)) {
// Return if ensuring debug info failed.
return;
}
@@ -1436,7 +1453,7 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
if (!frames_it.done()) {
// Fill the function to return to with one-shot break points.
JSFunction* function = JSFunction::cast(frames_it.frame()->function());
- FloodWithOneShot(Handle<SharedFunctionInfo>(function->shared()));
+ FloodWithOneShot(Handle<JSFunction>(function));
// Set target frame pointer.
ActivateStepOut(frames_it.frame());
}
@@ -1446,21 +1463,19 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
// Step next or step min.
// Fill the current function with one-shot break points.
- FloodWithOneShot(shared);
+ FloodWithOneShot(function);
// Remember source position and frame to handle step next.
thread_local_.last_statement_position_ =
debug_info->code()->SourceStatementPosition(frame->pc());
- thread_local_.last_fp_ = frame->fp();
+ thread_local_.last_fp_ = frame->UnpaddedFP();
} else {
// If there's restarter frame on top of the stack, just get the pointer
// to function which is going to be restarted.
if (is_at_restarted_function) {
Handle<JSFunction> restarted_function(
JSFunction::cast(*thread_local_.restarter_frame_function_pointer_));
- Handle<SharedFunctionInfo> restarted_shared(
- restarted_function->shared());
- FloodWithOneShot(restarted_shared);
+ FloodWithOneShot(restarted_function);
} else if (!call_function_stub.is_null()) {
// If it's CallFunction stub ensure target function is compiled and flood
// it with one shot breakpoints.
@@ -1502,7 +1517,7 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
} else if (!js_function->IsBuiltin()) {
// Don't step into builtins.
// It will also compile target function if it's not compiled yet.
- FloodWithOneShot(Handle<SharedFunctionInfo>(js_function->shared()));
+ FloodWithOneShot(js_function);
}
}
}
@@ -1511,7 +1526,7 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
// a call target as the function called might be a native function for
// which step in will not stop. It also prepares for stepping in
// getters/setters.
- FloodWithOneShot(shared);
+ FloodWithOneShot(function);
if (is_load_or_store) {
// Remember source position and frame to handle step in getter/setter. If
@@ -1520,7 +1535,7 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
// propagated on the next Debug::Break.
thread_local_.last_statement_position_ =
debug_info->code()->SourceStatementPosition(frame->pc());
- thread_local_.last_fp_ = frame->fp();
+ thread_local_.last_fp_ = frame->UnpaddedFP();
}
// Step in or Step in min
@@ -1555,7 +1570,7 @@ bool Debug::StepNextContinue(BreakLocationIterator* break_location_iterator,
// Continue if we are still on the same frame and in the same statement.
int current_statement_position =
break_location_iterator->code()->SourceStatementPosition(frame->pc());
- return thread_local_.last_fp_ == frame->fp() &&
+ return thread_local_.last_fp_ == frame->UnpaddedFP() &&
thread_local_.last_statement_position_ == current_statement_position;
}
@@ -1711,12 +1726,11 @@ void Debug::HandleStepIn(Handle<JSFunction> function,
// function.
if (!holder.is_null() && holder->IsJSFunction() &&
!JSFunction::cast(*holder)->IsBuiltin()) {
- Handle<SharedFunctionInfo> shared_info(
- JSFunction::cast(*holder)->shared());
- Debug::FloodWithOneShot(shared_info);
+ Handle<JSFunction> js_function = Handle<JSFunction>::cast(holder);
+ Debug::FloodWithOneShot(js_function);
}
} else {
- Debug::FloodWithOneShot(Handle<SharedFunctionInfo>(function->shared()));
+ Debug::FloodWithOneShot(function);
}
}
}
@@ -1756,7 +1770,7 @@ void Debug::ClearOneShot() {
void Debug::ActivateStepIn(StackFrame* frame) {
ASSERT(!StepOutActive());
- thread_local_.step_into_fp_ = frame->fp();
+ thread_local_.step_into_fp_ = frame->UnpaddedFP();
}
@@ -1767,7 +1781,7 @@ void Debug::ClearStepIn() {
void Debug::ActivateStepOut(StackFrame* frame) {
ASSERT(!StepInActive());
- thread_local_.step_out_fp_ = frame->fp();
+ thread_local_.step_out_fp_ = frame->UnpaddedFP();
}
@@ -1784,20 +1798,19 @@ void Debug::ClearStepNext() {
// Helper function to compile full code for debugging. This code will
-// have debug break slots and deoptimization
-// information. Deoptimization information is required in case that an
-// optimized version of this function is still activated on the
-// stack. It will also make sure that the full code is compiled with
-// the same flags as the previous version - that is flags which can
-// change the code generated. The current method of mapping from
-// already compiled full code without debug break slots to full code
-// with debug break slots depends on the generated code is otherwise
-// exactly the same.
-static bool CompileFullCodeForDebugging(Handle<SharedFunctionInfo> shared,
+// have debug break slots and deoptimization information. Deoptimization
+// information is required in case that an optimized version of this
+// function is still activated on the stack. It will also make sure that
+// the full code is compiled with the same flags as the previous version,
+// that is flags which can change the code generated. The current method
+// of mapping from already compiled full code without debug break slots
+// to full code with debug break slots depends on the generated code is
+// otherwise exactly the same.
+static bool CompileFullCodeForDebugging(Handle<JSFunction> function,
Handle<Code> current_code) {
ASSERT(!current_code->has_debug_break_slots());
- CompilationInfo info(shared);
+ CompilationInfoWithZone info(function);
info.MarkCompilingForDebugging(current_code);
ASSERT(!info.shared_info()->is_compiled());
ASSERT(!info.isolate()->has_pending_exception());
@@ -1809,7 +1822,7 @@ static bool CompileFullCodeForDebugging(Handle<SharedFunctionInfo> shared,
info.isolate()->clear_pending_exception();
#if DEBUG
if (result) {
- Handle<Code> new_code(shared->code());
+ Handle<Code> new_code(function->shared()->code());
ASSERT(new_code->has_debug_break_slots());
ASSERT(current_code->is_compiled_optimizable() ==
new_code->is_compiled_optimizable());
@@ -1869,29 +1882,48 @@ static void RedirectActivationsToRecompiledCodeOnThread(
continue;
}
- intptr_t delta = frame->pc() - frame_code->instruction_start();
- int debug_break_slot_count = 0;
- int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT);
+ // Iterate over the RelocInfo in the original code to compute the sum of the
+ // constant pools sizes. (See Assembler::CheckConstPool())
+ // Note that this is only useful for architectures using constant pools.
+ int constpool_mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL);
+ int frame_const_pool_size = 0;
+ for (RelocIterator it(*frame_code, constpool_mask); !it.done(); it.next()) {
+ RelocInfo* info = it.rinfo();
+ if (info->pc() >= frame->pc()) break;
+ frame_const_pool_size += static_cast<int>(info->data());
+ }
+ intptr_t frame_offset =
+ frame->pc() - frame_code->instruction_start() - frame_const_pool_size;
+
+ // Iterate over the RelocInfo for new code to find the number of bytes
+ // generated for debug slots and constant pools.
+ int debug_break_slot_bytes = 0;
+ int new_code_const_pool_size = 0;
+ int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
+ RelocInfo::ModeMask(RelocInfo::CONST_POOL);
for (RelocIterator it(*new_code, mask); !it.done(); it.next()) {
// Check if the pc in the new code with debug break
// slots is before this slot.
RelocInfo* info = it.rinfo();
- int debug_break_slot_bytes =
- debug_break_slot_count * Assembler::kDebugBreakSlotLength;
- intptr_t new_delta =
- info->pc() -
- new_code->instruction_start() -
- debug_break_slot_bytes;
- if (new_delta > delta) {
+ intptr_t new_offset = info->pc() - new_code->instruction_start() -
+ new_code_const_pool_size - debug_break_slot_bytes;
+ if (new_offset >= frame_offset) {
break;
}
- // Passed a debug break slot in the full code with debug
- // break slots.
- debug_break_slot_count++;
+ if (RelocInfo::IsDebugBreakSlot(info->rmode())) {
+ debug_break_slot_bytes += Assembler::kDebugBreakSlotLength;
+ } else {
+ ASSERT(RelocInfo::IsConstPool(info->rmode()));
+ // The size of the constant pool is encoded in the data.
+ new_code_const_pool_size += static_cast<int>(info->data());
+ }
}
- int debug_break_slot_bytes =
- debug_break_slot_count * Assembler::kDebugBreakSlotLength;
+
+ // Compute the equivalent pc in the new code.
+ byte* new_pc = new_code->instruction_start() + frame_offset +
+ debug_break_slot_bytes + new_code_const_pool_size;
+
if (FLAG_trace_deopt) {
PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
"with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
@@ -1908,14 +1940,12 @@ static void RedirectActivationsToRecompiledCodeOnThread(
new_code->instruction_size(),
new_code->instruction_size(),
reinterpret_cast<intptr_t>(frame->pc()),
- reinterpret_cast<intptr_t>(new_code->instruction_start()) +
- delta + debug_break_slot_bytes);
+ reinterpret_cast<intptr_t>(new_pc));
}
// Patch the return address to return into the code with
// debug break slots.
- frame->set_pc(
- new_code->instruction_start() + delta + debug_break_slot_bytes);
+ frame->set_pc(new_pc);
}
}
@@ -1957,6 +1987,9 @@ void Debug::PrepareForBreakPoints() {
Handle<Code> lazy_compile =
Handle<Code>(isolate_->builtins()->builtin(Builtins::kLazyCompile));
+ // There will be at least one break point when we are done.
+ has_break_points_ = true;
+
// Keep the list of activated functions in a handlified list as it
// is used both in GC and non-GC code.
List<Handle<JSFunction> > active_functions(100);
@@ -2013,6 +2046,7 @@ void Debug::PrepareForBreakPoints() {
// patch the return address to run in the new compiled code.
for (int i = 0; i < active_functions.length(); i++) {
Handle<JSFunction> function = active_functions[i];
+ Handle<SharedFunctionInfo> shared(function->shared());
if (function->code()->kind() == Code::FUNCTION &&
function->code()->has_debug_break_slots()) {
@@ -2020,7 +2054,6 @@ void Debug::PrepareForBreakPoints() {
continue;
}
- Handle<SharedFunctionInfo> shared(function->shared());
// If recompilation is not possible just skip it.
if (shared->is_toplevel() ||
!shared->allows_lazy_compilation() ||
@@ -2034,13 +2067,12 @@ void Debug::PrepareForBreakPoints() {
// Try to compile the full code with debug break slots. If it
// fails just keep the current code.
Handle<Code> current_code(function->shared()->code());
- ZoneScope zone_scope(isolate_, DELETE_ON_EXIT);
shared->set_code(*lazy_compile);
bool prev_force_debugger_active =
isolate_->debugger()->force_debugger_active();
isolate_->debugger()->set_force_debugger_active(true);
ASSERT(current_code->kind() == Code::FUNCTION);
- CompileFullCodeForDebugging(shared, current_code);
+ CompileFullCodeForDebugging(function, current_code);
isolate_->debugger()->set_force_debugger_active(
prev_force_debugger_active);
if (!shared->is_compiled()) {
@@ -2063,16 +2095,130 @@ void Debug::PrepareForBreakPoints() {
}
+Object* Debug::FindSharedFunctionInfoInScript(Handle<Script> script,
+ int position) {
+ // Iterate the heap looking for SharedFunctionInfo generated from the
+ // script. The inner most SharedFunctionInfo containing the source position
+ // for the requested break point is found.
+ // NOTE: This might require several heap iterations. If the SharedFunctionInfo
+ // which is found is not compiled it is compiled and the heap is iterated
+ // again as the compilation might create inner functions from the newly
+ // compiled function and the actual requested break point might be in one of
+ // these functions.
+ // NOTE: The below fix-point iteration depends on all functions that cannot be
+ // compiled lazily without a context to not be compiled at all. Compilation
+ // will be triggered at points where we do not need a context.
+ bool done = false;
+ // The current candidate for the source position:
+ int target_start_position = RelocInfo::kNoPosition;
+ Handle<JSFunction> target_function;
+ Handle<SharedFunctionInfo> target;
+ while (!done) {
+ { // Extra scope for iterator and no-allocation.
+ isolate_->heap()->EnsureHeapIsIterable();
+ AssertNoAllocation no_alloc_during_heap_iteration;
+ HeapIterator iterator;
+ for (HeapObject* obj = iterator.next();
+ obj != NULL; obj = iterator.next()) {
+ bool found_next_candidate = false;
+ Handle<JSFunction> function;
+ Handle<SharedFunctionInfo> shared;
+ if (obj->IsJSFunction()) {
+ function = Handle<JSFunction>(JSFunction::cast(obj));
+ shared = Handle<SharedFunctionInfo>(function->shared());
+ ASSERT(shared->allows_lazy_compilation() || shared->is_compiled());
+ found_next_candidate = true;
+ } else if (obj->IsSharedFunctionInfo()) {
+ shared = Handle<SharedFunctionInfo>(SharedFunctionInfo::cast(obj));
+ // Skip functions that we cannot compile lazily without a context,
+ // which is not available here, because there is no closure.
+ found_next_candidate = shared->is_compiled() ||
+ shared->allows_lazy_compilation_without_context();
+ }
+ if (!found_next_candidate) continue;
+ if (shared->script() == *script) {
+ // If the SharedFunctionInfo found has the requested script data and
+ // contains the source position it is a candidate.
+ int start_position = shared->function_token_position();
+ if (start_position == RelocInfo::kNoPosition) {
+ start_position = shared->start_position();
+ }
+ if (start_position <= position &&
+ position <= shared->end_position()) {
+ // If there is no candidate or this function is within the current
+ // candidate this is the new candidate.
+ if (target.is_null()) {
+ target_start_position = start_position;
+ target_function = function;
+ target = shared;
+ } else {
+ if (target_start_position == start_position &&
+ shared->end_position() == target->end_position()) {
+ // If a top-level function contains only one function
+ // declaration the source for the top-level and the function
+ // is the same. In that case prefer the non top-level function.
+ if (!shared->is_toplevel()) {
+ target_start_position = start_position;
+ target_function = function;
+ target = shared;
+ }
+ } else if (target_start_position <= start_position &&
+ shared->end_position() <= target->end_position()) {
+ // This containment check includes equality as a function
+ // inside a top-level function can share either start or end
+ // position with the top-level function.
+ target_start_position = start_position;
+ target_function = function;
+ target = shared;
+ }
+ }
+ }
+ }
+ } // End for loop.
+ } // End no-allocation scope.
+
+ if (target.is_null()) {
+ return isolate_->heap()->undefined_value();
+ }
+
+ // There will be at least one break point when we are done.
+ has_break_points_ = true;
+
+ // If the candidate found is compiled we are done.
+ done = target->is_compiled();
+ if (!done) {
+ // If the candidate is not compiled, compile it to reveal any inner
+ // functions which might contain the requested source position. This
+ // will compile all inner functions that cannot be compiled without a
+ // context, because Compiler::BuildFunctionInfo checks whether the
+ // debugger is active.
+ if (target_function.is_null()) {
+ SharedFunctionInfo::CompileLazy(target, KEEP_EXCEPTION);
+ } else {
+ JSFunction::CompileLazy(target_function, KEEP_EXCEPTION);
+ }
+ }
+ } // End while loop.
+
+ return *target;
+}
+
+
// Ensures the debug information is present for shared.
-bool Debug::EnsureDebugInfo(Handle<SharedFunctionInfo> shared) {
+bool Debug::EnsureDebugInfo(Handle<SharedFunctionInfo> shared,
+ Handle<JSFunction> function) {
// Return if we already have the debug info for shared.
if (HasDebugInfo(shared)) {
ASSERT(shared->is_compiled());
return true;
}
- // Ensure shared in compiled. Return false if this failed.
- if (!SharedFunctionInfo::EnsureCompiled(shared, CLEAR_EXCEPTION)) {
+ // There will be at least one break point when we are done.
+ has_break_points_ = true;
+
+ // Ensure function is compiled. Return false if this failed.
+ if (!function.is_null() &&
+ !JSFunction::EnsureCompiled(function, CLEAR_EXCEPTION)) {
return false;
}
@@ -2084,9 +2230,6 @@ bool Debug::EnsureDebugInfo(Handle<SharedFunctionInfo> shared) {
node->set_next(debug_info_list_);
debug_info_list_ = node;
- // Now there is at least one break point.
- has_break_points_ = true;
-
return true;
}
@@ -2128,9 +2271,9 @@ void Debug::SetAfterBreakTarget(JavaScriptFrame* frame) {
PrepareForBreakPoints();
// Get the executing function in which the debug break occurred.
- Handle<SharedFunctionInfo> shared =
- Handle<SharedFunctionInfo>(JSFunction::cast(frame->function())->shared());
- if (!EnsureDebugInfo(shared)) {
+ Handle<JSFunction> function(JSFunction::cast(frame->function()));
+ Handle<SharedFunctionInfo> shared(function->shared());
+ if (!EnsureDebugInfo(shared, function)) {
// Return if we failed to retrieve the debug info.
return;
}
@@ -2146,7 +2289,7 @@ void Debug::SetAfterBreakTarget(JavaScriptFrame* frame) {
// Find the call address in the running code. This address holds the call to
// either a DebugBreakXXX or to the debug break return entry code if the
// break point is still active after processing the break point.
- Address addr = frame->pc() - Assembler::kCallTargetAddressOffset;
+ Address addr = frame->pc() - Assembler::kPatchDebugBreakSlotReturnOffset;
// Check if the location is at JS exit or debug break slot.
bool at_js_return = false;
@@ -2220,9 +2363,9 @@ bool Debug::IsBreakAtReturn(JavaScriptFrame* frame) {
PrepareForBreakPoints();
// Get the executing function in which the debug break occurred.
- Handle<SharedFunctionInfo> shared =
- Handle<SharedFunctionInfo>(JSFunction::cast(frame->function())->shared());
- if (!EnsureDebugInfo(shared)) {
+ Handle<JSFunction> function(JSFunction::cast(frame->function()));
+ Handle<SharedFunctionInfo> shared(function->shared());
+ if (!EnsureDebugInfo(shared, function)) {
// Return if we failed to retrieve the debug info.
return false;
}
@@ -2235,7 +2378,7 @@ bool Debug::IsBreakAtReturn(JavaScriptFrame* frame) {
#endif
// Find the call address in the running code.
- Address addr = frame->pc() - Assembler::kCallTargetAddressOffset;
+ Address addr = frame->pc() - Assembler::kPatchDebugBreakSlotReturnOffset;
// Check if the location is at JS return.
RelocIterator it(debug_info->code());
@@ -2253,7 +2396,9 @@ bool Debug::IsBreakAtReturn(JavaScriptFrame* frame) {
void Debug::FramesHaveBeenDropped(StackFrame::Id new_break_frame_id,
FrameDropMode mode,
Object** restarter_frame_function_pointer) {
- thread_local_.frame_drop_mode_ = mode;
+ if (mode != CURRENTLY_SET_MODE) {
+ thread_local_.frame_drop_mode_ = mode;
+ }
thread_local_.break_frame_id_ = new_break_frame_id;
thread_local_.restarter_frame_function_pointer_ =
restarter_frame_function_pointer;
@@ -2268,7 +2413,7 @@ const int Debug::FramePaddingLayout::kPaddingValue = kInitialSize + 1;
bool Debug::IsDebugGlobal(GlobalObject* global) {
- return IsLoaded() && global == debug_context()->global();
+ return IsLoaded() && global == debug_context()->global_object();
}
@@ -2280,12 +2425,13 @@ void Debug::ClearMirrorCache() {
// Clear the mirror cache.
Handle<String> function_name =
isolate_->factory()->LookupSymbol(CStrVector("ClearMirrorCache"));
- Handle<Object> fun(Isolate::Current()->global()->GetPropertyNoExceptionThrown(
+ Handle<Object> fun(
+ Isolate::Current()->global_object()->GetPropertyNoExceptionThrown(
*function_name));
ASSERT(fun->IsJSFunction());
bool caught_exception;
Execution::TryCall(Handle<JSFunction>::cast(fun),
- Handle<JSObject>(Debug::debug_context()->global()),
+ Handle<JSObject>(Debug::debug_context()->global_object()),
0, NULL, &caught_exception);
}
@@ -2372,6 +2518,7 @@ Debugger::Debugger(Isolate* isolate)
event_listener_data_(Handle<Object>()),
compiling_natives_(false),
is_loading_debugger_(false),
+ live_edit_enabled_(true),
never_unload_debugger_(false),
force_debugger_active_(false),
message_handler_(NULL),
@@ -2407,7 +2554,8 @@ Handle<Object> Debugger::MakeJSObject(Vector<const char> constructor_name,
Handle<String> constructor_str =
isolate_->factory()->LookupSymbol(constructor_name);
Handle<Object> constructor(
- isolate_->global()->GetPropertyNoExceptionThrown(*constructor_str));
+ isolate_->global_object()->GetPropertyNoExceptionThrown(
+ *constructor_str));
ASSERT(constructor->IsJSFunction());
if (!constructor->IsJSFunction()) {
*caught_exception = true;
@@ -2415,7 +2563,7 @@ Handle<Object> Debugger::MakeJSObject(Vector<const char> constructor_name,
}
Handle<Object> js_object = Execution::TryCall(
Handle<JSFunction>::cast(constructor),
- Handle<JSObject>(isolate_->debug()->debug_context()->global()),
+ Handle<JSObject>(isolate_->debug()->debug_context()->global_object()),
argc,
argv,
caught_exception);
@@ -2637,7 +2785,7 @@ void Debugger::OnAfterCompile(Handle<Script> script,
Handle<String> update_script_break_points_symbol =
isolate_->factory()->LookupAsciiSymbol("UpdateScriptBreakPoints");
Handle<Object> update_script_break_points =
- Handle<Object>(debug->debug_context()->global()->
+ Handle<Object>(debug->debug_context()->global_object()->
GetPropertyNoExceptionThrown(*update_script_break_points_symbol));
if (!update_script_break_points->IsJSFunction()) {
return;
@@ -2682,6 +2830,7 @@ void Debugger::OnScriptCollected(int id) {
HandleScope scope(isolate_);
// No more to do if not debugging.
+ if (isolate_->debug()->InDebugger()) return;
if (!IsDebuggerActive()) return;
if (!Debugger::EventActive(v8::ScriptCollected)) return;
@@ -2793,7 +2942,7 @@ void Debugger::CallJSEventCallback(v8::DebugEvent event,
event_listener_data_ };
bool caught_exception;
Execution::TryCall(fun,
- isolate_->global(),
+ isolate_->global_object(),
ARRAY_SIZE(argv),
argv,
&caught_exception);
diff --git a/src/3rdparty/v8/src/debug.h b/src/3rdparty/v8/src/debug.h
index d9c966c..150e29e 100644
--- a/src/3rdparty/v8/src/debug.h
+++ b/src/3rdparty/v8/src/debug.h
@@ -232,19 +232,16 @@ class Debug {
void PreemptionWhileInDebugger();
void Iterate(ObjectVisitor* v);
- NO_INLINE(void PutValuesOnStackAndDie(int start,
- Address c_entry_fp,
- Address last_fp,
- Address larger_fp,
- int count,
- int end));
Object* Break(Arguments args);
- void SetBreakPoint(Handle<SharedFunctionInfo> shared,
+ void SetBreakPoint(Handle<JSFunction> function,
Handle<Object> break_point_object,
int* source_position);
+ bool SetBreakPointForScript(Handle<Script> script,
+ Handle<Object> break_point_object,
+ int* source_position);
void ClearBreakPoint(Handle<Object> break_point_object);
void ClearAllBreakPoints();
- void FloodWithOneShot(Handle<SharedFunctionInfo> shared);
+ void FloodWithOneShot(Handle<JSFunction> function);
void FloodBoundFunctionWithOneShot(Handle<JSFunction> function);
void FloodHandlerWithOneShot();
void ChangeBreakOnException(ExceptionBreakType type, bool enable);
@@ -260,8 +257,14 @@ class Debug {
void PrepareForBreakPoints();
- // Returns whether the operation succeeded.
- bool EnsureDebugInfo(Handle<SharedFunctionInfo> shared);
+ // This function is used in FunctionNameUsing* tests.
+ Object* FindSharedFunctionInfoInScript(Handle<Script> script, int position);
+
+ // Returns whether the operation succeeded. Compilation can only be triggered
+ // if a valid closure is passed as the second argument, otherwise the shared
+ // function needs to be compiled already.
+ bool EnsureDebugInfo(Handle<SharedFunctionInfo> shared,
+ Handle<JSFunction> function);
// Returns true if the current stub call is patched to call the debugger.
static bool IsDebugBreak(Address addr);
@@ -440,7 +443,8 @@ class Debug {
// The top JS frame had been calling some C++ function. The return address
// gets patched automatically.
FRAME_DROPPED_IN_DIRECT_CALL,
- FRAME_DROPPED_IN_RETURN_CALL
+ FRAME_DROPPED_IN_RETURN_CALL,
+ CURRENTLY_SET_MODE
};
void FramesHaveBeenDropped(StackFrame::Id new_break_frame_id,
@@ -789,7 +793,6 @@ class Debugger {
};
void OnAfterCompile(Handle<Script> script,
AfterCompileFlags after_compile_flags);
- void OnNewFunction(Handle<JSFunction> fun);
void OnScriptCollected(int id);
void ProcessDebugEvent(v8::DebugEvent event,
Handle<JSObject> event_data,
@@ -871,6 +874,8 @@ class Debugger {
bool compiling_natives() const { return compiling_natives_; }
void set_loading_debugger(bool v) { is_loading_debugger_ = v; }
bool is_loading_debugger() const { return is_loading_debugger_; }
+ void set_live_edit_enabled(bool v) { live_edit_enabled_ = v; }
+ bool live_edit_enabled() const { return live_edit_enabled_; }
void set_force_debugger_active(bool force_debugger_active) {
force_debugger_active_ = force_debugger_active;
}
@@ -899,6 +904,7 @@ class Debugger {
Handle<Object> event_listener_data_;
bool compiling_natives_; // Are we compiling natives?
bool is_loading_debugger_; // Are we loading the debugger?
+ bool live_edit_enabled_; // Enable LiveEdit.
bool never_unload_debugger_; // Can we unload the debugger?
bool force_debugger_active_; // Activate debugger without event listeners.
v8::Debug::MessageHandler2 message_handler_;
diff --git a/src/3rdparty/v8/src/deoptimizer.cc b/src/3rdparty/v8/src/deoptimizer.cc
index 2a30ddd..9d16211 100644
--- a/src/3rdparty/v8/src/deoptimizer.cc
+++ b/src/3rdparty/v8/src/deoptimizer.cc
@@ -27,6 +27,7 @@
#include "v8.h"
+#include "accessors.h"
#include "codegen.h"
#include "deoptimizer.h"
#include "disasm.h"
@@ -40,8 +41,11 @@ namespace v8 {
namespace internal {
DeoptimizerData::DeoptimizerData() {
- eager_deoptimization_entry_code_ = NULL;
- lazy_deoptimization_entry_code_ = NULL;
+ eager_deoptimization_entry_code_entries_ = -1;
+ lazy_deoptimization_entry_code_entries_ = -1;
+ size_t deopt_table_size = Deoptimizer::GetMaxDeoptTableSize();
+ eager_deoptimization_entry_code_ = new VirtualMemory(deopt_table_size);
+ lazy_deoptimization_entry_code_ = new VirtualMemory(deopt_table_size);
current_ = NULL;
deoptimizing_code_list_ = NULL;
#ifdef ENABLE_DEBUGGER_SUPPORT
@@ -51,16 +55,18 @@ DeoptimizerData::DeoptimizerData() {
DeoptimizerData::~DeoptimizerData() {
- if (eager_deoptimization_entry_code_ != NULL) {
- Isolate::Current()->memory_allocator()->Free(
- eager_deoptimization_entry_code_);
- eager_deoptimization_entry_code_ = NULL;
- }
- if (lazy_deoptimization_entry_code_ != NULL) {
- Isolate::Current()->memory_allocator()->Free(
- lazy_deoptimization_entry_code_);
- lazy_deoptimization_entry_code_ = NULL;
+ delete eager_deoptimization_entry_code_;
+ eager_deoptimization_entry_code_ = NULL;
+ delete lazy_deoptimization_entry_code_;
+ lazy_deoptimization_entry_code_ = NULL;
+
+ DeoptimizingCodeListNode* current = deoptimizing_code_list_;
+ while (current != NULL) {
+ DeoptimizingCodeListNode* prev = current;
+ current = current->next();
+ delete prev;
}
+ deoptimizing_code_list_ = NULL;
}
@@ -95,6 +101,20 @@ Deoptimizer* Deoptimizer::New(JSFunction* function,
}
+// No larger than 2K on all platforms
+static const int kDeoptTableMaxEpilogueCodeSize = 2 * KB;
+
+
+size_t Deoptimizer::GetMaxDeoptTableSize() {
+ int entries_size =
+ Deoptimizer::kMaxNumberOfEntries * Deoptimizer::table_entry_size_;
+ int commit_page_size = static_cast<int>(OS::CommitPageSize());
+ int page_count = ((kDeoptTableMaxEpilogueCodeSize + entries_size - 1) /
+ commit_page_size) + 1;
+ return static_cast<size_t>(commit_page_size * page_count);
+}
+
+
Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
ASSERT(isolate == Isolate::Current());
Deoptimizer* result = isolate->deoptimizer_data()->current_;
@@ -268,20 +288,29 @@ void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) {
void Deoptimizer::VisitAllOptimizedFunctionsForContext(
Context* context, OptimizedFunctionVisitor* visitor) {
+ Isolate* isolate = context->GetIsolate();
+ ZoneScope zone_scope(isolate->runtime_zone(), DELETE_ON_EXIT);
AssertNoAllocation no_allocation;
- ASSERT(context->IsGlobalContext());
+ ASSERT(context->IsNativeContext());
visitor->EnterContext(context);
- // Run through the list of optimized functions and deoptimize them.
+
+ // Create a snapshot of the optimized functions list. This is needed because
+ // visitors might remove more than one link from the list at once.
+ ZoneList<JSFunction*> snapshot(1, isolate->runtime_zone());
Object* element = context->OptimizedFunctionsListHead();
while (!element->IsUndefined()) {
JSFunction* element_function = JSFunction::cast(element);
- // Get the next link before deoptimizing as deoptimizing will clear the
- // next link.
+ snapshot.Add(element_function, isolate->runtime_zone());
element = element_function->next_function_link();
- visitor->VisitFunction(element_function);
}
+
+ // Run through the snapshot of optimized functions and visit them.
+ for (int i = 0; i < snapshot.length(); ++i) {
+ visitor->VisitFunction(snapshot.at(i));
+ }
+
visitor->LeaveContext(context);
}
@@ -294,10 +323,10 @@ void Deoptimizer::VisitAllOptimizedFunctionsForGlobalObject(
Object* proto = object->GetPrototype();
ASSERT(proto->IsJSGlobalObject());
VisitAllOptimizedFunctionsForContext(
- GlobalObject::cast(proto)->global_context(), visitor);
+ GlobalObject::cast(proto)->native_context(), visitor);
} else if (object->IsGlobalObject()) {
VisitAllOptimizedFunctionsForContext(
- GlobalObject::cast(object)->global_context(), visitor);
+ GlobalObject::cast(object)->native_context(), visitor);
}
}
@@ -306,12 +335,12 @@ void Deoptimizer::VisitAllOptimizedFunctions(
OptimizedFunctionVisitor* visitor) {
AssertNoAllocation no_allocation;
- // Run through the list of all global contexts and deoptimize.
- Object* context = Isolate::Current()->heap()->global_contexts_list();
+ // Run through the list of all native contexts and deoptimize.
+ Object* context = Isolate::Current()->heap()->native_contexts_list();
while (!context->IsUndefined()) {
// GC can happen when the context is not fully initialized,
// so the global field of the context can be undefined.
- Object* global = Context::cast(context)->get(Context::GLOBAL_INDEX);
+ Object* global = Context::cast(context)->get(Context::GLOBAL_OBJECT_INDEX);
if (!global->IsUndefined()) {
VisitAllOptimizedFunctionsForGlobalObject(JSObject::cast(global),
visitor);
@@ -354,10 +383,13 @@ Deoptimizer::Deoptimizer(Isolate* isolate,
bailout_type_(type),
from_(from),
fp_to_sp_delta_(fp_to_sp_delta),
+ has_alignment_padding_(0),
input_(NULL),
output_count_(0),
jsframe_count_(0),
output_(NULL),
+ deferred_arguments_objects_values_(0),
+ deferred_arguments_objects_(0),
deferred_heap_numbers_(0) {
if (FLAG_trace_deopt && type != OSR) {
if (type == DEBUGGER) {
@@ -378,6 +410,7 @@ Deoptimizer::Deoptimizer(Isolate* isolate,
reinterpret_cast<intptr_t>(from),
fp_to_sp_delta - (2 * kPointerSize));
}
+ function->shared()->increment_deopt_count();
// Find the optimized code.
if (type == EAGER) {
ASSERT(from == NULL);
@@ -440,61 +473,61 @@ void Deoptimizer::DeleteFrameDescriptions() {
}
-Address Deoptimizer::GetDeoptimizationEntry(int id, BailoutType type) {
+Address Deoptimizer::GetDeoptimizationEntry(int id,
+ BailoutType type,
+ GetEntryMode mode) {
ASSERT(id >= 0);
- if (id >= kNumberOfEntries) return NULL;
- MemoryChunk* base = NULL;
+ if (id >= kMaxNumberOfEntries) return NULL;
+ VirtualMemory* base = NULL;
+ if (mode == ENSURE_ENTRY_CODE) {
+ EnsureCodeForDeoptimizationEntry(type, id);
+ } else {
+ ASSERT(mode == CALCULATE_ENTRY_ADDRESS);
+ }
DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
if (type == EAGER) {
- if (data->eager_deoptimization_entry_code_ == NULL) {
- data->eager_deoptimization_entry_code_ = CreateCode(type);
- }
base = data->eager_deoptimization_entry_code_;
} else {
- if (data->lazy_deoptimization_entry_code_ == NULL) {
- data->lazy_deoptimization_entry_code_ = CreateCode(type);
- }
base = data->lazy_deoptimization_entry_code_;
}
return
- static_cast<Address>(base->area_start()) + (id * table_entry_size_);
+ static_cast<Address>(base->address()) + (id * table_entry_size_);
}
int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) {
- MemoryChunk* base = NULL;
+ VirtualMemory* base = NULL;
DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
if (type == EAGER) {
base = data->eager_deoptimization_entry_code_;
} else {
base = data->lazy_deoptimization_entry_code_;
}
+ Address base_casted = reinterpret_cast<Address>(base->address());
if (base == NULL ||
- addr < base->area_start() ||
- addr >= base->area_start() +
- (kNumberOfEntries * table_entry_size_)) {
+ addr < base->address() ||
+ addr >= base_casted + (kMaxNumberOfEntries * table_entry_size_)) {
return kNotDeoptimizationEntry;
}
ASSERT_EQ(0,
- static_cast<int>(addr - base->area_start()) % table_entry_size_);
- return static_cast<int>(addr - base->area_start()) / table_entry_size_;
+ static_cast<int>(addr - base_casted) % table_entry_size_);
+ return static_cast<int>(addr - base_casted) / table_entry_size_;
}
int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data,
- unsigned id,
+ BailoutId id,
SharedFunctionInfo* shared) {
// TODO(kasperl): For now, we do a simple linear search for the PC
// offset associated with the given node id. This should probably be
// changed to a binary search.
int length = data->DeoptPoints();
- Smi* smi_id = Smi::FromInt(id);
for (int i = 0; i < length; i++) {
- if (data->AstId(i) == smi_id) {
+ if (data->AstId(i) == id) {
return data->PcAndState(i)->value();
}
}
- PrintF("[couldn't find pc offset for node=%u]\n", id);
+ PrintF("[couldn't find pc offset for node=%d]\n", id.ToInt());
PrintF("[method: %s]\n", *shared->DebugName()->ToCString());
// Print the source code if available.
HeapStringAllocator string_allocator;
@@ -541,7 +574,7 @@ void Deoptimizer::DoComputeOutputFrames() {
// described by the input data.
DeoptimizationInputData* input_data =
DeoptimizationInputData::cast(optimized_code_->deoptimization_data());
- unsigned node_id = input_data->AstId(bailout_id_)->value();
+ BailoutId node_id = input_data->AstId(bailout_id_);
ByteArray* translations = input_data->TranslationByteArray();
unsigned translation_index =
input_data->TranslationIndex(bailout_id_)->value();
@@ -579,7 +612,24 @@ void Deoptimizer::DoComputeOutputFrames() {
case Translation::CONSTRUCT_STUB_FRAME:
DoComputeConstructStubFrame(&iterator, i);
break;
- default:
+ case Translation::GETTER_STUB_FRAME:
+ DoComputeAccessorStubFrame(&iterator, i, false);
+ break;
+ case Translation::SETTER_STUB_FRAME:
+ DoComputeAccessorStubFrame(&iterator, i, true);
+ break;
+ case Translation::BEGIN:
+ case Translation::REGISTER:
+ case Translation::INT32_REGISTER:
+ case Translation::UINT32_REGISTER:
+ case Translation::DOUBLE_REGISTER:
+ case Translation::STACK_SLOT:
+ case Translation::INT32_STACK_SLOT:
+ case Translation::UINT32_STACK_SLOT:
+ case Translation::DOUBLE_STACK_SLOT:
+ case Translation::LITERAL:
+ case Translation::ARGUMENTS_OBJECT:
+ case Translation::DUPLICATE:
UNREACHABLE();
break;
}
@@ -593,19 +643,34 @@ void Deoptimizer::DoComputeOutputFrames() {
PrintF("[deoptimizing: end 0x%08" V8PRIxPTR " ",
reinterpret_cast<intptr_t>(function));
function->PrintName();
- PrintF(" => node=%u, pc=0x%08" V8PRIxPTR ", state=%s, took %0.3f ms]\n",
- node_id,
+ PrintF(" => node=%d, pc=0x%08" V8PRIxPTR ", state=%s, alignment=%s,"
+ " took %0.3f ms]\n",
+ node_id.ToInt(),
output_[index]->GetPc(),
FullCodeGenerator::State2String(
static_cast<FullCodeGenerator::State>(
output_[index]->GetState()->value())),
+ has_alignment_padding_ ? "with padding" : "no padding",
ms);
}
}
-void Deoptimizer::MaterializeHeapNumbers() {
+void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) {
ASSERT_NE(DEBUGGER, bailout_type_);
+
+ // Handlify all argument object values before triggering any allocation.
+ List<Handle<Object> > values(deferred_arguments_objects_values_.length());
+ for (int i = 0; i < deferred_arguments_objects_values_.length(); ++i) {
+ values.Add(Handle<Object>(deferred_arguments_objects_values_[i]));
+ }
+
+ // Play it safe and clear all unhandlified values before we continue.
+ deferred_arguments_objects_values_.Clear();
+
+ // Materialize all heap numbers before looking at arguments because when the
+ // output frames are used to materialize arguments objects later on they need
+ // to already contain valid heap numbers.
for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
HeapNumberMaterializationDescriptor d = deferred_heap_numbers_[i];
Handle<Object> num = isolate_->factory()->NewNumber(d.value());
@@ -615,9 +680,55 @@ void Deoptimizer::MaterializeHeapNumbers() {
d.value(),
d.slot_address());
}
-
Memory::Object_at(d.slot_address()) = *num;
}
+
+ // Materialize arguments objects one frame at a time.
+ for (int frame_index = 0; frame_index < jsframe_count(); ++frame_index) {
+ if (frame_index != 0) it->Advance();
+ JavaScriptFrame* frame = it->frame();
+ Handle<JSFunction> function(JSFunction::cast(frame->function()), isolate_);
+ Handle<JSObject> arguments;
+ for (int i = frame->ComputeExpressionsCount() - 1; i >= 0; --i) {
+ if (frame->GetExpression(i) == isolate_->heap()->arguments_marker()) {
+ ArgumentsObjectMaterializationDescriptor descriptor =
+ deferred_arguments_objects_.RemoveLast();
+ const int length = descriptor.arguments_length();
+ if (arguments.is_null()) {
+ if (frame->has_adapted_arguments()) {
+ // Use the arguments adapter frame we just built to materialize the
+ // arguments object. FunctionGetArguments can't throw an exception,
+ // so cast away the doubt with an assert.
+ arguments = Handle<JSObject>(JSObject::cast(
+ Accessors::FunctionGetArguments(*function,
+ NULL)->ToObjectUnchecked()));
+ values.RewindBy(length);
+ } else {
+ // Construct an arguments object and copy the parameters to a newly
+ // allocated arguments object backing store.
+ arguments =
+ isolate_->factory()->NewArgumentsObject(function, length);
+ Handle<FixedArray> array =
+ isolate_->factory()->NewFixedArray(length);
+ ASSERT(array->length() == length);
+ for (int i = length - 1; i >= 0 ; --i) {
+ array->set(i, *values.RemoveLast());
+ }
+ arguments->set_elements(*array);
+ }
+ }
+ frame->SetExpression(i, *arguments);
+ ASSERT_EQ(Memory::Object_at(descriptor.slot_address()), *arguments);
+ if (FLAG_trace_deopt) {
+ PrintF("Materializing %sarguments object for %p: ",
+ frame->has_adapted_arguments() ? "(adapted) " : "",
+ reinterpret_cast<void*>(descriptor.slot_address()));
+ arguments->ShortPrint();
+ PrintF("\n");
+ }
+ }
+ }
+ }
}
@@ -696,6 +807,8 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
case Translation::JS_FRAME:
case Translation::ARGUMENTS_ADAPTOR_FRAME:
case Translation::CONSTRUCT_STUB_FRAME:
+ case Translation::GETTER_STUB_FRAME:
+ case Translation::SETTER_STUB_FRAME:
case Translation::DUPLICATE:
UNREACHABLE();
return;
@@ -744,6 +857,34 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
return;
}
+ case Translation::UINT32_REGISTER: {
+ int input_reg = iterator->Next();
+ uintptr_t value = static_cast<uintptr_t>(input_->GetRegister(input_reg));
+ bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue));
+ if (FLAG_trace_deopt) {
+ PrintF(
+ " 0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIuPTR
+ " ; uint %s (%s)\n",
+ output_[frame_index]->GetTop() + output_offset,
+ output_offset,
+ value,
+ converter.NameOfCPURegister(input_reg),
+ is_smi ? "smi" : "heap number");
+ }
+ if (is_smi) {
+ intptr_t tagged_value =
+ reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
+ output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
+ } else {
+ // We save the untagged value on the side and store a GC-safe
+ // temporary placeholder in the frame.
+ AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
+ static_cast<double>(static_cast<uint32_t>(value)));
+ output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
+ }
+ return;
+ }
+
case Translation::DOUBLE_REGISTER: {
int input_reg = iterator->Next();
double value = input_->GetDoubleRegister(input_reg);
@@ -769,7 +910,7 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
if (FLAG_trace_deopt) {
PrintF(" 0x%08" V8PRIxPTR ": ",
output_[frame_index]->GetTop() + output_offset);
- PrintF("[top + %d] <- 0x%08" V8PRIxPTR " ; [esp + %d] ",
+ PrintF("[top + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ",
output_offset,
input_value,
input_offset);
@@ -789,7 +930,7 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
if (FLAG_trace_deopt) {
PrintF(" 0x%08" V8PRIxPTR ": ",
output_[frame_index]->GetTop() + output_offset);
- PrintF("[top + %d] <- %" V8PRIdPTR " ; [esp + %d] (%s)\n",
+ PrintF("[top + %d] <- %" V8PRIdPTR " ; [sp + %d] (%s)\n",
output_offset,
value,
input_offset,
@@ -809,13 +950,43 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
return;
}
+ case Translation::UINT32_STACK_SLOT: {
+ int input_slot_index = iterator->Next();
+ unsigned input_offset =
+ input_->GetOffsetFromSlotIndex(input_slot_index);
+ uintptr_t value =
+ static_cast<uintptr_t>(input_->GetFrameSlot(input_offset));
+ bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue));
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": ",
+ output_[frame_index]->GetTop() + output_offset);
+ PrintF("[top + %d] <- %" V8PRIuPTR " ; [sp + %d] (uint32 %s)\n",
+ output_offset,
+ value,
+ input_offset,
+ is_smi ? "smi" : "heap number");
+ }
+ if (is_smi) {
+ intptr_t tagged_value =
+ reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
+ output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
+ } else {
+ // We save the untagged value on the side and store a GC-safe
+ // temporary placeholder in the frame.
+ AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
+ static_cast<double>(static_cast<uint32_t>(value)));
+ output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
+ }
+ return;
+ }
+
case Translation::DOUBLE_STACK_SLOT: {
int input_slot_index = iterator->Next();
unsigned input_offset =
input_->GetOffsetFromSlotIndex(input_slot_index);
double value = input_->GetDoubleFrameSlot(input_offset);
if (FLAG_trace_deopt) {
- PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- %e ; [esp + %d]\n",
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- %e ; [sp + %d]\n",
output_[frame_index]->GetTop() + output_offset,
output_offset,
value,
@@ -843,8 +1014,8 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
}
case Translation::ARGUMENTS_OBJECT: {
- // Use the arguments marker value as a sentinel and fill in the arguments
- // object after the deoptimized frame is built.
+ int args_index = iterator->Next() + 1; // Skip receiver.
+ int args_length = iterator->Next() - 1; // Skip receiver.
if (FLAG_trace_deopt) {
PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- ",
output_[frame_index]->GetTop() + output_offset,
@@ -852,15 +1023,76 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
isolate_->heap()->arguments_marker()->ShortPrint();
PrintF(" ; arguments object\n");
}
+ // Use the arguments marker value as a sentinel and fill in the arguments
+ // object after the deoptimized frame is built.
intptr_t value = reinterpret_cast<intptr_t>(
isolate_->heap()->arguments_marker());
+ AddArgumentsObject(
+ output_[frame_index]->GetTop() + output_offset, args_length);
output_[frame_index]->SetFrameSlot(output_offset, value);
+ // We save the tagged argument values on the side and materialize the
+ // actual arguments object after the deoptimized frame is built.
+ for (int i = 0; i < args_length; i++) {
+ unsigned input_offset = input_->GetOffsetFromSlotIndex(args_index + i);
+ intptr_t input_value = input_->GetFrameSlot(input_offset);
+ AddArgumentsObjectValue(input_value);
+ }
return;
}
}
}
+static bool ObjectToInt32(Object* obj, int32_t* value) {
+ if (obj->IsSmi()) {
+ *value = Smi::cast(obj)->value();
+ return true;
+ }
+
+ if (obj->IsHeapNumber()) {
+ double num = HeapNumber::cast(obj)->value();
+ if (FastI2D(FastD2I(num)) != num) {
+ if (FLAG_trace_osr) {
+ PrintF("**** %g could not be converted to int32 ****\n",
+ HeapNumber::cast(obj)->value());
+ }
+ return false;
+ }
+
+ *value = FastD2I(num);
+ return true;
+ }
+
+ return false;
+}
+
+
+static bool ObjectToUint32(Object* obj, uint32_t* value) {
+ if (obj->IsSmi()) {
+ if (Smi::cast(obj)->value() < 0) return false;
+
+ *value = static_cast<uint32_t>(Smi::cast(obj)->value());
+ return true;
+ }
+
+ if (obj->IsHeapNumber()) {
+ double num = HeapNumber::cast(obj)->value();
+ if ((num < 0) || (FastUI2D(FastD2UI(num)) != num)) {
+ if (FLAG_trace_osr) {
+ PrintF("**** %g could not be converted to uint32 ****\n",
+ HeapNumber::cast(obj)->value());
+ }
+ return false;
+ }
+
+ *value = FastD2UI(num);
+ return true;
+ }
+
+ return false;
+}
+
+
bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
int* input_offset) {
disasm::NameConverter converter;
@@ -883,6 +1115,8 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
case Translation::JS_FRAME:
case Translation::ARGUMENTS_ADAPTOR_FRAME:
case Translation::CONSTRUCT_STUB_FRAME:
+ case Translation::GETTER_STUB_FRAME:
+ case Translation::SETTER_STUB_FRAME:
case Translation::DUPLICATE:
UNREACHABLE(); // Malformed input.
return false;
@@ -900,22 +1134,10 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
}
case Translation::INT32_REGISTER: {
- // Abort OSR if we don't have a number.
- if (!input_object->IsNumber()) return false;
+ int32_t int32_value = 0;
+ if (!ObjectToInt32(input_object, &int32_value)) return false;
int output_reg = iterator->Next();
- int int32_value = input_object->IsSmi()
- ? Smi::cast(input_object)->value()
- : FastD2I(input_object->Number());
- // Abort the translation if the conversion lost information.
- if (!input_object->IsSmi() &&
- FastI2D(int32_value) != input_object->Number()) {
- if (FLAG_trace_osr) {
- PrintF("**** %g could not be converted to int32 ****\n",
- input_object->Number());
- }
- return false;
- }
if (FLAG_trace_osr) {
PrintF(" %s <- %d (int32) ; [sp + %d]\n",
converter.NameOfCPURegister(output_reg),
@@ -926,6 +1148,21 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
break;
}
+ case Translation::UINT32_REGISTER: {
+ uint32_t uint32_value = 0;
+ if (!ObjectToUint32(input_object, &uint32_value)) return false;
+
+ int output_reg = iterator->Next();
+ if (FLAG_trace_osr) {
+ PrintF(" %s <- %u (uint32) ; [sp + %d]\n",
+ converter.NameOfCPURegister(output_reg),
+ uint32_value,
+ *input_offset);
+ }
+ output->SetRegister(output_reg, static_cast<int32_t>(uint32_value));
+ }
+
+
case Translation::DOUBLE_REGISTER: {
// Abort OSR if we don't have a number.
if (!input_object->IsNumber()) return false;
@@ -959,24 +1196,12 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
}
case Translation::INT32_STACK_SLOT: {
- // Abort OSR if we don't have a number.
- if (!input_object->IsNumber()) return false;
+ int32_t int32_value = 0;
+ if (!ObjectToInt32(input_object, &int32_value)) return false;
int output_index = iterator->Next();
unsigned output_offset =
output->GetOffsetFromSlotIndex(output_index);
- int int32_value = input_object->IsSmi()
- ? Smi::cast(input_object)->value()
- : DoubleToInt32(input_object->Number());
- // Abort the translation if the conversion lost information.
- if (!input_object->IsSmi() &&
- FastI2D(int32_value) != input_object->Number()) {
- if (FLAG_trace_osr) {
- PrintF("**** %g could not be converted to int32 ****\n",
- input_object->Number());
- }
- return false;
- }
if (FLAG_trace_osr) {
PrintF(" [sp + %d] <- %d (int32) ; [sp + %d]\n",
output_offset,
@@ -987,6 +1212,23 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
break;
}
+ case Translation::UINT32_STACK_SLOT: {
+ uint32_t uint32_value = 0;
+ if (!ObjectToUint32(input_object, &uint32_value)) return false;
+
+ int output_index = iterator->Next();
+ unsigned output_offset =
+ output->GetOffsetFromSlotIndex(output_index);
+ if (FLAG_trace_osr) {
+ PrintF(" [sp + %d] <- %u (uint32) ; [sp + %d]\n",
+ output_offset,
+ uint32_value,
+ *input_offset);
+ }
+ output->SetFrameSlot(output_offset, static_cast<int32_t>(uint32_value));
+ break;
+ }
+
case Translation::DOUBLE_STACK_SLOT: {
static const int kLowerOffset = 0 * kPointerSize;
static const int kUpperOffset = 1 * kPointerSize;
@@ -1136,39 +1378,63 @@ Object* Deoptimizer::ComputeLiteral(int index) const {
}
-void Deoptimizer::AddDoubleValue(intptr_t slot_address,
- double value) {
+void Deoptimizer::AddArgumentsObject(intptr_t slot_address, int argc) {
+ ArgumentsObjectMaterializationDescriptor object_desc(
+ reinterpret_cast<Address>(slot_address), argc);
+ deferred_arguments_objects_.Add(object_desc);
+}
+
+
+void Deoptimizer::AddArgumentsObjectValue(intptr_t value) {
+ deferred_arguments_objects_values_.Add(reinterpret_cast<Object*>(value));
+}
+
+
+void Deoptimizer::AddDoubleValue(intptr_t slot_address, double value) {
HeapNumberMaterializationDescriptor value_desc(
reinterpret_cast<Address>(slot_address), value);
deferred_heap_numbers_.Add(value_desc);
}
-MemoryChunk* Deoptimizer::CreateCode(BailoutType type) {
+void Deoptimizer::EnsureCodeForDeoptimizationEntry(BailoutType type,
+ int max_entry_id) {
// We cannot run this if the serializer is enabled because this will
// cause us to emit relocation information for the external
// references. This is fine because the deoptimizer's code section
// isn't meant to be serialized at all.
ASSERT(!Serializer::enabled());
+ ASSERT(type == EAGER || type == LAZY);
+ DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
+ int entry_count = (type == EAGER)
+ ? data->eager_deoptimization_entry_code_entries_
+ : data->lazy_deoptimization_entry_code_entries_;
+ if (max_entry_id < entry_count) return;
+ entry_count = Min(Max(entry_count * 2, Deoptimizer::kMinNumberOfEntries),
+ Deoptimizer::kMaxNumberOfEntries);
+
MacroAssembler masm(Isolate::Current(), NULL, 16 * KB);
masm.set_emit_debug_code(false);
- GenerateDeoptimizationEntries(&masm, kNumberOfEntries, type);
+ GenerateDeoptimizationEntries(&masm, entry_count, type);
CodeDesc desc;
masm.GetCode(&desc);
ASSERT(desc.reloc_size == 0);
- MemoryChunk* chunk =
- Isolate::Current()->memory_allocator()->AllocateChunk(desc.instr_size,
- EXECUTABLE,
- NULL);
- ASSERT(chunk->area_size() >= desc.instr_size);
- if (chunk == NULL) {
- V8::FatalProcessOutOfMemory("Not enough memory for deoptimization table");
+ VirtualMemory* memory = type == EAGER
+ ? data->eager_deoptimization_entry_code_
+ : data->lazy_deoptimization_entry_code_;
+ size_t table_size = Deoptimizer::GetMaxDeoptTableSize();
+ ASSERT(static_cast<int>(table_size) >= desc.instr_size);
+ memory->Commit(memory->address(), table_size, true);
+ memcpy(memory->address(), desc.buffer, desc.instr_size);
+ CPU::FlushICache(memory->address(), desc.instr_size);
+
+ if (type == EAGER) {
+ data->eager_deoptimization_entry_code_entries_ = entry_count;
+ } else {
+ data->lazy_deoptimization_entry_code_entries_ = entry_count;
}
- memcpy(chunk->area_start(), desc.buffer, desc.instr_size);
- CPU::FlushICache(chunk->area_start(), desc.instr_size);
- return chunk;
}
@@ -1210,6 +1476,54 @@ void Deoptimizer::RemoveDeoptimizingCode(Code* code) {
}
+static Object* CutOutRelatedFunctionsList(Context* context,
+ Code* code,
+ Object* undefined) {
+ Object* result_list_head = undefined;
+ Object* head;
+ Object* current;
+ current = head = context->get(Context::OPTIMIZED_FUNCTIONS_LIST);
+ JSFunction* prev = NULL;
+ while (current != undefined) {
+ JSFunction* func = JSFunction::cast(current);
+ current = func->next_function_link();
+ if (func->code() == code) {
+ func->set_next_function_link(result_list_head);
+ result_list_head = func;
+ if (prev) {
+ prev->set_next_function_link(current);
+ } else {
+ head = current;
+ }
+ } else {
+ prev = func;
+ }
+ }
+ if (head != context->get(Context::OPTIMIZED_FUNCTIONS_LIST)) {
+ context->set(Context::OPTIMIZED_FUNCTIONS_LIST, head);
+ }
+ return result_list_head;
+}
+
+
+void Deoptimizer::ReplaceCodeForRelatedFunctions(JSFunction* function,
+ Code* code) {
+ Context* context = function->context()->native_context();
+
+ SharedFunctionInfo* shared = function->shared();
+
+ Object* undefined = Isolate::Current()->heap()->undefined_value();
+ Object* current = CutOutRelatedFunctionsList(context, code, undefined);
+
+ while (current != undefined) {
+ JSFunction* func = JSFunction::cast(current);
+ current = func->next_function_link();
+ func->set_code(shared->code());
+ func->set_next_function_link(undefined);
+ }
+}
+
+
FrameDescription::FrameDescription(uint32_t frame_size,
JSFunction* function)
: frame_size_(frame_size),
@@ -1290,7 +1604,7 @@ Object* FrameDescription::GetExpression(int index) {
}
-void TranslationBuffer::Add(int32_t value) {
+void TranslationBuffer::Add(int32_t value, Zone* zone) {
// Encode the sign bit in the least significant bit.
bool is_negative = (value < 0);
uint32_t bits = ((is_negative ? -value : value) << 1) |
@@ -1299,7 +1613,7 @@ void TranslationBuffer::Add(int32_t value) {
// each byte to indicate whether or not more bytes follow.
do {
uint32_t next = bits >> 7;
- contents_.Add(((bits << 1) & 0xFF) | (next != 0));
+ contents_.Add(((bits << 1) & 0xFF) | (next != 0), zone);
bits = next;
} while (bits != 0);
}
@@ -1332,95 +1646,127 @@ Handle<ByteArray> TranslationBuffer::CreateByteArray() {
void Translation::BeginConstructStubFrame(int literal_id, unsigned height) {
- buffer_->Add(CONSTRUCT_STUB_FRAME);
- buffer_->Add(literal_id);
- buffer_->Add(height);
+ buffer_->Add(CONSTRUCT_STUB_FRAME, zone());
+ buffer_->Add(literal_id, zone());
+ buffer_->Add(height, zone());
+}
+
+
+void Translation::BeginGetterStubFrame(int literal_id) {
+ buffer_->Add(GETTER_STUB_FRAME, zone());
+ buffer_->Add(literal_id, zone());
+}
+
+
+void Translation::BeginSetterStubFrame(int literal_id) {
+ buffer_->Add(SETTER_STUB_FRAME, zone());
+ buffer_->Add(literal_id, zone());
}
void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
- buffer_->Add(ARGUMENTS_ADAPTOR_FRAME);
- buffer_->Add(literal_id);
- buffer_->Add(height);
+ buffer_->Add(ARGUMENTS_ADAPTOR_FRAME, zone());
+ buffer_->Add(literal_id, zone());
+ buffer_->Add(height, zone());
}
-void Translation::BeginJSFrame(int node_id, int literal_id, unsigned height) {
- buffer_->Add(JS_FRAME);
- buffer_->Add(node_id);
- buffer_->Add(literal_id);
- buffer_->Add(height);
+void Translation::BeginJSFrame(BailoutId node_id,
+ int literal_id,
+ unsigned height) {
+ buffer_->Add(JS_FRAME, zone());
+ buffer_->Add(node_id.ToInt(), zone());
+ buffer_->Add(literal_id, zone());
+ buffer_->Add(height, zone());
}
void Translation::StoreRegister(Register reg) {
- buffer_->Add(REGISTER);
- buffer_->Add(reg.code());
+ buffer_->Add(REGISTER, zone());
+ buffer_->Add(reg.code(), zone());
}
void Translation::StoreInt32Register(Register reg) {
- buffer_->Add(INT32_REGISTER);
- buffer_->Add(reg.code());
+ buffer_->Add(INT32_REGISTER, zone());
+ buffer_->Add(reg.code(), zone());
+}
+
+
+void Translation::StoreUint32Register(Register reg) {
+ buffer_->Add(UINT32_REGISTER, zone());
+ buffer_->Add(reg.code(), zone());
}
void Translation::StoreDoubleRegister(DoubleRegister reg) {
- buffer_->Add(DOUBLE_REGISTER);
- buffer_->Add(DoubleRegister::ToAllocationIndex(reg));
+ buffer_->Add(DOUBLE_REGISTER, zone());
+ buffer_->Add(DoubleRegister::ToAllocationIndex(reg), zone());
}
void Translation::StoreStackSlot(int index) {
- buffer_->Add(STACK_SLOT);
- buffer_->Add(index);
+ buffer_->Add(STACK_SLOT, zone());
+ buffer_->Add(index, zone());
}
void Translation::StoreInt32StackSlot(int index) {
- buffer_->Add(INT32_STACK_SLOT);
- buffer_->Add(index);
+ buffer_->Add(INT32_STACK_SLOT, zone());
+ buffer_->Add(index, zone());
+}
+
+
+void Translation::StoreUint32StackSlot(int index) {
+ buffer_->Add(UINT32_STACK_SLOT, zone());
+ buffer_->Add(index, zone());
}
void Translation::StoreDoubleStackSlot(int index) {
- buffer_->Add(DOUBLE_STACK_SLOT);
- buffer_->Add(index);
+ buffer_->Add(DOUBLE_STACK_SLOT, zone());
+ buffer_->Add(index, zone());
}
void Translation::StoreLiteral(int literal_id) {
- buffer_->Add(LITERAL);
- buffer_->Add(literal_id);
+ buffer_->Add(LITERAL, zone());
+ buffer_->Add(literal_id, zone());
}
-void Translation::StoreArgumentsObject() {
- buffer_->Add(ARGUMENTS_OBJECT);
+void Translation::StoreArgumentsObject(int args_index, int args_length) {
+ buffer_->Add(ARGUMENTS_OBJECT, zone());
+ buffer_->Add(args_index, zone());
+ buffer_->Add(args_length, zone());
}
void Translation::MarkDuplicate() {
- buffer_->Add(DUPLICATE);
+ buffer_->Add(DUPLICATE, zone());
}
int Translation::NumberOfOperandsFor(Opcode opcode) {
switch (opcode) {
- case ARGUMENTS_OBJECT:
case DUPLICATE:
return 0;
+ case GETTER_STUB_FRAME:
+ case SETTER_STUB_FRAME:
case REGISTER:
case INT32_REGISTER:
+ case UINT32_REGISTER:
case DOUBLE_REGISTER:
case STACK_SLOT:
case INT32_STACK_SLOT:
+ case UINT32_STACK_SLOT:
case DOUBLE_STACK_SLOT:
case LITERAL:
return 1;
case BEGIN:
case ARGUMENTS_ADAPTOR_FRAME:
case CONSTRUCT_STUB_FRAME:
+ case ARGUMENTS_OBJECT:
return 2;
case JS_FRAME:
return 3;
@@ -1442,16 +1788,24 @@ const char* Translation::StringFor(Opcode opcode) {
return "ARGUMENTS_ADAPTOR_FRAME";
case CONSTRUCT_STUB_FRAME:
return "CONSTRUCT_STUB_FRAME";
+ case GETTER_STUB_FRAME:
+ return "GETTER_STUB_FRAME";
+ case SETTER_STUB_FRAME:
+ return "SETTER_STUB_FRAME";
case REGISTER:
return "REGISTER";
case INT32_REGISTER:
return "INT32_REGISTER";
+ case UINT32_REGISTER:
+ return "UINT32_REGISTER";
case DOUBLE_REGISTER:
return "DOUBLE_REGISTER";
case STACK_SLOT:
return "STACK_SLOT";
case INT32_STACK_SLOT:
return "INT32_STACK_SLOT";
+ case UINT32_STACK_SLOT:
+ return "UINT32_STACK_SLOT";
case DOUBLE_STACK_SLOT:
return "DOUBLE_STACK_SLOT";
case LITERAL:
@@ -1498,6 +1852,8 @@ SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator,
case Translation::JS_FRAME:
case Translation::ARGUMENTS_ADAPTOR_FRAME:
case Translation::CONSTRUCT_STUB_FRAME:
+ case Translation::GETTER_STUB_FRAME:
+ case Translation::SETTER_STUB_FRAME:
// Peeled off before getting here.
break;
@@ -1507,6 +1863,7 @@ SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator,
case Translation::REGISTER:
case Translation::INT32_REGISTER:
+ case Translation::UINT32_REGISTER:
case Translation::DOUBLE_REGISTER:
case Translation::DUPLICATE:
// We are at safepoint which corresponds to call. All registers are
@@ -1526,6 +1883,12 @@ SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator,
return SlotRef(slot_addr, SlotRef::INT32);
}
+ case Translation::UINT32_STACK_SLOT: {
+ int slot_index = iterator->Next();
+ Address slot_addr = SlotAddress(frame, slot_index);
+ return SlotRef(slot_addr, SlotRef::UINT32);
+ }
+
case Translation::DOUBLE_STACK_SLOT: {
int slot_index = iterator->Next();
Address slot_addr = SlotAddress(frame, slot_index);
@@ -1565,7 +1928,7 @@ Vector<SlotRef> SlotRef::ComputeSlotMappingForArguments(
int inlined_jsframe_index,
int formal_parameter_count) {
AssertNoAllocation no_gc;
- int deopt_index = AstNode::kNoNumber;
+ int deopt_index = Safepoint::kNoDeoptimizationIndex;
DeoptimizationInputData* data =
static_cast<OptimizedFrame*>(frame)->GetDeoptimizationData(&deopt_index);
TranslationIterator it(data->TranslationByteArray(),
diff --git a/src/3rdparty/v8/src/deoptimizer.h b/src/3rdparty/v8/src/deoptimizer.h
index 6bc4a51..4aa38ce 100644
--- a/src/3rdparty/v8/src/deoptimizer.h
+++ b/src/3rdparty/v8/src/deoptimizer.h
@@ -57,18 +57,32 @@ class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
};
+class ArgumentsObjectMaterializationDescriptor BASE_EMBEDDED {
+ public:
+ ArgumentsObjectMaterializationDescriptor(Address slot_address, int argc)
+ : slot_address_(slot_address), arguments_length_(argc) { }
+
+ Address slot_address() const { return slot_address_; }
+ int arguments_length() const { return arguments_length_; }
+
+ private:
+ Address slot_address_;
+ int arguments_length_;
+};
+
+
class OptimizedFunctionVisitor BASE_EMBEDDED {
public:
virtual ~OptimizedFunctionVisitor() {}
// Function which is called before iteration of any optimized functions
- // from given global context.
+ // from given native context.
virtual void EnterContext(Context* context) = 0;
virtual void VisitFunction(JSFunction* function) = 0;
// Function which is called after iteration of all optimized functions
- // from given global context.
+ // from given native context.
virtual void LeaveContext(Context* context) = 0;
};
@@ -86,8 +100,10 @@ class DeoptimizerData {
#endif
private:
- MemoryChunk* eager_deoptimization_entry_code_;
- MemoryChunk* lazy_deoptimization_entry_code_;
+ int eager_deoptimization_entry_code_entries_;
+ int lazy_deoptimization_entry_code_entries_;
+ VirtualMemory* eager_deoptimization_entry_code_;
+ VirtualMemory* lazy_deoptimization_entry_code_;
Deoptimizer* current_;
#ifdef ENABLE_DEBUGGER_SUPPORT
@@ -152,6 +168,10 @@ class Deoptimizer : public Malloced {
// execution returns.
static void DeoptimizeFunction(JSFunction* function);
+ // Iterate over all the functions which share the same code object
+ // and make them use unoptimized version.
+ static void ReplaceCodeForRelatedFunctions(JSFunction* function, Code* code);
+
// Deoptimize all functions in the heap.
static void DeoptimizeAll();
@@ -196,7 +216,7 @@ class Deoptimizer : public Malloced {
~Deoptimizer();
- void MaterializeHeapNumbers();
+ void MaterializeHeapObjects(JavaScriptFrameIterator* it);
#ifdef ENABLE_DEBUGGER_SUPPORT
void MaterializeHeapNumbersForDebuggerInspectableFrame(
Address parameters_top,
@@ -208,10 +228,20 @@ class Deoptimizer : public Malloced {
static void ComputeOutputFrames(Deoptimizer* deoptimizer);
- static Address GetDeoptimizationEntry(int id, BailoutType type);
+
+ enum GetEntryMode {
+ CALCULATE_ENTRY_ADDRESS,
+ ENSURE_ENTRY_CODE
+ };
+
+
+ static Address GetDeoptimizationEntry(
+ int id,
+ BailoutType type,
+ GetEntryMode mode = ENSURE_ENTRY_CODE);
static int GetDeoptimizationId(Address addr, BailoutType type);
static int GetOutputInfo(DeoptimizationOutputData* data,
- unsigned node_id,
+ BailoutId node_id,
SharedFunctionInfo* shared);
// Code generation support.
@@ -221,6 +251,10 @@ class Deoptimizer : public Malloced {
}
static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
+ static int has_alignment_padding_offset() {
+ return OFFSET_OF(Deoptimizer, has_alignment_padding_);
+ }
+
static int GetDeoptimizedCodeCount(Isolate* isolate);
static const int kNotDeoptimizationEntry = -1;
@@ -261,8 +295,11 @@ class Deoptimizer : public Malloced {
int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
+ static size_t GetMaxDeoptTableSize();
+
private:
- static const int kNumberOfEntries = 16384;
+ static const int kMinNumberOfEntries = 64;
+ static const int kMaxNumberOfEntries = 16384;
Deoptimizer(Isolate* isolate,
JSFunction* function,
@@ -280,6 +317,9 @@ class Deoptimizer : public Malloced {
int frame_index);
void DoComputeConstructStubFrame(TranslationIterator* iterator,
int frame_index);
+ void DoComputeAccessorStubFrame(TranslationIterator* iterator,
+ int frame_index,
+ bool is_setter_stub_frame);
void DoTranslateCommand(TranslationIterator* iterator,
int frame_index,
unsigned output_offset);
@@ -298,9 +338,12 @@ class Deoptimizer : public Malloced {
Object* ComputeLiteral(int index) const;
+ void AddArgumentsObject(intptr_t slot_address, int argc);
+ void AddArgumentsObjectValue(intptr_t value);
void AddDoubleValue(intptr_t slot_address, double value);
- static MemoryChunk* CreateCode(BailoutType type);
+ static void EnsureCodeForDeoptimizationEntry(BailoutType type,
+ int max_entry_id);
static void GenerateDeoptimizationEntries(
MacroAssembler* masm, int count, BailoutType type);
@@ -322,6 +365,7 @@ class Deoptimizer : public Malloced {
BailoutType bailout_type_;
Address from_;
int fp_to_sp_delta_;
+ int has_alignment_padding_;
// Input frame description.
FrameDescription* input_;
@@ -332,6 +376,8 @@ class Deoptimizer : public Malloced {
// Array of output frame descriptions.
FrameDescription** output_;
+ List<Object*> deferred_arguments_objects_values_;
+ List<ArgumentsObjectMaterializationDescriptor> deferred_arguments_objects_;
List<HeapNumberMaterializationDescriptor> deferred_heap_numbers_;
static const int table_entry_size_;
@@ -515,10 +561,10 @@ class FrameDescription {
class TranslationBuffer BASE_EMBEDDED {
public:
- TranslationBuffer() : contents_(256) { }
+ explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
int CurrentIndex() const { return contents_.length(); }
- void Add(int32_t value);
+ void Add(int32_t value, Zone* zone);
Handle<ByteArray> CreateByteArray();
@@ -554,12 +600,16 @@ class Translation BASE_EMBEDDED {
BEGIN,
JS_FRAME,
CONSTRUCT_STUB_FRAME,
+ GETTER_STUB_FRAME,
+ SETTER_STUB_FRAME,
ARGUMENTS_ADAPTOR_FRAME,
REGISTER,
INT32_REGISTER,
+ UINT32_REGISTER,
DOUBLE_REGISTER,
STACK_SLOT,
INT32_STACK_SLOT,
+ UINT32_STACK_SLOT,
DOUBLE_STACK_SLOT,
LITERAL,
ARGUMENTS_OBJECT,
@@ -569,39 +619,51 @@ class Translation BASE_EMBEDDED {
DUPLICATE
};
- Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count)
+ Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
+ Zone* zone)
: buffer_(buffer),
- index_(buffer->CurrentIndex()) {
- buffer_->Add(BEGIN);
- buffer_->Add(frame_count);
- buffer_->Add(jsframe_count);
+ index_(buffer->CurrentIndex()),
+ zone_(zone) {
+ buffer_->Add(BEGIN, zone);
+ buffer_->Add(frame_count, zone);
+ buffer_->Add(jsframe_count, zone);
}
int index() const { return index_; }
// Commands.
- void BeginJSFrame(int node_id, int literal_id, unsigned height);
+ void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
void BeginConstructStubFrame(int literal_id, unsigned height);
+ void BeginGetterStubFrame(int literal_id);
+ void BeginSetterStubFrame(int literal_id);
void StoreRegister(Register reg);
void StoreInt32Register(Register reg);
+ void StoreUint32Register(Register reg);
void StoreDoubleRegister(DoubleRegister reg);
void StoreStackSlot(int index);
void StoreInt32StackSlot(int index);
+ void StoreUint32StackSlot(int index);
void StoreDoubleStackSlot(int index);
void StoreLiteral(int literal_id);
- void StoreArgumentsObject();
+ void StoreArgumentsObject(int args_index, int args_length);
void MarkDuplicate();
+ Zone* zone() const { return zone_; }
+
static int NumberOfOperandsFor(Opcode opcode);
#if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
static const char* StringFor(Opcode opcode);
#endif
+ // A literal id which refers to the JSFunction itself.
+ static const int kSelfLiteralId = -239;
+
private:
TranslationBuffer* buffer_;
int index_;
+ Zone* zone_;
};
@@ -631,6 +693,7 @@ class SlotRef BASE_EMBEDDED {
UNKNOWN,
TAGGED,
INT32,
+ UINT32,
DOUBLE,
LITERAL
};
@@ -658,6 +721,16 @@ class SlotRef BASE_EMBEDDED {
}
}
+ case UINT32: {
+ uint32_t value = Memory::uint32_at(addr_);
+ if (value <= static_cast<uint32_t>(Smi::kMaxValue)) {
+ return Handle<Object>(Smi::FromInt(static_cast<int>(value)));
+ } else {
+ return Isolate::Current()->factory()->NewNumber(
+ static_cast<double>(value));
+ }
+ }
+
case DOUBLE: {
double value = Memory::double_at(addr_);
return Isolate::Current()->factory()->NewNumber(value);
diff --git a/src/3rdparty/v8/src/disassembler.cc b/src/3rdparty/v8/src/disassembler.cc
index e3b40ab..9f8b9a8 100644
--- a/src/3rdparty/v8/src/disassembler.cc
+++ b/src/3rdparty/v8/src/disassembler.cc
@@ -244,8 +244,8 @@ static int DecodeIt(FILE* f,
out.AddFormatted(" %s, %s", Code::Kind2String(kind),
Code::ICState2String(ic_state));
if (ic_state == MONOMORPHIC) {
- PropertyType type = code->type();
- out.AddFormatted(", %s", Code::PropertyType2String(type));
+ Code::StubType type = code->type();
+ out.AddFormatted(", %s", Code::StubType2String(type));
}
if (kind == Code::CALL_IC || kind == Code::KEYED_CALL_IC) {
out.AddFormatted(", argc = %d", code->arguments_count());
diff --git a/src/3rdparty/v8/src/elements-kind.cc b/src/3rdparty/v8/src/elements-kind.cc
new file mode 100644
index 0000000..19fac44
--- /dev/null
+++ b/src/3rdparty/v8/src/elements-kind.cc
@@ -0,0 +1,139 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "elements-kind.h"
+
+#include "api.h"
+#include "elements.h"
+#include "objects.h"
+
+namespace v8 {
+namespace internal {
+
+
+const char* ElementsKindToString(ElementsKind kind) {
+ ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
+ return accessor->name();
+}
+
+
+void PrintElementsKind(FILE* out, ElementsKind kind) {
+ FPrintF(out, "%s", ElementsKindToString(kind));
+}
+
+
+ElementsKind GetInitialFastElementsKind() {
+ if (FLAG_packed_arrays) {
+ return FAST_SMI_ELEMENTS;
+ } else {
+ return FAST_HOLEY_SMI_ELEMENTS;
+ }
+}
+
+
+struct InitializeFastElementsKindSequence {
+ static void Construct(
+ ElementsKind** fast_elements_kind_sequence_ptr) {
+ ElementsKind* fast_elements_kind_sequence =
+ new ElementsKind[kFastElementsKindCount];
+ *fast_elements_kind_sequence_ptr = fast_elements_kind_sequence;
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == FIRST_FAST_ELEMENTS_KIND);
+ fast_elements_kind_sequence[0] = FAST_SMI_ELEMENTS;
+ fast_elements_kind_sequence[1] = FAST_HOLEY_SMI_ELEMENTS;
+ fast_elements_kind_sequence[2] = FAST_DOUBLE_ELEMENTS;
+ fast_elements_kind_sequence[3] = FAST_HOLEY_DOUBLE_ELEMENTS;
+ fast_elements_kind_sequence[4] = FAST_ELEMENTS;
+ fast_elements_kind_sequence[5] = FAST_HOLEY_ELEMENTS;
+ }
+};
+
+
+static LazyInstance<ElementsKind*,
+ InitializeFastElementsKindSequence>::type
+ fast_elements_kind_sequence = LAZY_INSTANCE_INITIALIZER;
+
+
+ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number) {
+ ASSERT(sequence_number >= 0 &&
+ sequence_number < kFastElementsKindCount);
+ return fast_elements_kind_sequence.Get()[sequence_number];
+}
+
+int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind) {
+ for (int i = 0; i < kFastElementsKindCount; ++i) {
+ if (fast_elements_kind_sequence.Get()[i] == elements_kind) {
+ return i;
+ }
+ }
+ UNREACHABLE();
+ return 0;
+}
+
+
+ElementsKind GetNextMoreGeneralFastElementsKind(ElementsKind elements_kind,
+ bool allow_only_packed) {
+ ASSERT(IsFastElementsKind(elements_kind));
+ ASSERT(elements_kind != TERMINAL_FAST_ELEMENTS_KIND);
+ while (true) {
+ int index =
+ GetSequenceIndexFromFastElementsKind(elements_kind) + 1;
+ elements_kind = GetFastElementsKindFromSequenceIndex(index);
+ if (!IsFastHoleyElementsKind(elements_kind) || !allow_only_packed) {
+ return elements_kind;
+ }
+ }
+ UNREACHABLE();
+ return TERMINAL_FAST_ELEMENTS_KIND;
+}
+
+
+bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
+ ElementsKind to_kind) {
+ switch (from_kind) {
+ case FAST_SMI_ELEMENTS:
+ return to_kind != FAST_SMI_ELEMENTS;
+ case FAST_HOLEY_SMI_ELEMENTS:
+ return to_kind != FAST_SMI_ELEMENTS &&
+ to_kind != FAST_HOLEY_SMI_ELEMENTS;
+ case FAST_DOUBLE_ELEMENTS:
+ return to_kind != FAST_SMI_ELEMENTS &&
+ to_kind != FAST_HOLEY_SMI_ELEMENTS &&
+ to_kind != FAST_DOUBLE_ELEMENTS;
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ return to_kind == FAST_ELEMENTS ||
+ to_kind == FAST_HOLEY_ELEMENTS;
+ case FAST_ELEMENTS:
+ return to_kind == FAST_HOLEY_ELEMENTS;
+ case FAST_HOLEY_ELEMENTS:
+ return false;
+ default:
+ return false;
+ }
+}
+
+
+} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/elements-kind.h b/src/3rdparty/v8/src/elements-kind.h
new file mode 100644
index 0000000..cb3bb9c
--- /dev/null
+++ b/src/3rdparty/v8/src/elements-kind.h
@@ -0,0 +1,229 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_ELEMENTS_KIND_H_
+#define V8_ELEMENTS_KIND_H_
+
+#include "v8checks.h"
+
+namespace v8 {
+namespace internal {
+
+enum ElementsKind {
+ // The "fast" kind for elements that only contain SMI values. Must be first
+ // to make it possible to efficiently check maps for this kind.
+ FAST_SMI_ELEMENTS,
+ FAST_HOLEY_SMI_ELEMENTS,
+
+ // The "fast" kind for tagged values. Must be second to make it possible to
+ // efficiently check maps for this and the FAST_SMI_ONLY_ELEMENTS kind
+ // together at once.
+ FAST_ELEMENTS,
+ FAST_HOLEY_ELEMENTS,
+
+ // The "fast" kind for unwrapped, non-tagged double values.
+ FAST_DOUBLE_ELEMENTS,
+ FAST_HOLEY_DOUBLE_ELEMENTS,
+
+ // The "slow" kind.
+ DICTIONARY_ELEMENTS,
+ NON_STRICT_ARGUMENTS_ELEMENTS,
+ // The "fast" kind for external arrays
+ EXTERNAL_BYTE_ELEMENTS,
+ EXTERNAL_UNSIGNED_BYTE_ELEMENTS,
+ EXTERNAL_SHORT_ELEMENTS,
+ EXTERNAL_UNSIGNED_SHORT_ELEMENTS,
+ EXTERNAL_INT_ELEMENTS,
+ EXTERNAL_UNSIGNED_INT_ELEMENTS,
+ EXTERNAL_FLOAT_ELEMENTS,
+ EXTERNAL_DOUBLE_ELEMENTS,
+ EXTERNAL_PIXEL_ELEMENTS,
+
+ // Derived constants from ElementsKind
+ FIRST_ELEMENTS_KIND = FAST_SMI_ELEMENTS,
+ LAST_ELEMENTS_KIND = EXTERNAL_PIXEL_ELEMENTS,
+ FIRST_FAST_ELEMENTS_KIND = FAST_SMI_ELEMENTS,
+ LAST_FAST_ELEMENTS_KIND = FAST_HOLEY_DOUBLE_ELEMENTS,
+ FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND = EXTERNAL_BYTE_ELEMENTS,
+ LAST_EXTERNAL_ARRAY_ELEMENTS_KIND = EXTERNAL_PIXEL_ELEMENTS,
+ TERMINAL_FAST_ELEMENTS_KIND = FAST_HOLEY_ELEMENTS
+};
+
+const int kElementsKindCount = LAST_ELEMENTS_KIND - FIRST_ELEMENTS_KIND + 1;
+const int kFastElementsKindCount = LAST_FAST_ELEMENTS_KIND -
+ FIRST_FAST_ELEMENTS_KIND + 1;
+
+const char* ElementsKindToString(ElementsKind kind);
+void PrintElementsKind(FILE* out, ElementsKind kind);
+
+ElementsKind GetInitialFastElementsKind();
+
+ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_index);
+
+int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind);
+
+
+inline bool IsDictionaryElementsKind(ElementsKind kind) {
+ return kind == DICTIONARY_ELEMENTS;
+}
+
+
+inline bool IsExternalArrayElementsKind(ElementsKind kind) {
+ return kind >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
+ kind <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND;
+}
+
+
+inline bool IsFastElementsKind(ElementsKind kind) {
+ ASSERT(FIRST_FAST_ELEMENTS_KIND == 0);
+ return kind <= FAST_HOLEY_DOUBLE_ELEMENTS;
+}
+
+
+inline bool IsFastDoubleElementsKind(ElementsKind kind) {
+ return kind == FAST_DOUBLE_ELEMENTS ||
+ kind == FAST_HOLEY_DOUBLE_ELEMENTS;
+}
+
+
+inline bool IsDoubleOrFloatElementsKind(ElementsKind kind) {
+ return IsFastDoubleElementsKind(kind) ||
+ kind == EXTERNAL_DOUBLE_ELEMENTS ||
+ kind == EXTERNAL_FLOAT_ELEMENTS;
+}
+
+
+inline bool IsFastSmiOrObjectElementsKind(ElementsKind kind) {
+ return kind == FAST_SMI_ELEMENTS ||
+ kind == FAST_HOLEY_SMI_ELEMENTS ||
+ kind == FAST_ELEMENTS ||
+ kind == FAST_HOLEY_ELEMENTS;
+}
+
+
+inline bool IsFastSmiElementsKind(ElementsKind kind) {
+ return kind == FAST_SMI_ELEMENTS ||
+ kind == FAST_HOLEY_SMI_ELEMENTS;
+}
+
+
+inline bool IsFastObjectElementsKind(ElementsKind kind) {
+ return kind == FAST_ELEMENTS ||
+ kind == FAST_HOLEY_ELEMENTS;
+}
+
+
+inline bool IsFastHoleyElementsKind(ElementsKind kind) {
+ return kind == FAST_HOLEY_SMI_ELEMENTS ||
+ kind == FAST_HOLEY_DOUBLE_ELEMENTS ||
+ kind == FAST_HOLEY_ELEMENTS;
+}
+
+
+inline bool IsHoleyElementsKind(ElementsKind kind) {
+ return IsFastHoleyElementsKind(kind) ||
+ kind == DICTIONARY_ELEMENTS;
+}
+
+
+inline bool IsFastPackedElementsKind(ElementsKind kind) {
+ return kind == FAST_SMI_ELEMENTS ||
+ kind == FAST_DOUBLE_ELEMENTS ||
+ kind == FAST_ELEMENTS;
+}
+
+
+inline ElementsKind GetPackedElementsKind(ElementsKind holey_kind) {
+ if (holey_kind == FAST_HOLEY_SMI_ELEMENTS) {
+ return FAST_SMI_ELEMENTS;
+ }
+ if (holey_kind == FAST_HOLEY_DOUBLE_ELEMENTS) {
+ return FAST_DOUBLE_ELEMENTS;
+ }
+ if (holey_kind == FAST_HOLEY_ELEMENTS) {
+ return FAST_ELEMENTS;
+ }
+ return holey_kind;
+}
+
+
+inline ElementsKind GetHoleyElementsKind(ElementsKind packed_kind) {
+ if (packed_kind == FAST_SMI_ELEMENTS) {
+ return FAST_HOLEY_SMI_ELEMENTS;
+ }
+ if (packed_kind == FAST_DOUBLE_ELEMENTS) {
+ return FAST_HOLEY_DOUBLE_ELEMENTS;
+ }
+ if (packed_kind == FAST_ELEMENTS) {
+ return FAST_HOLEY_ELEMENTS;
+ }
+ return packed_kind;
+}
+
+
+inline ElementsKind FastSmiToObjectElementsKind(ElementsKind from_kind) {
+ ASSERT(IsFastSmiElementsKind(from_kind));
+ return (from_kind == FAST_SMI_ELEMENTS)
+ ? FAST_ELEMENTS
+ : FAST_HOLEY_ELEMENTS;
+}
+
+
+inline bool IsSimpleMapChangeTransition(ElementsKind from_kind,
+ ElementsKind to_kind) {
+ return (GetHoleyElementsKind(from_kind) == to_kind) ||
+ (IsFastSmiElementsKind(from_kind) &&
+ IsFastObjectElementsKind(to_kind));
+}
+
+
+bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
+ ElementsKind to_kind);
+
+
+inline bool IsTransitionableFastElementsKind(ElementsKind from_kind) {
+ return IsFastElementsKind(from_kind) &&
+ from_kind != TERMINAL_FAST_ELEMENTS_KIND;
+}
+
+
+ElementsKind GetNextMoreGeneralFastElementsKind(ElementsKind elements_kind,
+ bool allow_only_packed);
+
+
+inline bool CanTransitionToMoreGeneralFastElementsKind(
+ ElementsKind elements_kind,
+ bool allow_only_packed) {
+ return IsFastElementsKind(elements_kind) &&
+ (elements_kind != TERMINAL_FAST_ELEMENTS_KIND &&
+ (!allow_only_packed || elements_kind != FAST_ELEMENTS));
+}
+
+
+} } // namespace v8::internal
+
+#endif // V8_ELEMENTS_KIND_H_
diff --git a/src/3rdparty/v8/src/elements.cc b/src/3rdparty/v8/src/elements.cc
index 9b8548d..8cb48c6 100644
--- a/src/3rdparty/v8/src/elements.cc
+++ b/src/3rdparty/v8/src/elements.cc
@@ -39,8 +39,14 @@
// Inheritance hierarchy:
// - ElementsAccessorBase (abstract)
// - FastElementsAccessor (abstract)
-// - FastObjectElementsAccessor
+// - FastSmiOrObjectElementsAccessor
+// - FastPackedSmiElementsAccessor
+// - FastHoleySmiElementsAccessor
+// - FastPackedObjectElementsAccessor
+// - FastHoleyObjectElementsAccessor
// - FastDoubleElementsAccessor
+// - FastPackedDoubleElementsAccessor
+// - FastHoleyDoubleElementsAccessor
// - ExternalElementsAccessor (abstract)
// - ExternalByteElementsAccessor
// - ExternalUnsignedByteElementsAccessor
@@ -59,15 +65,24 @@ namespace v8 {
namespace internal {
+static const int kPackedSizeNotKnown = -1;
+
+
// First argument in list is the accessor class, the second argument is the
// accessor ElementsKind, and the third is the backing store class. Use the
// fast element handler for smi-only arrays. The implementation is currently
// identical. Note that the order must match that of the ElementsKind enum for
// the |accessor_array[]| below to work.
#define ELEMENTS_LIST(V) \
- V(FastObjectElementsAccessor, FAST_SMI_ONLY_ELEMENTS, FixedArray) \
- V(FastObjectElementsAccessor, FAST_ELEMENTS, FixedArray) \
- V(FastDoubleElementsAccessor, FAST_DOUBLE_ELEMENTS, FixedDoubleArray) \
+ V(FastPackedSmiElementsAccessor, FAST_SMI_ELEMENTS, FixedArray) \
+ V(FastHoleySmiElementsAccessor, FAST_HOLEY_SMI_ELEMENTS, \
+ FixedArray) \
+ V(FastPackedObjectElementsAccessor, FAST_ELEMENTS, FixedArray) \
+ V(FastHoleyObjectElementsAccessor, FAST_HOLEY_ELEMENTS, FixedArray) \
+ V(FastPackedDoubleElementsAccessor, FAST_DOUBLE_ELEMENTS, \
+ FixedDoubleArray) \
+ V(FastHoleyDoubleElementsAccessor, FAST_HOLEY_DOUBLE_ELEMENTS, \
+ FixedDoubleArray) \
V(DictionaryElementsAccessor, DICTIONARY_ELEMENTS, \
SeededNumberDictionary) \
V(NonStrictArgumentsElementsAccessor, NON_STRICT_ARGUMENTS_ELEMENTS, \
@@ -139,8 +154,6 @@ void CopyObjectToObjectElements(FixedArray* from,
uint32_t to_start,
int raw_copy_size) {
ASSERT(to->map() != HEAP->fixed_cow_array_map());
- ASSERT(from_kind == FAST_ELEMENTS || from_kind == FAST_SMI_ONLY_ELEMENTS);
- ASSERT(to_kind == FAST_ELEMENTS || to_kind == FAST_SMI_ONLY_ELEMENTS);
int copy_size = raw_copy_size;
if (raw_copy_size < 0) {
ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
@@ -148,7 +161,7 @@ void CopyObjectToObjectElements(FixedArray* from,
copy_size = Min(from->length() - from_start,
to->length() - to_start);
#ifdef DEBUG
- // FAST_ELEMENT arrays cannot be uninitialized. Ensure they are already
+ // FAST_*_ELEMENTS arrays cannot be uninitialized. Ensure they are already
// marked with the hole.
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
for (int i = to_start + copy_size; i < to->length(); ++i) {
@@ -160,12 +173,15 @@ void CopyObjectToObjectElements(FixedArray* from,
ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
(copy_size + static_cast<int>(from_start)) <= from->length());
if (copy_size == 0) return;
+ ASSERT(IsFastSmiOrObjectElementsKind(from_kind));
+ ASSERT(IsFastSmiOrObjectElementsKind(to_kind));
Address to_address = to->address() + FixedArray::kHeaderSize;
Address from_address = from->address() + FixedArray::kHeaderSize;
CopyWords(reinterpret_cast<Object**>(to_address) + to_start,
reinterpret_cast<Object**>(from_address) + from_start,
copy_size);
- if (from_kind == FAST_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ if (IsFastObjectElementsKind(from_kind) &&
+ IsFastObjectElementsKind(to_kind)) {
Heap* heap = from->GetHeap();
if (!heap->InNewSpace(to)) {
heap->RecordWrites(to->address(),
@@ -190,7 +206,7 @@ static void CopyDictionaryToObjectElements(SeededNumberDictionary* from,
raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
copy_size = from->max_number_key() + 1 - from_start;
#ifdef DEBUG
- // FAST_ELEMENT arrays cannot be uninitialized. Ensure they are already
+ // Fast object arrays cannot be uninitialized. Ensure they are already
// marked with the hole.
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
for (int i = to_start + copy_size; i < to->length(); ++i) {
@@ -200,7 +216,7 @@ static void CopyDictionaryToObjectElements(SeededNumberDictionary* from,
#endif
}
ASSERT(to != from);
- ASSERT(to_kind == FAST_ELEMENTS || to_kind == FAST_SMI_ONLY_ELEMENTS);
+ ASSERT(IsFastSmiOrObjectElementsKind(to_kind));
if (copy_size == 0) return;
uint32_t to_length = to->length();
if (to_start + copy_size > to_length) {
@@ -216,7 +232,7 @@ static void CopyDictionaryToObjectElements(SeededNumberDictionary* from,
to->set_the_hole(i + to_start);
}
}
- if (to_kind == FAST_ELEMENTS) {
+ if (IsFastObjectElementsKind(to_kind)) {
if (!heap->InNewSpace(to)) {
heap->RecordWrites(to->address(),
to->OffsetOfElementAt(to_start),
@@ -234,7 +250,7 @@ MUST_USE_RESULT static MaybeObject* CopyDoubleToObjectElements(
ElementsKind to_kind,
uint32_t to_start,
int raw_copy_size) {
- ASSERT(to_kind == FAST_ELEMENTS || to_kind == FAST_SMI_ONLY_ELEMENTS);
+ ASSERT(IsFastSmiOrObjectElementsKind(to_kind));
int copy_size = raw_copy_size;
if (raw_copy_size < 0) {
ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
@@ -242,7 +258,7 @@ MUST_USE_RESULT static MaybeObject* CopyDoubleToObjectElements(
copy_size = Min(from->length() - from_start,
to->length() - to_start);
#ifdef DEBUG
- // FAST_ELEMENT arrays cannot be uninitialized. Ensure they are already
+ // FAST_*_ELEMENTS arrays cannot be uninitialized. Ensure they are already
// marked with the hole.
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
for (int i = to_start + copy_size; i < to->length(); ++i) {
@@ -255,14 +271,14 @@ MUST_USE_RESULT static MaybeObject* CopyDoubleToObjectElements(
(copy_size + static_cast<int>(from_start)) <= from->length());
if (copy_size == 0) return from;
for (int i = 0; i < copy_size; ++i) {
- if (to_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(to_kind)) {
UNIMPLEMENTED();
return Failure::Exception();
} else {
MaybeObject* maybe_value = from->get(i + from_start);
Object* value;
- ASSERT(to_kind == FAST_ELEMENTS);
- // Because FAST_DOUBLE_ELEMENTS -> FAST_ELEMENT allocate HeapObjects
+ ASSERT(IsFastObjectElementsKind(to_kind));
+ // Because Double -> Object elements transitions allocate HeapObjects
// iteratively, the allocate must succeed within a single GC cycle,
// otherwise the retry after the GC will also fail. In order to ensure
// that no GC is triggered, allocate HeapNumbers from old space if they
@@ -313,6 +329,76 @@ static void CopyDoubleToDoubleElements(FixedDoubleArray* from,
}
+static void CopySmiToDoubleElements(FixedArray* from,
+ uint32_t from_start,
+ FixedDoubleArray* to,
+ uint32_t to_start,
+ int raw_copy_size) {
+ int copy_size = raw_copy_size;
+ if (raw_copy_size < 0) {
+ ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
+ raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
+ copy_size = from->length() - from_start;
+ if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
+ for (int i = to_start + copy_size; i < to->length(); ++i) {
+ to->set_the_hole(i);
+ }
+ }
+ }
+ ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
+ (copy_size + static_cast<int>(from_start)) <= from->length());
+ if (copy_size == 0) return;
+ Object* the_hole = from->GetHeap()->the_hole_value();
+ for (uint32_t from_end = from_start + static_cast<uint32_t>(copy_size);
+ from_start < from_end; from_start++, to_start++) {
+ Object* hole_or_smi = from->get(from_start);
+ if (hole_or_smi == the_hole) {
+ to->set_the_hole(to_start);
+ } else {
+ to->set(to_start, Smi::cast(hole_or_smi)->value());
+ }
+ }
+}
+
+
+static void CopyPackedSmiToDoubleElements(FixedArray* from,
+ uint32_t from_start,
+ FixedDoubleArray* to,
+ uint32_t to_start,
+ int packed_size,
+ int raw_copy_size) {
+ int copy_size = raw_copy_size;
+ uint32_t to_end;
+ if (raw_copy_size < 0) {
+ ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
+ raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
+ copy_size = from->length() - from_start;
+ if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
+ to_end = to->length();
+ } else {
+ to_end = to_start + static_cast<uint32_t>(copy_size);
+ }
+ } else {
+ to_end = to_start + static_cast<uint32_t>(copy_size);
+ }
+ ASSERT(static_cast<int>(to_end) <= to->length());
+ ASSERT(packed_size >= 0 && packed_size <= copy_size);
+ ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
+ (copy_size + static_cast<int>(from_start)) <= from->length());
+ if (copy_size == 0) return;
+ for (uint32_t from_end = from_start + static_cast<uint32_t>(packed_size);
+ from_start < from_end; from_start++, to_start++) {
+ Object* smi = from->get(from_start);
+ ASSERT(!smi->IsTheHole());
+ to->set(to_start, Smi::cast(smi)->value());
+ }
+
+ while (to_start < to_end) {
+ to->set_the_hole(to_start++);
+ }
+}
+
+
static void CopyObjectToDoubleElements(FixedArray* from,
uint32_t from_start,
FixedDoubleArray* to,
@@ -332,12 +418,14 @@ static void CopyObjectToDoubleElements(FixedArray* from,
ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
(copy_size + static_cast<int>(from_start)) <= from->length());
if (copy_size == 0) return;
- for (int i = 0; i < copy_size; i++) {
- Object* hole_or_object = from->get(i + from_start);
- if (hole_or_object->IsTheHole()) {
- to->set_the_hole(i + to_start);
+ Object* the_hole = from->GetHeap()->the_hole_value();
+ for (uint32_t from_end = from_start + copy_size;
+ from_start < from_end; from_start++, to_start++) {
+ Object* hole_or_object = from->get(from_start);
+ if (hole_or_object == the_hole) {
+ to->set_the_hole(to_start);
} else {
- to->set(i + to_start, hole_or_object->Number());
+ to->set(to_start, hole_or_object->Number());
}
}
}
@@ -404,13 +492,44 @@ class ElementsAccessorBase : public ElementsAccessor {
virtual ElementsKind kind() const { return ElementsTraits::Kind; }
+ static void ValidateContents(JSObject* holder, int length) {
+ }
+
+ static void ValidateImpl(JSObject* holder) {
+ FixedArrayBase* fixed_array_base = holder->elements();
+ // When objects are first allocated, its elements are Failures.
+ if (fixed_array_base->IsFailure()) return;
+ if (!fixed_array_base->IsHeapObject()) return;
+ Map* map = fixed_array_base->map();
+ // Arrays that have been shifted in place can't be verified.
+ Heap* heap = holder->GetHeap();
+ if (map == heap->one_pointer_filler_map() ||
+ map == heap->two_pointer_filler_map() ||
+ map == heap->free_space_map()) {
+ return;
+ }
+ int length = 0;
+ if (holder->IsJSArray()) {
+ Object* length_obj = JSArray::cast(holder)->length();
+ if (length_obj->IsSmi()) {
+ length = Smi::cast(length_obj)->value();
+ }
+ } else {
+ length = fixed_array_base->length();
+ }
+ ElementsAccessorSubclass::ValidateContents(holder, length);
+ }
+
+ virtual void Validate(JSObject* holder) {
+ ElementsAccessorSubclass::ValidateImpl(holder);
+ }
+
static bool HasElementImpl(Object* receiver,
JSObject* holder,
uint32_t key,
BackingStore* backing_store) {
- MaybeObject* element =
- ElementsAccessorSubclass::GetImpl(receiver, holder, key, backing_store);
- return !element->IsTheHole();
+ return ElementsAccessorSubclass::GetAttributesImpl(
+ receiver, holder, key, backing_store) != ABSENT;
}
virtual bool HasElement(Object* receiver,
@@ -444,6 +563,29 @@ class ElementsAccessorBase : public ElementsAccessor {
: backing_store->GetHeap()->the_hole_value();
}
+ MUST_USE_RESULT virtual PropertyAttributes GetAttributes(
+ Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ FixedArrayBase* backing_store) {
+ if (backing_store == NULL) {
+ backing_store = holder->elements();
+ }
+ return ElementsAccessorSubclass::GetAttributesImpl(
+ receiver, holder, key, BackingStore::cast(backing_store));
+ }
+
+ MUST_USE_RESULT static PropertyAttributes GetAttributesImpl(
+ Object* receiver,
+ JSObject* obj,
+ uint32_t key,
+ BackingStore* backing_store) {
+ if (key >= ElementsAccessorSubclass::GetCapacityImpl(backing_store)) {
+ return ABSENT;
+ }
+ return backing_store->is_the_hole(key) ? ABSENT : NONE;
+ }
+
MUST_USE_RESULT virtual MaybeObject* SetLength(JSArray* array,
Object* length) {
return ElementsAccessorSubclass::SetLengthImpl(
@@ -455,9 +597,10 @@ class ElementsAccessorBase : public ElementsAccessor {
Object* length,
BackingStore* backing_store);
- MUST_USE_RESULT virtual MaybeObject* SetCapacityAndLength(JSArray* array,
- int capacity,
- int length) {
+ MUST_USE_RESULT virtual MaybeObject* SetCapacityAndLength(
+ JSArray* array,
+ int capacity,
+ int length) {
return ElementsAccessorSubclass::SetFastElementsCapacityAndLength(
array,
capacity,
@@ -481,6 +624,7 @@ class ElementsAccessorBase : public ElementsAccessor {
FixedArrayBase* to,
ElementsKind to_kind,
uint32_t to_start,
+ int packed_size,
int copy_size) {
UNREACHABLE();
return NULL;
@@ -493,14 +637,27 @@ class ElementsAccessorBase : public ElementsAccessor {
uint32_t to_start,
int copy_size,
FixedArrayBase* from) {
+ int packed_size = kPackedSizeNotKnown;
if (from == NULL) {
from = from_holder->elements();
}
+
+ if (from_holder) {
+ ElementsKind elements_kind = from_holder->GetElementsKind();
+ bool is_packed = IsFastPackedElementsKind(elements_kind) &&
+ from_holder->IsJSArray();
+ if (is_packed) {
+ packed_size = Smi::cast(JSArray::cast(from_holder)->length())->value();
+ if (copy_size >= 0 && packed_size > copy_size) {
+ packed_size = copy_size;
+ }
+ }
+ }
if (from->length() == 0) {
return from;
}
return ElementsAccessorSubclass::CopyElementsImpl(
- from, from_start, to, to_kind, to_start, copy_size);
+ from, from_start, to, to_kind, to_start, packed_size, copy_size);
}
MUST_USE_RESULT virtual MaybeObject* AddElementsToFixedArray(
@@ -623,6 +780,7 @@ class FastElementsAccessor
KindTraits>(name) {}
protected:
friend class ElementsAccessorBase<FastElementsAccessorSubclass, KindTraits>;
+ friend class NonStrictArgumentsElementsAccessor;
typedef typename KindTraits::BackingStore BackingStore;
@@ -633,10 +791,21 @@ class FastElementsAccessor
Object* length_object,
uint32_t length) {
uint32_t old_capacity = backing_store->length();
+ Object* old_length = array->length();
+ bool same_or_smaller_size = old_length->IsSmi() &&
+ static_cast<uint32_t>(Smi::cast(old_length)->value()) >= length;
+ ElementsKind kind = array->GetElementsKind();
+
+ if (!same_or_smaller_size && IsFastElementsKind(kind) &&
+ !IsFastHoleyElementsKind(kind)) {
+ kind = GetHoleyElementsKind(kind);
+ MaybeObject* maybe_obj = array->TransitionElementsKind(kind);
+ if (maybe_obj->IsFailure()) return maybe_obj;
+ }
// Check whether the backing store should be shrunk.
if (length <= old_capacity) {
- if (array->HasFastTypeElements()) {
+ if (array->HasFastSmiOrObjectElements()) {
MaybeObject* maybe_obj = array->EnsureWritableFastElements();
if (!maybe_obj->To(&backing_store)) return maybe_obj;
}
@@ -653,7 +822,7 @@ class FastElementsAccessor
}
} else {
// Otherwise, fill the unused tail with holes.
- int old_length = FastD2I(array->length()->Number());
+ int old_length = FastD2IChecked(array->length()->Number());
for (int i = length; i < old_length; i++) {
backing_store->set_the_hole(i);
}
@@ -668,59 +837,66 @@ class FastElementsAccessor
MaybeObject* result = FastElementsAccessorSubclass::
SetFastElementsCapacityAndLength(array, new_capacity, length);
if (result->IsFailure()) return result;
+ array->ValidateElements();
return length_object;
}
// Request conversion to slow elements.
return array->GetHeap()->undefined_value();
}
-};
-
-
-class FastObjectElementsAccessor
- : public FastElementsAccessor<FastObjectElementsAccessor,
- ElementsKindTraits<FAST_ELEMENTS>,
- kPointerSize> {
- public:
- explicit FastObjectElementsAccessor(const char* name)
- : FastElementsAccessor<FastObjectElementsAccessor,
- ElementsKindTraits<FAST_ELEMENTS>,
- kPointerSize>(name) {}
static MaybeObject* DeleteCommon(JSObject* obj,
- uint32_t key) {
- ASSERT(obj->HasFastElements() ||
- obj->HasFastSmiOnlyElements() ||
+ uint32_t key,
+ JSReceiver::DeleteMode mode) {
+ ASSERT(obj->HasFastSmiOrObjectElements() ||
+ obj->HasFastDoubleElements() ||
obj->HasFastArgumentsElements());
Heap* heap = obj->GetHeap();
- FixedArray* backing_store = FixedArray::cast(obj->elements());
- if (backing_store->map() == heap->non_strict_arguments_elements_map()) {
- backing_store = FixedArray::cast(backing_store->get(1));
- } else {
- Object* writable;
- MaybeObject* maybe = obj->EnsureWritableFastElements();
- if (!maybe->ToObject(&writable)) return maybe;
- backing_store = FixedArray::cast(writable);
+ Object* elements = obj->elements();
+ if (elements == heap->empty_fixed_array()) {
+ return heap->true_value();
+ }
+ typename KindTraits::BackingStore* backing_store =
+ KindTraits::BackingStore::cast(elements);
+ bool is_non_strict_arguments_elements_map =
+ backing_store->map() == heap->non_strict_arguments_elements_map();
+ if (is_non_strict_arguments_elements_map) {
+ backing_store =
+ KindTraits::BackingStore::cast(
+ FixedArray::cast(backing_store)->get(1));
}
uint32_t length = static_cast<uint32_t>(
obj->IsJSArray()
? Smi::cast(JSArray::cast(obj)->length())->value()
: backing_store->length());
if (key < length) {
+ if (!is_non_strict_arguments_elements_map) {
+ ElementsKind kind = KindTraits::Kind;
+ if (IsFastPackedElementsKind(kind)) {
+ MaybeObject* transitioned =
+ obj->TransitionElementsKind(GetHoleyElementsKind(kind));
+ if (transitioned->IsFailure()) return transitioned;
+ }
+ if (IsFastSmiOrObjectElementsKind(KindTraits::Kind)) {
+ Object* writable;
+ MaybeObject* maybe = obj->EnsureWritableFastElements();
+ if (!maybe->ToObject(&writable)) return maybe;
+ backing_store = KindTraits::BackingStore::cast(writable);
+ }
+ }
backing_store->set_the_hole(key);
// If an old space backing store is larger than a certain size and
// has too few used values, normalize it.
// To avoid doing the check on every delete we require at least
// one adjacent hole to the value being deleted.
- Object* hole = heap->the_hole_value();
const int kMinLengthForSparsenessCheck = 64;
if (backing_store->length() >= kMinLengthForSparsenessCheck &&
!heap->InNewSpace(backing_store) &&
- ((key > 0 && backing_store->get(key - 1) == hole) ||
- (key + 1 < length && backing_store->get(key + 1) == hole))) {
+ ((key > 0 && backing_store->is_the_hole(key - 1)) ||
+ (key + 1 < length && backing_store->is_the_hole(key + 1)))) {
int num_used = 0;
for (int i = 0; i < backing_store->length(); ++i) {
- if (backing_store->get(i) != hole) ++num_used;
+ if (!backing_store->is_the_hole(i)) ++num_used;
// Bail out early if more than 1/4 is used.
if (4 * num_used > backing_store->length()) break;
}
@@ -733,27 +909,90 @@ class FastObjectElementsAccessor
return heap->true_value();
}
+ virtual MaybeObject* Delete(JSObject* obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) {
+ return DeleteCommon(obj, key, mode);
+ }
+
+ static bool HasElementImpl(
+ Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ typename KindTraits::BackingStore* backing_store) {
+ if (key >= static_cast<uint32_t>(backing_store->length())) {
+ return false;
+ }
+ return !backing_store->is_the_hole(key);
+ }
+
+ static void ValidateContents(JSObject* holder, int length) {
+#if DEBUG
+ FixedArrayBase* elements = holder->elements();
+ Heap* heap = elements->GetHeap();
+ Map* map = elements->map();
+ ASSERT((IsFastSmiOrObjectElementsKind(KindTraits::Kind) &&
+ (map == heap->fixed_array_map() ||
+ map == heap->fixed_cow_array_map())) ||
+ (IsFastDoubleElementsKind(KindTraits::Kind) ==
+ ((map == heap->fixed_array_map() && length == 0) ||
+ map == heap->fixed_double_array_map())));
+ for (int i = 0; i < length; i++) {
+ typename KindTraits::BackingStore* backing_store =
+ KindTraits::BackingStore::cast(elements);
+ ASSERT((!IsFastSmiElementsKind(KindTraits::Kind) ||
+ static_cast<Object*>(backing_store->get(i))->IsSmi()) ||
+ (IsFastHoleyElementsKind(KindTraits::Kind) ==
+ backing_store->is_the_hole(i)));
+ }
+#endif
+ }
+};
+
+
+template<typename FastElementsAccessorSubclass,
+ typename KindTraits>
+class FastSmiOrObjectElementsAccessor
+ : public FastElementsAccessor<FastElementsAccessorSubclass,
+ KindTraits,
+ kPointerSize> {
+ public:
+ explicit FastSmiOrObjectElementsAccessor(const char* name)
+ : FastElementsAccessor<FastElementsAccessorSubclass,
+ KindTraits,
+ kPointerSize>(name) {}
+
static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
uint32_t from_start,
FixedArrayBase* to,
ElementsKind to_kind,
uint32_t to_start,
+ int packed_size,
int copy_size) {
- switch (to_kind) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
- CopyObjectToObjectElements(
- FixedArray::cast(from), ElementsTraits::Kind, from_start,
- FixedArray::cast(to), to_kind, to_start, copy_size);
- return from;
- }
- case FAST_DOUBLE_ELEMENTS:
+ if (IsFastSmiOrObjectElementsKind(to_kind)) {
+ CopyObjectToObjectElements(
+ FixedArray::cast(from), KindTraits::Kind, from_start,
+ FixedArray::cast(to), to_kind, to_start, copy_size);
+ } else if (IsFastDoubleElementsKind(to_kind)) {
+ if (IsFastSmiElementsKind(KindTraits::Kind)) {
+ if (IsFastPackedElementsKind(KindTraits::Kind) &&
+ packed_size != kPackedSizeNotKnown) {
+ CopyPackedSmiToDoubleElements(
+ FixedArray::cast(from), from_start,
+ FixedDoubleArray::cast(to), to_start,
+ packed_size, copy_size);
+ } else {
+ CopySmiToDoubleElements(
+ FixedArray::cast(from), from_start,
+ FixedDoubleArray::cast(to), to_start, copy_size);
+ }
+ } else {
CopyObjectToDoubleElements(
FixedArray::cast(from), from_start,
FixedDoubleArray::cast(to), to_start, copy_size);
- return from;
- default:
- UNREACHABLE();
+ }
+ } else {
+ UNREACHABLE();
}
return to->GetHeap()->undefined_value();
}
@@ -762,64 +1001,102 @@ class FastObjectElementsAccessor
static MaybeObject* SetFastElementsCapacityAndLength(JSObject* obj,
uint32_t capacity,
uint32_t length) {
- JSObject::SetFastElementsCapacityMode set_capacity_mode =
- obj->HasFastSmiOnlyElements()
- ? JSObject::kAllowSmiOnlyElements
- : JSObject::kDontAllowSmiOnlyElements;
+ JSObject::SetFastElementsCapacitySmiMode set_capacity_mode =
+ obj->HasFastSmiElements()
+ ? JSObject::kAllowSmiElements
+ : JSObject::kDontAllowSmiElements;
return obj->SetFastElementsCapacityAndLength(capacity,
length,
set_capacity_mode);
}
+};
- protected:
- friend class FastElementsAccessor<FastObjectElementsAccessor,
- ElementsKindTraits<FAST_ELEMENTS>,
- kPointerSize>;
- virtual MaybeObject* Delete(JSObject* obj,
- uint32_t key,
- JSReceiver::DeleteMode mode) {
- return DeleteCommon(obj, key);
- }
+class FastPackedSmiElementsAccessor
+ : public FastSmiOrObjectElementsAccessor<
+ FastPackedSmiElementsAccessor,
+ ElementsKindTraits<FAST_SMI_ELEMENTS> > {
+ public:
+ explicit FastPackedSmiElementsAccessor(const char* name)
+ : FastSmiOrObjectElementsAccessor<
+ FastPackedSmiElementsAccessor,
+ ElementsKindTraits<FAST_SMI_ELEMENTS> >(name) {}
+};
+
+
+class FastHoleySmiElementsAccessor
+ : public FastSmiOrObjectElementsAccessor<
+ FastHoleySmiElementsAccessor,
+ ElementsKindTraits<FAST_HOLEY_SMI_ELEMENTS> > {
+ public:
+ explicit FastHoleySmiElementsAccessor(const char* name)
+ : FastSmiOrObjectElementsAccessor<
+ FastHoleySmiElementsAccessor,
+ ElementsKindTraits<FAST_HOLEY_SMI_ELEMENTS> >(name) {}
+};
+
+
+class FastPackedObjectElementsAccessor
+ : public FastSmiOrObjectElementsAccessor<
+ FastPackedObjectElementsAccessor,
+ ElementsKindTraits<FAST_ELEMENTS> > {
+ public:
+ explicit FastPackedObjectElementsAccessor(const char* name)
+ : FastSmiOrObjectElementsAccessor<
+ FastPackedObjectElementsAccessor,
+ ElementsKindTraits<FAST_ELEMENTS> >(name) {}
+};
+
+
+class FastHoleyObjectElementsAccessor
+ : public FastSmiOrObjectElementsAccessor<
+ FastHoleyObjectElementsAccessor,
+ ElementsKindTraits<FAST_HOLEY_ELEMENTS> > {
+ public:
+ explicit FastHoleyObjectElementsAccessor(const char* name)
+ : FastSmiOrObjectElementsAccessor<
+ FastHoleyObjectElementsAccessor,
+ ElementsKindTraits<FAST_HOLEY_ELEMENTS> >(name) {}
};
+template<typename FastElementsAccessorSubclass,
+ typename KindTraits>
class FastDoubleElementsAccessor
- : public FastElementsAccessor<FastDoubleElementsAccessor,
- ElementsKindTraits<FAST_DOUBLE_ELEMENTS>,
+ : public FastElementsAccessor<FastElementsAccessorSubclass,
+ KindTraits,
kDoubleSize> {
public:
explicit FastDoubleElementsAccessor(const char* name)
- : FastElementsAccessor<FastDoubleElementsAccessor,
- ElementsKindTraits<FAST_DOUBLE_ELEMENTS>,
+ : FastElementsAccessor<FastElementsAccessorSubclass,
+ KindTraits,
kDoubleSize>(name) {}
static MaybeObject* SetFastElementsCapacityAndLength(JSObject* obj,
uint32_t capacity,
uint32_t length) {
- return obj->SetFastDoubleElementsCapacityAndLength(capacity, length);
+ return obj->SetFastDoubleElementsCapacityAndLength(capacity,
+ length);
}
protected:
- friend class ElementsAccessorBase<FastDoubleElementsAccessor,
- ElementsKindTraits<FAST_DOUBLE_ELEMENTS> >;
- friend class FastElementsAccessor<FastDoubleElementsAccessor,
- ElementsKindTraits<FAST_DOUBLE_ELEMENTS>,
- kDoubleSize>;
-
static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
uint32_t from_start,
FixedArrayBase* to,
ElementsKind to_kind,
uint32_t to_start,
+ int packed_size,
int copy_size) {
switch (to_kind) {
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
return CopyDoubleToObjectElements(
FixedDoubleArray::cast(from), from_start, FixedArray::cast(to),
to_kind, to_start, copy_size);
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
CopyDoubleToDoubleElements(FixedDoubleArray::cast(from), from_start,
FixedDoubleArray::cast(to),
to_start, copy_size);
@@ -829,26 +1106,35 @@ class FastDoubleElementsAccessor
}
return to->GetHeap()->undefined_value();
}
+};
- virtual MaybeObject* Delete(JSObject* obj,
- uint32_t key,
- JSReceiver::DeleteMode mode) {
- int length = obj->IsJSArray()
- ? Smi::cast(JSArray::cast(obj)->length())->value()
- : FixedDoubleArray::cast(obj->elements())->length();
- if (key < static_cast<uint32_t>(length)) {
- FixedDoubleArray::cast(obj->elements())->set_the_hole(key);
- }
- return obj->GetHeap()->true_value();
- }
- static bool HasElementImpl(Object* receiver,
- JSObject* holder,
- uint32_t key,
- FixedDoubleArray* backing_store) {
- return key < static_cast<uint32_t>(backing_store->length()) &&
- !backing_store->is_the_hole(key);
- }
+class FastPackedDoubleElementsAccessor
+ : public FastDoubleElementsAccessor<
+ FastPackedDoubleElementsAccessor,
+ ElementsKindTraits<FAST_DOUBLE_ELEMENTS> > {
+ public:
+ friend class ElementsAccessorBase<FastPackedDoubleElementsAccessor,
+ ElementsKindTraits<FAST_DOUBLE_ELEMENTS> >;
+ explicit FastPackedDoubleElementsAccessor(const char* name)
+ : FastDoubleElementsAccessor<
+ FastPackedDoubleElementsAccessor,
+ ElementsKindTraits<FAST_DOUBLE_ELEMENTS> >(name) {}
+};
+
+
+class FastHoleyDoubleElementsAccessor
+ : public FastDoubleElementsAccessor<
+ FastHoleyDoubleElementsAccessor,
+ ElementsKindTraits<FAST_HOLEY_DOUBLE_ELEMENTS> > {
+ public:
+ friend class ElementsAccessorBase<
+ FastHoleyDoubleElementsAccessor,
+ ElementsKindTraits<FAST_HOLEY_DOUBLE_ELEMENTS> >;
+ explicit FastHoleyDoubleElementsAccessor(const char* name)
+ : FastDoubleElementsAccessor<
+ FastHoleyDoubleElementsAccessor,
+ ElementsKindTraits<FAST_HOLEY_DOUBLE_ELEMENTS> >(name) {}
};
@@ -879,6 +1165,16 @@ class ExternalElementsAccessor
: backing_store->GetHeap()->undefined_value();
}
+ MUST_USE_RESULT static PropertyAttributes GetAttributesImpl(
+ Object* receiver,
+ JSObject* obj,
+ uint32_t key,
+ BackingStore* backing_store) {
+ return
+ key < ExternalElementsAccessorSubclass::GetCapacityImpl(backing_store)
+ ? NONE : ABSENT;
+ }
+
MUST_USE_RESULT static MaybeObject* SetLengthImpl(
JSObject* obj,
Object* length,
@@ -1011,7 +1307,30 @@ class DictionaryElementsAccessor
JSArray* array,
Object* length_object,
uint32_t length) {
- if (length == 0) {
+ Heap* heap = array->GetHeap();
+ int capacity = dict->Capacity();
+ uint32_t new_length = length;
+ uint32_t old_length = static_cast<uint32_t>(array->length()->Number());
+ if (new_length < old_length) {
+ // Find last non-deletable element in range of elements to be
+ // deleted and adjust range accordingly.
+ for (int i = 0; i < capacity; i++) {
+ Object* key = dict->KeyAt(i);
+ if (key->IsNumber()) {
+ uint32_t number = static_cast<uint32_t>(key->Number());
+ if (new_length <= number && number < old_length) {
+ PropertyDetails details = dict->DetailsAt(i);
+ if (details.IsDontDelete()) new_length = number + 1;
+ }
+ }
+ }
+ if (new_length != length) {
+ MaybeObject* maybe_object = heap->NumberFromUint32(new_length);
+ if (!maybe_object->To(&length_object)) return maybe_object;
+ }
+ }
+
+ if (new_length == 0) {
// If the length of a slow array is reset to zero, we clear
// the array and flush backing storage. This has the added
// benefit that the array returns to fast mode.
@@ -1019,45 +1338,22 @@ class DictionaryElementsAccessor
MaybeObject* maybe_obj = array->ResetElements();
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
} else {
- uint32_t new_length = length;
- uint32_t old_length = static_cast<uint32_t>(array->length()->Number());
- if (new_length < old_length) {
- // Find last non-deletable element in range of elements to be
- // deleted and adjust range accordingly.
- Heap* heap = array->GetHeap();
- int capacity = dict->Capacity();
- for (int i = 0; i < capacity; i++) {
- Object* key = dict->KeyAt(i);
- if (key->IsNumber()) {
- uint32_t number = static_cast<uint32_t>(key->Number());
- if (new_length <= number && number < old_length) {
- PropertyDetails details = dict->DetailsAt(i);
- if (details.IsDontDelete()) new_length = number + 1;
- }
+ // Remove elements that should be deleted.
+ int removed_entries = 0;
+ Object* the_hole_value = heap->the_hole_value();
+ for (int i = 0; i < capacity; i++) {
+ Object* key = dict->KeyAt(i);
+ if (key->IsNumber()) {
+ uint32_t number = static_cast<uint32_t>(key->Number());
+ if (new_length <= number && number < old_length) {
+ dict->SetEntry(i, the_hole_value, the_hole_value);
+ removed_entries++;
}
}
- if (new_length != length) {
- MaybeObject* maybe_object = heap->NumberFromUint32(new_length);
- if (!maybe_object->To(&length_object)) return maybe_object;
- }
-
- // Remove elements that should be deleted.
- int removed_entries = 0;
- Object* the_hole_value = heap->the_hole_value();
- for (int i = 0; i < capacity; i++) {
- Object* key = dict->KeyAt(i);
- if (key->IsNumber()) {
- uint32_t number = static_cast<uint32_t>(key->Number());
- if (new_length <= number && number < old_length) {
- dict->SetEntry(i, the_hole_value, the_hole_value);
- removed_entries++;
- }
- }
- }
-
- // Update the number of elements.
- dict->ElementsRemoved(removed_entries);
}
+
+ // Update the number of elements.
+ dict->ElementsRemoved(removed_entries);
}
return length_object;
}
@@ -1112,15 +1408,19 @@ class DictionaryElementsAccessor
FixedArrayBase* to,
ElementsKind to_kind,
uint32_t to_start,
+ int packed_size,
int copy_size) {
switch (to_kind) {
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
CopyDictionaryToObjectElements(
SeededNumberDictionary::cast(from), from_start,
FixedArray::cast(to), to_kind, to_start, copy_size);
return from;
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
CopyDictionaryToDoubleElements(
SeededNumberDictionary::cast(from), from_start,
FixedDoubleArray::cast(to), to_start, copy_size);
@@ -1163,6 +1463,18 @@ class DictionaryElementsAccessor
return obj->GetHeap()->the_hole_value();
}
+ MUST_USE_RESULT static PropertyAttributes GetAttributesImpl(
+ Object* receiver,
+ JSObject* obj,
+ uint32_t key,
+ SeededNumberDictionary* backing_store) {
+ int entry = backing_store->FindEntry(key);
+ if (entry != SeededNumberDictionary::kNotFound) {
+ return backing_store->DetailsAt(entry).attributes();
+ }
+ return ABSENT;
+ }
+
static bool HasElementImpl(Object* receiver,
JSObject* holder,
uint32_t key,
@@ -1222,6 +1534,22 @@ class NonStrictArgumentsElementsAccessor : public ElementsAccessorBase<
}
}
+ MUST_USE_RESULT static PropertyAttributes GetAttributesImpl(
+ Object* receiver,
+ JSObject* obj,
+ uint32_t key,
+ FixedArray* parameter_map) {
+ Object* probe = GetParameterMapArg(obj, parameter_map, key);
+ if (!probe->IsTheHole()) {
+ return NONE;
+ } else {
+ // If not aliased, check the arguments.
+ FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
+ return ElementsAccessor::ForArray(arguments)->GetAttributes(
+ receiver, obj, key, arguments);
+ }
+ }
+
MUST_USE_RESULT static MaybeObject* SetLengthImpl(
JSObject* obj,
Object* length,
@@ -1247,7 +1575,10 @@ class NonStrictArgumentsElementsAccessor : public ElementsAccessorBase<
if (arguments->IsDictionary()) {
return DictionaryElementsAccessor::DeleteCommon(obj, key, mode);
} else {
- return FastObjectElementsAccessor::DeleteCommon(obj, key);
+ // It's difficult to access the version of DeleteCommon that is declared
+ // in the templatized super class, call the concrete implementation in
+ // the class for the most generalized ElementsKind subclass.
+ return FastHoleyObjectElementsAccessor::DeleteCommon(obj, key, mode);
}
}
return obj->GetHeap()->true_value();
@@ -1258,6 +1589,7 @@ class NonStrictArgumentsElementsAccessor : public ElementsAccessorBase<
FixedArrayBase* to,
ElementsKind to_kind,
uint32_t to_start,
+ int packed_size,
int copy_size) {
FixedArray* parameter_map = FixedArray::cast(from);
FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
@@ -1311,7 +1643,7 @@ ElementsAccessor* ElementsAccessor::ForArray(FixedArrayBase* array) {
if (array->IsDictionary()) {
return elements_accessors_[DICTIONARY_ELEMENTS];
} else {
- return elements_accessors_[FAST_ELEMENTS];
+ return elements_accessors_[FAST_HOLEY_ELEMENTS];
}
case EXTERNAL_BYTE_ARRAY_TYPE:
return elements_accessors_[EXTERNAL_BYTE_ELEMENTS];
diff --git a/src/3rdparty/v8/src/elements.h b/src/3rdparty/v8/src/elements.h
index 55d6fa5..8a83f0f 100644
--- a/src/3rdparty/v8/src/elements.h
+++ b/src/3rdparty/v8/src/elements.h
@@ -28,6 +28,7 @@
#ifndef V8_ELEMENTS_H_
#define V8_ELEMENTS_H_
+#include "elements-kind.h"
#include "objects.h"
#include "heap.h"
#include "isolate.h"
@@ -45,6 +46,10 @@ class ElementsAccessor {
virtual ElementsKind kind() const = 0;
const char* name() const { return name_; }
+ // Checks the elements of an object for consistency, asserting when a problem
+ // is found.
+ virtual void Validate(JSObject* obj) = 0;
+
// Returns true if a holder contains an element with the specified key
// without iterating up the prototype chain. The caller can optionally pass
// in the backing store to use for the check, which must be compatible with
@@ -66,6 +71,17 @@ class ElementsAccessor {
uint32_t key,
FixedArrayBase* backing_store = NULL) = 0;
+ // Returns an element's attributes, or ABSENT if there is no such
+ // element. This method doesn't iterate up the prototype chain. The caller
+ // can optionally pass in the backing store to use for the check, which must
+ // be compatible with the ElementsKind of the ElementsAccessor. If
+ // backing_store is NULL, the holder->elements() is used as the backing store.
+ MUST_USE_RESULT virtual PropertyAttributes GetAttributes(
+ Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ FixedArrayBase* backing_store = NULL) = 0;
+
// Modifies the length data property as specified for JSArrays and resizes the
// underlying backing store accordingly. The method honors the semantics of
// changing array sizes as defined in EcmaScript 5.1 15.4.5.2, i.e. array that
diff --git a/src/3rdparty/v8/src/execution.cc b/src/3rdparty/v8/src/execution.cc
index 7e69abe..913bf64 100644
--- a/src/3rdparty/v8/src/execution.cc
+++ b/src/3rdparty/v8/src/execution.cc
@@ -101,12 +101,12 @@ static Handle<Object> Invoke(bool is_construct,
// Make sure that the global object of the context we're about to
// make the current one is indeed a global object.
- ASSERT(function->context()->global()->IsGlobalObject());
+ ASSERT(function->context()->global_object()->IsGlobalObject());
Handle<JSObject> oldqml;
if (!qml.is_null()) {
- oldqml = Handle<JSObject>(function->context()->qml_global());
- function->context()->set_qml_global(JSObject::cast(*qml));
+ oldqml = Handle<JSObject>(function->context()->qml_global_object());
+ function->context()->set_qml_global_object(JSObject::cast(*qml));
}
{
@@ -126,9 +126,9 @@ static Handle<Object> Invoke(bool is_construct,
}
if (!qml.is_null())
- function->context()->set_qml_global(*oldqml);
+ function->context()->set_qml_global_object(*oldqml);
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
value->Verify();
#endif
@@ -142,6 +142,12 @@ static Handle<Object> Invoke(bool is_construct,
V8::FatalProcessOutOfMemory("JS", true);
}
}
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ // Reset stepping state when script exits with uncaught exception.
+ if (isolate->debugger()->IsDebuggerActive()) {
+ isolate->debug()->ClearStepping();
+ }
+#endif // ENABLE_DEBUGGER_SUPPORT
return Handle<Object>();
} else {
isolate->clear_pending_message();
@@ -158,7 +164,8 @@ Handle<Object> Execution::Call(Handle<Object> callable,
bool* pending_exception,
bool convert_receiver)
{
- return Call(callable, receiver, argc, argv, pending_exception, convert_receiver, Handle<Object>());
+ return Call(callable, receiver, argc, argv, pending_exception,
+ convert_receiver, Handle<Object>());
}
Handle<Object> Execution::Call(Handle<Object> callable,
@@ -180,10 +187,10 @@ Handle<Object> Execution::Call(Handle<Object> callable,
if (convert_receiver && !receiver->IsJSReceiver() &&
!func->shared()->native() && func->shared()->is_classic_mode()) {
if (receiver->IsUndefined() || receiver->IsNull()) {
- Object* global = func->context()->global()->global_receiver();
+ Object* global = func->context()->global_object()->global_receiver();
// Under some circumstances, 'global' can be the JSBuiltinsObject
- // In that case, don't rewrite.
- // (FWIW, the same holds for GetIsolate()->global()->global_receiver().)
+ // In that case, don't rewrite. (FWIW, the same holds for
+ // GetIsolate()->global_object()->global_receiver().)
if (!global->IsJSBuiltinsObject()) receiver = Handle<Object>(global);
} else {
receiver = ToObject(receiver, pending_exception);
@@ -199,7 +206,7 @@ Handle<Object> Execution::New(Handle<JSFunction> func,
int argc,
Handle<Object> argv[],
bool* pending_exception) {
- return Invoke(true, func, Isolate::Current()->global(), argc, argv,
+ return Invoke(true, func, Isolate::Current()->global_object(), argc, argv,
pending_exception, Handle<Object>());
}
@@ -261,7 +268,7 @@ Handle<Object> Execution::GetFunctionDelegate(Handle<Object> object) {
if (fun->IsHeapObject() &&
HeapObject::cast(fun)->map()->has_instance_call_handler()) {
return Handle<JSFunction>(
- isolate->global_context()->call_as_function_delegate());
+ isolate->native_context()->call_as_function_delegate());
}
return factory->undefined_value();
@@ -285,7 +292,7 @@ Handle<Object> Execution::TryGetFunctionDelegate(Handle<Object> object,
if (fun->IsHeapObject() &&
HeapObject::cast(fun)->map()->has_instance_call_handler()) {
return Handle<JSFunction>(
- isolate->global_context()->call_as_function_delegate());
+ isolate->native_context()->call_as_function_delegate());
}
// If the Object doesn't have an instance-call handler we should
@@ -318,7 +325,7 @@ Handle<Object> Execution::GetConstructorDelegate(Handle<Object> object) {
if (fun->IsHeapObject() &&
HeapObject::cast(fun)->map()->has_instance_call_handler()) {
return Handle<JSFunction>(
- isolate->global_context()->call_as_constructor_delegate());
+ isolate->native_context()->call_as_constructor_delegate());
}
return isolate->factory()->undefined_value();
@@ -346,7 +353,7 @@ Handle<Object> Execution::TryGetConstructorDelegate(
if (fun->IsHeapObject() &&
HeapObject::cast(fun)->map()->has_instance_call_handler()) {
return Handle<JSFunction>(
- isolate->global_context()->call_as_constructor_delegate());
+ isolate->native_context()->call_as_constructor_delegate());
}
// If the Object doesn't have an instance-call handler we should
@@ -461,6 +468,25 @@ void StackGuard::RequestRuntimeProfilerTick() {
}
+void StackGuard::RequestCodeReadyEvent() {
+ ASSERT(FLAG_parallel_recompilation);
+ if (ExecutionAccess::TryLock(isolate_)) {
+ thread_local_.interrupt_flags_ |= CODE_READY;
+ if (thread_local_.postpone_interrupts_nesting_ == 0) {
+ thread_local_.jslimit_ = thread_local_.climit_ = kInterruptLimit;
+ isolate_->heap()->SetStackLimits();
+ }
+ ExecutionAccess::Unlock(isolate_);
+ }
+}
+
+
+bool StackGuard::IsCodeReadyEvent() {
+ ExecutionAccess access(isolate_);
+ return (thread_local_.interrupt_flags_ & CODE_READY) != 0;
+}
+
+
bool StackGuard::IsGCRequest() {
ExecutionAccess access(isolate_);
return (thread_local_.interrupt_flags_ & GC_REQUEST) != 0;
@@ -676,7 +702,7 @@ Handle<JSRegExp> Execution::NewJSRegExp(Handle<String> pattern,
Handle<String> flags,
bool* exc) {
Handle<JSFunction> function = Handle<JSFunction>(
- pattern->GetIsolate()->global_context()->regexp_function());
+ pattern->GetIsolate()->native_context()->regexp_function());
Handle<Object> re_obj = RegExpImpl::CreateRegExpLiteral(
function, pattern, flags, exc);
if (*exc) return Handle<JSRegExp>();
@@ -722,7 +748,7 @@ Handle<JSFunction> Execution::InstantiateFunction(
// Fast case: see if the function has already been instantiated
int serial_number = Smi::cast(data->serial_number())->value();
Object* elm =
- isolate->global_context()->function_cache()->
+ isolate->native_context()->function_cache()->
GetElementNoExceptionThrown(serial_number);
if (elm->IsJSFunction()) return Handle<JSFunction>(JSFunction::cast(elm));
// The function has not yet been instantiated in this context; do it.
@@ -847,6 +873,11 @@ Object* Execution::DebugBreakHelper() {
return isolate->heap()->undefined_value();
}
+ // Ignore debug break if debugger is not active.
+ if (!isolate->debugger()->IsDebuggerActive()) {
+ return isolate->heap()->undefined_value();
+ }
+
StackLimitCheck check(isolate);
if (check.HasOverflowed()) {
return isolate->heap()->undefined_value();
@@ -861,7 +892,7 @@ Object* Execution::DebugBreakHelper() {
if (JSFunction::cast(fun)->IsBuiltin()) {
return isolate->heap()->undefined_value();
}
- GlobalObject* global = JSFunction::cast(fun)->context()->global();
+ GlobalObject* global = JSFunction::cast(fun)->context()->global_object();
// Don't stop in debugger functions.
if (isolate->debug()->IsDebugGlobal(global)) {
return isolate->heap()->undefined_value();
@@ -921,6 +952,17 @@ MaybeObject* Execution::HandleStackGuardInterrupt(Isolate* isolate) {
stack_guard->Continue(GC_REQUEST);
}
+ if (stack_guard->IsCodeReadyEvent()) {
+ ASSERT(FLAG_parallel_recompilation);
+ if (FLAG_trace_parallel_recompilation) {
+ PrintF(" ** CODE_READY event received.\n");
+ }
+ stack_guard->Continue(CODE_READY);
+ }
+ if (!stack_guard->IsTerminateExecution()) {
+ isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
+ }
+
isolate->counters()->stack_interrupts()->Increment();
// If FLAG_count_based_interrupts, every interrupt is a profiler interrupt.
if (FLAG_count_based_interrupts ||
diff --git a/src/3rdparty/v8/src/execution.h b/src/3rdparty/v8/src/execution.h
index c33a675..90219f5 100644
--- a/src/3rdparty/v8/src/execution.h
+++ b/src/3rdparty/v8/src/execution.h
@@ -42,7 +42,8 @@ enum InterruptFlag {
PREEMPT = 1 << 3,
TERMINATE = 1 << 4,
RUNTIME_PROFILER_TICK = 1 << 5,
- GC_REQUEST = 1 << 6
+ GC_REQUEST = 1 << 6,
+ CODE_READY = 1 << 7
};
@@ -203,6 +204,8 @@ class StackGuard {
void TerminateExecution();
bool IsRuntimeProfilerTick();
void RequestRuntimeProfilerTick();
+ bool IsCodeReadyEvent();
+ void RequestCodeReadyEvent();
#ifdef ENABLE_DEBUGGER_SUPPORT
bool IsDebugBreak();
void DebugBreak();
diff --git a/src/3rdparty/v8/src/extensions/gc-extension.cc b/src/3rdparty/v8/src/extensions/gc-extension.cc
index f921552..813b921 100644
--- a/src/3rdparty/v8/src/extensions/gc-extension.cc
+++ b/src/3rdparty/v8/src/extensions/gc-extension.cc
@@ -40,7 +40,11 @@ v8::Handle<v8::FunctionTemplate> GCExtension::GetNativeFunction(
v8::Handle<v8::Value> GCExtension::GC(const v8::Arguments& args) {
- HEAP->CollectAllGarbage(Heap::kNoGCFlags, "gc extension");
+ if (args[0]->BooleanValue()) {
+ HEAP->CollectGarbage(NEW_SPACE, "gc extension");
+ } else {
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags, "gc extension");
+ }
return v8::Undefined();
}
diff --git a/src/3rdparty/v8/src/extensions/statistics-extension.cc b/src/3rdparty/v8/src/extensions/statistics-extension.cc
new file mode 100644
index 0000000..7ae090c
--- /dev/null
+++ b/src/3rdparty/v8/src/extensions/statistics-extension.cc
@@ -0,0 +1,153 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "statistics-extension.h"
+
+namespace v8 {
+namespace internal {
+
+const char* const StatisticsExtension::kSource =
+ "native function getV8Statistics();";
+
+
+v8::Handle<v8::FunctionTemplate> StatisticsExtension::GetNativeFunction(
+ v8::Handle<v8::String> str) {
+ ASSERT(strcmp(*v8::String::AsciiValue(str), "getV8Statistics") == 0);
+ return v8::FunctionTemplate::New(StatisticsExtension::GetCounters);
+}
+
+
+static void AddCounter(v8::Local<v8::Object> object,
+ StatsCounter* counter,
+ const char* name) {
+ if (counter->Enabled()) {
+ object->Set(v8::String::New(name),
+ v8::Number::New(*counter->GetInternalPointer()));
+ }
+}
+
+static void AddNumber(v8::Local<v8::Object> object,
+ intptr_t value,
+ const char* name) {
+ object->Set(v8::String::New(name),
+ v8::Number::New(static_cast<double>(value)));
+}
+
+
+v8::Handle<v8::Value> StatisticsExtension::GetCounters(
+ const v8::Arguments& args) {
+ Isolate* isolate = Isolate::Current();
+ Heap* heap = isolate->heap();
+
+ if (args.Length() > 0) { // GC if first argument evaluates to true.
+ if (args[0]->IsBoolean() && args[0]->ToBoolean()->Value()) {
+ heap->CollectAllGarbage(Heap::kNoGCFlags, "counters extension");
+ }
+ }
+
+ Counters* counters = isolate->counters();
+ v8::Local<v8::Object> result = v8::Object::New();
+
+#define ADD_COUNTER(name, caption) \
+ AddCounter(result, counters->name(), #name);
+
+ STATS_COUNTER_LIST_1(ADD_COUNTER)
+ STATS_COUNTER_LIST_2(ADD_COUNTER)
+#undef ADD_COUNTER
+#define ADD_COUNTER(name) \
+ AddCounter(result, counters->count_of_##name(), "count_of_" #name); \
+ AddCounter(result, counters->size_of_##name(), "size_of_" #name);
+
+ INSTANCE_TYPE_LIST(ADD_COUNTER)
+#undef ADD_COUNTER
+#define ADD_COUNTER(name) \
+ AddCounter(result, counters->count_of_CODE_TYPE_##name(), \
+ "count_of_CODE_TYPE_" #name); \
+ AddCounter(result, counters->size_of_CODE_TYPE_##name(), \
+ "size_of_CODE_TYPE_" #name);
+
+ CODE_KIND_LIST(ADD_COUNTER)
+#undef ADD_COUNTER
+#define ADD_COUNTER(name) \
+ AddCounter(result, counters->count_of_FIXED_ARRAY_##name(), \
+ "count_of_FIXED_ARRAY_" #name); \
+ AddCounter(result, counters->size_of_FIXED_ARRAY_##name(), \
+ "size_of_FIXED_ARRAY_" #name);
+
+ FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADD_COUNTER)
+#undef ADD_COUNTER
+
+ AddNumber(result, isolate->memory_allocator()->Size(),
+ "total_committed_bytes");
+ AddNumber(result, heap->new_space()->Size(),
+ "new_space_live_bytes");
+ AddNumber(result, heap->new_space()->Available(),
+ "new_space_available_bytes");
+ AddNumber(result, heap->new_space()->CommittedMemory(),
+ "new_space_commited_bytes");
+ AddNumber(result, heap->old_pointer_space()->Size(),
+ "old_pointer_space_live_bytes");
+ AddNumber(result, heap->old_pointer_space()->Available(),
+ "old_pointer_space_available_bytes");
+ AddNumber(result, heap->old_pointer_space()->CommittedMemory(),
+ "old_pointer_space_commited_bytes");
+ AddNumber(result, heap->old_data_space()->Size(),
+ "old_data_space_live_bytes");
+ AddNumber(result, heap->old_data_space()->Available(),
+ "old_data_space_available_bytes");
+ AddNumber(result, heap->old_data_space()->CommittedMemory(),
+ "old_data_space_commited_bytes");
+ AddNumber(result, heap->code_space()->Size(),
+ "code_space_live_bytes");
+ AddNumber(result, heap->code_space()->Available(),
+ "code_space_available_bytes");
+ AddNumber(result, heap->code_space()->CommittedMemory(),
+ "code_space_commited_bytes");
+ AddNumber(result, heap->cell_space()->Size(),
+ "cell_space_live_bytes");
+ AddNumber(result, heap->cell_space()->Available(),
+ "cell_space_available_bytes");
+ AddNumber(result, heap->cell_space()->CommittedMemory(),
+ "cell_space_commited_bytes");
+ AddNumber(result, heap->lo_space()->Size(),
+ "lo_space_live_bytes");
+ AddNumber(result, heap->lo_space()->Available(),
+ "lo_space_available_bytes");
+ AddNumber(result, heap->lo_space()->CommittedMemory(),
+ "lo_space_commited_bytes");
+ AddNumber(result, heap->amount_of_external_allocated_memory(),
+ "amount_of_external_allocated_memory");
+ return result;
+}
+
+
+void StatisticsExtension::Register() {
+ static StatisticsExtension statistics_extension;
+ static v8::DeclareExtension declaration(&statistics_extension);
+}
+
+} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/extensions/statistics-extension.h b/src/3rdparty/v8/src/extensions/statistics-extension.h
new file mode 100644
index 0000000..433c4cf
--- /dev/null
+++ b/src/3rdparty/v8/src/extensions/statistics-extension.h
@@ -0,0 +1,49 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_EXTENSIONS_STATISTICS_EXTENSION_H_
+#define V8_EXTENSIONS_STATISTICS_EXTENSION_H_
+
+#include "v8.h"
+
+namespace v8 {
+namespace internal {
+
+class StatisticsExtension : public v8::Extension {
+ public:
+ StatisticsExtension() : v8::Extension("v8/statistics", kSource) {}
+ virtual v8::Handle<v8::FunctionTemplate> GetNativeFunction(
+ v8::Handle<v8::String> name);
+ static v8::Handle<v8::Value> GetCounters(const v8::Arguments& args);
+ static void Register();
+ private:
+ static const char* const kSource;
+};
+
+} } // namespace v8::internal
+
+#endif // V8_EXTENSIONS_STATISTICS_EXTENSION_H_
diff --git a/src/3rdparty/v8/src/factory.cc b/src/3rdparty/v8/src/factory.cc
index 64b26a4..703251f 100644
--- a/src/3rdparty/v8/src/factory.cc
+++ b/src/3rdparty/v8/src/factory.cc
@@ -34,6 +34,7 @@
#include "macro-assembler.h"
#include "objects.h"
#include "objects-visiting.h"
+#include "platform.h"
#include "scopeinfo.h"
namespace v8 {
@@ -111,10 +112,11 @@ Handle<ObjectHashTable> Factory::NewObjectHashTable(int at_least_space_for) {
}
-Handle<DescriptorArray> Factory::NewDescriptorArray(int number_of_descriptors) {
+Handle<DescriptorArray> Factory::NewDescriptorArray(int number_of_descriptors,
+ int slack) {
ASSERT(0 <= number_of_descriptors);
CALL_HEAP_FUNCTION(isolate(),
- DescriptorArray::Allocate(number_of_descriptors),
+ DescriptorArray::Allocate(number_of_descriptors, slack),
DescriptorArray);
}
@@ -283,19 +285,27 @@ Handle<String> Factory::NewExternalStringFromTwoByte(
}
-Handle<Context> Factory::NewGlobalContext() {
+Handle<Context> Factory::NewNativeContext() {
CALL_HEAP_FUNCTION(
isolate(),
- isolate()->heap()->AllocateGlobalContext(),
+ isolate()->heap()->AllocateNativeContext(),
Context);
}
-Handle<Context> Factory::NewModuleContext(Handle<Context> previous,
+Handle<Context> Factory::NewGlobalContext(Handle<JSFunction> function,
Handle<ScopeInfo> scope_info) {
CALL_HEAP_FUNCTION(
isolate(),
- isolate()->heap()->AllocateModuleContext(*previous, *scope_info),
+ isolate()->heap()->AllocateGlobalContext(*function, *scope_info),
+ Context);
+}
+
+
+Handle<Context> Factory::NewModuleContext(Handle<ScopeInfo> scope_info) {
+ CALL_HEAP_FUNCTION(
+ isolate(),
+ isolate()->heap()->AllocateModuleContext(*scope_info),
Context);
}
@@ -464,14 +474,15 @@ Handle<JSObject> Factory::NewFunctionPrototype(Handle<JSFunction> function) {
}
-Handle<Map> Factory::CopyMapDropDescriptors(Handle<Map> src) {
- CALL_HEAP_FUNCTION(isolate(), src->CopyDropDescriptors(), Map);
+Handle<Map> Factory::CopyWithPreallocatedFieldDescriptors(Handle<Map> src) {
+ CALL_HEAP_FUNCTION(
+ isolate(), src->CopyWithPreallocatedFieldDescriptors(), Map);
}
Handle<Map> Factory::CopyMap(Handle<Map> src,
int extra_inobject_properties) {
- Handle<Map> copy = CopyMapDropDescriptors(src);
+ Handle<Map> copy = CopyWithPreallocatedFieldDescriptors(src);
// Check that we do not overflow the instance size when adding the
// extra inobject properties.
int instance_size_delta = extra_inobject_properties * kPointerSize;
@@ -494,8 +505,8 @@ Handle<Map> Factory::CopyMap(Handle<Map> src,
}
-Handle<Map> Factory::CopyMapDropTransitions(Handle<Map> src) {
- CALL_HEAP_FUNCTION(isolate(), src->CopyDropTransitions(), Map);
+Handle<Map> Factory::CopyMap(Handle<Map> src) {
+ CALL_HEAP_FUNCTION(isolate(), src->Copy(), Map);
}
@@ -550,18 +561,27 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
}
result->set_context(*context);
- if (!function_info->bound()) {
+
+ int index = function_info->SearchOptimizedCodeMap(context->native_context());
+ if (!function_info->bound() && index < 0) {
int number_of_literals = function_info->num_literals();
Handle<FixedArray> literals = NewFixedArray(number_of_literals, pretenure);
if (number_of_literals > 0) {
- // Store the object, regexp and array functions in the literals
- // array prefix. These functions will be used when creating
- // object, regexp and array literals in this function.
- literals->set(JSFunction::kLiteralGlobalContextIndex,
- context->global_context());
+ // Store the native context in the literals array prefix. This
+ // context will be used when creating object, regexp and array
+ // literals in this function.
+ literals->set(JSFunction::kLiteralNativeContextIndex,
+ context->native_context());
}
result->set_literals(*literals);
}
+
+ if (index > 0) {
+ // Caching of optimized code enabled and optimized code found.
+ function_info->InstallFromOptimizedCodeMap(*result, index);
+ return result;
+ }
+
if (V8::UseCrankshaft() &&
FLAG_always_opt &&
result->is_compiled() &&
@@ -675,6 +695,43 @@ Handle<Object> Factory::NewError(const char* type,
}
+Handle<String> Factory::EmergencyNewError(const char* type,
+ Handle<JSArray> args) {
+ const int kBufferSize = 1000;
+ char buffer[kBufferSize];
+ size_t space = kBufferSize;
+ char* p = &buffer[0];
+
+ Vector<char> v(buffer, kBufferSize);
+ OS::StrNCpy(v, type, space);
+ space -= Min(space, strlen(type));
+ p = &buffer[kBufferSize] - space;
+
+ for (unsigned i = 0; i < ARRAY_SIZE(args); i++) {
+ if (space > 0) {
+ *p++ = ' ';
+ space--;
+ if (space > 0) {
+ MaybeObject* maybe_arg = args->GetElement(i);
+ Handle<String> arg_str(reinterpret_cast<String*>(maybe_arg));
+ const char* arg = *arg_str->ToCString();
+ Vector<char> v2(p, static_cast<int>(space));
+ OS::StrNCpy(v2, arg, space);
+ space -= Min(space, strlen(arg));
+ p = &buffer[kBufferSize] - space;
+ }
+ }
+ }
+ if (space > 0) {
+ *p = '\0';
+ } else {
+ buffer[kBufferSize - 1] = '\0';
+ }
+ Handle<String> error_string = NewStringFromUtf8(CStrVector(buffer), TENURED);
+ return error_string;
+}
+
+
Handle<Object> Factory::NewError(const char* maker,
const char* type,
Handle<JSArray> args) {
@@ -683,8 +740,9 @@ Handle<Object> Factory::NewError(const char* maker,
isolate()->js_builtins_object()->GetPropertyNoExceptionThrown(*make_str));
// If the builtins haven't been properly configured yet this error
// constructor may not have been defined. Bail out.
- if (!fun_obj->IsJSFunction())
- return undefined_value();
+ if (!fun_obj->IsJSFunction()) {
+ return EmergencyNewError(type, args);
+ }
Handle<JSFunction> fun = Handle<JSFunction>::cast(fun_obj);
Handle<Object> type_obj = LookupAsciiSymbol(type);
Handle<Object> argv[] = { type_obj, args };
@@ -775,7 +833,7 @@ Handle<JSFunction> Factory::NewFunctionWithPrototype(Handle<String> name,
instance_size != JSObject::kHeaderSize) {
Handle<Map> initial_map = NewMap(type,
instance_size,
- FAST_SMI_ONLY_ELEMENTS);
+ GetInitialFastElementsKind());
function->set_initial_map(*initial_map);
initial_map->set_constructor(*function);
}
@@ -837,97 +895,12 @@ Handle<Code> Factory::CopyCode(Handle<Code> code, Vector<byte> reloc_info) {
}
-MUST_USE_RESULT static inline MaybeObject* DoCopyInsert(
- DescriptorArray* array,
- String* key,
- Object* value,
- PropertyAttributes attributes) {
- CallbacksDescriptor desc(key, value, attributes);
- MaybeObject* obj = array->CopyInsert(&desc, REMOVE_TRANSITIONS);
- return obj;
-}
-
-
-// Allocate the new array.
-Handle<DescriptorArray> Factory::CopyAppendForeignDescriptor(
- Handle<DescriptorArray> array,
- Handle<String> key,
- Handle<Object> value,
- PropertyAttributes attributes) {
- CALL_HEAP_FUNCTION(isolate(),
- DoCopyInsert(*array, *key, *value, attributes),
- DescriptorArray);
-}
-
-
Handle<String> Factory::SymbolFromString(Handle<String> value) {
CALL_HEAP_FUNCTION(isolate(),
isolate()->heap()->LookupSymbol(*value), String);
}
-Handle<DescriptorArray> Factory::CopyAppendCallbackDescriptors(
- Handle<DescriptorArray> array,
- Handle<Object> descriptors) {
- v8::NeanderArray callbacks(descriptors);
- int nof_callbacks = callbacks.length();
- Handle<DescriptorArray> result =
- NewDescriptorArray(array->number_of_descriptors() + nof_callbacks);
-
- // Number of descriptors added to the result so far.
- int descriptor_count = 0;
-
- // Ensure that marking will not progress and change color of objects.
- DescriptorArray::WhitenessWitness witness(*result);
-
- // Copy the descriptors from the array.
- for (int i = 0; i < array->number_of_descriptors(); i++) {
- if (!array->IsNullDescriptor(i)) {
- DescriptorArray::CopyFrom(result, descriptor_count++, array, i, witness);
- }
- }
-
- // Number of duplicates detected.
- int duplicates = 0;
-
- // Fill in new callback descriptors. Process the callbacks from
- // back to front so that the last callback with a given name takes
- // precedence over previously added callbacks with that name.
- for (int i = nof_callbacks - 1; i >= 0; i--) {
- Handle<AccessorInfo> entry =
- Handle<AccessorInfo>(AccessorInfo::cast(callbacks.get(i)));
- // Ensure the key is a symbol before writing into the instance descriptor.
- Handle<String> key =
- SymbolFromString(Handle<String>(String::cast(entry->name())));
- // Check if a descriptor with this name already exists before writing.
- if (result->LinearSearch(*key, descriptor_count) ==
- DescriptorArray::kNotFound) {
- CallbacksDescriptor desc(*key, *entry, entry->property_attributes());
- result->Set(descriptor_count, &desc, witness);
- descriptor_count++;
- } else {
- duplicates++;
- }
- }
-
- // If duplicates were detected, allocate a result of the right size
- // and transfer the elements.
- if (duplicates > 0) {
- int number_of_descriptors = result->number_of_descriptors() - duplicates;
- Handle<DescriptorArray> new_result =
- NewDescriptorArray(number_of_descriptors);
- for (int i = 0; i < number_of_descriptors; i++) {
- DescriptorArray::CopyFrom(new_result, i, result, i, witness);
- }
- result = new_result;
- }
-
- // Sort the result before returning.
- result->Sort(witness);
- return result;
-}
-
-
Handle<JSObject> Factory::NewJSObject(Handle<JSFunction> constructor,
PretenureFlag pretenure) {
CALL_HEAP_FUNCTION(
@@ -936,10 +909,11 @@ Handle<JSObject> Factory::NewJSObject(Handle<JSFunction> constructor,
}
-Handle<JSModule> Factory::NewJSModule() {
+Handle<JSModule> Factory::NewJSModule(Handle<Context> context,
+ Handle<ScopeInfo> scope_info) {
CALL_HEAP_FUNCTION(
isolate(),
- isolate()->heap()->AllocateJSModule(), JSModule);
+ isolate()->heap()->AllocateJSModule(*context, *scope_info), JSModule);
}
@@ -1013,10 +987,11 @@ void Factory::EnsureCanContainHeapObjectElements(Handle<JSArray> array) {
void Factory::EnsureCanContainElements(Handle<JSArray> array,
Handle<FixedArrayBase> elements,
+ uint32_t length,
EnsureElementsMode mode) {
CALL_HEAP_FUNCTION_VOID(
isolate(),
- array->EnsureCanContainElements(*elements, mode));
+ array->EnsureCanContainElements(*elements, length, mode));
}
@@ -1045,7 +1020,7 @@ void Factory::BecomeJSFunction(Handle<JSReceiver> object) {
}
-void Factory::SetIdentityHash(Handle<JSObject> object, Object* hash) {
+void Factory::SetIdentityHash(Handle<JSObject> object, Smi* hash) {
CALL_HEAP_FUNCTION_VOID(
isolate(),
object->SetIdentityHash(hash, ALLOW_CREATION));
@@ -1145,7 +1120,7 @@ Handle<JSFunction> Factory::NewFunctionHelper(Handle<String> name,
Handle<JSFunction> Factory::NewFunction(Handle<String> name,
Handle<Object> prototype) {
Handle<JSFunction> fun = NewFunctionHelper(name, prototype);
- fun->set_context(isolate()->context()->global_context());
+ fun->set_context(isolate()->context()->native_context());
return fun;
}
@@ -1171,7 +1146,7 @@ Handle<JSFunction> Factory::NewFunctionWithoutPrototype(
LanguageMode language_mode) {
Handle<JSFunction> fun =
NewFunctionWithoutPrototypeHelper(name, language_mode);
- fun->set_context(isolate()->context()->global_context());
+ fun->set_context(isolate()->context()->native_context());
return fun;
}
@@ -1182,8 +1157,8 @@ Handle<Object> Factory::ToObject(Handle<Object> object) {
Handle<Object> Factory::ToObject(Handle<Object> object,
- Handle<Context> global_context) {
- CALL_HEAP_FUNCTION(isolate(), object->ToObject(*global_context), Object);
+ Handle<Context> native_context) {
+ CALL_HEAP_FUNCTION(isolate(), object->ToObject(*native_context), Object);
}
@@ -1333,20 +1308,31 @@ Handle<JSFunction> Factory::CreateApiFunction(
result->shared()->DontAdaptArguments();
// Recursively copy parent templates' accessors, 'data' may be modified.
- Handle<DescriptorArray> array =
- Handle<DescriptorArray>(map->instance_descriptors());
+ int max_number_of_additional_properties = 0;
+ FunctionTemplateInfo* info = *obj;
+ while (true) {
+ Object* props = info->property_accessors();
+ if (!props->IsUndefined()) {
+ Handle<Object> props_handle(props);
+ NeanderArray props_array(props_handle);
+ max_number_of_additional_properties += props_array.length();
+ }
+ Object* parent = info->parent_template();
+ if (parent->IsUndefined()) break;
+ info = FunctionTemplateInfo::cast(parent);
+ }
+
+ Map::EnsureDescriptorSlack(map, max_number_of_additional_properties);
+
while (true) {
Handle<Object> props = Handle<Object>(obj->property_accessors());
if (!props->IsUndefined()) {
- array = CopyAppendCallbackDescriptors(array, props);
+ Map::AppendCallbackDescriptors(map, props);
}
Handle<Object> parent = Handle<Object>(obj->parent_template());
if (parent->IsUndefined()) break;
obj = Handle<FunctionTemplateInfo>::cast(parent);
}
- if (!array->IsEmpty()) {
- map->set_instance_descriptors(*array);
- }
ASSERT(result->shared()->IsApiFunction());
return result;
@@ -1383,7 +1369,7 @@ Handle<MapCache> Factory::AddToMapCache(Handle<Context> context,
Handle<Map> Factory::ObjectLiteralMapFromCache(Handle<Context> context,
Handle<FixedArray> keys) {
if (context->map_cache()->IsUndefined()) {
- // Allocate the new map cache for the global context.
+ // Allocate the new map cache for the native context.
Handle<MapCache> new_cache = NewMapCache(24);
context->set_map_cache(*new_cache);
}
diff --git a/src/3rdparty/v8/src/factory.h b/src/3rdparty/v8/src/factory.h
index 06aad1b..51065aa 100644
--- a/src/3rdparty/v8/src/factory.h
+++ b/src/3rdparty/v8/src/factory.h
@@ -66,7 +66,8 @@ class Factory {
Handle<ObjectHashTable> NewObjectHashTable(int at_least_space_for);
- Handle<DescriptorArray> NewDescriptorArray(int number_of_descriptors);
+ Handle<DescriptorArray> NewDescriptorArray(int number_of_descriptors,
+ int slack = 0);
Handle<DeoptimizationInputData> NewDeoptimizationInputData(
int deopt_entry_count,
PretenureFlag pretenure);
@@ -160,12 +161,15 @@ class Factory {
const ExternalTwoByteString::Resource* resource);
// Create a global (but otherwise uninitialized) context.
- Handle<Context> NewGlobalContext();
+ Handle<Context> NewNativeContext();
- // Create a module context.
- Handle<Context> NewModuleContext(Handle<Context> previous,
+ // Create a global context.
+ Handle<Context> NewGlobalContext(Handle<JSFunction> function,
Handle<ScopeInfo> scope_info);
+ // Create a module context.
+ Handle<Context> NewModuleContext(Handle<ScopeInfo> scope_info);
+
// Create a function context.
Handle<Context> NewFunctionContext(int length, Handle<JSFunction> function);
@@ -216,19 +220,19 @@ class Factory {
Handle<JSGlobalPropertyCell> NewJSGlobalPropertyCell(
Handle<Object> value);
- Handle<Map> NewMap(InstanceType type,
- int instance_size,
- ElementsKind elements_kind = FAST_ELEMENTS);
+ Handle<Map> NewMap(
+ InstanceType type,
+ int instance_size,
+ ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
Handle<JSObject> NewFunctionPrototype(Handle<JSFunction> function);
- Handle<Map> CopyMapDropDescriptors(Handle<Map> map);
+ Handle<Map> CopyWithPreallocatedFieldDescriptors(Handle<Map> map);
// Copy the map adding more inobject properties if possible without
// overflowing the instance size.
Handle<Map> CopyMap(Handle<Map> map, int extra_inobject_props);
-
- Handle<Map> CopyMapDropTransitions(Handle<Map> map);
+ Handle<Map> CopyMap(Handle<Map> map);
Handle<Map> GetElementsTransitionMap(Handle<JSObject> object,
ElementsKind elements_kind);
@@ -266,16 +270,18 @@ class Factory {
Handle<JSObject> NewJSObjectFromMap(Handle<Map> map);
// JS modules are pretenured.
- Handle<JSModule> NewJSModule();
+ Handle<JSModule> NewJSModule(Handle<Context> context,
+ Handle<ScopeInfo> scope_info);
// JS arrays are pretenured when allocated by the parser.
- Handle<JSArray> NewJSArray(int capacity,
- ElementsKind elements_kind = FAST_ELEMENTS,
- PretenureFlag pretenure = NOT_TENURED);
+ Handle<JSArray> NewJSArray(
+ int capacity,
+ ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND,
+ PretenureFlag pretenure = NOT_TENURED);
Handle<JSArray> NewJSArrayWithElements(
Handle<FixedArrayBase> elements,
- ElementsKind elements_kind = FAST_ELEMENTS,
+ ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND,
PretenureFlag pretenure = NOT_TENURED);
void SetElementsCapacityAndLength(Handle<JSArray> array,
@@ -287,6 +293,7 @@ class Factory {
void EnsureCanContainHeapObjectElements(Handle<JSArray> array);
void EnsureCanContainElements(Handle<JSArray> array,
Handle<FixedArrayBase> elements,
+ uint32_t length,
EnsureElementsMode mode);
Handle<JSProxy> NewJSProxy(Handle<Object> handler, Handle<Object> prototype);
@@ -295,7 +302,7 @@ class Factory {
void BecomeJSObject(Handle<JSReceiver> object);
void BecomeJSFunction(Handle<JSReceiver> object);
- void SetIdentityHash(Handle<JSObject> object, Object* hash);
+ void SetIdentityHash(Handle<JSObject> object, Smi* hash);
Handle<JSFunction> NewFunction(Handle<String> name,
Handle<Object> prototype);
@@ -329,12 +336,13 @@ class Factory {
Handle<Object> ToObject(Handle<Object> object);
Handle<Object> ToObject(Handle<Object> object,
- Handle<Context> global_context);
+ Handle<Context> native_context);
// Interface for creating error objects.
Handle<Object> NewError(const char* maker, const char* type,
Handle<JSArray> args);
+ Handle<String> EmergencyNewError(const char* type, Handle<JSArray> args);
Handle<Object> NewError(const char* maker, const char* type,
Vector< Handle<Object> > args);
Handle<Object> NewError(const char* type,
@@ -382,12 +390,6 @@ class Factory {
Handle<JSFunction> NewFunctionWithoutPrototype(Handle<String> name,
Handle<Code> code);
- Handle<DescriptorArray> CopyAppendForeignDescriptor(
- Handle<DescriptorArray> array,
- Handle<String> key,
- Handle<Object> value,
- PropertyAttributes attributes);
-
Handle<String> NumberToString(Handle<Object> number);
Handle<String> Uint32ToString(uint32_t value);
@@ -460,7 +462,7 @@ class Factory {
Handle<DebugInfo> NewDebugInfo(Handle<SharedFunctionInfo> shared);
#endif
- // Return a map using the map cache in the global context.
+ // Return a map using the map cache in the native context.
// The key the an ordered set of property names.
Handle<Map> ObjectLiteralMapFromCache(Handle<Context> context,
Handle<FixedArray> keys);
@@ -499,14 +501,10 @@ class Factory {
Handle<String> name,
LanguageMode language_mode);
- Handle<DescriptorArray> CopyAppendCallbackDescriptors(
- Handle<DescriptorArray> array,
- Handle<Object> descriptors);
-
// Create a new map cache.
Handle<MapCache> NewMapCache(int at_least_space_for);
- // Update the map cache in the global context with (keys, map)
+ // Update the map cache in the native context with (keys, map)
Handle<MapCache> AddToMapCache(Handle<Context> context,
Handle<FixedArray> keys,
Handle<Map> map);
diff --git a/src/3rdparty/v8/src/flag-definitions.h b/src/3rdparty/v8/src/flag-definitions.h
index 31c2a3a..96d03fa 100644
--- a/src/3rdparty/v8/src/flag-definitions.h
+++ b/src/3rdparty/v8/src/flag-definitions.h
@@ -132,7 +132,9 @@ public:
// Flags for language modes and experimental language features.
DEFINE_bool(use_strict, false, "enforce strict mode")
-DEFINE_bool(es52_globals, false,
+DEFINE_bool(es5_readonly, true,
+ "activate correct semantics for inheriting readonliness")
+DEFINE_bool(es52_globals, true,
"activate new semantics for global var declarations")
DEFINE_bool(harmony_typeof, false, "enable harmony semantics for typeof")
@@ -142,14 +144,19 @@ DEFINE_bool(harmony_modules, false,
DEFINE_bool(harmony_proxies, false, "enable harmony proxies")
DEFINE_bool(harmony_collections, false,
"enable harmony collections (sets, maps, and weak maps)")
+DEFINE_bool(harmony_observation, false,
+ "enable harmony object observation (implies harmony collections")
DEFINE_bool(harmony, false, "enable all harmony features (except typeof)")
DEFINE_implication(harmony, harmony_scoping)
DEFINE_implication(harmony, harmony_modules)
DEFINE_implication(harmony, harmony_proxies)
DEFINE_implication(harmony, harmony_collections)
+DEFINE_implication(harmony, harmony_observation)
DEFINE_implication(harmony_modules, harmony_scoping)
+DEFINE_implication(harmony_observation, harmony_collections)
// Flags for experimental implementation features.
+DEFINE_bool(packed_arrays, true, "optimizes arrays that have no holes")
DEFINE_bool(smi_only_arrays, true, "tracks arrays with only smi values")
DEFINE_bool(clever_optimizations,
true,
@@ -197,18 +204,35 @@ DEFINE_bool(polymorphic_inlining, true, "polymorphic inlining")
DEFINE_bool(use_osr, true, "use on-stack replacement")
DEFINE_bool(array_bounds_checks_elimination, true,
"perform array bounds checks elimination")
-DEFINE_bool(array_index_dehoisting, false,
+DEFINE_bool(array_index_dehoisting, true,
"perform array index dehoisting")
+DEFINE_bool(dead_code_elimination, true, "use dead code elimination")
+DEFINE_bool(trace_dead_code_elimination, false, "trace dead code elimination")
DEFINE_bool(trace_osr, false, "trace on-stack replacement")
DEFINE_int(stress_runs, 0, "number of stress runs")
DEFINE_bool(optimize_closures, true, "optimize closures")
+DEFINE_bool(lookup_sample_by_shared, true,
+ "when picking a function to optimize, watch for shared function "
+ "info, not JSFunction itself")
+DEFINE_bool(cache_optimized_code, true,
+ "cache optimized code for closures")
DEFINE_bool(inline_construct, true, "inline constructor calls")
DEFINE_bool(inline_arguments, true, "inline functions with arguments object")
+DEFINE_bool(inline_accessors, true, "inline JavaScript accessors")
DEFINE_int(loop_weight, 1, "loop weight for representation inference")
DEFINE_bool(optimize_for_in, true,
"optimize functions containing for-in loops")
+DEFINE_bool(opt_safe_uint32_operations, true,
+ "allow uint32 values on optimize frames if they are used only in"
+ "safe operations")
+
+DEFINE_bool(parallel_recompilation, false,
+ "optimizing hot functions asynchronously on a separate thread")
+DEFINE_bool(trace_parallel_recompilation, false, "track parallel recompilation")
+DEFINE_int(parallel_recompilation_queue_length, 2,
+ "the length of the parallel compilation queue")
// Experimental profiler changes.
DEFINE_bool(experimental_profiler, true, "enable all profiler experiments")
@@ -225,7 +249,8 @@ DEFINE_bool(interrupt_at_exit, false,
"insert an interrupt check at function exit")
DEFINE_bool(weighted_back_edges, false,
"weight back edges by jump distance for interrupt triggering")
-DEFINE_int(interrupt_budget, 5900,
+ // 0x1700 fits in the immediate field of an ARM instruction.
+DEFINE_int(interrupt_budget, 0x1700,
"execution budget before interrupt is triggered")
DEFINE_int(type_info_threshold, 15,
"percentage of ICs that must have type info to allow optimization")
@@ -260,9 +285,18 @@ DEFINE_bool(enable_sahf, true,
"enable use of SAHF instruction if available (X64 only)")
DEFINE_bool(enable_vfp3, true,
"enable use of VFP3 instructions if available - this implies "
- "enabling ARMv7 instructions (ARM only)")
+ "enabling ARMv7 and VFP2 instructions (ARM only)")
+DEFINE_bool(enable_vfp2, true,
+ "enable use of VFP2 instructions if available")
DEFINE_bool(enable_armv7, true,
"enable use of ARMv7 instructions if available (ARM only)")
+DEFINE_bool(enable_sudiv, true,
+ "enable use of SDIV and UDIV instructions if available (ARM only)")
+DEFINE_bool(enable_movw_movt, false,
+ "enable loading 32-bit constant by means of movw/movt "
+ "instruction pairs (ARM only)")
+DEFINE_bool(enable_unaligned_accesses, true,
+ "enable unaligned accesses for ARMv7 (ARM only)")
DEFINE_bool(enable_fpu, true,
"enable use of MIPS FPU instructions if available (MIPS only)")
@@ -304,8 +338,8 @@ DEFINE_int(min_preparse_length, 1024,
"minimum length for automatic enable preparsing")
DEFINE_bool(always_full_compiler, false,
"try to use the dedicated run-once backend for all code")
-DEFINE_bool(trace_bailout, false,
- "print reasons for falling back to using the classic V8 backend")
+DEFINE_int(max_opt_count, 10,
+ "maximum number of optimization attempts before giving up.")
// compilation-cache.cc
DEFINE_bool(compilation_cache, true, "enable compilation cache")
@@ -346,27 +380,39 @@ DEFINE_bool(trace_gc, false,
DEFINE_bool(trace_gc_nvp, false,
"print one detailed trace line in name=value format "
"after each garbage collection")
+DEFINE_bool(trace_gc_ignore_scavenger, false,
+ "do not print trace line after scavenger collection")
DEFINE_bool(print_cumulative_gc_stat, false,
"print cumulative GC statistics in name=value format on exit")
DEFINE_bool(trace_gc_verbose, false,
"print more details following each garbage collection")
DEFINE_bool(trace_fragmentation, false,
"report fragmentation for old pointer and data pages")
+DEFINE_bool(trace_external_memory, false,
+ "print amount of external allocated memory after each time "
+ "it is adjusted.")
DEFINE_bool(collect_maps, true,
"garbage collect maps from which no objects can be reached")
DEFINE_bool(flush_code, true,
- "flush code that we expect not to use again before full gc")
+ "flush code that we expect not to use again (during full gc)")
+DEFINE_bool(flush_code_incrementally, false,
+ "flush code that we expect not to use again (incrementally)")
+DEFINE_bool(age_code, false,
+ "track un-executed functions to age code and flush only "
+ "old code")
DEFINE_bool(incremental_marking, true, "use incremental marking")
DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps")
DEFINE_bool(trace_incremental_marking, false,
"trace progress of the incremental marking")
+DEFINE_bool(track_gc_object_stats, false,
+ "track object counts and memory usage")
+#ifdef VERIFY_HEAP
+DEFINE_bool(verify_heap, false, "verify heap pointers before and after GC")
+#endif
// v8.cc
DEFINE_bool(use_idle_notification, true,
"Use idle notification to reduce memory footprint.")
-
-DEFINE_bool(send_idle_notification, false,
- "Send idle notifcation between stress runs.")
// ic.cc
DEFINE_bool(use_ic, true, "use inline caching")
@@ -388,6 +434,8 @@ DEFINE_bool(never_compact, false,
"Never perform compaction on full GC - testing only")
DEFINE_bool(compact_code_space, true,
"Compact code space on full non-incremental collections")
+DEFINE_bool(incremental_code_compaction, true,
+ "Compact code space on full incremental collections")
DEFINE_bool(cleanup_code_caches_at_gc, true,
"Flush inline caches prior to mark compact collection and "
"flush code caches in maps during mark compact cycle.")
@@ -400,6 +448,7 @@ DEFINE_bool(use_verbose_printer, true, "allows verbose printing")
// parser.cc
DEFINE_bool(allow_natives_syntax, false, "allow natives syntax")
+DEFINE_bool(trace_parse, false, "trace parsing and preparsing")
// simulator-arm.cc and simulator-mips.cc
DEFINE_bool(trace_sim, false, "Trace simulator execution")
@@ -444,6 +493,10 @@ DEFINE_string(testing_serialization_file, "/tmp/serdes",
"file in which to serialize heap")
#endif
+// mksnapshot.cc
+DEFINE_string(extra_code, NULL, "A filename with extra code to be included in"
+ " the snapshot (mksnapshot only)")
+
//
// Dev shell flags
//
@@ -529,7 +582,8 @@ DEFINE_bool(gc_greedy, false, "perform GC prior to some allocations")
DEFINE_bool(gc_verbose, false, "print stuff during garbage collection")
DEFINE_bool(heap_stats, false, "report heap statistics before and after GC")
DEFINE_bool(code_stats, false, "report code statistics after GC")
-DEFINE_bool(verify_heap, false, "verify heap pointers before and after GC")
+DEFINE_bool(verify_native_context_separation, false,
+ "verify that code holds on to at most one native context after GC")
DEFINE_bool(print_handles, false, "report handles after GC")
DEFINE_bool(print_global_handles, false, "report global handles after GC")
@@ -602,6 +656,8 @@ DEFINE_bool(sliding_state_window, false,
"Update sliding state window counters.")
DEFINE_string(logfile, "v8.log", "Specify the name of the log file.")
DEFINE_bool(ll_prof, false, "Enable low-level linux profiler.")
+DEFINE_string(gc_fake_mmap, "/tmp/__v8_gc__",
+ "Specify the name of the file for fake gc mmap used in ll_prof")
//
// Disassembler only flags
diff --git a/src/3rdparty/v8/src/flags.cc b/src/3rdparty/v8/src/flags.cc
index 5720cbd..bca0eff 100644
--- a/src/3rdparty/v8/src/flags.cc
+++ b/src/3rdparty/v8/src/flags.cc
@@ -31,7 +31,7 @@
#include "v8.h"
#include "platform.h"
-#include "smart-array-pointer.h"
+#include "smart-pointers.h"
#include "string-stream.h"
@@ -343,6 +343,7 @@ static Flag* FindFlag(const char* name) {
int FlagList::SetFlagsFromCommandLine(int* argc,
char** argv,
bool remove_flags) {
+ int return_code = 0;
// parse arguments
for (int i = 1; i < *argc;) {
int j = i; // j > 0
@@ -368,7 +369,8 @@ int FlagList::SetFlagsFromCommandLine(int* argc,
} else {
fprintf(stderr, "Error: unrecognized flag %s\n"
"Try --help for options\n", arg);
- return j;
+ return_code = j;
+ break;
}
}
@@ -382,7 +384,8 @@ int FlagList::SetFlagsFromCommandLine(int* argc,
fprintf(stderr, "Error: missing value for flag %s of type %s\n"
"Try --help for options\n",
arg, Type2String(flag->type()));
- return j;
+ return_code = j;
+ break;
}
}
@@ -424,7 +427,8 @@ int FlagList::SetFlagsFromCommandLine(int* argc,
fprintf(stderr, "Error: illegal value for flag %s of type %s\n"
"Try --help for options\n",
arg, Type2String(flag->type()));
- return j;
+ return_code = j;
+ break;
}
// remove the flag & value from the command
@@ -451,7 +455,7 @@ int FlagList::SetFlagsFromCommandLine(int* argc,
exit(0);
}
// parsed all flags successfully
- return 0;
+ return return_code;
}
diff --git a/src/3rdparty/v8/src/frames.cc b/src/3rdparty/v8/src/frames.cc
index e265341..6342852 100644
--- a/src/3rdparty/v8/src/frames.cc
+++ b/src/3rdparty/v8/src/frames.cc
@@ -469,8 +469,22 @@ StackFrame::Type StackFrame::GetCallerState(State* state) const {
}
+Address StackFrame::UnpaddedFP() const {
+#if defined(V8_TARGET_ARCH_IA32)
+ if (!is_optimized()) return fp();
+ int32_t alignment_state = Memory::int32_at(
+ fp() + JavaScriptFrameConstants::kDynamicAlignmentStateOffset);
+
+ return (alignment_state == kAlignmentPaddingPushed) ?
+ (fp() + kPointerSize) : fp();
+#else
+ return fp();
+#endif
+}
+
+
Code* EntryFrame::unchecked_code() const {
- return HEAP->raw_unchecked_js_entry_code();
+ return HEAP->js_entry_code();
}
@@ -493,7 +507,7 @@ StackFrame::Type EntryFrame::GetCallerState(State* state) const {
Code* EntryConstructFrame::unchecked_code() const {
- return HEAP->raw_unchecked_js_construct_entry_code();
+ return HEAP->js_construct_entry_code();
}
@@ -747,7 +761,7 @@ void JavaScriptFrame::PrintTop(FILE* file,
while (!it.done()) {
if (it.frame()->is_java_script()) {
JavaScriptFrame* frame = it.frame();
- if (frame->IsConstructor()) PrintF(file, "new ");
+ if (frame->IsConstructor()) FPrintF(file, "new ");
// function name
Object* maybe_fun = frame->function();
if (maybe_fun->IsJSFunction()) {
@@ -773,12 +787,12 @@ void JavaScriptFrame::PrintTop(FILE* file,
SmartArrayPointer<char> c_script_name =
script_name->ToCString(DISALLOW_NULLS,
ROBUST_STRING_TRAVERSAL);
- PrintF(file, " at %s:%d", *c_script_name, line);
+ FPrintF(file, " at %s:%d", *c_script_name, line);
} else {
- PrintF(file, "at <unknown>:%d", line);
+ FPrintF(file, " at <unknown>:%d", line);
}
} else {
- PrintF(file, " at <unknown>:<unknown>");
+ FPrintF(file, " at <unknown>:<unknown>");
}
}
} else {
@@ -789,14 +803,14 @@ void JavaScriptFrame::PrintTop(FILE* file,
// function arguments
// (we are intentionally only printing the actually
// supplied parameters, not all parameters required)
- PrintF(file, "(this=");
+ FPrintF(file, "(this=");
frame->receiver()->ShortPrint(file);
const int length = frame->ComputeParametersCount();
for (int i = 0; i < length; i++) {
- PrintF(file, ", ");
+ FPrintF(file, ", ");
frame->GetParameter(i)->ShortPrint(file);
}
- PrintF(file, ")");
+ FPrintF(file, ")");
}
break;
}
@@ -818,12 +832,23 @@ void FrameSummary::Print() {
}
+JSFunction* OptimizedFrame::LiteralAt(FixedArray* literal_array,
+ int literal_id) {
+ if (literal_id == Translation::kSelfLiteralId) {
+ return JSFunction::cast(function());
+ }
+
+ return JSFunction::cast(literal_array->get(literal_id));
+}
+
+
void OptimizedFrame::Summarize(List<FrameSummary>* frames) {
ASSERT(frames->length() == 0);
ASSERT(is_optimized());
int deopt_index = Safepoint::kNoDeoptimizationIndex;
DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
+ FixedArray* literal_array = data->LiteralArray();
// BUG(3243555): Since we don't have a lazy-deopt registered at
// throw-statements, we can't use the translation at the call-site of
@@ -850,11 +875,9 @@ void OptimizedFrame::Summarize(List<FrameSummary>* frames) {
opcode = static_cast<Translation::Opcode>(it.Next());
if (opcode == Translation::JS_FRAME) {
i--;
- int ast_id = it.Next();
- int function_id = it.Next();
+ BailoutId ast_id = BailoutId(it.Next());
+ JSFunction* function = LiteralAt(literal_array, it.Next());
it.Next(); // Skip height.
- JSFunction* function =
- JSFunction::cast(data->LiteralArray()->get(function_id));
// The translation commands are ordered and the receiver is always
// at the first position. Since we are always at a call when we need
@@ -961,6 +984,7 @@ void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) {
int deopt_index = Safepoint::kNoDeoptimizationIndex;
DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
+ FixedArray* literal_array = data->LiteralArray();
TranslationIterator it(data->TranslationByteArray(),
data->TranslationIndex(deopt_index)->value());
@@ -976,10 +1000,8 @@ void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) {
if (opcode == Translation::JS_FRAME) {
jsframe_count--;
it.Next(); // Skip ast id.
- int function_id = it.Next();
+ JSFunction* function = LiteralAt(literal_array, it.Next());
it.Next(); // Skip height.
- JSFunction* function =
- JSFunction::cast(data->LiteralArray()->get(function_id));
functions->Add(function);
} else {
// Skip over operands to advance to the next opcode.
@@ -1394,11 +1416,11 @@ class field##_Wrapper : public ZoneObject { \
STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)
#undef DEFINE_WRAPPER
-static StackFrame* AllocateFrameCopy(StackFrame* frame) {
+static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) {
#define FRAME_TYPE_CASE(type, field) \
case StackFrame::type: { \
field##_Wrapper* wrapper = \
- new field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
+ new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
return &wrapper->frame_; \
}
@@ -1410,11 +1432,11 @@ static StackFrame* AllocateFrameCopy(StackFrame* frame) {
return NULL;
}
-Vector<StackFrame*> CreateStackMap() {
- ZoneList<StackFrame*> list(10);
+Vector<StackFrame*> CreateStackMap(Zone* zone) {
+ ZoneList<StackFrame*> list(10, zone);
for (StackFrameIterator it; !it.done(); it.Advance()) {
- StackFrame* frame = AllocateFrameCopy(it.frame());
- list.Add(frame);
+ StackFrame* frame = AllocateFrameCopy(it.frame(), zone);
+ list.Add(frame, zone);
}
return list.ToVector();
}
diff --git a/src/3rdparty/v8/src/frames.h b/src/3rdparty/v8/src/frames.h
index 78cdd0c..30f7e1f 100644
--- a/src/3rdparty/v8/src/frames.h
+++ b/src/3rdparty/v8/src/frames.h
@@ -206,6 +206,11 @@ class StackFrame BASE_EMBEDDED {
Address fp() const { return state_.fp; }
Address caller_sp() const { return GetCallerStackPointer(); }
+ // If this frame is optimized and was dynamically aligned return its old
+ // unaligned frame pointer. When the frame is deoptimized its FP will shift
+ // up one word and become unaligned.
+ Address UnpaddedFP() const;
+
Address pc() const { return *pc_address(); }
void set_pc(Address pc) { *pc_address() = pc; }
@@ -572,6 +577,8 @@ class OptimizedFrame : public JavaScriptFrame {
inline explicit OptimizedFrame(StackFrameIterator* iterator);
private:
+ JSFunction* LiteralAt(FixedArray* literal_array, int literal_id);
+
friend class StackFrameIterator;
};
@@ -888,7 +895,7 @@ class StackFrameLocator BASE_EMBEDDED {
// Reads all frames on the current stack and copies them into the current
// zone memory.
-Vector<StackFrame*> CreateStackMap();
+Vector<StackFrame*> CreateStackMap(Zone* zone);
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/full-codegen.cc b/src/3rdparty/v8/src/full-codegen.cc
index 9b1df4e..9592e0a 100644
--- a/src/3rdparty/v8/src/full-codegen.cc
+++ b/src/3rdparty/v8/src/full-codegen.cc
@@ -36,6 +36,7 @@
#include "prettyprinter.h"
#include "scopes.h"
#include "scopeinfo.h"
+#include "snapshot.h"
#include "stub-cache.h"
namespace v8 {
@@ -315,7 +316,7 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
code->set_optimizable(info->IsOptimizable() &&
!info->function()->flags()->Contains(kDontOptimize) &&
- info->function()->scope()->AllowsLazyRecompilation());
+ info->function()->scope()->AllowsLazyCompilation());
cgen.PopulateDeoptimizationData(code);
cgen.PopulateTypeFeedbackInfo(code);
cgen.PopulateTypeFeedbackCells(code);
@@ -352,7 +353,7 @@ unsigned FullCodeGenerator::EmitStackCheckTable() {
unsigned length = stack_checks_.length();
__ dd(length);
for (unsigned i = 0; i < length; ++i) {
- __ dd(stack_checks_[i].id);
+ __ dd(stack_checks_[i].id.ToInt());
__ dd(stack_checks_[i].pc_and_state);
}
return offset;
@@ -367,7 +368,7 @@ void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
Handle<DeoptimizationOutputData> data = isolate()->factory()->
NewDeoptimizationOutputData(length, TENURED);
for (int i = 0; i < length; i++) {
- data->SetAstId(i, Smi::FromInt(bailout_entries_[i].id));
+ data->SetAstId(i, bailout_entries_[i].id);
data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state));
}
code->set_deoptimization_data(*data);
@@ -382,6 +383,20 @@ void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
}
+void FullCodeGenerator::Initialize() {
+ // The generation of debug code must match between the snapshot code and the
+ // code that is generated later. This is assumed by the debugger when it is
+ // calculating PC offsets after generating a debug version of code. Therefore
+ // we disable the production of debug code in the full compiler if we are
+ // either generating a snapshot or we booted from a snapshot.
+ generate_debug_code_ = FLAG_debug_code &&
+ !Serializer::enabled() &&
+ !Snapshot::HaveASnapshotToStartFrom();
+ masm_->set_emit_debug_code(generate_debug_code_);
+ masm_->set_predictable_code_size(true);
+}
+
+
void FullCodeGenerator::PopulateTypeFeedbackCells(Handle<Code> code) {
if (type_feedback_cells_.is_empty()) return;
int length = type_feedback_cells_.length();
@@ -389,7 +404,7 @@ void FullCodeGenerator::PopulateTypeFeedbackCells(Handle<Code> code) {
Handle<TypeFeedbackCells> cache = Handle<TypeFeedbackCells>::cast(
isolate()->factory()->NewFixedArray(array_size, TENURED));
for (int i = 0; i < length; i++) {
- cache->SetAstId(i, Smi::FromInt(type_feedback_cells_[i].ast_id));
+ cache->SetAstId(i, type_feedback_cells_[i].ast_id);
cache->SetCell(i, *type_feedback_cells_[i].cell);
}
TypeFeedbackInfo::cast(code->type_feedback_info())->set_type_feedback_cells(
@@ -420,7 +435,7 @@ void FullCodeGenerator::RecordJSReturnSite(Call* call) {
}
-void FullCodeGenerator::PrepareForBailoutForId(unsigned id, State state) {
+void FullCodeGenerator::PrepareForBailoutForId(BailoutId id, State state) {
// There's no need to prepare this code for bailouts from already optimized
// code or code that can't be optimized.
if (!info_->HasDeoptimizationSupport()) return;
@@ -440,23 +455,23 @@ void FullCodeGenerator::PrepareForBailoutForId(unsigned id, State state) {
}
}
#endif // DEBUG
- bailout_entries_.Add(entry);
+ bailout_entries_.Add(entry, zone());
}
void FullCodeGenerator::RecordTypeFeedbackCell(
- unsigned id, Handle<JSGlobalPropertyCell> cell) {
+ TypeFeedbackId id, Handle<JSGlobalPropertyCell> cell) {
TypeFeedbackCellEntry entry = { id, cell };
- type_feedback_cells_.Add(entry);
+ type_feedback_cells_.Add(entry, zone());
}
-void FullCodeGenerator::RecordStackCheck(unsigned ast_id) {
+void FullCodeGenerator::RecordStackCheck(BailoutId ast_id) {
// The pc offset does not need to be encoded and packed together with a
// state.
ASSERT(masm_->pc_offset() > 0);
BailoutEntry entry = { ast_id, static_cast<unsigned>(masm_->pc_offset()) };
- stack_checks_.Add(entry);
+ stack_checks_.Add(entry, zone());
}
@@ -570,7 +585,7 @@ void FullCodeGenerator::DoTest(const TestContext* context) {
void FullCodeGenerator::VisitDeclarations(
ZoneList<Declaration*>* declarations) {
ZoneList<Handle<Object> >* saved_globals = globals_;
- ZoneList<Handle<Object> > inner_globals(10);
+ ZoneList<Handle<Object> > inner_globals(10, zone());
globals_ = &inner_globals;
AstVisitor::VisitDeclarations(declarations);
@@ -589,27 +604,20 @@ void FullCodeGenerator::VisitDeclarations(
void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
- Handle<JSModule> instance = module->interface()->Instance();
- ASSERT(!instance.is_null());
-
// Allocate a module context statically.
Block* block = module->body();
Scope* saved_scope = scope();
scope_ = block->scope();
- Handle<ScopeInfo> scope_info = scope_->GetScopeInfo();
+ Interface* interface = module->interface();
+ Handle<JSModule> instance = interface->Instance();
- // Generate code for module creation and linking.
Comment cmnt(masm_, "[ ModuleLiteral");
SetStatementPosition(block);
- if (scope_info->HasContext()) {
- // Set up module context.
- __ Push(scope_info);
- __ Push(instance);
- __ CallRuntime(Runtime::kPushModuleContext, 2);
- StoreToFrameField(
- StandardFrameConstants::kContextOffset, context_register());
- }
+ // Set up module context.
+ __ Push(instance);
+ __ CallRuntime(Runtime::kPushModuleContext, 1);
+ StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
{
Comment cmnt(masm_, "[ Declarations");
@@ -617,42 +625,21 @@ void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
}
scope_ = saved_scope;
- if (scope_info->HasContext()) {
- // Pop module context.
- LoadContextField(context_register(), Context::PREVIOUS_INDEX);
- // Update local stack frame context field.
- StoreToFrameField(
- StandardFrameConstants::kContextOffset, context_register());
- }
-
- // Populate module instance object.
- const PropertyAttributes attr =
- static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE | DONT_ENUM);
- for (Interface::Iterator it = module->interface()->iterator();
- !it.done(); it.Advance()) {
- if (it.interface()->IsModule()) {
- Handle<Object> value = it.interface()->Instance();
- ASSERT(!value.is_null());
- JSReceiver::SetProperty(instance, it.name(), value, attr, kStrictMode);
- } else {
- // TODO(rossberg): set proper getters instead of undefined...
- // instance->DefineAccessor(*it.name(), ACCESSOR_GETTER, *getter, attr);
- Handle<Object> value(isolate()->heap()->undefined_value());
- JSReceiver::SetProperty(instance, it.name(), value, attr, kStrictMode);
- }
- }
- USE(instance->PreventExtensions());
+ // Pop module context.
+ LoadContextField(context_register(), Context::PREVIOUS_INDEX);
+ // Update local stack frame context field.
+ StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
}
void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
- // Noting to do.
+ // Nothing to do.
// The instance object is resolved statically through the module's interface.
}
void FullCodeGenerator::VisitModulePath(ModulePath* module) {
- // Noting to do.
+ // Nothing to do.
// The instance object is resolved statically through the module's interface.
}
@@ -822,7 +809,7 @@ void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
Comment cmnt(masm_, is_logical_and ? "[ Logical AND" : "[ Logical OR");
Expression* left = expr->left();
Expression* right = expr->right();
- int right_id = expr->RightId();
+ BailoutId right_id = expr->RightId();
Label done;
if (context()->IsTest()) {
@@ -916,25 +903,36 @@ void FullCodeGenerator::VisitBlock(Block* stmt) {
Scope* saved_scope = scope();
// Push a block context when entering a block with block scoped variables.
if (stmt->scope() != NULL) {
- { Comment cmnt(masm_, "[ Extend block context");
- scope_ = stmt->scope();
- Handle<ScopeInfo> scope_info = scope_->GetScopeInfo();
- int heap_slots = scope_info->ContextLength() - Context::MIN_CONTEXT_SLOTS;
- __ Push(scope_info);
- PushFunctionArgumentForContextAllocation();
- if (heap_slots <= FastNewBlockContextStub::kMaximumSlots) {
- FastNewBlockContextStub stub(heap_slots);
- __ CallStub(&stub);
- } else {
- __ CallRuntime(Runtime::kPushBlockContext, 2);
+ scope_ = stmt->scope();
+ if (scope_->is_module_scope()) {
+ // If this block is a module body, then we have already allocated and
+ // initialized the declarations earlier. Just push the context.
+ ASSERT(!scope_->interface()->Instance().is_null());
+ __ Push(scope_->interface()->Instance());
+ __ CallRuntime(Runtime::kPushModuleContext, 1);
+ StoreToFrameField(
+ StandardFrameConstants::kContextOffset, context_register());
+ } else {
+ { Comment cmnt(masm_, "[ Extend block context");
+ Handle<ScopeInfo> scope_info = scope_->GetScopeInfo();
+ int heap_slots =
+ scope_info->ContextLength() - Context::MIN_CONTEXT_SLOTS;
+ __ Push(scope_info);
+ PushFunctionArgumentForContextAllocation();
+ if (heap_slots <= FastNewBlockContextStub::kMaximumSlots) {
+ FastNewBlockContextStub stub(heap_slots);
+ __ CallStub(&stub);
+ } else {
+ __ CallRuntime(Runtime::kPushBlockContext, 2);
+ }
+
+ // Replace the context stored in the frame.
+ StoreToFrameField(StandardFrameConstants::kContextOffset,
+ context_register());
+ }
+ { Comment cmnt(masm_, "[ Declarations");
+ VisitDeclarations(scope_->declarations());
}
-
- // Replace the context stored in the frame.
- StoreToFrameField(StandardFrameConstants::kContextOffset,
- context_register());
- }
- { Comment cmnt(masm_, "[ Declarations");
- VisitDeclarations(scope_->declarations());
}
}
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
diff --git a/src/3rdparty/v8/src/full-codegen.h b/src/3rdparty/v8/src/full-codegen.h
index a89b446..972839e 100644
--- a/src/3rdparty/v8/src/full-codegen.h
+++ b/src/3rdparty/v8/src/full-codegen.h
@@ -86,11 +86,18 @@ class FullCodeGenerator: public AstVisitor {
globals_(NULL),
context_(NULL),
bailout_entries_(info->HasDeoptimizationSupport()
- ? info->function()->ast_node_count() : 0),
- stack_checks_(2), // There's always at least one.
+ ? info->function()->ast_node_count() : 0,
+ info->zone()),
+ stack_checks_(2, info->zone()), // There's always at least one.
type_feedback_cells_(info->HasDeoptimizationSupport()
- ? info->function()->ast_node_count() : 0),
- ic_total_count_(0) { }
+ ? info->function()->ast_node_count() : 0,
+ info->zone()),
+ ic_total_count_(0),
+ zone_(info->zone()) {
+ Initialize();
+ }
+
+ void Initialize();
static bool MakeCode(CompilationInfo* info);
@@ -108,6 +115,23 @@ class FullCodeGenerator: public AstVisitor {
return NULL;
}
+ Zone* zone() const { return zone_; }
+
+ static const int kMaxBackEdgeWeight = 127;
+
+#if V8_TARGET_ARCH_IA32
+ static const int kBackEdgeDistanceUnit = 100;
+#elif V8_TARGET_ARCH_X64
+ static const int kBackEdgeDistanceUnit = 162;
+#elif V8_TARGET_ARCH_ARM
+ static const int kBackEdgeDistanceUnit = 142;
+#elif V8_TARGET_ARCH_MIPS
+ static const int kBackEdgeDistanceUnit = 142;
+#else
+#error Unsupported target architecture.
+#endif
+
+
private:
class Breakable;
class Iteration;
@@ -236,7 +260,7 @@ class FullCodeGenerator: public AstVisitor {
// The finally block of a try/finally statement.
class Finally : public NestedStatement {
public:
- static const int kElementCount = 2;
+ static const int kElementCount = 5;
explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) { }
virtual ~Finally() {}
@@ -393,11 +417,12 @@ class FullCodeGenerator: public AstVisitor {
// Bailout support.
void PrepareForBailout(Expression* node, State state);
- void PrepareForBailoutForId(unsigned id, State state);
+ void PrepareForBailoutForId(BailoutId id, State state);
// Cache cell support. This associates AST ids with global property cells
// that will be cleared during GC and collected by the type-feedback oracle.
- void RecordTypeFeedbackCell(unsigned id, Handle<JSGlobalPropertyCell> cell);
+ void RecordTypeFeedbackCell(TypeFeedbackId id,
+ Handle<JSGlobalPropertyCell> cell);
// Record a call's return site offset, used to rebuild the frame if the
// called function was inlined at the site.
@@ -424,7 +449,7 @@ class FullCodeGenerator: public AstVisitor {
// of code inside the loop.
void EmitStackCheck(IterationStatement* stmt, Label* back_edge_target);
// Record the OSR AST id corresponding to a stack check in the code.
- void RecordStackCheck(unsigned osr_ast_id);
+ void RecordStackCheck(BailoutId osr_ast_id);
// Emit a table of stack check ids and pcs into the code stream. Return
// the offset of the start of the table.
unsigned EmitStackCheckTable();
@@ -515,7 +540,7 @@ class FullCodeGenerator: public AstVisitor {
void CallIC(Handle<Code> code,
RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
- unsigned ast_id = kNoASTId);
+ TypeFeedbackId id = TypeFeedbackId::None());
void SetFunctionPosition(FunctionLiteral* fun);
void SetReturnPosition(FunctionLiteral* fun);
@@ -587,12 +612,12 @@ class FullCodeGenerator: public AstVisitor {
Handle<FixedArray> handler_table() { return handler_table_; }
struct BailoutEntry {
- unsigned id;
+ BailoutId id;
unsigned pc_and_state;
};
struct TypeFeedbackCellEntry {
- unsigned ast_id;
+ TypeFeedbackId ast_id;
Handle<JSGlobalPropertyCell> cell;
};
@@ -787,6 +812,8 @@ class FullCodeGenerator: public AstVisitor {
int ic_total_count_;
Handle<FixedArray> handler_table_;
Handle<JSGlobalPropertyCell> profiling_counter_;
+ bool generate_debug_code_;
+ Zone* zone_;
friend class NestedStatement;
@@ -797,16 +824,16 @@ class FullCodeGenerator: public AstVisitor {
// A map from property names to getter/setter pairs allocated in the zone.
class AccessorTable: public TemplateHashMap<Literal,
ObjectLiteral::Accessors,
- ZoneListAllocationPolicy> {
+ ZoneAllocationPolicy> {
public:
explicit AccessorTable(Zone* zone) :
- TemplateHashMap<Literal,
- ObjectLiteral::Accessors,
- ZoneListAllocationPolicy>(Literal::Match),
+ TemplateHashMap<Literal, ObjectLiteral::Accessors,
+ ZoneAllocationPolicy>(Literal::Match,
+ ZoneAllocationPolicy(zone)),
zone_(zone) { }
Iterator lookup(Literal* literal) {
- Iterator it = find(literal, true);
+ Iterator it = find(literal, true, ZoneAllocationPolicy(zone_));
if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors();
return it;
}
diff --git a/src/3rdparty/v8/src/func-name-inferrer.cc b/src/3rdparty/v8/src/func-name-inferrer.cc
index 239358d..2dd0bbc 100644
--- a/src/3rdparty/v8/src/func-name-inferrer.cc
+++ b/src/3rdparty/v8/src/func-name-inferrer.cc
@@ -34,11 +34,12 @@
namespace v8 {
namespace internal {
-FuncNameInferrer::FuncNameInferrer(Isolate* isolate)
+FuncNameInferrer::FuncNameInferrer(Isolate* isolate, Zone* zone)
: isolate_(isolate),
- entries_stack_(10),
- names_stack_(5),
- funcs_to_infer_(4) {
+ entries_stack_(10, zone),
+ names_stack_(5, zone),
+ funcs_to_infer_(4, zone),
+ zone_(zone) {
}
@@ -48,21 +49,21 @@ void FuncNameInferrer::PushEnclosingName(Handle<String> name) {
// and starts with a capital letter.
if (name->length() > 0 && Runtime::IsUpperCaseChar(
isolate()->runtime_state(), name->Get(0))) {
- names_stack_.Add(Name(name, kEnclosingConstructorName));
+ names_stack_.Add(Name(name, kEnclosingConstructorName), zone());
}
}
void FuncNameInferrer::PushLiteralName(Handle<String> name) {
if (IsOpen() && !isolate()->heap()->prototype_symbol()->Equals(*name)) {
- names_stack_.Add(Name(name, kLiteralName));
+ names_stack_.Add(Name(name, kLiteralName), zone());
}
}
void FuncNameInferrer::PushVariableName(Handle<String> name) {
if (IsOpen() && !isolate()->heap()->result_symbol()->Equals(*name)) {
- names_stack_.Add(Name(name, kVariableName));
+ names_stack_.Add(Name(name, kVariableName), zone());
}
}
diff --git a/src/3rdparty/v8/src/func-name-inferrer.h b/src/3rdparty/v8/src/func-name-inferrer.h
index 1a57268..f57e778 100644
--- a/src/3rdparty/v8/src/func-name-inferrer.h
+++ b/src/3rdparty/v8/src/func-name-inferrer.h
@@ -45,7 +45,7 @@ class Isolate;
// a name.
class FuncNameInferrer : public ZoneObject {
public:
- explicit FuncNameInferrer(Isolate* isolate);
+ FuncNameInferrer(Isolate* isolate, Zone* zone);
// Returns whether we have entered name collection state.
bool IsOpen() const { return !entries_stack_.is_empty(); }
@@ -55,7 +55,7 @@ class FuncNameInferrer : public ZoneObject {
// Enters name collection state.
void Enter() {
- entries_stack_.Add(names_stack_.length());
+ entries_stack_.Add(names_stack_.length(), zone());
}
// Pushes an encountered name onto names stack when in collection state.
@@ -66,7 +66,7 @@ class FuncNameInferrer : public ZoneObject {
// Adds a function to infer name for.
void AddFunction(FunctionLiteral* func_to_infer) {
if (IsOpen()) {
- funcs_to_infer_.Add(func_to_infer);
+ funcs_to_infer_.Add(func_to_infer, zone());
}
}
@@ -88,6 +88,8 @@ class FuncNameInferrer : public ZoneObject {
void Leave() {
ASSERT(IsOpen());
names_stack_.Rewind(entries_stack_.RemoveLast());
+ if (entries_stack_.is_empty())
+ funcs_to_infer_.Clear();
}
private:
@@ -103,6 +105,7 @@ class FuncNameInferrer : public ZoneObject {
};
Isolate* isolate() { return isolate_; }
+ Zone* zone() const { return zone_; }
// Constructs a full name in dotted notation from gathered names.
Handle<String> MakeNameFromStack();
@@ -117,6 +120,7 @@ class FuncNameInferrer : public ZoneObject {
ZoneList<int> entries_stack_;
ZoneList<Name> names_stack_;
ZoneList<FunctionLiteral*> funcs_to_infer_;
+ Zone* zone_;
DISALLOW_COPY_AND_ASSIGN(FuncNameInferrer);
};
diff --git a/src/3rdparty/v8/src/gdb-jit.cc b/src/3rdparty/v8/src/gdb-jit.cc
index d3cd447..dde6bbd 100644
--- a/src/3rdparty/v8/src/gdb-jit.cc
+++ b/src/3rdparty/v8/src/gdb-jit.cc
@@ -31,11 +31,13 @@
#include "bootstrapper.h"
#include "compiler.h"
+#include "frames.h"
+#include "frames-inl.h"
#include "global-handles.h"
#include "messages.h"
-#include "platform.h"
#include "natives.h"
-#include "scopeinfo.h"
+#include "platform.h"
+#include "scopes.h"
namespace v8 {
namespace internal {
@@ -194,7 +196,7 @@ class DebugSectionBase : public ZoneObject {
virtual void WriteBody(Writer::Slot<THeader> header, Writer* writer) {
uintptr_t start = writer->position();
- if (WriteBody(writer)) {
+ if (WriteBodyInternal(writer)) {
uintptr_t end = writer->position();
header->offset = start;
#if defined(__MACH_O)
@@ -204,7 +206,7 @@ class DebugSectionBase : public ZoneObject {
}
}
- virtual bool WriteBody(Writer* writer) {
+ virtual bool WriteBodyInternal(Writer* writer) {
return false;
}
@@ -340,14 +342,14 @@ class ELFSection : public DebugSectionBase<ELFSectionHeader> {
virtual void WriteBody(Writer::Slot<Header> header, Writer* w) {
uintptr_t start = w->position();
- if (WriteBody(w)) {
+ if (WriteBodyInternal(w)) {
uintptr_t end = w->position();
header->offset = start;
header->size = end - start;
}
}
- virtual bool WriteBody(Writer* w) {
+ virtual bool WriteBodyInternal(Writer* w) {
return false;
}
@@ -627,9 +629,9 @@ class MachO BASE_EMBEDDED {
#if defined(__ELF)
class ELF BASE_EMBEDDED {
public:
- ELF() : sections_(6) {
- sections_.Add(new ELFSection("", ELFSection::TYPE_NULL, 0));
- sections_.Add(new StringTable(".shstrtab"));
+ ELF(Zone* zone) : sections_(6, zone) {
+ sections_.Add(new(zone) ELFSection("", ELFSection::TYPE_NULL, 0), zone);
+ sections_.Add(new(zone) StringTable(".shstrtab"), zone);
}
void Write(Writer* w) {
@@ -642,8 +644,8 @@ class ELF BASE_EMBEDDED {
return sections_[index];
}
- uint32_t AddSection(ELFSection* section) {
- sections_.Add(section);
+ uint32_t AddSection(ELFSection* section, Zone* zone) {
+ sections_.Add(section, zone);
section->set_index(sections_.length() - 1);
return sections_.length() - 1;
}
@@ -675,7 +677,7 @@ class ELF BASE_EMBEDDED {
{ 0x7f, 'E', 'L', 'F', 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0};
#elif defined(V8_TARGET_ARCH_X64)
const uint8_t ident[16] =
- { 0x7f, 'E', 'L', 'F', 2, 1, 1, 0, 0, 0 , 0, 0, 0, 0, 0, 0};
+ { 0x7f, 'E', 'L', 'F', 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0};
#else
#error Unsupported target architecture.
#endif
@@ -852,10 +854,10 @@ class ELFSymbol BASE_EMBEDDED {
class ELFSymbolTable : public ELFSection {
public:
- explicit ELFSymbolTable(const char* name)
+ ELFSymbolTable(const char* name, Zone* zone)
: ELFSection(name, TYPE_SYMTAB, sizeof(uintptr_t)),
- locals_(1),
- globals_(1) {
+ locals_(1, zone),
+ globals_(1, zone) {
}
virtual void WriteBody(Writer::Slot<Header> header, Writer* w) {
@@ -883,11 +885,11 @@ class ELFSymbolTable : public ELFSection {
strtab->DetachWriter();
}
- void Add(const ELFSymbol& symbol) {
+ void Add(const ELFSymbol& symbol, Zone* zone) {
if (symbol.binding() == ELFSymbol::BIND_LOCAL) {
- locals_.Add(symbol);
+ locals_.Add(symbol, zone);
} else {
- globals_.Add(symbol);
+ globals_.Add(symbol, zone);
}
}
@@ -1019,26 +1021,29 @@ class CodeDescription BASE_EMBEDDED {
static void CreateSymbolsTable(CodeDescription* desc,
ELF* elf,
int text_section_index) {
- ELFSymbolTable* symtab = new ELFSymbolTable(".symtab");
- StringTable* strtab = new StringTable(".strtab");
+ Zone* zone = desc->info()->zone();
+ ELFSymbolTable* symtab = new(zone) ELFSymbolTable(".symtab", zone);
+ StringTable* strtab = new(zone) StringTable(".strtab");
// Symbol table should be followed by the linked string table.
- elf->AddSection(symtab);
- elf->AddSection(strtab);
+ elf->AddSection(symtab, zone);
+ elf->AddSection(strtab, zone);
symtab->Add(ELFSymbol("V8 Code",
0,
0,
ELFSymbol::BIND_LOCAL,
ELFSymbol::TYPE_FILE,
- ELFSection::INDEX_ABSOLUTE));
+ ELFSection::INDEX_ABSOLUTE),
+ zone);
symtab->Add(ELFSymbol(desc->name(),
0,
desc->CodeSize(),
ELFSymbol::BIND_GLOBAL,
ELFSymbol::TYPE_FUNC,
- text_section_index));
+ text_section_index),
+ zone);
}
#endif // defined(__ELF)
@@ -1074,7 +1079,7 @@ class DebugInfoSection : public DebugSection {
DW_ATE_SIGNED = 0x5
};
- bool WriteBody(Writer* w) {
+ bool WriteBodyInternal(Writer* w) {
uintptr_t cu_start = w->position();
Writer::Slot<uint32_t> size = w->CreateSlotHere<uint32_t>();
uintptr_t start = w->position();
@@ -1094,8 +1099,7 @@ class DebugInfoSection : public DebugSection {
w->WriteString("v8value");
if (desc_->IsInfoAvailable()) {
- CompilationInfo* info = desc_->info();
- ScopeInfo<FreeStoreAllocationPolicy> scope_info(info->scope());
+ Scope* scope = desc_->info()->scope();
w->WriteULEB128(2);
w->WriteString(desc_->name());
w->Write<intptr_t>(desc_->CodeStart());
@@ -1106,23 +1110,27 @@ class DebugInfoSection : public DebugSection {
w->Write<uint8_t>(DW_OP_reg5); // The frame pointer's here on ia32
#elif defined(V8_TARGET_ARCH_X64)
w->Write<uint8_t>(DW_OP_reg6); // and here on x64.
+#elif defined(V8_TARGET_ARCH_ARM)
+ UNIMPLEMENTED();
+#elif defined(V8_TARGET_ARCH_MIPS)
+ UNIMPLEMENTED();
#else
#error Unsupported target architecture.
#endif
fb_block_size.set(static_cast<uint32_t>(w->position() - fb_block_start));
- int params = scope_info.number_of_parameters();
- int slots = scope_info.number_of_stack_slots();
- int context_slots = scope_info.number_of_context_slots();
+ int params = scope->num_parameters();
+ int slots = scope->num_stack_slots();
+ int context_slots = scope->ContextLocalCount();
// The real slot ID is internal_slots + context_slot_id.
int internal_slots = Context::MIN_CONTEXT_SLOTS;
- int locals = scope_info.LocalCount();
+ int locals = scope->StackLocalCount();
int current_abbreviation = 4;
for (int param = 0; param < params; ++param) {
w->WriteULEB128(current_abbreviation++);
w->WriteString(
- *scope_info.ParameterName(param)->ToCString(DISALLOW_NULLS));
+ *scope->parameter(param)->name()->ToCString(DISALLOW_NULLS));
w->Write<uint32_t>(ty_offset);
Writer::Slot<uint32_t> block_size = w->CreateSlotHere<uint32_t>();
uintptr_t block_start = w->position();
@@ -1148,7 +1156,7 @@ class DebugInfoSection : public DebugSection {
ASSERT(Context::CLOSURE_INDEX == 0);
ASSERT(Context::PREVIOUS_INDEX == 1);
ASSERT(Context::EXTENSION_INDEX == 2);
- ASSERT(Context::GLOBAL_INDEX == 3);
+ ASSERT(Context::GLOBAL_OBJECT_INDEX == 3);
w->WriteULEB128(current_abbreviation++);
w->WriteString(".closure");
w->WriteULEB128(current_abbreviation++);
@@ -1167,10 +1175,13 @@ class DebugInfoSection : public DebugSection {
w->WriteString(builder.Finalize());
}
+ ZoneList<Variable*> stack_locals(locals, scope->zone());
+ ZoneList<Variable*> context_locals(context_slots, scope->zone());
+ scope->CollectStackAndContextLocals(&stack_locals, &context_locals);
for (int local = 0; local < locals; ++local) {
w->WriteULEB128(current_abbreviation++);
w->WriteString(
- *scope_info.LocalName(local)->ToCString(DISALLOW_NULLS));
+ *stack_locals[local]->name()->ToCString(DISALLOW_NULLS));
w->Write<uint32_t>(ty_offset);
Writer::Slot<uint32_t> block_size = w->CreateSlotHere<uint32_t>();
uintptr_t block_start = w->position();
@@ -1287,7 +1298,7 @@ class DebugAbbrevSection : public DebugSection {
w->WriteULEB128(0);
}
- bool WriteBody(Writer* w) {
+ bool WriteBodyInternal(Writer* w) {
int current_abbreviation = 1;
bool extra_info = desc_->IsInfoAvailable();
ASSERT(desc_->IsLineInfoAvailable());
@@ -1306,14 +1317,13 @@ class DebugAbbrevSection : public DebugSection {
w->WriteULEB128(0);
if (extra_info) {
- CompilationInfo* info = desc_->info();
- ScopeInfo<FreeStoreAllocationPolicy> scope_info(info->scope());
- int params = scope_info.number_of_parameters();
- int slots = scope_info.number_of_stack_slots();
- int context_slots = scope_info.number_of_context_slots();
+ Scope* scope = desc_->info()->scope();
+ int params = scope->num_parameters();
+ int slots = scope->num_stack_slots();
+ int context_slots = scope->ContextLocalCount();
// The real slot ID is internal_slots + context_slot_id.
int internal_slots = Context::MIN_CONTEXT_SLOTS;
- int locals = scope_info.LocalCount();
+ int locals = scope->StackLocalCount();
int total_children =
params + slots + context_slots + internal_slots + locals + 2;
@@ -1418,7 +1428,7 @@ class DebugLineSection : public DebugSection {
DW_LNE_DEFINE_FILE = 3
};
- bool WriteBody(Writer* w) {
+ bool WriteBodyInternal(Writer* w) {
// Write prologue.
Writer::Slot<uint32_t> total_length = w->CreateSlotHere<uint32_t>();
uintptr_t start = w->position();
@@ -1558,7 +1568,7 @@ class DebugLineSection : public DebugSection {
class UnwindInfoSection : public DebugSection {
public:
explicit UnwindInfoSection(CodeDescription* desc);
- virtual bool WriteBody(Writer* w);
+ virtual bool WriteBodyInternal(Writer* w);
int WriteCIE(Writer* w);
void WriteFDE(Writer* w, int);
@@ -1770,7 +1780,7 @@ void UnwindInfoSection::WriteFDEStateAfterRBPPop(Writer* w) {
}
-bool UnwindInfoSection::WriteBody(Writer* w) {
+bool UnwindInfoSection::WriteBodyInternal(Writer* w) {
uint32_t cie_position = WriteCIE(w);
WriteFDE(w, cie_position);
return true;
@@ -1780,13 +1790,14 @@ bool UnwindInfoSection::WriteBody(Writer* w) {
#endif // V8_TARGET_ARCH_X64
static void CreateDWARFSections(CodeDescription* desc, DebugObject* obj) {
+ Zone* zone = desc->info()->zone();
if (desc->IsLineInfoAvailable()) {
- obj->AddSection(new DebugInfoSection(desc));
- obj->AddSection(new DebugAbbrevSection(desc));
- obj->AddSection(new DebugLineSection(desc));
+ obj->AddSection(new(zone) DebugInfoSection(desc), zone);
+ obj->AddSection(new(zone) DebugAbbrevSection(desc), zone);
+ obj->AddSection(new(zone) DebugLineSection(desc), zone);
}
#ifdef V8_TARGET_ARCH_X64
- obj->AddSection(new UnwindInfoSection(desc));
+ obj->AddSection(new(zone) UnwindInfoSection(desc), zone);
#endif
}
@@ -1905,7 +1916,8 @@ static void UnregisterCodeEntry(JITCodeEntry* entry) {
static JITCodeEntry* CreateELFObject(CodeDescription* desc) {
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ Zone* zone = desc->info()->zone();
+ ZoneScope zone_scope(zone, DELETE_ON_EXIT);
#ifdef __MACH_O
MachO mach_o;
Writer w(&mach_o);
@@ -1918,17 +1930,19 @@ static JITCodeEntry* CreateELFObject(CodeDescription* desc) {
mach_o.Write(&w, desc->CodeStart(), desc->CodeSize());
#else
- ELF elf;
+ ELF elf(zone);
Writer w(&elf);
int text_section_index = elf.AddSection(
- new FullHeaderELFSection(".text",
- ELFSection::TYPE_NOBITS,
- kCodeAlignment,
- desc->CodeStart(),
- 0,
- desc->CodeSize(),
- ELFSection::FLAG_ALLOC | ELFSection::FLAG_EXEC));
+ new(zone) FullHeaderELFSection(
+ ".text",
+ ELFSection::TYPE_NOBITS,
+ kCodeAlignment,
+ desc->CodeStart(),
+ 0,
+ desc->CodeSize(),
+ ELFSection::FLAG_ALLOC | ELFSection::FLAG_EXEC),
+ zone);
CreateSymbolsTable(desc, &elf, text_section_index);
diff --git a/src/3rdparty/v8/src/global-handles.cc b/src/3rdparty/v8/src/global-handles.cc
index 9c0ad45..0006f8e 100644
--- a/src/3rdparty/v8/src/global-handles.cc
+++ b/src/3rdparty/v8/src/global-handles.cc
@@ -69,6 +69,7 @@ class GlobalHandles::Node {
class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
index_ = 0;
independent_ = false;
+ partially_dependent_ = false;
in_new_space_list_ = false;
parameter_or_next_free_.next_free = NULL;
callback_ = NULL;
@@ -89,6 +90,7 @@ class GlobalHandles::Node {
object_ = object;
class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
independent_ = false;
+ partially_dependent_ = false;
state_ = NORMAL;
parameter_or_next_free_.parameter = NULL;
callback_ = NULL;
@@ -154,6 +156,15 @@ class GlobalHandles::Node {
}
bool is_independent() const { return independent_; }
+ void MarkPartiallyDependent(GlobalHandles* global_handles) {
+ ASSERT(state_ != FREE);
+ if (global_handles->isolate()->heap()->InNewSpace(object_)) {
+ partially_dependent_ = true;
+ }
+ }
+ bool is_partially_dependent() const { return partially_dependent_; }
+ void clear_partially_dependent() { partially_dependent_ = false; }
+
// In-new-space-list flag accessors.
void set_in_new_space_list(bool v) { in_new_space_list_ = v; }
bool is_in_new_space_list() const { return in_new_space_list_; }
@@ -260,6 +271,7 @@ class GlobalHandles::Node {
State state_ : 4;
bool independent_ : 1;
+ bool partially_dependent_ : 1;
bool in_new_space_list_ : 1;
// Handle specific callback.
@@ -448,6 +460,16 @@ void GlobalHandles::MarkIndependent(Object** location) {
}
+void GlobalHandles::MarkPartiallyDependent(Object** location) {
+ Node::FromLocation(location)->MarkPartiallyDependent(this);
+}
+
+
+bool GlobalHandles::IsIndependent(Object** location) {
+ return Node::FromLocation(location)->is_independent();
+}
+
+
bool GlobalHandles::IsNearDeath(Object** location) {
return Node::FromLocation(location)->IsNearDeath();
}
@@ -462,6 +484,9 @@ void GlobalHandles::SetWrapperClassId(Object** location, uint16_t class_id) {
Node::FromLocation(location)->set_wrapper_class_id(class_id);
}
+uint16_t GlobalHandles::GetWrapperClassId(Object** location) {
+ return Node::FromLocation(location)->wrapper_class_id();
+}
void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
for (NodeIterator it(this); !it.done(); it.Advance()) {
@@ -493,8 +518,9 @@ void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v) {
for (int i = 0; i < new_space_nodes_.length(); ++i) {
Node* node = new_space_nodes_[i];
if (node->IsStrongRetainer() ||
- (node->IsWeakRetainer() && !node->is_independent())) {
- v->VisitPointer(node->location());
+ (node->IsWeakRetainer() && !node->is_independent() &&
+ !node->is_partially_dependent())) {
+ v->VisitPointer(node->location());
}
}
}
@@ -505,8 +531,8 @@ void GlobalHandles::IdentifyNewSpaceWeakIndependentHandles(
for (int i = 0; i < new_space_nodes_.length(); ++i) {
Node* node = new_space_nodes_[i];
ASSERT(node->is_in_new_space_list());
- if (node->is_independent() && node->IsWeak() &&
- f(isolate_->heap(), node->location())) {
+ if ((node->is_independent() || node->is_partially_dependent()) &&
+ node->IsWeak() && f(isolate_->heap(), node->location())) {
node->MarkPending();
}
}
@@ -517,7 +543,8 @@ void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) {
for (int i = 0; i < new_space_nodes_.length(); ++i) {
Node* node = new_space_nodes_[i];
ASSERT(node->is_in_new_space_list());
- if (node->is_independent() && node->IsWeakRetainer()) {
+ if ((node->is_independent() || node->is_partially_dependent()) &&
+ node->IsWeakRetainer()) {
v->VisitPointer(node->location());
}
}
@@ -539,7 +566,10 @@ bool GlobalHandles::PostGarbageCollectionProcessing(
// Skip dependent handles. Their weak callbacks might expect to be
// called between two global garbage collection callbacks which
// are not called for minor collections.
- if (!node->is_independent()) continue;
+ if (!node->is_independent() && !node->is_partially_dependent()) {
+ continue;
+ }
+ node->clear_partially_dependent();
if (node->PostGarbageCollectionProcessing(isolate_, this)) {
if (initial_post_gc_processing_count != post_gc_processing_count_) {
// Weak callback triggered another GC and another round of
@@ -555,6 +585,7 @@ bool GlobalHandles::PostGarbageCollectionProcessing(
}
} else {
for (NodeIterator it(this); !it.done(); it.Advance()) {
+ it.node()->clear_partially_dependent();
if (it.node()->PostGarbageCollectionProcessing(isolate_, this)) {
if (initial_post_gc_processing_count != post_gc_processing_count_) {
// See the comment above.
@@ -602,7 +633,7 @@ void GlobalHandles::IterateAllRoots(ObjectVisitor* v) {
void GlobalHandles::IterateAllRootsWithClassIds(ObjectVisitor* v) {
for (NodeIterator it(this); !it.done(); it.Advance()) {
- if (it.node()->has_wrapper_class_id() && it.node()->IsRetainer()) {
+ if (it.node()->IsRetainer() && it.node()->has_wrapper_class_id()) {
v->VisitEmbedderReference(it.node()->location(),
it.node()->wrapper_class_id());
}
diff --git a/src/3rdparty/v8/src/global-handles.h b/src/3rdparty/v8/src/global-handles.h
index ddf5fe2..482baef 100644
--- a/src/3rdparty/v8/src/global-handles.h
+++ b/src/3rdparty/v8/src/global-handles.h
@@ -131,6 +131,7 @@ class GlobalHandles {
WeakReferenceCallback callback);
static void SetWrapperClassId(Object** location, uint16_t class_id);
+ static uint16_t GetWrapperClassId(Object** location);
// Returns the current number of weak handles.
int NumberOfWeakHandles() { return number_of_weak_handles_; }
@@ -154,6 +155,11 @@ class GlobalHandles {
// Clear the weakness of a global handle.
void MarkIndependent(Object** location);
+ // Mark the reference to this object externaly unreachable.
+ void MarkPartiallyDependent(Object** location);
+
+ static bool IsIndependent(Object** location);
+
// Tells whether global handle is near death.
static bool IsNearDeath(Object** location);
@@ -192,16 +198,17 @@ class GlobalHandles {
// Iterates over strong and dependent handles. See the node above.
void IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v);
- // Finds weak independent handles satisfying the callback predicate
- // and marks them as pending. See the note above.
+ // Finds weak independent or partially independent handles satisfying
+ // the callback predicate and marks them as pending. See the note above.
void IdentifyNewSpaceWeakIndependentHandles(WeakSlotCallbackWithHeap f);
- // Iterates over weak independent handles. See the note above.
+ // Iterates over weak independent or partially independent handles.
+ // See the note above.
void IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v);
// Add an object group.
// Should be only used in GC callback function before a collection.
- // All groups are destroyed after a mark-compact collection.
+ // All groups are destroyed after a garbage collection.
void AddObjectGroup(Object*** handles,
size_t length,
v8::RetainedObjectInfo* info);
diff --git a/src/3rdparty/v8/src/globals.h b/src/3rdparty/v8/src/globals.h
index 54d628e..74c12f8 100644
--- a/src/3rdparty/v8/src/globals.h
+++ b/src/3rdparty/v8/src/globals.h
@@ -74,7 +74,7 @@ namespace internal {
#define V8_HOST_ARCH_IA32 1
#define V8_HOST_ARCH_32_BIT 1
#define V8_HOST_CAN_READ_UNALIGNED 1
-#elif defined(__ARMEL__)
+#elif defined(__ARMEL__) || defined(_M_ARM)
#define V8_HOST_ARCH_ARM 1
#define V8_HOST_ARCH_32_BIT 1
// Some CPU-OS combinations allow unaligned access on ARM. We assume
@@ -128,7 +128,7 @@ namespace internal {
// Setting USE_SIMULATOR explicitly from the build script will force
// the use of a simulated environment.
#if !defined(USE_SIMULATOR)
-#if (defined(V8_TARGET_ARCH_ARM) && !defined(V8_HOST_ARCH_ARM))
+#if (defined(V8_TARGET_ARCH_ARM) && !defined(V8_HOST_ARCH_ARM) && !defined(_WIN32_WCE))
#define USE_SIMULATOR 1
#endif
#if (defined(V8_TARGET_ARCH_MIPS) && !defined(V8_HOST_ARCH_MIPS))
@@ -136,21 +136,6 @@ namespace internal {
#endif
#endif
-// Define unaligned read for the target architectures supporting it.
-#if defined(V8_TARGET_ARCH_X64) || defined(V8_TARGET_ARCH_IA32)
-#define V8_TARGET_CAN_READ_UNALIGNED 1
-#elif V8_TARGET_ARCH_ARM
-// Some CPU-OS combinations allow unaligned access on ARM. We assume
-// that unaligned accesses are not allowed unless the build system
-// defines the CAN_USE_UNALIGNED_ACCESSES macro to be non-zero.
-#if CAN_USE_UNALIGNED_ACCESSES
-#define V8_TARGET_CAN_READ_UNALIGNED 1
-#endif
-#elif V8_TARGET_ARCH_MIPS
-#else
-#error Target architecture is not supported by v8
-#endif
-
// Support for alternative bool type. This is only enabled if the code is
// compiled with USE_MYBOOL defined. This catches some nasty type bugs.
// For instance, 'bool b = "false";' results in b == true! This is a hidden
@@ -203,6 +188,7 @@ typedef byte* Address;
#define V8PRIxPTR V8_PTR_PREFIX "x"
#define V8PRIdPTR V8_PTR_PREFIX "d"
+#define V8PRIuPTR V8_PTR_PREFIX "u"
// Fix for Mac OS X defining uintptr_t as "unsigned long":
#if defined(__APPLE__) && defined(__MACH__)
@@ -360,6 +346,20 @@ F FUNCTION_CAST(Address addr) {
#define MUST_USE_RESULT
#endif
+
+// Define DISABLE_ASAN macros.
+#if defined(__has_feature)
+#if __has_feature(address_sanitizer)
+#define DISABLE_ASAN __attribute__((no_address_safety_analysis))
+#endif
+#endif
+
+
+#ifndef DISABLE_ASAN
+#define DISABLE_ASAN
+#endif
+
+
// -----------------------------------------------------------------------------
// Forward declarations for frequently used classes
// (sorted alphabetically)
diff --git a/src/3rdparty/v8/src/handles-inl.h b/src/3rdparty/v8/src/handles-inl.h
index a5c81ce..1307986 100644
--- a/src/3rdparty/v8/src/handles-inl.h
+++ b/src/3rdparty/v8/src/handles-inl.h
@@ -149,25 +149,31 @@ T** HandleScope::CreateHandle(T* value, Isolate* isolate) {
#ifdef DEBUG
inline NoHandleAllocation::NoHandleAllocation() {
+ Isolate* isolate = Isolate::Current();
v8::ImplementationUtilities::HandleScopeData* current =
- Isolate::Current()->handle_scope_data();
+ isolate->handle_scope_data();
- // Shrink the current handle scope to make it impossible to do
- // handle allocations without an explicit handle scope.
- current->limit = current->next;
+ active_ = !isolate->optimizing_compiler_thread()->IsOptimizerThread();
+ if (active_) {
+ // Shrink the current handle scope to make it impossible to do
+ // handle allocations without an explicit handle scope.
+ current->limit = current->next;
- level_ = current->level;
- current->level = 0;
+ level_ = current->level;
+ current->level = 0;
+ }
}
inline NoHandleAllocation::~NoHandleAllocation() {
- // Restore state in current handle scope to re-enable handle
- // allocations.
- v8::ImplementationUtilities::HandleScopeData* data =
- Isolate::Current()->handle_scope_data();
- ASSERT_EQ(0, data->level);
- data->level = level_;
+ if (active_) {
+ // Restore state in current handle scope to re-enable handle
+ // allocations.
+ v8::ImplementationUtilities::HandleScopeData* data =
+ Isolate::Current()->handle_scope_data();
+ ASSERT_EQ(0, data->level);
+ data->level = level_;
+ }
}
#endif
diff --git a/src/3rdparty/v8/src/handles.cc b/src/3rdparty/v8/src/handles.cc
index def1604..a6192d8 100644
--- a/src/3rdparty/v8/src/handles.cc
+++ b/src/3rdparty/v8/src/handles.cc
@@ -165,7 +165,7 @@ void SetExpectedNofProperties(Handle<JSFunction> func, int nof) {
func->shared()->set_expected_nof_properties(nof);
if (func->has_initial_map()) {
Handle<Map> new_initial_map =
- func->GetIsolate()->factory()->CopyMapDropTransitions(
+ func->GetIsolate()->factory()->CopyMap(
Handle<Map>(func->initial_map()));
new_initial_map->set_unused_property_fields(nof);
func->set_initial_map(*new_initial_map);
@@ -561,6 +561,9 @@ v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSReceiver> receiver,
result = enum_fun(info);
}
}
+#if ENABLE_EXTRA_CHECKS
+ CHECK(result.IsEmpty() || v8::Utils::OpenHandle(*result)->IsJSObject());
+#endif
return result;
}
@@ -581,12 +584,34 @@ v8::Handle<v8::Array> GetKeysForIndexedInterceptor(Handle<JSReceiver> receiver,
// Leaving JavaScript.
VMState state(isolate, EXTERNAL);
result = enum_fun(info);
+#if ENABLE_EXTRA_CHECKS
+ CHECK(result.IsEmpty() || v8::Utils::OpenHandle(*result)->IsJSObject());
+#endif
}
}
return result;
}
+Handle<Object> GetScriptNameOrSourceURL(Handle<Script> script) {
+ Isolate* isolate = script->GetIsolate();
+ Handle<String> name_or_source_url_key =
+ isolate->factory()->LookupAsciiSymbol("nameOrSourceURL");
+ Handle<JSValue> script_wrapper = GetScriptWrapper(script);
+ Handle<Object> property = GetProperty(script_wrapper,
+ name_or_source_url_key);
+ ASSERT(property->IsJSFunction());
+ Handle<JSFunction> method = Handle<JSFunction>::cast(property);
+ bool caught_exception;
+ Handle<Object> result = Execution::TryCall(method, script_wrapper, 0,
+ NULL, &caught_exception);
+ if (caught_exception) {
+ result = isolate->factory()->undefined_value();
+ }
+ return result;
+}
+
+
static bool ContainsOnlyValidKeys(Handle<FixedArray> array) {
int len = array->length();
for (int i = 0; i < len; i++) {
@@ -604,7 +629,7 @@ Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSReceiver> object,
Isolate* isolate = object->GetIsolate();
Handle<FixedArray> content = isolate->factory()->empty_fixed_array();
Handle<JSObject> arguments_boilerplate = Handle<JSObject>(
- isolate->context()->global_context()->arguments_boilerplate(),
+ isolate->context()->native_context()->arguments_boilerplate(),
isolate);
Handle<JSFunction> arguments_function = Handle<JSFunction>(
JSFunction::cast(arguments_boilerplate->map()->constructor()),
@@ -699,77 +724,134 @@ Handle<JSArray> GetKeysFor(Handle<JSReceiver> object, bool* threw) {
}
+Handle<FixedArray> ReduceFixedArrayTo(Handle<FixedArray> array, int length) {
+ ASSERT(array->length() >= length);
+ if (array->length() == length) return array;
+
+ Handle<FixedArray> new_array =
+ array->GetIsolate()->factory()->NewFixedArray(length);
+ for (int i = 0; i < length; ++i) new_array->set(i, array->get(i));
+ return new_array;
+}
+
+
Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
bool cache_result) {
- int index = 0;
Isolate* isolate = object->GetIsolate();
if (object->HasFastProperties()) {
if (object->map()->instance_descriptors()->HasEnumCache()) {
- isolate->counters()->enum_cache_hits()->Increment();
+ int own_property_count = object->map()->EnumLength();
+ // If we have an enum cache, but the enum length of the given map is set
+ // to kInvalidEnumCache, this means that the map itself has never used the
+ // present enum cache. The first step to using the cache is to set the
+ // enum length of the map by counting the number of own descriptors that
+ // are not DONT_ENUM.
+ if (own_property_count == Map::kInvalidEnumCache) {
+ own_property_count = object->map()->NumberOfDescribedProperties(
+ OWN_DESCRIPTORS, DONT_ENUM);
+
+ if (cache_result) object->map()->SetEnumLength(own_property_count);
+ }
+
DescriptorArray* desc = object->map()->instance_descriptors();
- return Handle<FixedArray>(FixedArray::cast(desc->GetEnumCache()),
- isolate);
+ Handle<FixedArray> keys(desc->GetEnumCache(), isolate);
+
+ // In case the number of properties required in the enum are actually
+ // present, we can reuse the enum cache. Otherwise, this means that the
+ // enum cache was generated for a previous (smaller) version of the
+ // Descriptor Array. In that case we regenerate the enum cache.
+ if (own_property_count <= keys->length()) {
+ isolate->counters()->enum_cache_hits()->Increment();
+ return ReduceFixedArrayTo(keys, own_property_count);
+ }
}
- isolate->counters()->enum_cache_misses()->Increment();
+
Handle<Map> map(object->map());
- int num_enum = object->NumberOfLocalProperties(DONT_ENUM);
- Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum);
- Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum);
+ if (map->instance_descriptors()->IsEmpty()) {
+ isolate->counters()->enum_cache_hits()->Increment();
+ if (cache_result) map->SetEnumLength(0);
+ return isolate->factory()->empty_fixed_array();
+ }
- Handle<FixedArray> indices;
- Handle<FixedArray> sort_array2;
+ isolate->counters()->enum_cache_misses()->Increment();
+ int num_enum = map->NumberOfDescribedProperties(ALL_DESCRIPTORS, DONT_ENUM);
- if (cache_result) {
- indices = isolate->factory()->NewFixedArray(num_enum);
- sort_array2 = isolate->factory()->NewFixedArray(num_enum);
- }
+ Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum);
+ Handle<FixedArray> indices = isolate->factory()->NewFixedArray(num_enum);
Handle<DescriptorArray> descs =
Handle<DescriptorArray>(object->map()->instance_descriptors(), isolate);
+ int real_size = map->NumberOfOwnDescriptors();
+ int enum_size = 0;
+ int index = 0;
+
for (int i = 0; i < descs->number_of_descriptors(); i++) {
- if (descs->IsProperty(i) && !descs->GetDetails(i).IsDontEnum()) {
+ PropertyDetails details = descs->GetDetails(i);
+ if (!details.IsDontEnum()) {
+ if (i < real_size) ++enum_size;
storage->set(index, descs->GetKey(i));
- PropertyDetails details = descs->GetDetails(i);
- sort_array->set(index, Smi::FromInt(details.index()));
if (!indices.is_null()) {
if (details.type() != FIELD) {
indices = Handle<FixedArray>();
- sort_array2 = Handle<FixedArray>();
} else {
int field_index = Descriptor::IndexFromValue(descs->GetValue(i));
if (field_index >= map->inobject_properties()) {
field_index = -(field_index - map->inobject_properties() + 1);
}
indices->set(index, Smi::FromInt(field_index));
- sort_array2->set(index, Smi::FromInt(details.index()));
}
}
index++;
}
}
- storage->SortPairs(*sort_array, sort_array->length());
- if (!indices.is_null()) {
- indices->SortPairs(*sort_array2, sort_array2->length());
- }
+ ASSERT(index == storage->length());
+
+ Handle<FixedArray> bridge_storage =
+ isolate->factory()->NewFixedArray(
+ DescriptorArray::kEnumCacheBridgeLength);
+ DescriptorArray* desc = object->map()->instance_descriptors();
+ desc->SetEnumCache(*bridge_storage,
+ *storage,
+ indices.is_null() ? Object::cast(Smi::FromInt(0))
+ : Object::cast(*indices));
if (cache_result) {
- Handle<FixedArray> bridge_storage =
- isolate->factory()->NewFixedArray(
- DescriptorArray::kEnumCacheBridgeLength);
- DescriptorArray* desc = object->map()->instance_descriptors();
- desc->SetEnumCache(*bridge_storage,
- *storage,
- indices.is_null() ? Object::cast(Smi::FromInt(0))
- : Object::cast(*indices));
+ object->map()->SetEnumLength(enum_size);
}
- ASSERT(storage->length() == index);
- return storage;
+
+ return ReduceFixedArrayTo(storage, enum_size);
} else {
- int num_enum = object->NumberOfLocalProperties(DONT_ENUM);
- Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum);
- Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum);
- object->property_dictionary()->CopyEnumKeysTo(*storage, *sort_array);
+ Handle<StringDictionary> dictionary(object->property_dictionary());
+
+ int length = dictionary->NumberOfElements();
+ if (length == 0) {
+ return Handle<FixedArray>(isolate->heap()->empty_fixed_array());
+ }
+
+ // The enumeration array is generated by allocating an array big enough to
+ // hold all properties that have been seen, whether they are are deleted or
+ // not. Subsequently all visible properties are added to the array. If some
+ // properties were not visible, the array is trimmed so it only contains
+ // visible properties. This improves over adding elements and sorting by
+ // index by having linear complexity rather than n*log(n).
+
+ // By comparing the monotonous NextEnumerationIndex to the NumberOfElements,
+ // we can predict the number of holes in the final array. If there will be
+ // more than 50% holes, regenerate the enumeration indices to reduce the
+ // number of holes to a minimum. This avoids allocating a large array if
+ // many properties were added but subsequently deleted.
+ int next_enumeration = dictionary->NextEnumerationIndex();
+ if (!object->IsGlobalObject() && next_enumeration > (length * 3) / 2) {
+ StringDictionary::DoGenerateNewEnumerationIndices(dictionary);
+ next_enumeration = dictionary->NextEnumerationIndex();
+ }
+
+ Handle<FixedArray> storage =
+ isolate->factory()->NewFixedArray(next_enumeration);
+
+ storage = Handle<FixedArray>(dictionary->CopyEnumKeysTo(*storage));
+ ASSERT(storage->length() == object->NumberOfLocalProperties(DONT_ENUM));
return storage;
}
}
@@ -958,4 +1040,47 @@ int Utf8Length(Handle<String> str) {
return len;
}
+
+DeferredHandleScope::DeferredHandleScope(Isolate* isolate)
+ : impl_(isolate->handle_scope_implementer()) {
+ ASSERT(impl_->isolate() == Isolate::Current());
+ impl_->BeginDeferredScope();
+ v8::ImplementationUtilities::HandleScopeData* data =
+ impl_->isolate()->handle_scope_data();
+ Object** new_next = impl_->GetSpareOrNewBlock();
+ Object** new_limit = &new_next[kHandleBlockSize];
+ ASSERT(data->limit == &impl_->blocks()->last()[kHandleBlockSize]);
+ impl_->blocks()->Add(new_next);
+
+#ifdef DEBUG
+ prev_level_ = data->level;
+#endif
+ data->level++;
+ prev_limit_ = data->limit;
+ prev_next_ = data->next;
+ data->next = new_next;
+ data->limit = new_limit;
+}
+
+
+DeferredHandleScope::~DeferredHandleScope() {
+ impl_->isolate()->handle_scope_data()->level--;
+ ASSERT(handles_detached_);
+ ASSERT(impl_->isolate()->handle_scope_data()->level == prev_level_);
+}
+
+
+DeferredHandles* DeferredHandleScope::Detach() {
+ DeferredHandles* deferred = impl_->Detach(prev_limit_);
+ v8::ImplementationUtilities::HandleScopeData* data =
+ impl_->isolate()->handle_scope_data();
+ data->next = prev_next_;
+ data->limit = prev_limit_;
+#ifdef DEBUG
+ handles_detached_ = true;
+#endif
+ return deferred;
+}
+
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/handles.h b/src/3rdparty/v8/src/handles.h
index 960696b..b80dbe5 100644
--- a/src/3rdparty/v8/src/handles.h
+++ b/src/3rdparty/v8/src/handles.h
@@ -95,6 +95,17 @@ class Handle {
};
+// Convenience wrapper.
+template<class T>
+inline Handle<T> handle(T* t) {
+ return Handle<T>(t);
+}
+
+
+class DeferredHandles;
+class HandleScopeImplementer;
+
+
// A stack-allocated class that governs a number of local handles.
// After a handle scope has been created, all local handles will be
// allocated within that handle scope until either the handle scope is
@@ -156,8 +167,37 @@ class HandleScope {
// Zaps the handles in the half-open interval [start, end).
static void ZapRange(internal::Object** start, internal::Object** end);
+ friend class v8::internal::DeferredHandles;
friend class v8::HandleScope;
+ friend class v8::internal::HandleScopeImplementer;
friend class v8::ImplementationUtilities;
+ friend class v8::internal::Isolate;
+};
+
+
+class DeferredHandles;
+
+
+class DeferredHandleScope {
+ public:
+ explicit DeferredHandleScope(Isolate* isolate);
+ // The DeferredHandles object returned stores the Handles created
+ // since the creation of this DeferredHandleScope. The Handles are
+ // alive as long as the DeferredHandles object is alive.
+ DeferredHandles* Detach();
+ ~DeferredHandleScope();
+
+ private:
+ Object** prev_limit_;
+ Object** prev_next_;
+ HandleScopeImplementer* impl_;
+
+#ifdef DEBUG
+ bool handles_detached_;
+ int prev_level_;
+#endif
+
+ friend class HandleScopeImplementer;
};
@@ -216,7 +256,7 @@ Handle<FixedArray> AddKeysFromJSArray(Handle<FixedArray>,
// if none exists.
Handle<JSValue> GetScriptWrapper(Handle<Script> script);
-// Script line number computations.
+// Script line number computations. Note that the line number is zero-based.
void InitScriptLineEnds(Handle<Script> script);
// For string calculates an array of line end positions. If the string
// does not end with a new line character, this character may optionally be
@@ -227,6 +267,7 @@ int GetScriptLineNumber(Handle<Script> script, int code_position);
// The safe version does not make heap allocations but may work much slower.
int GetScriptLineNumberSafe(Handle<Script> script, int code_position);
int GetScriptColumnNumber(Handle<Script> script, int code_position);
+Handle<Object> GetScriptNameOrSourceURL(Handle<Script> script);
// Computes the enumerable keys from interceptors. Used for debug mirrors and
// by GetKeysInFixedArrayFor below.
@@ -243,6 +284,7 @@ Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSReceiver> object,
KeyCollectionType type,
bool* threw);
Handle<JSArray> GetKeysFor(Handle<JSReceiver> object, bool* threw);
+Handle<FixedArray> ReduceFixedArrayTo(Handle<FixedArray> array, int length);
Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
bool cache_result);
@@ -294,6 +336,7 @@ class NoHandleAllocation BASE_EMBEDDED {
inline ~NoHandleAllocation();
private:
int level_;
+ bool active_;
#endif
};
diff --git a/src/3rdparty/v8/src/hashmap.h b/src/3rdparty/v8/src/hashmap.h
index 91843b8..11f6ace 100644
--- a/src/3rdparty/v8/src/hashmap.h
+++ b/src/3rdparty/v8/src/hashmap.h
@@ -40,9 +40,16 @@ class TemplateHashMapImpl {
public:
typedef bool (*MatchFun) (void* key1, void* key2);
+ // The default capacity. This is used by the call sites which want
+ // to pass in a non-default AllocationPolicy but want to use the
+ // default value of capacity specified by the implementation.
+ static const uint32_t kDefaultHashMapCapacity = 8;
+
// initial_capacity is the size of the initial hash map;
// it must be a power of 2 (and thus must not be 0).
- TemplateHashMapImpl(MatchFun match, uint32_t initial_capacity = 8);
+ TemplateHashMapImpl(MatchFun match,
+ uint32_t capacity = kDefaultHashMapCapacity,
+ AllocationPolicy allocator = AllocationPolicy());
~TemplateHashMapImpl();
@@ -52,7 +59,8 @@ class TemplateHashMapImpl {
struct Entry {
void* key;
void* value;
- uint32_t hash; // the full hash value for key
+ uint32_t hash; // The full hash value for key
+ int order; // If you never remove entries this is the insertion order.
};
// If an entry with matching key is found, Lookup()
@@ -60,7 +68,8 @@ class TemplateHashMapImpl {
// but insert is set, a new entry is inserted with
// corresponding key, key hash, and NULL value.
// Otherwise, NULL is returned.
- Entry* Lookup(void* key, uint32_t hash, bool insert);
+ Entry* Lookup(void* key, uint32_t hash, bool insert,
+ AllocationPolicy allocator = AllocationPolicy());
// Removes the entry with matching key.
// It returns the value of the deleted entry
@@ -97,29 +106,30 @@ class TemplateHashMapImpl {
Entry* map_end() const { return map_ + capacity_; }
Entry* Probe(void* key, uint32_t hash);
- void Initialize(uint32_t capacity);
- void Resize();
+ void Initialize(uint32_t capacity, AllocationPolicy allocator);
+ void Resize(AllocationPolicy allocator);
};
typedef TemplateHashMapImpl<FreeStoreAllocationPolicy> HashMap;
-template<class P>
-TemplateHashMapImpl<P>::TemplateHashMapImpl(MatchFun match,
- uint32_t initial_capacity) {
+template<class AllocationPolicy>
+TemplateHashMapImpl<AllocationPolicy>::TemplateHashMapImpl(
+ MatchFun match, uint32_t initial_capacity, AllocationPolicy allocator) {
match_ = match;
- Initialize(initial_capacity);
+ Initialize(initial_capacity, allocator);
}
-template<class P>
-TemplateHashMapImpl<P>::~TemplateHashMapImpl() {
- P::Delete(map_);
+template<class AllocationPolicy>
+TemplateHashMapImpl<AllocationPolicy>::~TemplateHashMapImpl() {
+ AllocationPolicy::Delete(map_);
}
-template<class P>
-typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Lookup(
- void* key, uint32_t hash, bool insert) {
+template<class AllocationPolicy>
+typename TemplateHashMapImpl<AllocationPolicy>::Entry*
+TemplateHashMapImpl<AllocationPolicy>::Lookup(
+ void* key, uint32_t hash, bool insert, AllocationPolicy allocator) {
// Find a matching entry.
Entry* p = Probe(key, hash);
if (p->key != NULL) {
@@ -131,11 +141,12 @@ typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Lookup(
p->key = key;
p->value = NULL;
p->hash = hash;
+ p->order = occupancy_;
occupancy_++;
// Grow the map if we reached >= 80% occupancy.
if (occupancy_ + occupancy_/4 >= capacity_) {
- Resize();
+ Resize(allocator);
p = Probe(key, hash);
}
@@ -147,8 +158,8 @@ typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Lookup(
}
-template<class P>
-void* TemplateHashMapImpl<P>::Remove(void* key, uint32_t hash) {
+template<class AllocationPolicy>
+void* TemplateHashMapImpl<AllocationPolicy>::Remove(void* key, uint32_t hash) {
// Lookup the entry for the key to remove.
Entry* p = Probe(key, hash);
if (p->key == NULL) {
@@ -209,8 +220,8 @@ void* TemplateHashMapImpl<P>::Remove(void* key, uint32_t hash) {
}
-template<class P>
-void TemplateHashMapImpl<P>::Clear() {
+template<class AllocationPolicy>
+void TemplateHashMapImpl<AllocationPolicy>::Clear() {
// Mark all entries as empty.
const Entry* end = map_end();
for (Entry* p = map_; p < end; p++) {
@@ -220,15 +231,16 @@ void TemplateHashMapImpl<P>::Clear() {
}
-template<class P>
-typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Start() const {
+template<class AllocationPolicy>
+typename TemplateHashMapImpl<AllocationPolicy>::Entry*
+ TemplateHashMapImpl<AllocationPolicy>::Start() const {
return Next(map_ - 1);
}
-template<class P>
-typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Next(Entry* p)
- const {
+template<class AllocationPolicy>
+typename TemplateHashMapImpl<AllocationPolicy>::Entry*
+ TemplateHashMapImpl<AllocationPolicy>::Next(Entry* p) const {
const Entry* end = map_end();
ASSERT(map_ - 1 <= p && p < end);
for (p++; p < end; p++) {
@@ -240,9 +252,9 @@ typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Next(Entry* p)
}
-template<class P>
-typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Probe(void* key,
- uint32_t hash) {
+template<class AllocationPolicy>
+typename TemplateHashMapImpl<AllocationPolicy>::Entry*
+ TemplateHashMapImpl<AllocationPolicy>::Probe(void* key, uint32_t hash) {
ASSERT(key != NULL);
ASSERT(IsPowerOf2(capacity_));
@@ -262,10 +274,11 @@ typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Probe(void* key,
}
-template<class P>
-void TemplateHashMapImpl<P>::Initialize(uint32_t capacity) {
+template<class AllocationPolicy>
+void TemplateHashMapImpl<AllocationPolicy>::Initialize(
+ uint32_t capacity, AllocationPolicy allocator) {
ASSERT(IsPowerOf2(capacity));
- map_ = reinterpret_cast<Entry*>(P::New(capacity * sizeof(Entry)));
+ map_ = reinterpret_cast<Entry*>(allocator.New(capacity * sizeof(Entry)));
if (map_ == NULL) {
v8::internal::FatalProcessOutOfMemory("HashMap::Initialize");
return;
@@ -275,24 +288,26 @@ void TemplateHashMapImpl<P>::Initialize(uint32_t capacity) {
}
-template<class P>
-void TemplateHashMapImpl<P>::Resize() {
+template<class AllocationPolicy>
+void TemplateHashMapImpl<AllocationPolicy>::Resize(AllocationPolicy allocator) {
Entry* map = map_;
uint32_t n = occupancy_;
// Allocate larger map.
- Initialize(capacity_ * 2);
+ Initialize(capacity_ * 2, allocator);
// Rehash all current entries.
for (Entry* p = map; n > 0; p++) {
if (p->key != NULL) {
- Lookup(p->key, p->hash, true)->value = p->value;
+ Entry* entry = Lookup(p->key, p->hash, true, allocator);
+ entry->value = p->value;
+ entry->order = p->order;
n--;
}
}
// Delete old map.
- P::Delete(map);
+ AllocationPolicy::Delete(map);
}
@@ -329,13 +344,18 @@ class TemplateHashMap: private TemplateHashMapImpl<AllocationPolicy> {
};
TemplateHashMap(
- typename TemplateHashMapImpl<AllocationPolicy>::MatchFun match)
- : TemplateHashMapImpl<AllocationPolicy>(match) { }
+ typename TemplateHashMapImpl<AllocationPolicy>::MatchFun match,
+ AllocationPolicy allocator = AllocationPolicy())
+ : TemplateHashMapImpl<AllocationPolicy>(
+ match,
+ TemplateHashMapImpl<AllocationPolicy>::kDefaultHashMapCapacity,
+ allocator) { }
Iterator begin() const { return Iterator(this, this->Start()); }
Iterator end() const { return Iterator(this, NULL); }
- Iterator find(Key* key, bool insert = false) {
- return Iterator(this, this->Lookup(key, key->Hash(), insert));
+ Iterator find(Key* key, bool insert = false,
+ AllocationPolicy allocator = AllocationPolicy()) {
+ return Iterator(this, this->Lookup(key, key->Hash(), insert, allocator));
}
};
diff --git a/src/3rdparty/v8/src/heap-inl.h b/src/3rdparty/v8/src/heap-inl.h
index aa933b6..cb274cb 100644
--- a/src/3rdparty/v8/src/heap-inl.h
+++ b/src/3rdparty/v8/src/heap-inl.h
@@ -85,13 +85,16 @@ void PromotionQueue::ActivateGuardIfOnTheSamePage() {
MaybeObject* Heap::AllocateStringFromUtf8(Vector<const char> str,
PretenureFlag pretenure) {
// Check for ASCII first since this is the common case.
- if (String::IsAscii(str.start(), str.length())) {
+ const char* start = str.start();
+ int length = str.length();
+ int non_ascii_start = String::NonAsciiStart(start, length);
+ if (non_ascii_start >= length) {
// If the string is ASCII, we do not need to convert the characters
// since UTF8 is backwards compatible with ASCII.
return AllocateStringFromAscii(str, pretenure);
}
// Non-ASCII and we need to decode.
- return AllocateStringFromUtf8Slow(str, pretenure);
+ return AllocateStringFromUtf8Slow(str, non_ascii_start, pretenure);
}
@@ -283,13 +286,6 @@ MaybeObject* Heap::AllocateRawMap() {
#endif
MaybeObject* result = map_space_->AllocateRaw(Map::kSize);
if (result->IsFailure()) old_gen_exhausted_ = true;
-#ifdef DEBUG
- if (!result->IsFailure()) {
- // Maps have their own alignment.
- CHECK((reinterpret_cast<intptr_t>(result) & kMapAlignmentMask) ==
- static_cast<intptr_t>(kHeapObjectTag));
- }
-#endif
return result;
}
@@ -484,10 +480,12 @@ intptr_t Heap::AdjustAmountOfExternalAllocatedMemory(
// Avoid overflow.
if (amount > amount_of_external_allocated_memory_) {
amount_of_external_allocated_memory_ = amount;
+ } else {
+ // Give up and reset the counters in case of an overflow.
+ amount_of_external_allocated_memory_ = 0;
+ amount_of_external_allocated_memory_at_last_global_gc_ = 0;
}
- intptr_t amount_since_last_global_gc =
- amount_of_external_allocated_memory_ -
- amount_of_external_allocated_memory_at_last_global_gc_;
+ intptr_t amount_since_last_global_gc = PromotedExternalMemorySize();
if (amount_since_last_global_gc > external_allocation_limit_) {
CollectAllGarbage(kNoGCFlags, "external memory allocation limit reached");
}
@@ -495,8 +493,19 @@ intptr_t Heap::AdjustAmountOfExternalAllocatedMemory(
// Avoid underflow.
if (amount >= 0) {
amount_of_external_allocated_memory_ = amount;
+ } else {
+ // Give up and reset the counters in case of an overflow.
+ amount_of_external_allocated_memory_ = 0;
+ amount_of_external_allocated_memory_at_last_global_gc_ = 0;
}
}
+ if (FLAG_trace_external_memory) {
+ PrintPID("%8.0f ms: ", isolate()->time_millis_since_init());
+ PrintF("Adjust amount of external memory: delta=%6" V8_PTR_PREFIX "d KB, "
+ " amount=%6" V8_PTR_PREFIX "d KB, isolate=0x%08" V8PRIxPTR ".\n",
+ change_in_bytes / 1024, amount_of_external_allocated_memory_ / 1024,
+ reinterpret_cast<intptr_t>(isolate()));
+ }
ASSERT(amount_of_external_allocated_memory_ >= 0);
return amount_of_external_allocated_memory_;
}
@@ -621,12 +630,24 @@ void ExternalStringTable::Iterate(ObjectVisitor* v) {
void ExternalStringTable::Verify() {
#ifdef DEBUG
for (int i = 0; i < new_space_strings_.length(); ++i) {
- ASSERT(heap_->InNewSpace(new_space_strings_[i]));
- ASSERT(new_space_strings_[i] != HEAP->raw_unchecked_the_hole_value());
+ Object* obj = Object::cast(new_space_strings_[i]);
+ // TODO(yangguo): check that the object is indeed an external string.
+ ASSERT(heap_->InNewSpace(obj));
+ ASSERT(obj != HEAP->the_hole_value());
+ if (obj->IsExternalAsciiString()) {
+ ExternalAsciiString* string = ExternalAsciiString::cast(obj);
+ ASSERT(String::IsAscii(string->GetChars(), string->length()));
+ }
}
for (int i = 0; i < old_space_strings_.length(); ++i) {
- ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
- ASSERT(old_space_strings_[i] != HEAP->raw_unchecked_the_hole_value());
+ Object* obj = Object::cast(old_space_strings_[i]);
+ // TODO(yangguo): check that the object is indeed an external string.
+ ASSERT(!heap_->InNewSpace(obj));
+ ASSERT(obj != HEAP->the_hole_value());
+ if (obj->IsExternalAsciiString()) {
+ ExternalAsciiString* string = ExternalAsciiString::cast(obj);
+ ASSERT(String::IsAscii(string->GetChars(), string->length()));
+ }
}
#endif
}
@@ -641,9 +662,11 @@ void ExternalStringTable::AddOldObject(HeapObject* object) {
void ExternalStringTable::ShrinkNewObjects(int position) {
new_space_strings_.Rewind(position);
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Verify();
}
+#endif
}
@@ -742,28 +765,15 @@ AlwaysAllocateScope::~AlwaysAllocateScope() {
}
-LinearAllocationScope::LinearAllocationScope() {
- HEAP->linear_allocation_scope_depth_++;
-}
-
-
-LinearAllocationScope::~LinearAllocationScope() {
- HEAP->linear_allocation_scope_depth_--;
- ASSERT(HEAP->linear_allocation_scope_depth_ >= 0);
-}
-
-
-#ifdef DEBUG
void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
for (Object** current = start; current < end; current++) {
if ((*current)->IsHeapObject()) {
HeapObject* object = HeapObject::cast(*current);
- ASSERT(HEAP->Contains(object));
- ASSERT(object->map()->IsMap());
+ CHECK(HEAP->Contains(object));
+ CHECK(object->map()->IsMap());
}
}
}
-#endif
double GCTracer::SizeOfHeapObjects() {
@@ -771,37 +781,47 @@ double GCTracer::SizeOfHeapObjects() {
}
-#ifdef DEBUG
DisallowAllocationFailure::DisallowAllocationFailure() {
+#ifdef DEBUG
old_state_ = HEAP->disallow_allocation_failure_;
HEAP->disallow_allocation_failure_ = true;
+#endif
}
DisallowAllocationFailure::~DisallowAllocationFailure() {
+#ifdef DEBUG
HEAP->disallow_allocation_failure_ = old_state_;
-}
#endif
+}
#ifdef DEBUG
AssertNoAllocation::AssertNoAllocation() {
- old_state_ = HEAP->allow_allocation(false);
+ Isolate* isolate = ISOLATE;
+ active_ = !isolate->optimizing_compiler_thread()->IsOptimizerThread();
+ if (active_) {
+ old_state_ = isolate->heap()->allow_allocation(false);
+ }
}
AssertNoAllocation::~AssertNoAllocation() {
- HEAP->allow_allocation(old_state_);
+ if (active_) HEAP->allow_allocation(old_state_);
}
DisableAssertNoAllocation::DisableAssertNoAllocation() {
- old_state_ = HEAP->allow_allocation(true);
+ Isolate* isolate = ISOLATE;
+ active_ = !isolate->optimizing_compiler_thread()->IsOptimizerThread();
+ if (active_) {
+ old_state_ = isolate->heap()->allow_allocation(true);
+ }
}
DisableAssertNoAllocation::~DisableAssertNoAllocation() {
- HEAP->allow_allocation(old_state_);
+ if (active_) HEAP->allow_allocation(old_state_);
}
#else
diff --git a/src/3rdparty/v8/src/heap-profiler.cc b/src/3rdparty/v8/src/heap-profiler.cc
index 2e971a5..301b099 100644
--- a/src/3rdparty/v8/src/heap-profiler.cc
+++ b/src/3rdparty/v8/src/heap-profiler.cc
@@ -97,7 +97,7 @@ void HeapProfiler::StopHeapObjectsTracking() {
}
-void HeapProfiler::PushHeapObjectsStats(v8::OutputStream* stream) {
+SnapshotObjectId HeapProfiler::PushHeapObjectsStats(v8::OutputStream* stream) {
ASSERT(Isolate::Current()->heap_profiler() != NULL);
return Isolate::Current()->heap_profiler()->PushHeapObjectsStatsImpl(stream);
}
@@ -158,8 +158,8 @@ void HeapProfiler::StartHeapObjectsTrackingImpl() {
}
-void HeapProfiler::PushHeapObjectsStatsImpl(OutputStream* stream) {
- snapshots_->PushHeapObjectsStats(stream);
+SnapshotObjectId HeapProfiler::PushHeapObjectsStatsImpl(OutputStream* stream) {
+ return snapshots_->PushHeapObjectsStats(stream);
}
@@ -168,6 +168,14 @@ void HeapProfiler::StopHeapObjectsTrackingImpl() {
}
+size_t HeapProfiler::GetMemorySizeUsedByProfiler() {
+ HeapProfiler* profiler = Isolate::Current()->heap_profiler();
+ ASSERT(profiler != NULL);
+ size_t size = profiler->snapshots_->GetUsedMemorySize();
+ return size;
+}
+
+
int HeapProfiler::GetSnapshotsCount() {
HeapProfiler* profiler = Isolate::Current()->heap_profiler();
ASSERT(profiler != NULL);
diff --git a/src/3rdparty/v8/src/heap-profiler.h b/src/3rdparty/v8/src/heap-profiler.h
index 96b042d..346177b 100644
--- a/src/3rdparty/v8/src/heap-profiler.h
+++ b/src/3rdparty/v8/src/heap-profiler.h
@@ -49,6 +49,8 @@ class HeapProfiler {
static void SetUp();
static void TearDown();
+ static size_t GetMemorySizeUsedByProfiler();
+
static HeapSnapshot* TakeSnapshot(const char* name,
int type,
v8::ActivityControl* control);
@@ -58,7 +60,7 @@ class HeapProfiler {
static void StartHeapObjectsTracking();
static void StopHeapObjectsTracking();
- static void PushHeapObjectsStats(OutputStream* stream);
+ static SnapshotObjectId PushHeapObjectsStats(OutputStream* stream);
static int GetSnapshotsCount();
static HeapSnapshot* GetSnapshot(int index);
static HeapSnapshot* FindSnapshot(unsigned uid);
@@ -89,7 +91,7 @@ class HeapProfiler {
void StartHeapObjectsTrackingImpl();
void StopHeapObjectsTrackingImpl();
- void PushHeapObjectsStatsImpl(OutputStream* stream);
+ SnapshotObjectId PushHeapObjectsStatsImpl(OutputStream* stream);
HeapSnapshotsCollection* snapshots_;
unsigned next_snapshot_uid_;
diff --git a/src/3rdparty/v8/src/heap.cc b/src/3rdparty/v8/src/heap.cc
index f678517..ebf3ccd 100644
--- a/src/3rdparty/v8/src/heap.cc
+++ b/src/3rdparty/v8/src/heap.cc
@@ -48,6 +48,7 @@
#include "snapshot.h"
#include "store-buffer.h"
#include "v8threads.h"
+#include "v8utils.h"
#include "vm-state-inl.h"
#if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
#include "regexp-macro-assembler.h"
@@ -66,21 +67,26 @@ Heap::Heap()
: isolate_(NULL),
// semispace_size_ should be a power of 2 and old_generation_size_ should be
// a multiple of Page::kPageSize.
-#if defined(ANDROID)
-#define LUMP_OF_MEMORY (128 * KB)
- code_range_size_(0),
-#elif defined(V8_TARGET_ARCH_X64)
+#if defined(V8_TARGET_ARCH_X64)
#define LUMP_OF_MEMORY (2 * MB)
code_range_size_(512*MB),
#else
#define LUMP_OF_MEMORY MB
code_range_size_(0),
#endif
+#if defined(ANDROID)
+ reserved_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
+ max_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
+ initial_semispace_size_(Page::kPageSize),
+ max_old_generation_size_(192*MB),
+ max_executable_size_(max_old_generation_size_),
+#else
reserved_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
max_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
initial_semispace_size_(Page::kPageSize),
max_old_generation_size_(700ul * LUMP_OF_MEMORY),
max_executable_size_(256l * LUMP_OF_MEMORY),
+#endif
// Variables set based on semispace_size_ and old_generation_size_ in
// ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
@@ -92,6 +98,7 @@ Heap::Heap()
linear_allocation_scope_depth_(0),
contexts_disposed_(0),
global_ic_age_(0),
+ flush_monomorphic_ics_(false),
scan_on_scavenge_pages_(0),
new_space_(this),
old_pointer_space_(NULL),
@@ -134,6 +141,7 @@ Heap::Heap()
previous_survival_rate_trend_(Heap::STABLE),
survival_rate_trend_(Heap::STABLE),
max_gc_pause_(0),
+ total_gc_time_ms_(0),
max_alive_after_gc_(0),
min_in_mutator_(kMaxInt),
alive_after_last_gc_(0),
@@ -150,7 +158,8 @@ Heap::Heap()
scavenges_since_last_idle_round_(kIdleScavengeThreshold),
promotion_queue_(this),
configured_(false),
- chunks_queued_for_free_(NULL) {
+ chunks_queued_for_free_(NULL),
+ relocation_mutex_(NULL) {
// Allow build-time customization of the max semispace size. Building
// V8 with snapshots and a non-default max semispace size is much
// easier if you can define it as part of the build environment.
@@ -168,12 +177,14 @@ Heap::Heap()
}
memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
- global_contexts_list_ = NULL;
+ native_contexts_list_ = NULL;
mark_compact_collector_.heap_ = this;
external_string_table_.heap_ = this;
// Put a dummy entry in the remembered pages so we can find the list the
// minidump even if there are no real unmapped pages.
RememberUnmappedPage(NULL, false);
+
+ ClearObjectStats(true);
}
@@ -201,6 +212,20 @@ intptr_t Heap::CommittedMemory() {
lo_space_->Size();
}
+
+size_t Heap::CommittedPhysicalMemory() {
+ if (!HasBeenSetUp()) return 0;
+
+ return new_space_.CommittedPhysicalMemory() +
+ old_pointer_space_->CommittedPhysicalMemory() +
+ old_data_space_->CommittedPhysicalMemory() +
+ code_space_->CommittedPhysicalMemory() +
+ map_space_->CommittedPhysicalMemory() +
+ cell_space_->CommittedPhysicalMemory() +
+ lo_space_->CommittedPhysicalMemory();
+}
+
+
intptr_t Heap::CommittedMemoryExecutable() {
if (!HasBeenSetUp()) return 0;
@@ -315,48 +340,59 @@ void Heap::ReportStatisticsBeforeGC() {
void Heap::PrintShortHeapStatistics() {
if (!FLAG_trace_gc_verbose) return;
- PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d"
- ", available: %8" V8_PTR_PREFIX "d\n",
- isolate_->memory_allocator()->Size(),
- isolate_->memory_allocator()->Available());
- PrintF("New space, used: %8" V8_PTR_PREFIX "d"
- ", available: %8" V8_PTR_PREFIX "d\n",
- Heap::new_space_.Size(),
- new_space_.Available());
- PrintF("Old pointers, used: %8" V8_PTR_PREFIX "d"
- ", available: %8" V8_PTR_PREFIX "d"
- ", waste: %8" V8_PTR_PREFIX "d\n",
- old_pointer_space_->Size(),
- old_pointer_space_->Available(),
- old_pointer_space_->Waste());
- PrintF("Old data space, used: %8" V8_PTR_PREFIX "d"
- ", available: %8" V8_PTR_PREFIX "d"
- ", waste: %8" V8_PTR_PREFIX "d\n",
- old_data_space_->Size(),
- old_data_space_->Available(),
- old_data_space_->Waste());
- PrintF("Code space, used: %8" V8_PTR_PREFIX "d"
- ", available: %8" V8_PTR_PREFIX "d"
- ", waste: %8" V8_PTR_PREFIX "d\n",
- code_space_->Size(),
- code_space_->Available(),
- code_space_->Waste());
- PrintF("Map space, used: %8" V8_PTR_PREFIX "d"
- ", available: %8" V8_PTR_PREFIX "d"
- ", waste: %8" V8_PTR_PREFIX "d\n",
- map_space_->Size(),
- map_space_->Available(),
- map_space_->Waste());
- PrintF("Cell space, used: %8" V8_PTR_PREFIX "d"
- ", available: %8" V8_PTR_PREFIX "d"
- ", waste: %8" V8_PTR_PREFIX "d\n",
- cell_space_->Size(),
- cell_space_->Available(),
- cell_space_->Waste());
- PrintF("Large object space, used: %8" V8_PTR_PREFIX "d"
- ", available: %8" V8_PTR_PREFIX "d\n",
- lo_space_->Size(),
- lo_space_->Available());
+ PrintPID("Memory allocator, used: %6" V8_PTR_PREFIX "d KB"
+ ", available: %6" V8_PTR_PREFIX "d KB\n",
+ isolate_->memory_allocator()->Size() / KB,
+ isolate_->memory_allocator()->Available() / KB);
+ PrintPID("New space, used: %6" V8_PTR_PREFIX "d KB"
+ ", available: %6" V8_PTR_PREFIX "d KB"
+ ", committed: %6" V8_PTR_PREFIX "d KB\n",
+ new_space_.Size() / KB,
+ new_space_.Available() / KB,
+ new_space_.CommittedMemory() / KB);
+ PrintPID("Old pointers, used: %6" V8_PTR_PREFIX "d KB"
+ ", available: %6" V8_PTR_PREFIX "d KB"
+ ", committed: %6" V8_PTR_PREFIX "d KB\n",
+ old_pointer_space_->SizeOfObjects() / KB,
+ old_pointer_space_->Available() / KB,
+ old_pointer_space_->CommittedMemory() / KB);
+ PrintPID("Old data space, used: %6" V8_PTR_PREFIX "d KB"
+ ", available: %6" V8_PTR_PREFIX "d KB"
+ ", committed: %6" V8_PTR_PREFIX "d KB\n",
+ old_data_space_->SizeOfObjects() / KB,
+ old_data_space_->Available() / KB,
+ old_data_space_->CommittedMemory() / KB);
+ PrintPID("Code space, used: %6" V8_PTR_PREFIX "d KB"
+ ", available: %6" V8_PTR_PREFIX "d KB"
+ ", committed: %6" V8_PTR_PREFIX "d KB\n",
+ code_space_->SizeOfObjects() / KB,
+ code_space_->Available() / KB,
+ code_space_->CommittedMemory() / KB);
+ PrintPID("Map space, used: %6" V8_PTR_PREFIX "d KB"
+ ", available: %6" V8_PTR_PREFIX "d KB"
+ ", committed: %6" V8_PTR_PREFIX "d KB\n",
+ map_space_->SizeOfObjects() / KB,
+ map_space_->Available() / KB,
+ map_space_->CommittedMemory() / KB);
+ PrintPID("Cell space, used: %6" V8_PTR_PREFIX "d KB"
+ ", available: %6" V8_PTR_PREFIX "d KB"
+ ", committed: %6" V8_PTR_PREFIX "d KB\n",
+ cell_space_->SizeOfObjects() / KB,
+ cell_space_->Available() / KB,
+ cell_space_->CommittedMemory() / KB);
+ PrintPID("Large object space, used: %6" V8_PTR_PREFIX "d KB"
+ ", available: %6" V8_PTR_PREFIX "d KB"
+ ", committed: %6" V8_PTR_PREFIX "d KB\n",
+ lo_space_->SizeOfObjects() / KB,
+ lo_space_->Available() / KB,
+ lo_space_->CommittedMemory() / KB);
+ PrintPID("All spaces, used: %6" V8_PTR_PREFIX "d KB"
+ ", available: %6" V8_PTR_PREFIX "d KB"
+ ", committed: %6" V8_PTR_PREFIX "d KB\n",
+ this->SizeOfObjects() / KB,
+ this->Available() / KB,
+ this->CommittedMemory() / KB);
+ PrintPID("Total time spent in GC : %d ms\n", total_gc_time_ms_);
}
@@ -383,18 +419,23 @@ void Heap::GarbageCollectionPrologue() {
ClearJSFunctionResultCaches();
gc_count_++;
unflattened_strings_length_ = 0;
-#ifdef DEBUG
- ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
- allow_allocation(false);
+ if (FLAG_flush_code && FLAG_flush_code_incrementally) {
+ mark_compact_collector()->EnableCodeFlushing(true);
+ }
+
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Verify();
}
+#endif
+
+#ifdef DEBUG
+ ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
+ allow_allocation(false);
if (FLAG_gc_verbose) Print();
-#endif // DEBUG
-#if defined(DEBUG)
ReportStatisticsBeforeGC();
#endif // DEBUG
@@ -402,6 +443,7 @@ void Heap::GarbageCollectionPrologue() {
store_buffer()->GCPrologue();
}
+
intptr_t Heap::SizeOfObjects() {
intptr_t total = 0;
AllSpaces spaces;
@@ -411,17 +453,34 @@ intptr_t Heap::SizeOfObjects() {
return total;
}
+
+void Heap::RepairFreeListsAfterBoot() {
+ PagedSpaces spaces;
+ for (PagedSpace* space = spaces.next();
+ space != NULL;
+ space = spaces.next()) {
+ space->RepairFreeListsAfterBoot();
+ }
+}
+
+
void Heap::GarbageCollectionEpilogue() {
store_buffer()->GCEpilogue();
LiveObjectList::GCEpilogue();
-#ifdef DEBUG
- allow_allocation(true);
- ZapFromSpace();
+ // In release mode, we only zap the from space under heap verification.
+ if (Heap::ShouldZapGarbage()) {
+ ZapFromSpace();
+ }
+
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Verify();
}
+#endif
+#ifdef DEBUG
+ allow_allocation(true);
if (FLAG_print_global_handles) isolate_->global_handles()->Print();
if (FLAG_print_handles) PrintHandles();
if (FLAG_gc_verbose) Print();
@@ -435,6 +494,56 @@ void Heap::GarbageCollectionEpilogue() {
symbol_table()->Capacity());
isolate_->counters()->number_of_symbols()->Set(
symbol_table()->NumberOfElements());
+
+ if (CommittedMemory() > 0) {
+ isolate_->counters()->external_fragmentation_total()->AddSample(
+ static_cast<int>(100 - (SizeOfObjects() * 100.0) / CommittedMemory()));
+
+ isolate_->counters()->heap_fraction_map_space()->AddSample(
+ static_cast<int>(
+ (map_space()->CommittedMemory() * 100.0) / CommittedMemory()));
+ isolate_->counters()->heap_fraction_cell_space()->AddSample(
+ static_cast<int>(
+ (cell_space()->CommittedMemory() * 100.0) / CommittedMemory()));
+
+ isolate_->counters()->heap_sample_total_committed()->AddSample(
+ static_cast<int>(CommittedMemory() / KB));
+ isolate_->counters()->heap_sample_total_used()->AddSample(
+ static_cast<int>(SizeOfObjects() / KB));
+ isolate_->counters()->heap_sample_map_space_committed()->AddSample(
+ static_cast<int>(map_space()->CommittedMemory() / KB));
+ isolate_->counters()->heap_sample_cell_space_committed()->AddSample(
+ static_cast<int>(cell_space()->CommittedMemory() / KB));
+ }
+
+#define UPDATE_COUNTERS_FOR_SPACE(space) \
+ isolate_->counters()->space##_bytes_available()->Set( \
+ static_cast<int>(space()->Available())); \
+ isolate_->counters()->space##_bytes_committed()->Set( \
+ static_cast<int>(space()->CommittedMemory())); \
+ isolate_->counters()->space##_bytes_used()->Set( \
+ static_cast<int>(space()->SizeOfObjects()));
+#define UPDATE_FRAGMENTATION_FOR_SPACE(space) \
+ if (space()->CommittedMemory() > 0) { \
+ isolate_->counters()->external_fragmentation_##space()->AddSample( \
+ static_cast<int>(100 - \
+ (space()->SizeOfObjects() * 100.0) / space()->CommittedMemory())); \
+ }
+#define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space) \
+ UPDATE_COUNTERS_FOR_SPACE(space) \
+ UPDATE_FRAGMENTATION_FOR_SPACE(space)
+
+ UPDATE_COUNTERS_FOR_SPACE(new_space)
+ UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_pointer_space)
+ UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_data_space)
+ UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(code_space)
+ UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(map_space)
+ UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(cell_space)
+ UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(lo_space)
+#undef UPDATE_COUNTERS_FOR_SPACE
+#undef UPDATE_FRAGMENTATION_FOR_SPACE
+#undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE
+
#if defined(DEBUG)
ReportStatisticsAfterGC();
#endif // DEBUG
@@ -542,10 +651,12 @@ bool Heap::CollectGarbage(AllocationSpace space,
PerformGarbageCollection(collector, &tracer);
rate->Stop();
+ ASSERT(collector == SCAVENGER || incremental_marking()->IsStopped());
+
+ // This can do debug callbacks and restart incremental marking.
GarbageCollectionEpilogue();
}
- ASSERT(collector == SCAVENGER || incremental_marking()->IsStopped());
if (incremental_marking()->IsStopped()) {
if (incremental_marking()->WorthActivating() && NextGCIsLikelyToBeFull()) {
incremental_marking()->Start();
@@ -566,7 +677,7 @@ void Heap::PerformScavenge() {
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
// Helper class for verifying the symbol table.
class SymbolTableVerifier : public ObjectVisitor {
public:
@@ -575,20 +686,18 @@ class SymbolTableVerifier : public ObjectVisitor {
for (Object** p = start; p < end; p++) {
if ((*p)->IsHeapObject()) {
// Check that the symbol is actually a symbol.
- ASSERT((*p)->IsTheHole() || (*p)->IsUndefined() || (*p)->IsSymbol());
+ CHECK((*p)->IsTheHole() || (*p)->IsUndefined() || (*p)->IsSymbol());
}
}
}
};
-#endif // DEBUG
static void VerifySymbolTable() {
-#ifdef DEBUG
SymbolTableVerifier verifier;
HEAP->symbol_table()->IterateElements(&verifier);
-#endif // DEBUG
}
+#endif // VERIFY_HEAP
static bool AbortIncrementalMarkingAndCollectGarbage(
@@ -603,67 +712,42 @@ static bool AbortIncrementalMarkingAndCollectGarbage(
void Heap::ReserveSpace(
- int new_space_size,
- int pointer_space_size,
- int data_space_size,
- int code_space_size,
- int map_space_size,
- int cell_space_size,
- int large_object_size) {
- NewSpace* new_space = Heap::new_space();
- PagedSpace* old_pointer_space = Heap::old_pointer_space();
- PagedSpace* old_data_space = Heap::old_data_space();
- PagedSpace* code_space = Heap::code_space();
- PagedSpace* map_space = Heap::map_space();
- PagedSpace* cell_space = Heap::cell_space();
- LargeObjectSpace* lo_space = Heap::lo_space();
+ int *sizes,
+ Address *locations_out) {
bool gc_performed = true;
int counter = 0;
static const int kThreshold = 20;
while (gc_performed && counter++ < kThreshold) {
gc_performed = false;
- if (!new_space->ReserveSpace(new_space_size)) {
- Heap::CollectGarbage(NEW_SPACE,
- "failed to reserve space in the new space");
- gc_performed = true;
- }
- if (!old_pointer_space->ReserveSpace(pointer_space_size)) {
- AbortIncrementalMarkingAndCollectGarbage(this, OLD_POINTER_SPACE,
- "failed to reserve space in the old pointer space");
- gc_performed = true;
- }
- if (!(old_data_space->ReserveSpace(data_space_size))) {
- AbortIncrementalMarkingAndCollectGarbage(this, OLD_DATA_SPACE,
- "failed to reserve space in the old data space");
- gc_performed = true;
- }
- if (!(code_space->ReserveSpace(code_space_size))) {
- AbortIncrementalMarkingAndCollectGarbage(this, CODE_SPACE,
- "failed to reserve space in the code space");
- gc_performed = true;
- }
- if (!(map_space->ReserveSpace(map_space_size))) {
- AbortIncrementalMarkingAndCollectGarbage(this, MAP_SPACE,
- "failed to reserve space in the map space");
- gc_performed = true;
- }
- if (!(cell_space->ReserveSpace(cell_space_size))) {
- AbortIncrementalMarkingAndCollectGarbage(this, CELL_SPACE,
- "failed to reserve space in the cell space");
- gc_performed = true;
- }
- // We add a slack-factor of 2 in order to have space for a series of
- // large-object allocations that are only just larger than the page size.
- large_object_size *= 2;
- // The ReserveSpace method on the large object space checks how much
- // we can expand the old generation. This includes expansion caused by
- // allocation in the other spaces.
- large_object_size += cell_space_size + map_space_size + code_space_size +
- data_space_size + pointer_space_size;
- if (!(lo_space->ReserveSpace(large_object_size))) {
- AbortIncrementalMarkingAndCollectGarbage(this, LO_SPACE,
- "failed to reserve space in the large object space");
- gc_performed = true;
+ ASSERT(NEW_SPACE == FIRST_PAGED_SPACE - 1);
+ for (int space = NEW_SPACE; space <= LAST_PAGED_SPACE; space++) {
+ if (sizes[space] != 0) {
+ MaybeObject* allocation;
+ if (space == NEW_SPACE) {
+ allocation = new_space()->AllocateRaw(sizes[space]);
+ } else {
+ allocation = paged_space(space)->AllocateRaw(sizes[space]);
+ }
+ FreeListNode* node;
+ if (!allocation->To<FreeListNode>(&node)) {
+ if (space == NEW_SPACE) {
+ Heap::CollectGarbage(NEW_SPACE,
+ "failed to reserve space in the new space");
+ } else {
+ AbortIncrementalMarkingAndCollectGarbage(
+ this,
+ static_cast<AllocationSpace>(space),
+ "failed to reserve space in paged space");
+ }
+ gc_performed = true;
+ break;
+ } else {
+ // Mark with a free list node, in case we have a GC before
+ // deserializing.
+ node->set_size(this, sizes[space]);
+ locations_out[space] = node->address();
+ }
+ }
}
}
@@ -691,7 +775,7 @@ void Heap::EnsureFromSpaceIsCommitted() {
void Heap::ClearJSFunctionResultCaches() {
if (isolate_->bootstrapper()->IsActive()) return;
- Object* context = global_contexts_list_;
+ Object* context = native_contexts_list_;
while (!context->IsUndefined()) {
// Get the caches for this context. GC can happen when the context
// is not fully initialized, so the caches can be undefined.
@@ -718,7 +802,7 @@ void Heap::ClearNormalizedMapCaches() {
return;
}
- Object* context = global_contexts_list_;
+ Object* context = native_contexts_list_;
while (!context->IsUndefined()) {
// GC can happen when the context is not fully initialized,
// so the cache can be undefined.
@@ -770,9 +854,12 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
PROFILE(isolate_, CodeMovingGCEvent());
}
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifySymbolTable();
}
+#endif
+
if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
ASSERT(!allocation_allowed_);
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
@@ -847,8 +934,8 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
// have to limit maximal capacity of the young generation.
new_space_high_promotion_mode_active_ = true;
if (FLAG_trace_gc) {
- PrintF("Limited new space size due to high promotion rate: %d MB\n",
- new_space_.InitialCapacity() / MB);
+ PrintPID("Limited new space size due to high promotion rate: %d MB\n",
+ new_space_.InitialCapacity() / MB);
}
} else if (new_space_high_promotion_mode_active_ &&
IsStableOrDecreasingSurvivalTrend() &&
@@ -858,8 +945,8 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
// to grow again.
new_space_high_promotion_mode_active_ = false;
if (FLAG_trace_gc) {
- PrintF("Unlimited new space size due to low promotion rate: %d MB\n",
- new_space_.MaximumCapacity() / MB);
+ PrintPID("Unlimited new space size due to low promotion rate: %d MB\n",
+ new_space_.MaximumCapacity() / MB);
}
}
@@ -899,9 +986,12 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
global_gc_epilogue_callback_();
}
+
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifySymbolTable();
}
+#endif
return next_gc_likely_to_collect_more;
}
@@ -928,7 +1018,7 @@ void Heap::MarkCompact(GCTracer* tracer) {
contexts_disposed_ = 0;
- isolate_->set_context_exit_happened(false);
+ flush_monomorphic_ics_ = false;
}
@@ -938,7 +1028,8 @@ void Heap::MarkCompactPrologue() {
isolate_->keyed_lookup_cache()->Clear();
isolate_->context_slot_cache()->Clear();
isolate_->descriptor_lookup_cache()->Clear();
- StringSplitCache::Clear(string_split_cache());
+ RegExpResultsCache::Clear(string_split_cache());
+ RegExpResultsCache::Clear(regexp_multiple_cache());
isolate_->compilation_cache()->MarkCompactPrologue();
@@ -983,7 +1074,7 @@ class ScavengeVisitor: public ObjectVisitor {
};
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
// Visitor class to verify pointers in code or data space do not point into
// new space.
class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
@@ -991,7 +1082,7 @@ class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
void VisitPointers(Object** start, Object**end) {
for (Object** current = start; current < end; current++) {
if ((*current)->IsHeapObject()) {
- ASSERT(!HEAP->InNewSpace(HeapObject::cast(*current)));
+ CHECK(!HEAP->InNewSpace(HeapObject::cast(*current)));
}
}
}
@@ -1016,7 +1107,7 @@ static void VerifyNonPointerSpacePointers() {
object->Iterate(&v);
}
}
-#endif
+#endif // VERIFY_HEAP
void Heap::CheckNewSpaceExpansionCriteria() {
@@ -1154,7 +1245,9 @@ class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
void Heap::Scavenge() {
-#ifdef DEBUG
+ RelocationLock relocation_lock(this);
+
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
#endif
@@ -1220,20 +1313,32 @@ void Heap::Scavenge() {
// Copy objects reachable from cells by scavenging cell values directly.
HeapObjectIterator cell_iterator(cell_space_);
- for (HeapObject* cell = cell_iterator.Next();
- cell != NULL; cell = cell_iterator.Next()) {
- if (cell->IsJSGlobalPropertyCell()) {
- Address value_address =
- reinterpret_cast<Address>(cell) +
- (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag);
+ for (HeapObject* heap_object = cell_iterator.Next();
+ heap_object != NULL;
+ heap_object = cell_iterator.Next()) {
+ if (heap_object->IsJSGlobalPropertyCell()) {
+ JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(heap_object);
+ Address value_address = cell->ValueAddress();
scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
}
}
- // Scavenge object reachable from the global contexts list directly.
- scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
+ // Copy objects reachable from the code flushing candidates list.
+ MarkCompactCollector* collector = mark_compact_collector();
+ if (collector->is_code_flushing_enabled()) {
+ collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor);
+ }
+
+ // Scavenge object reachable from the native contexts list directly.
+ scavenge_visitor.VisitPointer(BitCast<Object**>(&native_contexts_list_));
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
+
+ while (IterateObjectGroups(&scavenge_visitor)) {
+ new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
+ }
+ isolate()->global_handles()->RemoveObjectGroups();
+
isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles(
&IsUnscavengedHeapObject);
isolate_->global_handles()->IterateNewSpaceWeakIndependentRoots(
@@ -1274,6 +1379,51 @@ void Heap::Scavenge() {
}
+// TODO(mstarzinger): Unify this method with
+// MarkCompactCollector::MarkObjectGroups().
+bool Heap::IterateObjectGroups(ObjectVisitor* scavenge_visitor) {
+ List<ObjectGroup*>* object_groups =
+ isolate()->global_handles()->object_groups();
+
+ int last = 0;
+ bool changed = false;
+ for (int i = 0; i < object_groups->length(); i++) {
+ ObjectGroup* entry = object_groups->at(i);
+ ASSERT(entry != NULL);
+
+ Object*** objects = entry->objects_;
+ bool group_marked = false;
+ for (size_t j = 0; j < entry->length_; j++) {
+ Object* object = *objects[j];
+ if (object->IsHeapObject()) {
+ if (!IsUnscavengedHeapObject(this, &object)) {
+ group_marked = true;
+ break;
+ }
+ }
+ }
+
+ if (!group_marked) {
+ (*object_groups)[last++] = entry;
+ continue;
+ }
+
+ for (size_t j = 0; j < entry->length_; ++j) {
+ Object* object = *objects[j];
+ if (object->IsHeapObject()) {
+ scavenge_visitor->VisitPointer(&object);
+ changed = true;
+ }
+ }
+
+ entry->Dispose();
+ object_groups->at(i) = NULL;
+ }
+ object_groups->Rewind(last);
+ return changed;
+}
+
+
HeapObject* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
Object** p) {
MapWord first_word = HeapObject::cast(*p)->map_word();
@@ -1291,9 +1441,11 @@ HeapObject* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
void Heap::UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func) {
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
external_string_table_.Verify();
}
+#endif
if (external_string_table_.new_space_strings_.is_empty()) return;
@@ -1391,7 +1543,7 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
Object* undefined = undefined_value();
Object* head = undefined;
Context* tail = NULL;
- Object* candidate = global_contexts_list_;
+ Object* candidate = native_contexts_list_;
// We don't record weak slots during marking or scavenges.
// Instead we do it once when we complete mark-compact cycle.
@@ -1464,20 +1616,47 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
}
// Update the head of the list of contexts.
- global_contexts_list_ = head;
+ native_contexts_list_ = head;
}
void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
AssertNoAllocation no_allocation;
- class VisitorAdapter : public ObjectVisitor {
+ // Both the external string table and the symbol table may contain
+ // external strings, but neither lists them exhaustively, nor is the
+ // intersection set empty. Therefore we iterate over the external string
+ // table first, ignoring symbols, and then over the symbol table.
+
+ class ExternalStringTableVisitorAdapter : public ObjectVisitor {
public:
- explicit VisitorAdapter(v8::ExternalResourceVisitor* visitor)
- : visitor_(visitor) {}
+ explicit ExternalStringTableVisitorAdapter(
+ v8::ExternalResourceVisitor* visitor) : visitor_(visitor) {}
+ virtual void VisitPointers(Object** start, Object** end) {
+ for (Object** p = start; p < end; p++) {
+ // Visit non-symbol external strings,
+ // since symbols are listed in the symbol table.
+ if (!(*p)->IsSymbol()) {
+ ASSERT((*p)->IsExternalString());
+ visitor_->VisitExternalString(Utils::ToLocal(
+ Handle<String>(String::cast(*p))));
+ }
+ }
+ }
+ private:
+ v8::ExternalResourceVisitor* visitor_;
+ } external_string_table_visitor(visitor);
+
+ external_string_table_.Iterate(&external_string_table_visitor);
+
+ class SymbolTableVisitorAdapter : public ObjectVisitor {
+ public:
+ explicit SymbolTableVisitorAdapter(
+ v8::ExternalResourceVisitor* visitor) : visitor_(visitor) {}
virtual void VisitPointers(Object** start, Object** end) {
for (Object** p = start; p < end; p++) {
if ((*p)->IsExternalString()) {
+ ASSERT((*p)->IsSymbol());
visitor_->VisitExternalString(Utils::ToLocal(
Handle<String>(String::cast(*p))));
}
@@ -1485,8 +1664,9 @@ void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
}
private:
v8::ExternalResourceVisitor* visitor_;
- } visitor_adapter(visitor);
- external_string_table_.Iterate(&visitor_adapter);
+ } symbol_table_visitor(visitor);
+
+ symbol_table()->IterateElements(&symbol_table_visitor);
}
@@ -1590,7 +1770,7 @@ class ScavengingVisitor : public StaticVisitorBase {
table_.Register(kVisitFixedArray, &EvacuateFixedArray);
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
- table_.Register(kVisitGlobalContext,
+ table_.Register(kVisitNativeContext,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
template VisitSpecialized<Context::kSize>);
@@ -1676,7 +1856,7 @@ class ScavengingVisitor : public StaticVisitorBase {
RecordCopiedObject(heap, target);
HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
Isolate* isolate = heap->isolate();
- if (isolate->logger()->is_logging() ||
+ if (isolate->logger()->is_logging_code_events() ||
CpuProfiler::is_profiling(isolate)) {
if (target->IsSharedFunctionInfo()) {
PROFILE(isolate, SharedFunctionInfoMoveEvent(
@@ -1984,9 +2164,8 @@ void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
int instance_size) {
Object* result;
- { MaybeObject* maybe_result = AllocateRawMap();
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
+ MaybeObject* maybe_result = AllocateRawMap();
+ if (!maybe_result->ToObject(&result)) return maybe_result;
// Map::cast cannot be used due to uninitialized map field.
reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
@@ -1999,6 +2178,9 @@ MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
reinterpret_cast<Map*>(result)->set_bit_field(0);
reinterpret_cast<Map*>(result)->set_bit_field2(0);
+ int bit_field3 = Map::EnumLengthBits::encode(Map::kInvalidEnumCache) |
+ Map::OwnsDescriptors::encode(true);
+ reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3);
return result;
}
@@ -2007,9 +2189,8 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type,
int instance_size,
ElementsKind elements_kind) {
Object* result;
- { MaybeObject* maybe_result = AllocateRawMap();
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
+ MaybeObject* maybe_result = AllocateRawMap();
+ if (!maybe_result->To(&result)) return maybe_result;
Map* map = reinterpret_cast<Map*>(result);
map->set_map_no_write_barrier(meta_map());
@@ -2021,20 +2202,17 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type,
map->set_instance_size(instance_size);
map->set_inobject_properties(0);
map->set_pre_allocated_property_fields(0);
- map->init_instance_descriptors();
map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER);
- map->init_prototype_transitions(undefined_value());
+ map->init_back_pointer(undefined_value());
map->set_unused_property_fields(0);
+ map->set_instance_descriptors(empty_descriptor_array());
map->set_bit_field(0);
map->set_bit_field2(1 << Map::kIsExtensible);
+ int bit_field3 = Map::EnumLengthBits::encode(Map::kInvalidEnumCache) |
+ Map::OwnsDescriptors::encode(true);
+ map->set_bit_field3(bit_field3);
map->set_elements_kind(elements_kind);
- // If the map object is aligned fill the padding area with Smi 0 objects.
- if (Map::kPadStart < Map::kSize) {
- memset(reinterpret_cast<byte*>(map) + Map::kPadStart - kHeapObjectTag,
- 0,
- Map::kSize - Map::kPadStart);
- }
return map;
}
@@ -2071,8 +2249,7 @@ MaybeObject* Heap::AllocateTypeFeedbackInfo() {
{ MaybeObject* maybe_info = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE);
if (!maybe_info->To(&info)) return maybe_info;
}
- info->set_ic_total_count(0);
- info->set_ic_with_type_info_count(0);
+ info->initialize_storage();
info->set_type_feedback_cells(TypeFeedbackCells::cast(empty_fixed_array()),
SKIP_WRITE_BARRIER);
return info;
@@ -2160,17 +2337,17 @@ bool Heap::CreateInitialMaps() {
set_empty_descriptor_array(DescriptorArray::cast(obj));
// Fix the instance_descriptors for the existing maps.
- meta_map()->init_instance_descriptors();
meta_map()->set_code_cache(empty_fixed_array());
- meta_map()->init_prototype_transitions(undefined_value());
+ meta_map()->init_back_pointer(undefined_value());
+ meta_map()->set_instance_descriptors(empty_descriptor_array());
- fixed_array_map()->init_instance_descriptors();
fixed_array_map()->set_code_cache(empty_fixed_array());
- fixed_array_map()->init_prototype_transitions(undefined_value());
+ fixed_array_map()->init_back_pointer(undefined_value());
+ fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
- oddball_map()->init_instance_descriptors();
oddball_map()->set_code_cache(empty_fixed_array());
- oddball_map()->init_prototype_transitions(undefined_value());
+ oddball_map()->init_back_pointer(undefined_value());
+ oddball_map()->set_instance_descriptors(empty_descriptor_array());
// Fix prototype object for existing maps.
meta_map()->set_prototype(null_value());
@@ -2378,9 +2555,16 @@ bool Heap::CreateInitialMaps() {
AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
if (!maybe_obj->ToObject(&obj)) return false;
}
- Map* global_context_map = Map::cast(obj);
- global_context_map->set_visitor_id(StaticVisitorBase::kVisitGlobalContext);
- set_global_context_map(global_context_map);
+ set_global_context_map(Map::cast(obj));
+
+ { MaybeObject* maybe_obj =
+ AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ Map* native_context_map = Map::cast(obj);
+ native_context_map->set_dictionary_map(true);
+ native_context_map->set_visitor_id(StaticVisitorBase::kVisitNativeContext);
+ set_native_context_map(native_context_map);
{ MaybeObject* maybe_obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE,
SharedFunctionInfo::kAlignedSize);
@@ -2469,7 +2653,7 @@ bool Heap::CreateApiObjects() {
// bottleneck to trap the Smi-only -> fast elements transition, and there
// appears to be no benefit for optimize this case.
Map* new_neander_map = Map::cast(obj);
- new_neander_map->set_elements_kind(FAST_ELEMENTS);
+ new_neander_map->set_elements_kind(TERMINAL_FAST_ELEMENTS_KIND);
set_neander_map(new_neander_map);
{ MaybeObject* maybe_obj = AllocateJSObjectFromMap(neander_map());
@@ -2632,7 +2816,7 @@ bool Heap::CreateInitialObjects() {
// hash code in place. The hash code for the hidden_symbol is zero to ensure
// that it will always be at the first entry in property descriptors.
{ MaybeObject* maybe_obj =
- AllocateSymbol(CStrVector(""), 0, String::kZeroHash);
+ AllocateSymbol(CStrVector(""), 0, String::kEmptyStringHash);
if (!maybe_obj->ToObject(&obj)) return false;
}
hidden_symbol_ = String::cast(obj);
@@ -2693,18 +2877,33 @@ bool Heap::CreateInitialObjects() {
set_single_character_string_cache(FixedArray::cast(obj));
// Allocate cache for string split.
- { MaybeObject* maybe_obj =
- AllocateFixedArray(StringSplitCache::kStringSplitCacheSize, TENURED);
+ { MaybeObject* maybe_obj = AllocateFixedArray(
+ RegExpResultsCache::kRegExpResultsCacheSize, TENURED);
if (!maybe_obj->ToObject(&obj)) return false;
}
set_string_split_cache(FixedArray::cast(obj));
+ { MaybeObject* maybe_obj = AllocateFixedArray(
+ RegExpResultsCache::kRegExpResultsCacheSize, TENURED);
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ set_regexp_multiple_cache(FixedArray::cast(obj));
+
// Allocate cache for external strings pointing to native source code.
{ MaybeObject* maybe_obj = AllocateFixedArray(Natives::GetBuiltinsCount());
if (!maybe_obj->ToObject(&obj)) return false;
}
set_natives_source_cache(FixedArray::cast(obj));
+ // Allocate object to hold object observation state.
+ { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ { MaybeObject* maybe_obj = AllocateJSObjectFromMap(Map::cast(obj));
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ set_observation_state(JSObject::cast(obj));
+
// Handling of script id generation is in FACTORY->NewScript.
set_last_script_id(undefined_value());
@@ -2724,70 +2923,126 @@ bool Heap::CreateInitialObjects() {
}
-Object* StringSplitCache::Lookup(
- FixedArray* cache, String* string, String* pattern) {
- if (!string->IsSymbol() || !pattern->IsSymbol()) return Smi::FromInt(0);
- uint32_t hash = string->Hash();
- uint32_t index = ((hash & (kStringSplitCacheSize - 1)) &
+bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) {
+ RootListIndex writable_roots[] = {
+ kStoreBufferTopRootIndex,
+ kStackLimitRootIndex,
+ kInstanceofCacheFunctionRootIndex,
+ kInstanceofCacheMapRootIndex,
+ kInstanceofCacheAnswerRootIndex,
+ kCodeStubsRootIndex,
+ kNonMonomorphicCacheRootIndex,
+ kPolymorphicCodeCacheRootIndex,
+ kLastScriptIdRootIndex,
+ kEmptyScriptRootIndex,
+ kRealStackLimitRootIndex,
+ kArgumentsAdaptorDeoptPCOffsetRootIndex,
+ kConstructStubDeoptPCOffsetRootIndex,
+ kGetterStubDeoptPCOffsetRootIndex,
+ kSetterStubDeoptPCOffsetRootIndex,
+ kSymbolTableRootIndex,
+ };
+
+ for (unsigned int i = 0; i < ARRAY_SIZE(writable_roots); i++) {
+ if (root_index == writable_roots[i])
+ return true;
+ }
+ return false;
+}
+
+
+Object* RegExpResultsCache::Lookup(Heap* heap,
+ String* key_string,
+ Object* key_pattern,
+ ResultsCacheType type) {
+ FixedArray* cache;
+ if (!key_string->IsSymbol()) return Smi::FromInt(0);
+ if (type == STRING_SPLIT_SUBSTRINGS) {
+ ASSERT(key_pattern->IsString());
+ if (!key_pattern->IsSymbol()) return Smi::FromInt(0);
+ cache = heap->string_split_cache();
+ } else {
+ ASSERT(type == REGEXP_MULTIPLE_INDICES);
+ ASSERT(key_pattern->IsFixedArray());
+ cache = heap->regexp_multiple_cache();
+ }
+
+ uint32_t hash = key_string->Hash();
+ uint32_t index = ((hash & (kRegExpResultsCacheSize - 1)) &
~(kArrayEntriesPerCacheEntry - 1));
- if (cache->get(index + kStringOffset) == string &&
- cache->get(index + kPatternOffset) == pattern) {
+ if (cache->get(index + kStringOffset) == key_string &&
+ cache->get(index + kPatternOffset) == key_pattern) {
return cache->get(index + kArrayOffset);
}
- index = ((index + kArrayEntriesPerCacheEntry) & (kStringSplitCacheSize - 1));
- if (cache->get(index + kStringOffset) == string &&
- cache->get(index + kPatternOffset) == pattern) {
+ index =
+ ((index + kArrayEntriesPerCacheEntry) & (kRegExpResultsCacheSize - 1));
+ if (cache->get(index + kStringOffset) == key_string &&
+ cache->get(index + kPatternOffset) == key_pattern) {
return cache->get(index + kArrayOffset);
}
return Smi::FromInt(0);
}
-void StringSplitCache::Enter(Heap* heap,
- FixedArray* cache,
- String* string,
- String* pattern,
- FixedArray* array) {
- if (!string->IsSymbol() || !pattern->IsSymbol()) return;
- uint32_t hash = string->Hash();
- uint32_t index = ((hash & (kStringSplitCacheSize - 1)) &
+void RegExpResultsCache::Enter(Heap* heap,
+ String* key_string,
+ Object* key_pattern,
+ FixedArray* value_array,
+ ResultsCacheType type) {
+ FixedArray* cache;
+ if (!key_string->IsSymbol()) return;
+ if (type == STRING_SPLIT_SUBSTRINGS) {
+ ASSERT(key_pattern->IsString());
+ if (!key_pattern->IsSymbol()) return;
+ cache = heap->string_split_cache();
+ } else {
+ ASSERT(type == REGEXP_MULTIPLE_INDICES);
+ ASSERT(key_pattern->IsFixedArray());
+ cache = heap->regexp_multiple_cache();
+ }
+
+ uint32_t hash = key_string->Hash();
+ uint32_t index = ((hash & (kRegExpResultsCacheSize - 1)) &
~(kArrayEntriesPerCacheEntry - 1));
if (cache->get(index + kStringOffset) == Smi::FromInt(0)) {
- cache->set(index + kStringOffset, string);
- cache->set(index + kPatternOffset, pattern);
- cache->set(index + kArrayOffset, array);
+ cache->set(index + kStringOffset, key_string);
+ cache->set(index + kPatternOffset, key_pattern);
+ cache->set(index + kArrayOffset, value_array);
} else {
uint32_t index2 =
- ((index + kArrayEntriesPerCacheEntry) & (kStringSplitCacheSize - 1));
+ ((index + kArrayEntriesPerCacheEntry) & (kRegExpResultsCacheSize - 1));
if (cache->get(index2 + kStringOffset) == Smi::FromInt(0)) {
- cache->set(index2 + kStringOffset, string);
- cache->set(index2 + kPatternOffset, pattern);
- cache->set(index2 + kArrayOffset, array);
+ cache->set(index2 + kStringOffset, key_string);
+ cache->set(index2 + kPatternOffset, key_pattern);
+ cache->set(index2 + kArrayOffset, value_array);
} else {
cache->set(index2 + kStringOffset, Smi::FromInt(0));
cache->set(index2 + kPatternOffset, Smi::FromInt(0));
cache->set(index2 + kArrayOffset, Smi::FromInt(0));
- cache->set(index + kStringOffset, string);
- cache->set(index + kPatternOffset, pattern);
- cache->set(index + kArrayOffset, array);
+ cache->set(index + kStringOffset, key_string);
+ cache->set(index + kPatternOffset, key_pattern);
+ cache->set(index + kArrayOffset, value_array);
}
}
- if (array->length() < 100) { // Limit how many new symbols we want to make.
- for (int i = 0; i < array->length(); i++) {
- String* str = String::cast(array->get(i));
+ // If the array is a reasonably short list of substrings, convert it into a
+ // list of symbols.
+ if (type == STRING_SPLIT_SUBSTRINGS && value_array->length() < 100) {
+ for (int i = 0; i < value_array->length(); i++) {
+ String* str = String::cast(value_array->get(i));
Object* symbol;
MaybeObject* maybe_symbol = heap->LookupSymbol(str);
if (maybe_symbol->ToObject(&symbol)) {
- array->set(i, symbol);
+ value_array->set(i, symbol);
}
}
}
- array->set_map_no_write_barrier(heap->fixed_cow_array_map());
+ // Convert backing store to a copy-on-write array.
+ value_array->set_map_no_write_barrier(heap->fixed_cow_array_map());
}
-void StringSplitCache::Clear(FixedArray* cache) {
- for (int i = 0; i < kStringSplitCacheSize; i++) {
+void RegExpResultsCache::Clear(FixedArray* cache) {
+ for (int i = 0; i < kRegExpResultsCacheSize; i++) {
cache->set(i, Smi::FromInt(0));
}
}
@@ -2817,7 +3072,7 @@ void Heap::AllocateFullSizeNumberStringCache() {
// The idea is to have a small number string cache in the snapshot to keep
// boot-time memory usage down. If we expand the number string cache already
// while creating the snapshot then that didn't work out.
- ASSERT(!Serializer::enabled());
+ ASSERT(!Serializer::enabled() || FLAG_extra_code != NULL);
MaybeObject* maybe_obj =
AllocateFixedArray(FullSizeNumberStringCacheLength(), TENURED);
Object* new_cache;
@@ -3005,6 +3260,7 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
share->set_name(name);
Code* illegal = isolate_->builtins()->builtin(Builtins::kIllegal);
share->set_code(illegal);
+ share->ClearOptimizedCodeMap();
share->set_scope_info(ScopeInfo::Empty());
Code* construct_stub =
isolate_->builtins()->builtin(Builtins::kJSConstructStubGeneric);
@@ -3017,8 +3273,8 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER);
share->set_this_property_assignments(undefined_value(), SKIP_WRITE_BARRIER);
share->set_ast_node_count(0);
- share->set_deopt_counter(FLAG_deopt_every_n_times);
- share->set_ic_age(0);
+ share->set_stress_deopt_counter(FLAG_deopt_every_n_times);
+ share->set_counters(0);
// Set integer fields (smi or int, depending on the architecture).
share->set_length(0);
@@ -3050,6 +3306,7 @@ MaybeObject* Heap::AllocateJSMessageObject(String* type,
}
JSMessageObject* message = JSMessageObject::cast(result);
message->set_properties(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER);
+ message->initialize_elements();
message->set_elements(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER);
message->set_type(type);
message->set_arguments(arguments);
@@ -3274,7 +3531,7 @@ MaybeObject* Heap::AllocateSubString(String* buffer,
}
ASSERT(buffer->IsFlat());
-#if DEBUG
+#if VERIFY_HEAP
if (FLAG_verify_heap) {
buffer->StringVerify();
}
@@ -3326,6 +3583,8 @@ MaybeObject* Heap::AllocateExternalStringFromAscii(
return Failure::OutOfMemoryException();
}
+ ASSERT(String::IsAscii(resource->data(), static_cast<int>(length)));
+
Map* map = external_ascii_string_map();
Object* result;
{ MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
@@ -3489,17 +3748,27 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
MaybeObject* maybe_result;
// Large code objects and code objects which should stay at a fixed address
// are allocated in large object space.
- if (obj_size > code_space()->AreaSize() || immovable) {
+ HeapObject* result;
+ bool force_lo_space = obj_size > code_space()->AreaSize();
+ if (force_lo_space) {
maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
} else {
maybe_result = code_space_->AllocateRaw(obj_size);
}
+ if (!maybe_result->To<HeapObject>(&result)) return maybe_result;
- Object* result;
- if (!maybe_result->ToObject(&result)) return maybe_result;
+ if (immovable && !force_lo_space &&
+ // Objects on the first page of each space are never moved.
+ !code_space_->FirstPage()->Contains(result->address())) {
+ // Discard the first code allocation, which was on a page where it could be
+ // moved.
+ CreateFillerObjectAt(result->address(), obj_size);
+ maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
+ if (!maybe_result->To<HeapObject>(&result)) return maybe_result;
+ }
// Initialize the object
- HeapObject::cast(result)->set_map_no_write_barrier(code_map());
+ result->set_map_no_write_barrier(code_map());
Code* code = Code::cast(result);
ASSERT(!isolate_->code_range()->exists() ||
isolate_->code_range()->contains(code->address()));
@@ -3526,7 +3795,7 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
// through the self_reference parameter.
code->CopyFrom(desc);
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
code->Verify();
}
@@ -3608,7 +3877,7 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
isolate_->code_range()->contains(code->address()));
new_code->Relocate(new_addr - old_addr);
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
code->Verify();
}
@@ -3655,29 +3924,27 @@ MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) {
// from the function's context, since the function can be from a
// different context.
JSFunction* object_function =
- function->context()->global_context()->object_function();
+ function->context()->native_context()->object_function();
// Each function prototype gets a copy of the object function map.
// This avoid unwanted sharing of maps between prototypes of different
// constructors.
Map* new_map;
ASSERT(object_function->has_initial_map());
- { MaybeObject* maybe_map =
- object_function->initial_map()->CopyDropTransitions();
- if (!maybe_map->To<Map>(&new_map)) return maybe_map;
- }
+ MaybeObject* maybe_map = object_function->initial_map()->Copy();
+ if (!maybe_map->To(&new_map)) return maybe_map;
+
Object* prototype;
- { MaybeObject* maybe_prototype = AllocateJSObjectFromMap(new_map);
- if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
- }
+ MaybeObject* maybe_prototype = AllocateJSObjectFromMap(new_map);
+ if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
+
// When creating the prototype for the function we must set its
// constructor to the function.
- Object* result;
- { MaybeObject* maybe_result =
- JSObject::cast(prototype)->SetLocalPropertyIgnoreAttributes(
- constructor_symbol(), function, DONT_ENUM);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
+ MaybeObject* maybe_failure =
+ JSObject::cast(prototype)->SetLocalPropertyIgnoreAttributes(
+ constructor_symbol(), function, DONT_ENUM);
+ if (maybe_failure->IsFailure()) return maybe_failure;
+
return prototype;
}
@@ -3707,12 +3974,12 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
!JSFunction::cast(callee)->shared()->is_classic_mode();
if (strict_mode_callee) {
boilerplate =
- isolate()->context()->global_context()->
+ isolate()->context()->native_context()->
strict_mode_arguments_boilerplate();
arguments_object_size = kArgumentsObjectSizeStrict;
} else {
boilerplate =
- isolate()->context()->global_context()->arguments_boilerplate();
+ isolate()->context()->native_context()->arguments_boilerplate();
arguments_object_size = kArgumentsObjectSize;
}
@@ -3751,7 +4018,7 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
// Check the state of the object
ASSERT(JSObject::cast(result)->HasFastProperties());
- ASSERT(JSObject::cast(result)->HasFastElements());
+ ASSERT(JSObject::cast(result)->HasFastObjectElements());
return result;
}
@@ -3778,25 +4045,22 @@ MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
// suggested by the function.
int instance_size = fun->shared()->CalculateInstanceSize();
int in_object_properties = fun->shared()->CalculateInObjectProperties();
- Object* map_obj;
- { MaybeObject* maybe_map_obj = AllocateMap(JS_OBJECT_TYPE, instance_size);
- if (!maybe_map_obj->ToObject(&map_obj)) return maybe_map_obj;
- }
+ Map* map;
+ MaybeObject* maybe_map = AllocateMap(JS_OBJECT_TYPE, instance_size);
+ if (!maybe_map->To(&map)) return maybe_map;
// Fetch or allocate prototype.
Object* prototype;
if (fun->has_instance_prototype()) {
prototype = fun->instance_prototype();
} else {
- { MaybeObject* maybe_prototype = AllocateFunctionPrototype(fun);
- if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
- }
+ MaybeObject* maybe_prototype = AllocateFunctionPrototype(fun);
+ if (!maybe_prototype->To(&prototype)) return maybe_prototype;
}
- Map* map = Map::cast(map_obj);
map->set_inobject_properties(in_object_properties);
map->set_unused_property_fields(in_object_properties);
map->set_prototype(prototype);
- ASSERT(map->has_fast_elements());
+ ASSERT(map->has_fast_object_elements());
// If the function has only simple this property assignments add
// field descriptors for these to the initial map as the object
@@ -3811,21 +4075,17 @@ MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
fun->shared()->ForbidInlineConstructor();
} else {
DescriptorArray* descriptors;
- { MaybeObject* maybe_descriptors_obj = DescriptorArray::Allocate(count);
- if (!maybe_descriptors_obj->To<DescriptorArray>(&descriptors)) {
- return maybe_descriptors_obj;
- }
- }
+ MaybeObject* maybe_descriptors = DescriptorArray::Allocate(count);
+ if (!maybe_descriptors->To(&descriptors)) return maybe_descriptors;
+
DescriptorArray::WhitenessWitness witness(descriptors);
for (int i = 0; i < count; i++) {
String* name = fun->shared()->GetThisPropertyAssignmentName(i);
ASSERT(name->IsSymbol());
- FieldDescriptor field(name, i, NONE);
- field.SetEnumerationIndex(i);
+ FieldDescriptor field(name, i, NONE, i + 1);
descriptors->Set(i, &field, witness);
}
- descriptors->SetNextEnumerationIndex(count);
- descriptors->SortUnchecked(witness);
+ descriptors->Sort();
// The descriptors may contain duplicates because the compiler does not
// guarantee the uniqueness of property names (it would have required
@@ -3834,7 +4094,7 @@ MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
if (HasDuplicates(descriptors)) {
fun->shared()->ForbidInlineConstructor();
} else {
- map->set_instance_descriptors(descriptors);
+ map->InitializeDescriptors(descriptors);
map->set_pre_allocated_property_fields(count);
map->set_unused_property_fields(in_object_properties - count);
}
@@ -3913,8 +4173,7 @@ MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
InitializeJSObjectFromMap(JSObject::cast(obj),
FixedArray::cast(properties),
map);
- ASSERT(JSObject::cast(obj)->HasFastSmiOnlyElements() ||
- JSObject::cast(obj)->HasFastElements());
+ ASSERT(JSObject::cast(obj)->HasFastSmiOrObjectElements());
return obj;
}
@@ -3942,13 +4201,18 @@ MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
}
-MaybeObject* Heap::AllocateJSModule() {
+MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) {
// Allocate a fresh map. Modules do not have a prototype.
Map* map;
MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize);
if (!maybe_map->To(&map)) return maybe_map;
// Allocate the object based on the map.
- return AllocateJSObjectFromMap(map, TENURED);
+ JSModule* module;
+ MaybeObject* maybe_module = AllocateJSObjectFromMap(map, TENURED);
+ if (!maybe_module->To(&module)) return maybe_module;
+ module->set_context(context);
+ module->set_scope_info(scope_info);
+ return module;
}
@@ -3959,6 +4223,9 @@ MaybeObject* Heap::AllocateJSArrayAndStorage(
ArrayStorageAllocationMode mode,
PretenureFlag pretenure) {
ASSERT(capacity >= length);
+ if (length != 0 && mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE) {
+ elements_kind = GetHoleyElementsKind(elements_kind);
+ }
MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
JSArray* array;
if (!maybe_array->To(&array)) return maybe_array;
@@ -3979,8 +4246,7 @@ MaybeObject* Heap::AllocateJSArrayAndStorage(
maybe_elms = AllocateFixedDoubleArrayWithHoles(capacity);
}
} else {
- ASSERT(elements_kind == FAST_ELEMENTS ||
- elements_kind == FAST_SMI_ONLY_ELEMENTS);
+ ASSERT(IsFastSmiOrObjectElementsKind(elements_kind));
if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
maybe_elms = AllocateUninitializedFixedArray(capacity);
} else {
@@ -4006,6 +4272,7 @@ MaybeObject* Heap::AllocateJSArrayWithElements(
array->set_elements(elements);
array->set_length(Smi::FromInt(elements->length()));
+ array->ValidateElements();
return array;
}
@@ -4059,6 +4326,7 @@ MaybeObject* Heap::AllocateJSFunctionProxy(Object* handler,
MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
ASSERT(constructor->has_initial_map());
Map* map = constructor->initial_map();
+ ASSERT(map->is_dictionary_map());
// Make sure no field properties are described in the initial map.
// This guarantees us that normalizing the properties does not
@@ -4076,13 +4344,11 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512;
// Allocate a dictionary object for backing storage.
- Object* obj;
- { MaybeObject* maybe_obj =
- StringDictionary::Allocate(
- map->NumberOfDescribedProperties() * 2 + initial_size);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
- StringDictionary* dictionary = StringDictionary::cast(obj);
+ StringDictionary* dictionary;
+ MaybeObject* maybe_dictionary =
+ StringDictionary::Allocate(
+ map->NumberOfOwnDescriptors() * 2 + initial_size);
+ if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
// The global object might be created from an object template with accessors.
// Fill these accessors into the dictionary.
@@ -4090,36 +4356,32 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
for (int i = 0; i < descs->number_of_descriptors(); i++) {
PropertyDetails details = descs->GetDetails(i);
ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
- PropertyDetails d =
- PropertyDetails(details.attributes(), CALLBACKS, details.index());
+ PropertyDetails d = PropertyDetails(details.attributes(),
+ CALLBACKS,
+ details.descriptor_index());
Object* value = descs->GetCallbacksObject(i);
- { MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value);
- if (!maybe_value->ToObject(&value)) return maybe_value;
- }
+ MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value);
+ if (!maybe_value->ToObject(&value)) return maybe_value;
- Object* result;
- { MaybeObject* maybe_result = dictionary->Add(descs->GetKey(i), value, d);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- dictionary = StringDictionary::cast(result);
+ MaybeObject* maybe_added = dictionary->Add(descs->GetKey(i), value, d);
+ if (!maybe_added->To(&dictionary)) return maybe_added;
}
// Allocate the global object and initialize it with the backing store.
- { MaybeObject* maybe_obj = Allocate(map, OLD_POINTER_SPACE);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
- JSObject* global = JSObject::cast(obj);
+ JSObject* global;
+ MaybeObject* maybe_global = Allocate(map, OLD_POINTER_SPACE);
+ if (!maybe_global->To(&global)) return maybe_global;
+
InitializeJSObjectFromMap(global, dictionary, map);
// Create a new map for the global object.
- { MaybeObject* maybe_obj = map->CopyDropDescriptors();
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
- Map* new_map = Map::cast(obj);
+ Map* new_map;
+ MaybeObject* maybe_map = map->CopyDropDescriptors();
+ if (!maybe_map->To(&new_map)) return maybe_map;
+ new_map->set_dictionary_map(true);
// Set up the global object as a normalized object.
global->set_map(new_map);
- global->map()->clear_instance_descriptors();
global->set_properties(dictionary);
// Make sure result is a global object with properties in dictionary.
@@ -4248,7 +4510,7 @@ MaybeObject* Heap::ReinitializeJSReceiver(
map->set_function_with_prototype(true);
InitializeFunction(JSFunction::cast(object), shared, the_hole_value());
JSFunction::cast(object)->set_context(
- isolate()->context()->global_context());
+ isolate()->context()->native_context());
}
// Put in filler if the new object is smaller than the old.
@@ -4289,7 +4551,8 @@ MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
MaybeObject* Heap::AllocateStringFromAscii(Vector<const char> string,
PretenureFlag pretenure) {
- if (string.length() == 1) {
+ int length = string.length();
+ if (length == 1) {
return Heap::LookupSingleCharacterStringFromCode(string[0]);
}
Object* result;
@@ -4299,22 +4562,20 @@ MaybeObject* Heap::AllocateStringFromAscii(Vector<const char> string,
}
// Copy the characters into the new object.
- SeqAsciiString* string_result = SeqAsciiString::cast(result);
- for (int i = 0; i < string.length(); i++) {
- string_result->SeqAsciiStringSet(i, string[i]);
- }
+ CopyChars(SeqAsciiString::cast(result)->GetChars(), string.start(), length);
return result;
}
MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
+ int non_ascii_start,
PretenureFlag pretenure) {
- // Count the number of characters in the UTF-8 string and check if
- // it is an ASCII string.
+ // Continue counting the number of characters in the UTF-8 string, starting
+ // from the first non-ascii character or word.
+ int chars = non_ascii_start;
Access<UnicodeCache::Utf8Decoder>
decoder(isolate_->unicode_cache()->utf8_decoder());
- decoder->Reset(string.start(), string.length());
- int chars = 0;
+ decoder->Reset(string.start() + non_ascii_start, string.length() - chars);
while (decoder->has_more()) {
uint32_t r = decoder->GetNext();
if (r <= unibrow::Utf16::kMaxNonSurrogateCharCode) {
@@ -4330,16 +4591,16 @@ MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
}
// Convert and copy the characters into the new object.
- String* string_result = String::cast(result);
+ SeqTwoByteString* twobyte = SeqTwoByteString::cast(result);
decoder->Reset(string.start(), string.length());
int i = 0;
while (i < chars) {
uint32_t r = decoder->GetNext();
if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) {
- string_result->Set(i++, unibrow::Utf16::LeadSurrogate(r));
- string_result->Set(i++, unibrow::Utf16::TrailSurrogate(r));
+ twobyte->SeqTwoByteStringSet(i++, unibrow::Utf16::LeadSurrogate(r));
+ twobyte->SeqTwoByteStringSet(i++, unibrow::Utf16::TrailSurrogate(r));
} else {
- string_result->Set(i++, r);
+ twobyte->SeqTwoByteStringSet(i++, r);
}
}
return result;
@@ -4349,20 +4610,18 @@ MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
MaybeObject* Heap::AllocateStringFromTwoByte(Vector<const uc16> string,
PretenureFlag pretenure) {
// Check if the string is an ASCII string.
- MaybeObject* maybe_result;
- if (String::IsAscii(string.start(), string.length())) {
- maybe_result = AllocateRawAsciiString(string.length(), pretenure);
- } else { // It's not an ASCII string.
- maybe_result = AllocateRawTwoByteString(string.length(), pretenure);
- }
Object* result;
- if (!maybe_result->ToObject(&result)) return maybe_result;
+ int length = string.length();
+ const uc16* start = string.start();
- // Copy the characters into the new object, which may be either ASCII or
- // UTF-16.
- String* string_result = String::cast(result);
- for (int i = 0; i < string.length(); i++) {
- string_result->Set(i, string[i]);
+ if (String::IsAscii(start, length)) {
+ MaybeObject* maybe_result = AllocateRawAsciiString(length, pretenure);
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ CopyChars(SeqAsciiString::cast(result)->GetChars(), start, length);
+ } else { // It's not an ASCII string.
+ MaybeObject* maybe_result = AllocateRawTwoByteString(length, pretenure);
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ CopyChars(SeqTwoByteString::cast(result)->GetChars(), start, length);
}
return result;
}
@@ -4491,6 +4750,16 @@ MaybeObject* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
String::cast(result)->set_length(length);
String::cast(result)->set_hash_field(String::kEmptyHashField);
ASSERT_EQ(size, HeapObject::cast(result)->Size());
+
+#ifdef VERIFY_HEAP
+ if (FLAG_verify_heap) {
+ // Initialize string's content to ensure ASCII-ness (character range 0-127)
+ // as required when verifying the heap.
+ char* dest = SeqAsciiString::cast(result)->GetChars();
+ memset(dest, 0x0F, length * kCharSize);
+ }
+#endif
+
return result;
}
@@ -4534,16 +4803,16 @@ MaybeObject* Heap::AllocateRawTwoByteString(int length,
MaybeObject* Heap::AllocateJSArray(
ElementsKind elements_kind,
PretenureFlag pretenure) {
- Context* global_context = isolate()->context()->global_context();
- JSFunction* array_function = global_context->array_function();
+ Context* native_context = isolate()->context()->native_context();
+ JSFunction* array_function = native_context->array_function();
Map* map = array_function->initial_map();
- if (elements_kind == FAST_DOUBLE_ELEMENTS) {
- map = Map::cast(global_context->double_js_array_map());
- } else if (elements_kind == FAST_ELEMENTS || !FLAG_smi_only_arrays) {
- map = Map::cast(global_context->object_js_array_map());
- } else {
- ASSERT(elements_kind == FAST_SMI_ONLY_ELEMENTS);
- ASSERT(map == global_context->smi_js_array_map());
+ Object* maybe_map_array = native_context->js_array_maps();
+ if (!maybe_map_array->IsUndefined()) {
+ Object* maybe_transitioned_map =
+ FixedArray::cast(maybe_map_array)->get(elements_kind);
+ if (!maybe_transitioned_map->IsUndefined()) {
+ map = Map::cast(maybe_transitioned_map);
+ }
}
return AllocateJSObjectFromMap(map, pretenure);
@@ -4820,35 +5089,50 @@ MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
}
-MaybeObject* Heap::AllocateGlobalContext() {
+MaybeObject* Heap::AllocateNativeContext() {
Object* result;
{ MaybeObject* maybe_result =
- AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
+ AllocateFixedArray(Context::NATIVE_CONTEXT_SLOTS);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
Context* context = reinterpret_cast<Context*>(result);
- context->set_map_no_write_barrier(global_context_map());
- context->set_smi_js_array_map(undefined_value());
- context->set_double_js_array_map(undefined_value());
- context->set_object_js_array_map(undefined_value());
- ASSERT(context->IsGlobalContext());
+ context->set_map_no_write_barrier(native_context_map());
+ context->set_js_array_maps(undefined_value());
+ ASSERT(context->IsNativeContext());
ASSERT(result->IsContext());
return result;
}
-MaybeObject* Heap::AllocateModuleContext(Context* previous,
+MaybeObject* Heap::AllocateGlobalContext(JSFunction* function,
ScopeInfo* scope_info) {
Object* result;
{ MaybeObject* maybe_result =
- AllocateFixedArrayWithHoles(scope_info->ContextLength(), TENURED);
+ AllocateFixedArray(scope_info->ContextLength(), TENURED);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
Context* context = reinterpret_cast<Context*>(result);
- context->set_map_no_write_barrier(module_context_map());
- context->set_previous(previous);
+ context->set_map_no_write_barrier(global_context_map());
+ context->set_closure(function);
+ context->set_previous(function->context());
context->set_extension(scope_info);
- context->set_global(previous->global());
+ context->set_global_object(function->context()->global_object());
+ ASSERT(context->IsGlobalContext());
+ ASSERT(result->IsContext());
+ return context;
+}
+
+
+MaybeObject* Heap::AllocateModuleContext(ScopeInfo* scope_info) {
+ Object* result;
+ { MaybeObject* maybe_result =
+ AllocateFixedArray(scope_info->ContextLength(), TENURED);
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ }
+ Context* context = reinterpret_cast<Context*>(result);
+ context->set_map_no_write_barrier(module_context_map());
+ // Context links will be set later.
+ context->set_extension(Smi::FromInt(0));
return context;
}
@@ -4863,9 +5147,9 @@ MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
context->set_map_no_write_barrier(function_context_map());
context->set_closure(function);
context->set_previous(function->context());
- context->set_extension(NULL);
- context->set_global(function->context()->global());
- context->set_qml_global(function->context()->qml_global());
+ context->set_extension(Smi::FromInt(0));
+ context->set_global_object(function->context()->global_object());
+ context->set_qml_global_object(function->context()->qml_global_object());
return context;
}
@@ -4885,8 +5169,8 @@ MaybeObject* Heap::AllocateCatchContext(JSFunction* function,
context->set_closure(function);
context->set_previous(previous);
context->set_extension(name);
- context->set_global(previous->global());
- context->set_qml_global(previous->qml_global());
+ context->set_global_object(previous->global_object());
+ context->set_qml_global_object(previous->qml_global_object());
context->set(Context::THROWN_OBJECT_INDEX, thrown_object);
return context;
}
@@ -4904,8 +5188,8 @@ MaybeObject* Heap::AllocateWithContext(JSFunction* function,
context->set_closure(function);
context->set_previous(previous);
context->set_extension(extension);
- context->set_global(previous->global());
- context->set_qml_global(previous->qml_global());
+ context->set_global_object(previous->global_object());
+ context->set_qml_global_object(previous->qml_global_object());
return context;
}
@@ -4923,8 +5207,8 @@ MaybeObject* Heap::AllocateBlockContext(JSFunction* function,
context->set_closure(function);
context->set_previous(previous);
context->set_extension(scope_info);
- context->set_global(previous->global());
- context->set_qml_global(previous->qml_global());
+ context->set_global_object(previous->global_object());
+ context->set_qml_global_object(previous->qml_global_object());
return context;
}
@@ -4998,12 +5282,17 @@ void Heap::AdvanceIdleIncrementalMarking(intptr_t step_size) {
bool Heap::IdleNotification(int hint) {
+ // Hints greater than this value indicate that
+ // the embedder is requesting a lot of GC work.
const int kMaxHint = 1000;
+ // Minimal hint that allows to do full GC.
+ const int kMinHintForFullGC = 100;
intptr_t size_factor = Min(Max(hint, 20), kMaxHint) / 4;
// The size factor is in range [5..250]. The numbers here are chosen from
// experiments. If you changes them, make sure to test with
// chrome/performance_ui_tests --gtest_filter="GeneralMixMemoryTest.*
- intptr_t step_size = size_factor * IncrementalMarking::kAllocatedThreshold;
+ intptr_t step_size =
+ size_factor * IncrementalMarking::kAllocatedThreshold;
if (contexts_disposed_ > 0) {
if (hint >= kMaxHint) {
@@ -5066,16 +5355,30 @@ bool Heap::IdleNotification(int hint) {
mark_sweeps_since_idle_round_started_ += new_mark_sweeps;
ms_count_at_last_idle_notification_ = ms_count_;
- if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) {
+ int remaining_mark_sweeps = kMaxMarkSweepsInIdleRound -
+ mark_sweeps_since_idle_round_started_;
+
+ if (remaining_mark_sweeps <= 0) {
FinishIdleRound();
return true;
}
if (incremental_marking()->IsStopped()) {
- incremental_marking()->Start();
+ // If there are no more than two GCs left in this idle round and we are
+ // allowed to do a full GC, then make those GCs full in order to compact
+ // the code space.
+ // TODO(ulan): Once we enable code compaction for incremental marking,
+ // we can get rid of this special case and always start incremental marking.
+ if (remaining_mark_sweeps <= 2 && hint >= kMinHintForFullGC) {
+ CollectAllGarbage(kReduceMemoryFootprintMask,
+ "idle notification: finalize idle round");
+ } else {
+ incremental_marking()->Start();
+ }
+ }
+ if (!incremental_marking()->IsStopped()) {
+ AdvanceIdleIncrementalMarking(step_size);
}
-
- AdvanceIdleIncrementalMarking(step_size);
return false;
}
@@ -5249,9 +5552,9 @@ bool Heap::InSpace(Address addr, AllocationSpace space) {
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void Heap::Verify() {
- ASSERT(HasBeenSetUp());
+ CHECK(HasBeenSetUp());
store_buffer()->Verify();
@@ -5269,38 +5572,8 @@ void Heap::Verify() {
cell_space_->Verify(&no_dirty_regions_visitor);
lo_space_->Verify();
-
- VerifyNoAccessorPairSharing();
-}
-
-
-void Heap::VerifyNoAccessorPairSharing() {
- // Verification is done in 2 phases: First we mark all AccessorPairs, checking
- // that we mark only unmarked pairs, then we clear all marks, restoring the
- // initial state. We use the Smi tag of the AccessorPair's getter as the
- // marking bit, because we can never see a Smi as the getter.
- for (int phase = 0; phase < 2; phase++) {
- HeapObjectIterator iter(map_space());
- for (HeapObject* obj = iter.Next(); obj != NULL; obj = iter.Next()) {
- if (obj->IsMap()) {
- DescriptorArray* descs = Map::cast(obj)->instance_descriptors();
- for (int i = 0; i < descs->number_of_descriptors(); i++) {
- if (descs->GetType(i) == CALLBACKS &&
- descs->GetValue(i)->IsAccessorPair()) {
- AccessorPair* accessors = AccessorPair::cast(descs->GetValue(i));
- uintptr_t before = reinterpret_cast<intptr_t>(accessors->getter());
- uintptr_t after = (phase == 0) ?
- ((before & ~kSmiTagMask) | kSmiTag) :
- ((before & ~kHeapObjectTag) | kHeapObjectTag);
- CHECK(before != after);
- accessors->set_getter(reinterpret_cast<Object*>(after));
- }
- }
- }
- }
- }
}
-#endif // DEBUG
+#endif
MaybeObject* Heap::LookupSymbol(Vector<const char> string) {
@@ -5393,7 +5666,6 @@ bool Heap::LookupSymbolIfExists(String* string, String** symbol) {
}
-#ifdef DEBUG
void Heap::ZapFromSpace() {
NewSpacePageIterator it(new_space_.FromSpaceStart(),
new_space_.FromSpaceEnd());
@@ -5406,7 +5678,6 @@ void Heap::ZapFromSpace() {
}
}
}
-#endif // DEBUG
void Heap::IterateAndMarkPointersToFromSpace(Address start,
@@ -5656,6 +5927,7 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
// Iterate over local handles in handle scopes.
isolate_->handle_scope_implementer()->Iterate(v);
+ isolate_->IterateDeferredHandles(v);
v->Synchronize(VisitorSynchronization::kHandleScope);
// Iterate over the builtin code objects and code stubs in the
@@ -5718,8 +5990,8 @@ bool Heap::ConfigureHeap(int max_semispace_size,
if (max_semispace_size < Page::kPageSize) {
max_semispace_size = Page::kPageSize;
if (FLAG_trace_gc) {
- PrintF("Max semispace size cannot be less than %dkbytes\n",
- Page::kPageSize >> 10);
+ PrintPID("Max semispace size cannot be less than %dkbytes\n",
+ Page::kPageSize >> 10);
}
}
max_semispace_size_ = max_semispace_size;
@@ -5734,8 +6006,8 @@ bool Heap::ConfigureHeap(int max_semispace_size,
if (max_semispace_size_ > reserved_semispace_size_) {
max_semispace_size_ = reserved_semispace_size_;
if (FLAG_trace_gc) {
- PrintF("Max semispace size cannot be more than %dkbytes\n",
- reserved_semispace_size_ >> 10);
+ PrintPID("Max semispace size cannot be more than %dkbytes\n",
+ reserved_semispace_size_ >> 10);
}
}
} else {
@@ -5760,7 +6032,7 @@ bool Heap::ConfigureHeap(int max_semispace_size,
max_semispace_size_ = RoundUpToPowerOf2(max_semispace_size_);
reserved_semispace_size_ = RoundUpToPowerOf2(reserved_semispace_size_);
initial_semispace_size_ = Min(initial_semispace_size_, max_semispace_size_);
- external_allocation_limit_ = 10 * max_semispace_size_;
+ external_allocation_limit_ = 16 * max_semispace_size_;
// The old generation is paged and needs at least one page for each space.
int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1;
@@ -6110,7 +6382,7 @@ bool Heap::SetUp(bool create_heap_objects) {
// Create initial objects
if (!CreateInitialObjects()) return false;
- global_contexts_list_ = undefined_value();
+ native_contexts_list_ = undefined_value();
}
LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
@@ -6118,6 +6390,8 @@ bool Heap::SetUp(bool create_heap_objects) {
store_buffer()->SetUp();
+ if (FLAG_parallel_recompilation) relocation_mutex_ = OS::CreateMutex();
+
return true;
}
@@ -6140,16 +6414,18 @@ void Heap::SetStackLimits() {
void Heap::TearDown() {
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Verify();
}
#endif
+
if (FLAG_print_cumulative_gc_stat) {
PrintF("\n\n");
PrintF("gc_count=%d ", gc_count_);
PrintF("mark_sweep_count=%d ", ms_count_);
PrintF("max_gc_pause=%d ", get_max_gc_pause());
+ PrintF("total_gc_time=%d ", total_gc_time_ms_);
PrintF("min_in_mutator=%d ", get_min_in_mutator());
PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
get_max_alive_after_gc());
@@ -6203,6 +6479,8 @@ void Heap::TearDown() {
isolate_->memory_allocator()->TearDown();
+ delete relocation_mutex_;
+
#ifdef DEBUG
delete debug_utils_;
debug_utils_ = NULL;
@@ -6620,7 +6898,7 @@ void PathTracer::TracePathFrom(Object** root) {
ASSERT((search_target_ == kAnyGlobalObject) ||
search_target_->IsHeapObject());
found_target_in_trace_ = false;
- object_stack_.Clear();
+ Reset();
MarkVisitor mark_visitor(this);
MarkRecursively(root, &mark_visitor);
@@ -6632,8 +6910,8 @@ void PathTracer::TracePathFrom(Object** root) {
}
-static bool SafeIsGlobalContext(HeapObject* obj) {
- return obj->map() == obj->GetHeap()->raw_unchecked_global_context_map();
+static bool SafeIsNativeContext(HeapObject* obj) {
+ return obj->map() == obj->GetHeap()->raw_unchecked_native_context_map();
}
@@ -6655,7 +6933,7 @@ void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) {
return;
}
- bool is_global_context = SafeIsGlobalContext(obj);
+ bool is_native_context = SafeIsNativeContext(obj);
// not visited yet
Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
@@ -6665,7 +6943,7 @@ void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) {
obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
// Scan the object body.
- if (is_global_context && (visit_mode_ == VISIT_ONLY_STRONG)) {
+ if (is_native_context && (visit_mode_ == VISIT_ONLY_STRONG)) {
// This is specialized to scan Context's properly.
Object** start = reinterpret_cast<Object**>(obj->address() +
Context::kHeaderSize);
@@ -6724,11 +7002,7 @@ void PathTracer::ProcessResults() {
for (int i = 0; i < object_stack_.length(); i++) {
if (i > 0) PrintF("\n |\n |\n V\n\n");
Object* obj = object_stack_[i];
-#ifdef OBJECT_PRINT
obj->Print();
-#else
- obj->ShortPrint();
-#endif
}
PrintF("=====================================\n");
}
@@ -6737,6 +7011,15 @@ void PathTracer::ProcessResults() {
#ifdef DEBUG
+// Triggers a depth-first traversal of reachable objects from one
+// given root object and finds a path to a specific heap object and
+// prints it.
+void Heap::TracePathToObjectFrom(Object* target, Object* root) {
+ PathTracer tracer(target, PathTracer::FIND_ALL, VISIT_ALL);
+ tracer.VisitPointer(&root);
+}
+
+
// Triggers a depth-first traversal of reachable objects from roots
// and finds a path to a specific heap object and prints it.
void Heap::TracePathToObject(Object* target) {
@@ -6824,6 +7107,7 @@ GCTracer::~GCTracer() {
// Update cumulative GC statistics if required.
if (FLAG_print_cumulative_gc_stat) {
+ heap_->total_gc_time_ms_ += time;
heap_->max_gc_pause_ = Max(heap_->max_gc_pause_, time);
heap_->max_alive_after_gc_ = Max(heap_->max_alive_after_gc_,
heap_->alive_after_last_gc_);
@@ -6831,9 +7115,13 @@ GCTracer::~GCTracer() {
heap_->min_in_mutator_ = Min(heap_->min_in_mutator_,
static_cast<int>(spent_in_mutator_));
}
+ } else if (FLAG_trace_gc_verbose) {
+ heap_->total_gc_time_ms_ += time;
}
- PrintF("%8.0f ms: ", heap_->isolate()->time_millis_since_init());
+ if (collector_ == SCAVENGER && FLAG_trace_gc_ignore_scavenger) return;
+
+ PrintPID("%8.0f ms: ", heap_->isolate()->time_millis_since_init());
if (!FLAG_trace_gc_nvp) {
int external_time = static_cast<int>(scopes_[Scope::EXTERNAL]);
@@ -6875,9 +7163,7 @@ GCTracer::~GCTracer() {
PrintF(".\n");
} else {
PrintF("pause=%d ", time);
- PrintF("mutator=%d ",
- static_cast<int>(spent_in_mutator_));
-
+ PrintF("mutator=%d ", static_cast<int>(spent_in_mutator_));
PrintF("gc=");
switch (collector_) {
case SCAVENGER:
@@ -7004,7 +7290,7 @@ void KeyedLookupCache::Clear() {
void DescriptorLookupCache::Clear() {
- for (int index = 0; index < kLength; index++) keys_[index].array = NULL;
+ for (int index = 0; index < kLength; index++) keys_[index].source = NULL;
}
@@ -7044,7 +7330,7 @@ void TranscendentalCache::Clear() {
void ExternalStringTable::CleanUp() {
int last = 0;
for (int i = 0; i < new_space_strings_.length(); ++i) {
- if (new_space_strings_[i] == heap_->raw_unchecked_the_hole_value()) {
+ if (new_space_strings_[i] == heap_->the_hole_value()) {
continue;
}
if (heap_->InNewSpace(new_space_strings_[i])) {
@@ -7056,16 +7342,18 @@ void ExternalStringTable::CleanUp() {
new_space_strings_.Rewind(last);
last = 0;
for (int i = 0; i < old_space_strings_.length(); ++i) {
- if (old_space_strings_[i] == heap_->raw_unchecked_the_hole_value()) {
+ if (old_space_strings_[i] == heap_->the_hole_value()) {
continue;
}
ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
old_space_strings_[last++] = old_space_strings_[i];
}
old_space_strings_.Rewind(last);
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Verify();
}
+#endif
}
@@ -7156,4 +7444,63 @@ void Heap::RememberUnmappedPage(Address page, bool compacted) {
remembered_unmapped_pages_index_ %= kRememberedUnmappedPages;
}
+
+void Heap::ClearObjectStats(bool clear_last_time_stats) {
+ memset(object_counts_, 0, sizeof(object_counts_));
+ memset(object_sizes_, 0, sizeof(object_sizes_));
+ if (clear_last_time_stats) {
+ memset(object_counts_last_time_, 0, sizeof(object_counts_last_time_));
+ memset(object_sizes_last_time_, 0, sizeof(object_sizes_last_time_));
+ }
+}
+
+
+static LazyMutex checkpoint_object_stats_mutex = LAZY_MUTEX_INITIALIZER;
+
+
+void Heap::CheckpointObjectStats() {
+ ScopedLock lock(checkpoint_object_stats_mutex.Pointer());
+ Counters* counters = isolate()->counters();
+#define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
+ counters->count_of_##name()->Increment( \
+ static_cast<int>(object_counts_[name])); \
+ counters->count_of_##name()->Decrement( \
+ static_cast<int>(object_counts_last_time_[name])); \
+ counters->size_of_##name()->Increment( \
+ static_cast<int>(object_sizes_[name])); \
+ counters->size_of_##name()->Decrement( \
+ static_cast<int>(object_sizes_last_time_[name]));
+ INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
+#undef ADJUST_LAST_TIME_OBJECT_COUNT
+ int index;
+#define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
+ index = FIRST_CODE_KIND_SUB_TYPE + Code::name; \
+ counters->count_of_CODE_TYPE_##name()->Increment( \
+ static_cast<int>(object_counts_[index])); \
+ counters->count_of_CODE_TYPE_##name()->Decrement( \
+ static_cast<int>(object_counts_last_time_[index])); \
+ counters->size_of_CODE_TYPE_##name()->Increment( \
+ static_cast<int>(object_sizes_[index])); \
+ counters->size_of_CODE_TYPE_##name()->Decrement( \
+ static_cast<int>(object_sizes_last_time_[index]));
+ CODE_KIND_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
+#undef ADJUST_LAST_TIME_OBJECT_COUNT
+#define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
+ index = FIRST_FIXED_ARRAY_SUB_TYPE + name; \
+ counters->count_of_FIXED_ARRAY_##name()->Increment( \
+ static_cast<int>(object_counts_[index])); \
+ counters->count_of_FIXED_ARRAY_##name()->Decrement( \
+ static_cast<int>(object_counts_last_time_[index])); \
+ counters->size_of_FIXED_ARRAY_##name()->Increment( \
+ static_cast<int>(object_sizes_[index])); \
+ counters->size_of_FIXED_ARRAY_##name()->Decrement( \
+ static_cast<int>(object_sizes_last_time_[index]));
+ FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
+#undef ADJUST_LAST_TIME_OBJECT_COUNT
+
+ memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
+ memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
+ ClearObjectStats();
+}
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/heap.h b/src/3rdparty/v8/src/heap.h
index 0e744e4..12cd295 100644
--- a/src/3rdparty/v8/src/heap.h
+++ b/src/3rdparty/v8/src/heap.h
@@ -64,7 +64,7 @@ namespace internal {
V(Map, ascii_symbol_map, AsciiSymbolMap) \
V(Map, ascii_string_map, AsciiStringMap) \
V(Map, heap_number_map, HeapNumberMap) \
- V(Map, global_context_map, GlobalContextMap) \
+ V(Map, native_context_map, NativeContextMap) \
V(Map, fixed_array_map, FixedArrayMap) \
V(Map, code_map, CodeMap) \
V(Map, scope_info_map, ScopeInfoMap) \
@@ -87,6 +87,7 @@ namespace internal {
V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
V(FixedArray, string_split_cache, StringSplitCache) \
+ V(FixedArray, regexp_multiple_cache, RegExpMultipleCache) \
V(Object, termination_exception, TerminationException) \
V(Smi, hash_seed, HashSeed) \
V(Map, string_map, StringMap) \
@@ -130,6 +131,7 @@ namespace internal {
V(Map, with_context_map, WithContextMap) \
V(Map, block_context_map, BlockContextMap) \
V(Map, module_context_map, ModuleContextMap) \
+ V(Map, global_context_map, GlobalContextMap) \
V(Map, oddball_map, OddballMap) \
V(Map, message_object_map, JSMessageObjectMap) \
V(Map, foreign_map, ForeignMap) \
@@ -150,7 +152,10 @@ namespace internal {
V(Smi, real_stack_limit, RealStackLimit) \
V(StringDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
- V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)
+ V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
+ V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
+ V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset) \
+ V(JSObject, observation_state, ObservationState)
#define ROOT_LIST(V) \
STRONG_ROOT_LIST(V) \
@@ -172,6 +177,7 @@ namespace internal {
V(constructor_symbol, "constructor") \
V(code_symbol, ".code") \
V(result_symbol, ".result") \
+ V(dot_for_symbol, ".for.") \
V(catch_var_symbol, ".catch-var") \
V(empty_symbol, "") \
V(eval_symbol, "eval") \
@@ -241,7 +247,8 @@ namespace internal {
V(use_strict, "use strict") \
V(dot_symbol, ".") \
V(anonymous_function_symbol, "(anonymous function)") \
- V(compare_ic_symbol, ".compare_ic") \
+ V(compare_ic_symbol, "==") \
+ V(strict_compare_ic_symbol, "===") \
V(infinity_symbol, "Infinity") \
V(minus_infinity_symbol, "-Infinity") \
V(hidden_stack_trace_symbol, "v8::hidden_stack_trace") \
@@ -486,6 +493,9 @@ class Heap {
// Returns the amount of executable memory currently committed for the heap.
intptr_t CommittedMemoryExecutable();
+ // Returns the amount of phyical memory currently committed for the heap.
+ size_t CommittedPhysicalMemory();
+
// Returns the available bytes in space w/o growing.
// Heap doesn't guarantee that it can allocate an object that requires
// all available bytes. Check MaxHeapObjectSize() instead.
@@ -508,6 +518,24 @@ class Heap {
MapSpace* map_space() { return map_space_; }
CellSpace* cell_space() { return cell_space_; }
LargeObjectSpace* lo_space() { return lo_space_; }
+ PagedSpace* paged_space(int idx) {
+ switch (idx) {
+ case OLD_POINTER_SPACE:
+ return old_pointer_space();
+ case OLD_DATA_SPACE:
+ return old_data_space();
+ case MAP_SPACE:
+ return map_space();
+ case CELL_SPACE:
+ return cell_space();
+ case CODE_SPACE:
+ return code_space();
+ case NEW_SPACE:
+ case LO_SPACE:
+ UNREACHABLE();
+ }
+ return NULL;
+ }
bool always_allocate() { return always_allocate_scope_depth_ != 0; }
Address always_allocate_scope_depth_address() {
@@ -535,7 +563,8 @@ class Heap {
MUST_USE_RESULT MaybeObject* AllocateJSObject(
JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
- MUST_USE_RESULT MaybeObject* AllocateJSModule();
+ MUST_USE_RESULT MaybeObject* AllocateJSModule(Context* context,
+ ScopeInfo* scope_info);
// Allocate a JSArray with no elements
MUST_USE_RESULT MaybeObject* AllocateEmptyJSArray(
@@ -626,7 +655,7 @@ class Heap {
MUST_USE_RESULT MaybeObject* AllocateMap(
InstanceType instance_type,
int instance_size,
- ElementsKind elements_kind = FAST_ELEMENTS);
+ ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
// Allocates a partial map for bootstrapping.
MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
@@ -656,6 +685,9 @@ class Heap {
// Clear the Instanceof cache (used when a prototype changes).
inline void ClearInstanceofCache();
+ // For use during bootup.
+ void RepairFreeListsAfterBoot();
+
// Allocates and fully initializes a String. There are two String
// encodings: ASCII and two byte. One should choose between the three string
// allocation functions based on the encoding of the string buffer used to
@@ -682,6 +714,7 @@ class Heap {
PretenureFlag pretenure = NOT_TENURED);
MUST_USE_RESULT MaybeObject* AllocateStringFromUtf8Slow(
Vector<const char> str,
+ int non_ascii_start,
PretenureFlag pretenure = NOT_TENURED);
MUST_USE_RESULT MaybeObject* AllocateStringFromTwoByte(
Vector<const uc16> str,
@@ -825,13 +858,16 @@ class Heap {
MUST_USE_RESULT MaybeObject* AllocateHashTable(
int length, PretenureFlag pretenure = NOT_TENURED);
- // Allocate a global (but otherwise uninitialized) context.
- MUST_USE_RESULT MaybeObject* AllocateGlobalContext();
+ // Allocate a native (but otherwise uninitialized) context.
+ MUST_USE_RESULT MaybeObject* AllocateNativeContext();
- // Allocate a module context.
- MUST_USE_RESULT MaybeObject* AllocateModuleContext(Context* previous,
+ // Allocate a global context.
+ MUST_USE_RESULT MaybeObject* AllocateGlobalContext(JSFunction* function,
ScopeInfo* scope_info);
+ // Allocate a module context.
+ MUST_USE_RESULT MaybeObject* AllocateModuleContext(ScopeInfo* scope_info);
+
// Allocate a function context.
MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
JSFunction* function);
@@ -1077,7 +1113,10 @@ class Heap {
void EnsureHeapIsIterable();
// Notify the heap that a context has been disposed.
- int NotifyContextDisposed() { return ++contexts_disposed_; }
+ int NotifyContextDisposed() {
+ flush_monomorphic_ics_ = true;
+ return ++contexts_disposed_;
+ }
// Utility to invoke the scavenger. This is needed in test code to
// ensure correct callback for weak global handles.
@@ -1105,8 +1144,8 @@ class Heap {
#endif
void AddGCPrologueCallback(
- GCEpilogueCallback callback, GCType gc_type_filter);
- void RemoveGCPrologueCallback(GCEpilogueCallback callback);
+ GCPrologueCallback callback, GCType gc_type_filter);
+ void RemoveGCPrologueCallback(GCPrologueCallback callback);
void AddGCEpilogueCallback(
GCEpilogueCallback callback, GCType gc_type_filter);
@@ -1153,13 +1192,13 @@ class Heap {
// not match the empty string.
String* hidden_symbol() { return hidden_symbol_; }
- void set_global_contexts_list(Object* object) {
- global_contexts_list_ = object;
+ void set_native_contexts_list(Object* object) {
+ native_contexts_list_ = object;
}
- Object* global_contexts_list() { return global_contexts_list_; }
+ Object* native_contexts_list() { return native_contexts_list_; }
// Number of mark-sweeps.
- int ms_count() { return ms_count_; }
+ unsigned int ms_count() { return ms_count_; }
// Iterates over all roots in the heap.
void IterateRoots(ObjectVisitor* v, VisitMode mode);
@@ -1230,21 +1269,19 @@ class Heap {
return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
}
- // Get address of global contexts list for serialization support.
- Object** global_contexts_list_address() {
- return &global_contexts_list_;
+ // Get address of native contexts list for serialization support.
+ Object** native_contexts_list_address() {
+ return &native_contexts_list_;
}
-#ifdef DEBUG
- void Print();
- void PrintHandles();
-
+#ifdef VERIFY_HEAP
// Verify the heap is in its normal state before or after a GC.
void Verify();
+#endif
- // Verify that AccessorPairs are not shared, i.e. make sure that they have
- // exactly one pointer to them.
- void VerifyNoAccessorPairSharing();
+#ifdef DEBUG
+ void Print();
+ void PrintHandles();
void OldPointerSpaceCheckStoreBuffer();
void MapSpaceCheckStoreBuffer();
@@ -1253,10 +1290,23 @@ class Heap {
// Report heap statistics.
void ReportHeapStatistics(const char* title);
void ReportCodeStatistics(const char* title);
+#endif
+
+ // Zapping is needed for verify heap, and always done in debug builds.
+ static inline bool ShouldZapGarbage() {
+#ifdef DEBUG
+ return true;
+#else
+#ifdef VERIFY_HEAP
+ return FLAG_verify_heap;
+#else
+ return false;
+#endif
+#endif
+ }
// Fill in bogus values in from space
void ZapFromSpace();
-#endif
// Print short heap statistics.
void PrintShortHeapStatistics();
@@ -1294,6 +1344,7 @@ class Heap {
return disallow_allocation_failure_;
}
+ void TracePathToObjectFrom(Object* target, Object* root);
void TracePathToObject(Object* target);
void TracePathToGlobal();
#endif
@@ -1308,20 +1359,9 @@ class Heap {
// Commits from space if it is uncommitted.
void EnsureFromSpaceIsCommitted();
- // Support for partial snapshots. After calling this we can allocate a
- // certain number of bytes using only linear allocation (with a
- // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
- // or causing a GC. It returns true of space was reserved or false if a GC is
- // needed. For paged spaces the space requested must include the space wasted
- // at the end of each page when allocating linearly.
- void ReserveSpace(
- int new_space_size,
- int pointer_space_size,
- int data_space_size,
- int code_space_size,
- int map_space_size,
- int cell_space_size,
- int large_object_size);
+ // Support for partial snapshots. After calling this we have a linear
+ // space to write objects in each space.
+ void ReserveSpace(int *sizes, Address* addresses);
//
// Support for the API.
@@ -1397,15 +1437,15 @@ class Heap {
STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
#undef ROOT_INDEX_DECLARATION
-// Utility type maps
-#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
- STRUCT_LIST(DECLARE_STRUCT_MAP)
-#undef DECLARE_STRUCT_MAP
-
#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
#undef SYMBOL_DECLARATION
+ // Utility type maps
+#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
+ STRUCT_LIST(DECLARE_STRUCT_MAP)
+#undef DECLARE_STRUCT_MAP
+
kSymbolTableRootIndex,
kStrongRootListLength = kSymbolTableRootIndex,
kRootListLength
@@ -1417,6 +1457,10 @@ class Heap {
STATIC_CHECK(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
STATIC_CHECK(kempty_symbolRootIndex == Internals::kEmptySymbolRootIndex);
+ // Generated code can embed direct references to non-writable roots if
+ // they are in new space.
+ static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
+
MUST_USE_RESULT MaybeObject* NumberToString(
Object* number, bool check_number_string_cache = true);
MUST_USE_RESULT MaybeObject* Uint32ToString(
@@ -1488,13 +1532,6 @@ class Heap {
void ClearNormalizedMapCaches();
- // Clears the cache of ICs related to this map.
- void ClearCacheOnMap(Map* map) {
- if (FLAG_cleanup_code_caches_at_gc) {
- map->ClearCodeCache(this);
- }
- }
-
GCTracer* tracer() { return tracer_; }
// Returns the size of objects residing in non new spaces.
@@ -1592,6 +1629,16 @@ class Heap {
set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
}
+ void SetGetterStubDeoptPCOffset(int pc_offset) {
+ ASSERT(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
+ set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
+ }
+
+ void SetSetterStubDeoptPCOffset(int pc_offset) {
+ ASSERT(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
+ set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
+ }
+
// For post mortem debugging.
void RememberUnmappedPage(Address page, bool compacted);
@@ -1602,9 +1649,65 @@ class Heap {
}
void AgeInlineCaches() {
- ++global_ic_age_;
+ global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
+ }
+
+ bool flush_monomorphic_ics() { return flush_monomorphic_ics_; }
+
+ intptr_t amount_of_external_allocated_memory() {
+ return amount_of_external_allocated_memory_;
+ }
+
+ // ObjectStats are kept in two arrays, counts and sizes. Related stats are
+ // stored in a contiguous linear buffer. Stats groups are stored one after
+ // another.
+ enum {
+ FIRST_CODE_KIND_SUB_TYPE = LAST_TYPE + 1,
+ FIRST_FIXED_ARRAY_SUB_TYPE =
+ FIRST_CODE_KIND_SUB_TYPE + Code::LAST_CODE_KIND + 1,
+ OBJECT_STATS_COUNT =
+ FIRST_FIXED_ARRAY_SUB_TYPE + LAST_FIXED_ARRAY_SUB_TYPE + 1
+ };
+
+ void RecordObjectStats(InstanceType type, int sub_type, size_t size) {
+ ASSERT(type <= LAST_TYPE);
+ if (sub_type < 0) {
+ object_counts_[type]++;
+ object_sizes_[type] += size;
+ } else {
+ if (type == CODE_TYPE) {
+ ASSERT(sub_type <= Code::LAST_CODE_KIND);
+ object_counts_[FIRST_CODE_KIND_SUB_TYPE + sub_type]++;
+ object_sizes_[FIRST_CODE_KIND_SUB_TYPE + sub_type] += size;
+ } else if (type == FIXED_ARRAY_TYPE) {
+ ASSERT(sub_type <= LAST_FIXED_ARRAY_SUB_TYPE);
+ object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type]++;
+ object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type] += size;
+ }
+ }
}
+ void CheckpointObjectStats();
+
+ // We don't use a ScopedLock here since we want to lock the heap
+ // only when FLAG_parallel_recompilation is true.
+ class RelocationLock {
+ public:
+ explicit RelocationLock(Heap* heap) : heap_(heap) {
+ if (FLAG_parallel_recompilation) {
+ heap_->relocation_mutex_->Lock();
+ }
+ }
+ ~RelocationLock() {
+ if (FLAG_parallel_recompilation) {
+ heap_->relocation_mutex_->Unlock();
+ }
+ }
+
+ private:
+ Heap* heap_;
+ };
+
private:
Heap();
@@ -1636,6 +1739,8 @@ class Heap {
int global_ic_age_;
+ bool flush_monomorphic_ics_;
+
int scan_on_scavenge_pages_;
#if defined(V8_TARGET_ARCH_X64)
@@ -1657,7 +1762,7 @@ class Heap {
// Returns the amount of external memory registered since last global gc.
intptr_t PromotedExternalMemorySize();
- int ms_count_; // how many mark-sweep collections happened
+ unsigned int ms_count_; // how many mark-sweep collections happened
unsigned int gc_count_; // how many gc happened
// For post mortem debugging.
@@ -1729,7 +1834,7 @@ class Heap {
// last GC.
int old_gen_exhausted_;
- Object* global_contexts_list_;
+ Object* native_contexts_list_;
StoreBufferRebuilder store_buffer_rebuilder_;
@@ -1807,6 +1912,7 @@ class Heap {
bool PerformGarbageCollection(GarbageCollector collector,
GCTracer* tracer);
+ bool IterateObjectGroups(ObjectVisitor* scavenge_visitor);
inline void UpdateOldSpaceLimits();
@@ -1998,14 +2104,24 @@ class Heap {
void AdvanceIdleIncrementalMarking(intptr_t step_size);
+ void ClearObjectStats(bool clear_last_time_stats = false);
static const int kInitialSymbolTableSize = 2048;
static const int kInitialEvalCacheSize = 64;
static const int kInitialNumberStringCacheSize = 256;
+ // Object counts and used memory by InstanceType
+ size_t object_counts_[OBJECT_STATS_COUNT];
+ size_t object_counts_last_time_[OBJECT_STATS_COUNT];
+ size_t object_sizes_[OBJECT_STATS_COUNT];
+ size_t object_sizes_last_time_[OBJECT_STATS_COUNT];
+
// Maximum GC pause.
int max_gc_pause_;
+ // Total time spent in GC.
+ int total_gc_time_ms_;
+
// Maximum size of objects alive after GC.
intptr_t max_alive_after_gc_;
@@ -2050,15 +2166,16 @@ class Heap {
MemoryChunk* chunks_queued_for_free_;
+ Mutex* relocation_mutex_;
+
friend class Factory;
friend class GCTracer;
friend class DisallowAllocationFailure;
friend class AlwaysAllocateScope;
- friend class LinearAllocationScope;
friend class Page;
friend class Isolate;
friend class MarkCompactCollector;
- friend class StaticMarkingVisitor;
+ friend class MarkCompactMarkingVisitor;
friend class MapCompact;
DISALLOW_COPY_AND_ASSIGN(Heap);
@@ -2098,21 +2215,29 @@ class HeapStats {
};
-class AlwaysAllocateScope {
+class DisallowAllocationFailure {
public:
- inline AlwaysAllocateScope();
- inline ~AlwaysAllocateScope();
+ inline DisallowAllocationFailure();
+ inline ~DisallowAllocationFailure();
+
+#ifdef DEBUG
+ private:
+ bool old_state_;
+#endif
};
-class LinearAllocationScope {
+class AlwaysAllocateScope {
public:
- inline LinearAllocationScope();
- inline ~LinearAllocationScope();
+ inline AlwaysAllocateScope();
+ inline ~AlwaysAllocateScope();
+
+ private:
+ // Implicitly disable artificial allocation failures.
+ DisallowAllocationFailure disallow_allocation_failure_;
};
-#ifdef DEBUG
// Visitor class to verify interior pointers in spaces that do not contain
// or care about intergenerational references. All heap object pointers have to
// point into the heap to a location that has a map pointer at its first word.
@@ -2122,7 +2247,6 @@ class VerifyPointersVisitor: public ObjectVisitor {
public:
inline void VisitPointers(Object** start, Object** end);
};
-#endif
// Space iterator for iterating over all spaces of the heap.
@@ -2281,7 +2405,7 @@ class KeyedLookupCache {
};
-// Cache for mapping (array, property name) into descriptor index.
+// Cache for mapping (map, property name) into descriptor index.
// The cache contains both positive and negative results.
// Descriptor index equals kNotFound means the property is absent.
// Cleared at startup and prior to any gc.
@@ -2289,21 +2413,21 @@ class DescriptorLookupCache {
public:
// Lookup descriptor index for (map, name).
// If absent, kAbsent is returned.
- int Lookup(DescriptorArray* array, String* name) {
+ int Lookup(Map* source, String* name) {
if (!StringShape(name).IsSymbol()) return kAbsent;
- int index = Hash(array, name);
+ int index = Hash(source, name);
Key& key = keys_[index];
- if ((key.array == array) && (key.name == name)) return results_[index];
+ if ((key.source == source) && (key.name == name)) return results_[index];
return kAbsent;
}
// Update an element in the cache.
- void Update(DescriptorArray* array, String* name, int result) {
+ void Update(Map* source, String* name, int result) {
ASSERT(result != kAbsent);
if (StringShape(name).IsSymbol()) {
- int index = Hash(array, name);
+ int index = Hash(source, name);
Key& key = keys_[index];
- key.array = array;
+ key.source = source;
key.name = name;
results_[index] = result;
}
@@ -2317,24 +2441,26 @@ class DescriptorLookupCache {
private:
DescriptorLookupCache() {
for (int i = 0; i < kLength; ++i) {
- keys_[i].array = NULL;
+ keys_[i].source = NULL;
keys_[i].name = NULL;
results_[i] = kAbsent;
}
}
- static int Hash(DescriptorArray* array, String* name) {
+ static int Hash(Object* source, String* name) {
// Uses only lower 32 bits if pointers are larger.
- uint32_t array_hash =
- static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
+ uint32_t source_hash =
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source))
+ >> kPointerSizeLog2;
uint32_t name_hash =
- static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
- return (array_hash ^ name_hash) % kLength;
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name))
+ >> kPointerSizeLog2;
+ return (source_hash ^ name_hash) % kLength;
}
static const int kLength = 64;
struct Key {
- DescriptorArray* array;
+ Map* source;
String* name;
};
@@ -2346,18 +2472,6 @@ class DescriptorLookupCache {
};
-#ifdef DEBUG
-class DisallowAllocationFailure {
- public:
- inline DisallowAllocationFailure();
- inline ~DisallowAllocationFailure();
-
- private:
- bool old_state_;
-};
-#endif
-
-
// A helper class to document/test C++ scopes where we do not
// expect a GC. Usage:
//
@@ -2373,6 +2487,7 @@ class AssertNoAllocation {
#ifdef DEBUG
private:
bool old_state_;
+ bool active_;
#endif
};
@@ -2385,6 +2500,7 @@ class DisableAssertNoAllocation {
#ifdef DEBUG
private:
bool old_state_;
+ bool active_;
#endif
};
@@ -2504,24 +2620,31 @@ class GCTracer BASE_EMBEDDED {
};
-class StringSplitCache {
+class RegExpResultsCache {
public:
- static Object* Lookup(FixedArray* cache, String* string, String* pattern);
+ enum ResultsCacheType { REGEXP_MULTIPLE_INDICES, STRING_SPLIT_SUBSTRINGS };
+
+ // Attempt to retrieve a cached result. On failure, 0 is returned as a Smi.
+ // On success, the returned result is guaranteed to be a COW-array.
+ static Object* Lookup(Heap* heap,
+ String* key_string,
+ Object* key_pattern,
+ ResultsCacheType type);
+ // Attempt to add value_array to the cache specified by type. On success,
+ // value_array is turned into a COW-array.
static void Enter(Heap* heap,
- FixedArray* cache,
- String* string,
- String* pattern,
- FixedArray* array);
+ String* key_string,
+ Object* key_pattern,
+ FixedArray* value_array,
+ ResultsCacheType type);
static void Clear(FixedArray* cache);
- static const int kStringSplitCacheSize = 0x100;
+ static const int kRegExpResultsCacheSize = 0x100;
private:
static const int kArrayEntriesPerCacheEntry = 4;
static const int kStringOffset = 0;
static const int kPatternOffset = 1;
static const int kArrayOffset = 2;
-
- static MaybeObject* WrapFixedArrayInJSArray(Object* fixed_array);
};
diff --git a/src/3rdparty/v8/src/hydrogen-instructions.cc b/src/3rdparty/v8/src/hydrogen-instructions.cc
index 26f7f7a..c8edcff 100644
--- a/src/3rdparty/v8/src/hydrogen-instructions.cc
+++ b/src/3rdparty/v8/src/hydrogen-instructions.cc
@@ -156,6 +156,20 @@ void Range::Union(Range* other) {
}
+void Range::CombinedMax(Range* other) {
+ upper_ = Max(upper_, other->upper_);
+ lower_ = Max(lower_, other->lower_);
+ set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
+}
+
+
+void Range::CombinedMin(Range* other) {
+ upper_ = Min(upper_, other->upper_);
+ lower_ = Min(lower_, other->lower_);
+ set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
+}
+
+
void Range::Sar(int32_t value) {
int32_t bits = value & 0x1F;
lower_ = lower_ >> bits;
@@ -336,7 +350,8 @@ HUseListNode* HValue::RemoveUse(HValue* value, int index) {
// Do not reuse use list nodes in debug mode, zap them.
if (current != NULL) {
HUseListNode* temp =
- new HUseListNode(current->value(), current->index(), NULL);
+ new(block()->zone())
+ HUseListNode(current->value(), current->index(), NULL);
current->Zap();
current = temp;
}
@@ -495,8 +510,8 @@ void HValue::RegisterUse(int index, HValue* new_value) {
if (new_value != NULL) {
if (removed == NULL) {
- new_value->use_list_ =
- new HUseListNode(this, index, new_value->use_list_);
+ new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
+ this, index, new_value->use_list_);
} else {
removed->set_tail(new_value->use_list_);
new_value->use_list_ = removed;
@@ -697,7 +712,7 @@ void HCallGlobal::PrintDataTo(StringStream* stream) {
void HCallKnownGlobal::PrintDataTo(StringStream* stream) {
- stream->Add("o ", target()->shared()->DebugName());
+ stream->Add("%o ", target()->shared()->DebugName());
stream->Add("#%d", argument_count());
}
@@ -849,28 +864,20 @@ void HLoadFieldByIndex::PrintDataTo(StringStream* stream) {
}
-HValue* HConstant::Canonicalize() {
- return HasNoUses() ? NULL : this;
-}
-
-
-HValue* HTypeof::Canonicalize() {
- return HasNoUses() ? NULL : this;
-}
-
-
HValue* HBitwise::Canonicalize() {
if (!representation().IsInteger32()) return this;
// If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
if (left()->IsConstant() &&
HConstant::cast(left())->HasInteger32Value() &&
- HConstant::cast(left())->Integer32Value() == nop_constant) {
+ HConstant::cast(left())->Integer32Value() == nop_constant &&
+ !right()->CheckFlag(kUint32)) {
return right();
}
if (right()->IsConstant() &&
HConstant::cast(right())->HasInteger32Value() &&
- HConstant::cast(right())->Integer32Value() == nop_constant) {
+ HConstant::cast(right())->Integer32Value() == nop_constant &&
+ !left()->CheckFlag(kUint32)) {
return left();
}
return this;
@@ -882,7 +889,9 @@ HValue* HBitNot::Canonicalize() {
if (value()->IsBitNot()) {
HValue* result = HBitNot::cast(value())->value();
ASSERT(result->representation().IsInteger32());
- return result;
+ if (!result->CheckFlag(kUint32)) {
+ return result;
+ }
}
return this;
}
@@ -945,7 +954,8 @@ HValue* HUnaryMathOperation::Canonicalize() {
// introduced.
if (value()->representation().IsInteger32()) return value();
-#ifdef V8_TARGET_ARCH_ARM
+#if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_IA32) || \
+ defined(V8_TARGET_ARCH_X64)
if (value()->IsDiv() && (value()->UseCount() == 1)) {
// TODO(2038): Implement this optimization for non ARM architectures.
HDiv* hdiv = HDiv::cast(value());
@@ -969,7 +979,7 @@ HValue* HUnaryMathOperation::Canonicalize() {
!HInstruction::cast(new_right)->IsLinked()) {
HInstruction::cast(new_right)->InsertBefore(this);
}
- HMathFloorOfDiv* instr = new HMathFloorOfDiv(context(),
+ HMathFloorOfDiv* instr = new(block()->zone()) HMathFloorOfDiv(context(),
new_left,
new_right);
// Replace this HMathFloor instruction by the new HMathFloorOfDiv.
@@ -1043,6 +1053,13 @@ void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
}
+void HLoadElements::PrintDataTo(StringStream* stream) {
+ value()->PrintNameTo(stream);
+ stream->Add(" ");
+ typecheck()->PrintNameTo(stream);
+}
+
+
void HCheckMaps::PrintDataTo(StringStream* stream) {
value()->PrintNameTo(stream);
stream->Add(" [%p", *map_set()->first());
@@ -1076,6 +1093,11 @@ void HCheckInstanceType::PrintDataTo(StringStream* stream) {
}
+void HCheckPrototypeMaps::PrintDataTo(StringStream* stream) {
+ stream->Add("[receiver_prototype=%p,holder=%p]", *prototype(), *holder());
+}
+
+
void HCallStub::PrintDataTo(StringStream* stream) {
stream->Add("%s ",
CodeStub::MajorName(major_key_, false));
@@ -1104,6 +1126,7 @@ Range* HChange::InferRange(Zone* zone) {
Range* input_range = value()->range();
if (from().IsInteger32() &&
to().IsTagged() &&
+ !value()->CheckFlag(HInstruction::kUint32) &&
input_range != NULL && input_range->IsInSmiRange()) {
set_type(HType::Smi());
}
@@ -1236,6 +1259,24 @@ Range* HMod::InferRange(Zone* zone) {
}
+Range* HMathMinMax::InferRange(Zone* zone) {
+ if (representation().IsInteger32()) {
+ Range* a = left()->range();
+ Range* b = right()->range();
+ Range* res = a->Copy(zone);
+ if (operation_ == kMathMax) {
+ res->CombinedMax(b);
+ } else {
+ ASSERT(operation_ == kMathMin);
+ res->CombinedMin(b);
+ }
+ return res;
+ } else {
+ return HValue::InferRange(zone);
+ }
+}
+
+
void HPhi::PrintTo(StringStream* stream) {
stream->Add("[");
for (int i = 0; i < OperandCount(); ++i) {
@@ -1256,7 +1297,7 @@ void HPhi::PrintTo(StringStream* stream) {
void HPhi::AddInput(HValue* value) {
- inputs_.Add(NULL);
+ inputs_.Add(NULL, value->block()->zone());
SetOperandAt(OperandCount() - 1, value);
// Mark phis that may have 'arguments' directly or indirectly as an operand.
if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
@@ -1303,14 +1344,33 @@ void HPhi::InitRealUses(int phi_id) {
for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
HValue* value = it.value();
if (!value->IsPhi()) {
- Representation rep = value->RequiredInputRepresentation(it.index());
+ Representation rep = value->ObservedInputRepresentation(it.index());
non_phi_uses_[rep.kind()] += value->LoopWeight();
+ if (FLAG_trace_representation) {
+ PrintF("%d %s is used by %d %s as %s\n",
+ this->id(),
+ this->Mnemonic(),
+ value->id(),
+ value->Mnemonic(),
+ rep.Mnemonic());
+ }
}
}
}
void HPhi::AddNonPhiUsesFrom(HPhi* other) {
+ if (FLAG_trace_representation) {
+ PrintF("adding to %d %s uses of %d %s: i%d d%d t%d\n",
+ this->id(),
+ this->Mnemonic(),
+ other->id(),
+ other->Mnemonic(),
+ other->non_phi_uses_[Representation::kInteger32],
+ other->non_phi_uses_[Representation::kDouble],
+ other->non_phi_uses_[Representation::kTagged]);
+ }
+
for (int i = 0; i < Representation::kNumRepresentations; i++) {
indirect_uses_[i] += other->non_phi_uses_[i];
}
@@ -1324,8 +1384,14 @@ void HPhi::AddIndirectUsesTo(int* dest) {
}
+void HPhi::ResetInteger32Uses() {
+ non_phi_uses_[Representation::kInteger32] = 0;
+ indirect_uses_[Representation::kInteger32] = 0;
+}
+
+
void HSimulate::PrintDataTo(StringStream* stream) {
- stream->Add("id=%d", ast_id());
+ stream->Add("id=%d", ast_id().ToInt());
if (pop_count_ > 0) stream->Add(" pop %d", pop_count_);
if (values_.length() > 0) {
if (pop_count_ > 0) stream->Add(" /");
@@ -1354,45 +1420,82 @@ void HDeoptimize::PrintDataTo(StringStream* stream) {
void HEnterInlined::PrintDataTo(StringStream* stream) {
SmartArrayPointer<char> name = function()->debug_name()->ToCString();
- stream->Add("%s, id=%d", *name, function()->id());
+ stream->Add("%s, id=%d", *name, function()->id().ToInt());
+}
+
+
+static bool IsInteger32(double value) {
+ double roundtrip_value = static_cast<double>(static_cast<int32_t>(value));
+ return BitCast<int64_t>(roundtrip_value) == BitCast<int64_t>(value);
}
HConstant::HConstant(Handle<Object> handle, Representation r)
: handle_(handle),
has_int32_value_(false),
- has_double_value_(false),
- int32_value_(0),
- double_value_(0) {
+ has_double_value_(false) {
set_representation(r);
SetFlag(kUseGVN);
if (handle_->IsNumber()) {
double n = handle_->Number();
- double roundtrip_value = static_cast<double>(static_cast<int32_t>(n));
- has_int32_value_ = BitCast<int64_t>(roundtrip_value) == BitCast<int64_t>(n);
- if (has_int32_value_) int32_value_ = static_cast<int32_t>(n);
+ has_int32_value_ = IsInteger32(n);
+ int32_value_ = DoubleToInt32(n);
double_value_ = n;
has_double_value_ = true;
}
}
-HConstant* HConstant::CopyToRepresentation(Representation r) const {
+HConstant::HConstant(int32_t integer_value, Representation r)
+ : has_int32_value_(true),
+ has_double_value_(true),
+ int32_value_(integer_value),
+ double_value_(FastI2D(integer_value)) {
+ set_representation(r);
+ SetFlag(kUseGVN);
+}
+
+
+HConstant::HConstant(double double_value, Representation r)
+ : has_int32_value_(IsInteger32(double_value)),
+ has_double_value_(true),
+ int32_value_(DoubleToInt32(double_value)),
+ double_value_(double_value) {
+ set_representation(r);
+ SetFlag(kUseGVN);
+}
+
+
+HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
if (r.IsInteger32() && !has_int32_value_) return NULL;
if (r.IsDouble() && !has_double_value_) return NULL;
- return new HConstant(handle_, r);
+ if (handle_.is_null()) {
+ ASSERT(has_int32_value_ || has_double_value_);
+ if (has_int32_value_) return new(zone) HConstant(int32_value_, r);
+ return new(zone) HConstant(double_value_, r);
+ }
+ return new(zone) HConstant(handle_, r);
}
-HConstant* HConstant::CopyToTruncatedInt32() const {
- if (!has_double_value_) return NULL;
- int32_t truncated = NumberToInt32(*handle_);
- return new HConstant(FACTORY->NewNumberFromInt(truncated),
- Representation::Integer32());
+HConstant* HConstant::CopyToTruncatedInt32(Zone* zone) const {
+ if (has_int32_value_) {
+ if (handle_.is_null()) {
+ return new(zone) HConstant(int32_value_, Representation::Integer32());
+ } else {
+ // Re-use the existing Handle if possible.
+ return new(zone) HConstant(handle_, Representation::Integer32());
+ }
+ } else if (has_double_value_) {
+ return new(zone) HConstant(DoubleToInt32(double_value_),
+ Representation::Integer32());
+ } else {
+ return NULL;
+ }
}
-bool HConstant::ToBoolean() const {
+bool HConstant::ToBoolean() {
// Converts the constant's boolean value according to
// ECMAScript section 9.2 ToBoolean conversion.
if (HasInteger32Value()) return Integer32Value() != 0;
@@ -1400,17 +1503,25 @@ bool HConstant::ToBoolean() const {
double v = DoubleValue();
return v != 0 && !isnan(v);
}
- if (handle()->IsTrue()) return true;
- if (handle()->IsFalse()) return false;
- if (handle()->IsUndefined()) return false;
- if (handle()->IsNull()) return false;
- if (handle()->IsString() &&
- String::cast(*handle())->length() == 0) return false;
+ Handle<Object> literal = handle();
+ if (literal->IsTrue()) return true;
+ if (literal->IsFalse()) return false;
+ if (literal->IsUndefined()) return false;
+ if (literal->IsNull()) return false;
+ if (literal->IsString() && String::cast(*literal)->length() == 0) {
+ return false;
+ }
return true;
}
void HConstant::PrintDataTo(StringStream* stream) {
- handle()->ShortPrint(stream);
+ if (has_int32_value_) {
+ stream->Add("%d ", int32_value_);
+ } else if (has_double_value_) {
+ stream->Add("%f ", FmtElm(double_value_));
+ } else {
+ handle()->ShortPrint(stream);
+ }
}
@@ -1506,7 +1617,7 @@ Range* HShl::InferRange(Zone* zone) {
}
-Range* HLoadKeyedSpecializedArrayElement::InferRange(Zone* zone) {
+Range* HLoadKeyed::InferRange(Zone* zone) {
switch (elements_kind()) {
case EXTERNAL_PIXEL_ELEMENTS:
return new(zone) Range(0, 255);
@@ -1597,24 +1708,55 @@ void HLoadNamedField::PrintDataTo(StringStream* stream) {
}
+// Returns true if an instance of this map can never find a property with this
+// name in its prototype chain. This means all prototypes up to the top are
+// fast and don't have the name in them. It would be good if we could optimize
+// polymorphic loads where the property is sometimes found in the prototype
+// chain.
+static bool PrototypeChainCanNeverResolve(
+ Handle<Map> map, Handle<String> name) {
+ Isolate* isolate = map->GetIsolate();
+ Object* current = map->prototype();
+ while (current != isolate->heap()->null_value()) {
+ if (current->IsJSGlobalProxy() ||
+ current->IsGlobalObject() ||
+ !current->IsJSObject() ||
+ JSObject::cast(current)->map()->has_named_interceptor() ||
+ JSObject::cast(current)->IsAccessCheckNeeded() ||
+ !JSObject::cast(current)->HasFastProperties()) {
+ return false;
+ }
+
+ LookupResult lookup(isolate);
+ Map* map = JSObject::cast(current)->map();
+ map->LookupDescriptor(NULL, *name, &lookup);
+ if (lookup.IsFound()) return false;
+ if (!lookup.IsCacheable()) return false;
+ current = JSObject::cast(current)->GetPrototype();
+ }
+ return true;
+}
+
+
HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context,
HValue* object,
SmallMapList* types,
- Handle<String> name)
- : types_(Min(types->length(), kMaxLoadPolymorphism)),
+ Handle<String> name,
+ Zone* zone)
+ : types_(Min(types->length(), kMaxLoadPolymorphism), zone),
name_(name),
need_generic_(false) {
SetOperandAt(0, context);
SetOperandAt(1, object);
set_representation(Representation::Tagged());
SetGVNFlag(kDependsOnMaps);
- int map_transitions = 0;
+ SmallMapList negative_lookups;
for (int i = 0;
i < types->length() && types_.length() < kMaxLoadPolymorphism;
++i) {
Handle<Map> map = types->at(i);
LookupResult lookup(map->GetIsolate());
- map->LookupInDescriptors(NULL, *name, &lookup);
+ map->LookupDescriptor(NULL, *name, &lookup);
if (lookup.IsFound()) {
switch (lookup.type()) {
case FIELD: {
@@ -1624,28 +1766,47 @@ HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context,
} else {
SetGVNFlag(kDependsOnBackingStoreFields);
}
- types_.Add(types->at(i));
+ types_.Add(types->at(i), zone);
break;
}
case CONSTANT_FUNCTION:
- types_.Add(types->at(i));
+ types_.Add(types->at(i), zone);
break;
- case MAP_TRANSITION:
- // We should just ignore these since they are not relevant to a load
- // operation. This means we will deopt if we actually see this map
- // from optimized code.
- map_transitions++;
+ case CALLBACKS:
break;
- default:
+ case TRANSITION:
+ case INTERCEPTOR:
+ case NONEXISTENT:
+ case NORMAL:
+ case HANDLER:
+ UNREACHABLE();
break;
}
+ } else if (lookup.IsCacheable() &&
+ // For dicts the lookup on the map will fail, but the object may
+ // contain the property so we cannot generate a negative lookup
+ // (which would just be a map check and return undefined).
+ !map->is_dictionary_map() &&
+ !map->has_named_interceptor() &&
+ // TODO Do we really need this? (since version 3.13.0)
+ //!map->named_interceptor_is_fallback() &&
+ PrototypeChainCanNeverResolve(map, name)) {
+ negative_lookups.Add(types->at(i), zone);
}
}
- if (types_.length() + map_transitions == types->length() &&
- FLAG_deoptimize_uncommon_cases) {
+ bool need_generic =
+ (types->length() != negative_lookups.length() + types_.length());
+ if (!need_generic && FLAG_deoptimize_uncommon_cases) {
SetFlag(kUseGVN);
+ for (int i = 0; i < negative_lookups.length(); i++) {
+ types_.Add(negative_lookups.at(i), zone);
+ }
} else {
+ // We don't have an easy way to handle both a call (to the generic stub) and
+ // a deopt in the same hydrogen instruction, so in this case we don't add
+ // the negative lookups which can deopt - just let the generic stub handle
+ // them.
SetAllSideEffects();
need_generic_ = true;
}
@@ -1685,36 +1846,47 @@ void HLoadNamedGeneric::PrintDataTo(StringStream* stream) {
}
-void HLoadKeyedFastElement::PrintDataTo(StringStream* stream) {
- object()->PrintNameTo(stream);
+void HLoadKeyed::PrintDataTo(StringStream* stream) {
+ if (!is_external()) {
+ elements()->PrintNameTo(stream);
+ } else {
+ ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
+ elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
+ elements()->PrintNameTo(stream);
+ stream->Add(".");
+ stream->Add(ElementsKindToString(elements_kind()));
+ }
+
stream->Add("[");
key()->PrintNameTo(stream);
- stream->Add("]");
+ stream->Add("] ");
+ dependency()->PrintNameTo(stream);
+ if (RequiresHoleCheck()) {
+ stream->Add(" check_hole");
+ }
}
-bool HLoadKeyedFastElement::RequiresHoleCheck() {
- if (hole_check_mode_ == OMIT_HOLE_CHECK) {
+bool HLoadKeyed::RequiresHoleCheck() const {
+ if (IsFastPackedElementsKind(elements_kind())) {
return false;
}
+ if (IsFastDoubleElementsKind(elements_kind())) {
+ return true;
+ }
+
for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
HValue* use = it.value();
- if (!use->IsChange()) return true;
+ if (!use->IsChange()) {
+ return true;
+ }
}
return false;
}
-void HLoadKeyedFastDoubleElement::PrintDataTo(StringStream* stream) {
- elements()->PrintNameTo(stream);
- stream->Add("[");
- key()->PrintNameTo(stream);
- stream->Add("]");
-}
-
-
void HLoadKeyedGeneric::PrintDataTo(StringStream* stream) {
object()->PrintNameTo(stream);
stream->Add("[");
@@ -1727,25 +1899,26 @@ HValue* HLoadKeyedGeneric::Canonicalize() {
// Recognize generic keyed loads that use property name generated
// by for-in statement as a key and rewrite them into fast property load
// by index.
- if (key()->IsLoadKeyedFastElement()) {
- HLoadKeyedFastElement* key_load = HLoadKeyedFastElement::cast(key());
- if (key_load->object()->IsForInCacheArray()) {
+ if (key()->IsLoadKeyed()) {
+ HLoadKeyed* key_load = HLoadKeyed::cast(key());
+ if (key_load->elements()->IsForInCacheArray()) {
HForInCacheArray* names_cache =
- HForInCacheArray::cast(key_load->object());
+ HForInCacheArray::cast(key_load->elements());
if (names_cache->enumerable() == object()) {
HForInCacheArray* index_cache =
names_cache->index_cache();
HCheckMapValue* map_check =
new(block()->zone()) HCheckMapValue(object(), names_cache->map());
- HInstruction* index = new(block()->zone()) HLoadKeyedFastElement(
+ HInstruction* index = new(block()->zone()) HLoadKeyed(
index_cache,
key_load->key(),
- HLoadKeyedFastElement::OMIT_HOLE_CHECK);
- HLoadFieldByIndex* load = new(block()->zone()) HLoadFieldByIndex(
- object(), index);
+ key_load->key(),
+ key_load->elements_kind());
map_check->InsertBefore(this);
index->InsertBefore(this);
+ HLoadFieldByIndex* load = new(block()->zone()) HLoadFieldByIndex(
+ object(), index);
load->InsertBefore(this);
return load;
}
@@ -1756,52 +1929,6 @@ HValue* HLoadKeyedGeneric::Canonicalize() {
}
-void HLoadKeyedSpecializedArrayElement::PrintDataTo(
- StringStream* stream) {
- external_pointer()->PrintNameTo(stream);
- stream->Add(".");
- switch (elements_kind()) {
- case EXTERNAL_BYTE_ELEMENTS:
- stream->Add("byte");
- break;
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- stream->Add("u_byte");
- break;
- case EXTERNAL_SHORT_ELEMENTS:
- stream->Add("short");
- break;
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- stream->Add("u_short");
- break;
- case EXTERNAL_INT_ELEMENTS:
- stream->Add("int");
- break;
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- stream->Add("u_int");
- break;
- case EXTERNAL_FLOAT_ELEMENTS:
- stream->Add("float");
- break;
- case EXTERNAL_DOUBLE_ELEMENTS:
- stream->Add("double");
- break;
- case EXTERNAL_PIXEL_ELEMENTS:
- stream->Add("pixel");
- break;
- case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_DOUBLE_ELEMENTS:
- case DICTIONARY_ELEMENTS:
- case NON_STRICT_ARGUMENTS_ELEMENTS:
- UNREACHABLE();
- break;
- }
- stream->Add("[");
- key()->PrintNameTo(stream);
- stream->Add("]");
-}
-
-
void HStoreNamedGeneric::PrintDataTo(StringStream* stream) {
object()->PrintNameTo(stream);
stream->Add(".");
@@ -1828,17 +1955,17 @@ void HStoreNamedField::PrintDataTo(StringStream* stream) {
}
-void HStoreKeyedFastElement::PrintDataTo(StringStream* stream) {
- object()->PrintNameTo(stream);
- stream->Add("[");
- key()->PrintNameTo(stream);
- stream->Add("] = ");
- value()->PrintNameTo(stream);
-}
-
+void HStoreKeyed::PrintDataTo(StringStream* stream) {
+ if (!is_external()) {
+ elements()->PrintNameTo(stream);
+ } else {
+ elements()->PrintNameTo(stream);
+ stream->Add(".");
+ stream->Add(ElementsKindToString(elements_kind()));
+ ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
+ elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
+ }
-void HStoreKeyedFastDoubleElement::PrintDataTo(StringStream* stream) {
- elements()->PrintNameTo(stream);
stream->Add("[");
key()->PrintNameTo(stream);
stream->Add("] = ");
@@ -1855,56 +1982,15 @@ void HStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
}
-void HStoreKeyedSpecializedArrayElement::PrintDataTo(
- StringStream* stream) {
- external_pointer()->PrintNameTo(stream);
- stream->Add(".");
- switch (elements_kind()) {
- case EXTERNAL_BYTE_ELEMENTS:
- stream->Add("byte");
- break;
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- stream->Add("u_byte");
- break;
- case EXTERNAL_SHORT_ELEMENTS:
- stream->Add("short");
- break;
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- stream->Add("u_short");
- break;
- case EXTERNAL_INT_ELEMENTS:
- stream->Add("int");
- break;
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- stream->Add("u_int");
- break;
- case EXTERNAL_FLOAT_ELEMENTS:
- stream->Add("float");
- break;
- case EXTERNAL_DOUBLE_ELEMENTS:
- stream->Add("double");
- break;
- case EXTERNAL_PIXEL_ELEMENTS:
- stream->Add("pixel");
- break;
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS:
- case FAST_DOUBLE_ELEMENTS:
- case DICTIONARY_ELEMENTS:
- case NON_STRICT_ARGUMENTS_ELEMENTS:
- UNREACHABLE();
- break;
- }
- stream->Add("[");
- key()->PrintNameTo(stream);
- stream->Add("] = ");
- value()->PrintNameTo(stream);
-}
-
-
void HTransitionElementsKind::PrintDataTo(StringStream* stream) {
object()->PrintNameTo(stream);
- stream->Add(" %p -> %p", *original_map(), *transitioned_map());
+ ElementsKind from_kind = original_map()->elements_kind();
+ ElementsKind to_kind = transitioned_map()->elements_kind();
+ stream->Add(" %p [%s] -> %p [%s]",
+ *original_map(),
+ ElementsAccessor::ForKind(from_kind)->name(),
+ *transitioned_map(),
+ ElementsAccessor::ForKind(to_kind)->name());
}
@@ -1915,7 +2001,7 @@ void HLoadGlobalCell::PrintDataTo(StringStream* stream) {
}
-bool HLoadGlobalCell::RequiresHoleCheck() {
+bool HLoadGlobalCell::RequiresHoleCheck() const {
if (details_.IsDontDelete() && !details_.IsReadOnly()) return false;
for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
HValue* use = it.value();
@@ -1997,6 +2083,10 @@ HType HPhi::CalculateInferredType() {
HType HConstant::CalculateInferredType() {
+ if (has_int32_value_) {
+ return Smi::IsValid(int32_value_) ? HType::Smi() : HType::HeapNumber();
+ }
+ if (has_double_value_) return HType::HeapNumber();
return HType::TypeFromValue(handle_);
}
@@ -2144,6 +2234,13 @@ HValue* HDiv::EnsureAndPropagateNotMinusZero(BitVector* visited) {
}
+HValue* HMathFloorOfDiv::EnsureAndPropagateNotMinusZero(BitVector* visited) {
+ visited->Add(id());
+ SetFlag(kBailoutOnMinusZero);
+ return NULL;
+}
+
+
HValue* HMul::EnsureAndPropagateNotMinusZero(BitVector* visited) {
visited->Add(id());
if (range() == NULL || range()->CanBeMinusZero()) {
@@ -2175,10 +2272,10 @@ HValue* HAdd::EnsureAndPropagateNotMinusZero(BitVector* visited) {
}
-bool HStoreKeyedFastDoubleElement::NeedsCanonicalization() {
- // If value was loaded from unboxed double backing store or
- // converted from an integer then we don't have to canonicalize it.
- if (value()->IsLoadKeyedFastDoubleElement() ||
+bool HStoreKeyed::NeedsCanonicalization() {
+ // If value is an integer or comes from the result of a keyed load
+ // then it will be a non-hole value: no need for canonicalization.
+ if (value()->IsLoadKeyed() ||
(value()->IsChange() && HChange::cast(value())->from().IsInteger32())) {
return false;
}
@@ -2439,12 +2536,6 @@ void HCheckFunction::Verify() {
ASSERT(HasNoUses());
}
-
-void HCheckPrototypeMaps::Verify() {
- HInstruction::Verify();
- ASSERT(HasNoUses());
-}
-
#endif
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/hydrogen-instructions.h b/src/3rdparty/v8/src/hydrogen-instructions.h
index c2c51ca..7136657 100644
--- a/src/3rdparty/v8/src/hydrogen-instructions.h
+++ b/src/3rdparty/v8/src/hydrogen-instructions.h
@@ -53,6 +53,7 @@ class LChunkBuilder;
#define HYDROGEN_ABSTRACT_INSTRUCTION_LIST(V) \
+ V(BinaryOperation) \
V(BitwiseBinaryOperation) \
V(ControlInstruction) \
V(Instruction) \
@@ -124,7 +125,6 @@ class LChunkBuilder;
V(IsStringAndBranch) \
V(IsSmiAndBranch) \
V(IsUndetectableAndBranch) \
- V(StringCompareAndBranch) \
V(JSArrayLength) \
V(LeaveInlined) \
V(LoadContextSlot) \
@@ -133,14 +133,14 @@ class LChunkBuilder;
V(LoadFunctionPrototype) \
V(LoadGlobalCell) \
V(LoadGlobalGeneric) \
- V(LoadKeyedFastDoubleElement) \
- V(LoadKeyedFastElement) \
+ V(LoadKeyed) \
V(LoadKeyedGeneric) \
- V(LoadKeyedSpecializedArrayElement) \
V(LoadNamedField) \
V(LoadNamedFieldPolymorphic) \
V(LoadNamedGeneric) \
+ V(MapEnumLength) \
V(MathFloorOfDiv) \
+ V(MathMinMax) \
V(Mod) \
V(Mul) \
V(ObjectLiteral) \
@@ -152,6 +152,7 @@ class LChunkBuilder;
V(Random) \
V(RegExpLiteral) \
V(Return) \
+ V(Ror) \
V(Sar) \
V(Shl) \
V(Shr) \
@@ -161,15 +162,14 @@ class LChunkBuilder;
V(StoreContextSlot) \
V(StoreGlobalCell) \
V(StoreGlobalGeneric) \
- V(StoreKeyedFastDoubleElement) \
- V(StoreKeyedFastElement) \
+ V(StoreKeyed) \
V(StoreKeyedGeneric) \
- V(StoreKeyedSpecializedArrayElement) \
V(StoreNamedField) \
V(StoreNamedGeneric) \
V(StringAdd) \
V(StringCharCodeAt) \
V(StringCharFromCode) \
+ V(StringCompareAndBranch) \
V(StringLength) \
V(Sub) \
V(ThisFunction) \
@@ -224,6 +224,16 @@ class LChunkBuilder;
virtual Opcode opcode() const { return HValue::k##type; }
+#ifdef DEBUG
+#define ASSERT_ALLOCATION_DISABLED do { \
+ OptimizingCompilerThread* thread = \
+ ISOLATE->optimizing_compiler_thread(); \
+ ASSERT(thread->IsOptimizerThread() || !HEAP->IsAllocationAllowed()); \
+ } while (0)
+#else
+#define ASSERT_ALLOCATION_DISABLED do {} while (0)
+#endif
+
class Range: public ZoneObject {
public:
Range()
@@ -276,6 +286,8 @@ class Range: public ZoneObject {
void Intersect(Range* other);
void Union(Range* other);
+ void CombinedMax(Range* other);
+ void CombinedMin(Range* other);
void AddConstant(int32_t value);
void Sar(int32_t value);
@@ -549,7 +561,14 @@ class HValue: public ZoneObject {
kIsArguments,
kTruncatingToInt32,
kIsDead,
- kLastFlag = kIsDead
+ // Instructions that are allowed to produce full range unsigned integer
+ // values are marked with kUint32 flag. If arithmetic shift or a load from
+ // EXTERNAL_UNSIGNED_INT_ELEMENTS array is not marked with this flag
+ // it will deoptimize if result does not fit into signed integer range.
+ // HGraph::ComputeSafeUint32Operations is responsible for setting this
+ // flag.
+ kUint32,
+ kLastFlag = kUint32
};
STATIC_ASSERT(kLastFlag < kBitsPerInt);
@@ -645,7 +664,7 @@ class HValue: public ZoneObject {
// Operands.
virtual int OperandCount() = 0;
- virtual HValue* OperandAt(int index) = 0;
+ virtual HValue* OperandAt(int index) const = 0;
void SetOperandAt(int index, HValue* value);
void DeleteAndReplaceWith(HValue* other);
@@ -720,6 +739,11 @@ class HValue: public ZoneObject {
return representation();
}
+ // Type feedback access.
+ virtual Representation ObservedInputRepresentation(int index) {
+ return RequiredInputRepresentation(index);
+ }
+
// This gives the instruction an opportunity to replace itself with an
// instruction that does the same in some better way. To replace an
// instruction with a new one, first add the new instruction to the graph,
@@ -751,6 +775,10 @@ class HValue: public ZoneObject {
UNREACHABLE();
}
+ bool IsDead() const {
+ return HasNoUses() && !HasObservableSideEffects() && IsDeletable();
+ }
+
#ifdef DEBUG
virtual void Verify() = 0;
#endif
@@ -835,6 +863,8 @@ class HValue: public ZoneObject {
GVNFlagSet gvn_flags_;
private:
+ virtual bool IsDeletable() const { return false; }
+
DISALLOW_COPY_AND_ASSIGN(HValue);
};
@@ -852,9 +882,14 @@ class HInstruction: public HValue {
void InsertBefore(HInstruction* next);
void InsertAfter(HInstruction* previous);
+ // The position is a write-once variable.
int position() const { return position_; }
bool has_position() const { return position_ != RelocInfo::kNoPosition; }
- void set_position(int position) { position_ = position; }
+ void set_position(int position) {
+ ASSERT(!has_position());
+ ASSERT(position != RelocInfo::kNoPosition);
+ position_ = position;
+ }
bool CanTruncateToInt32() const { return CheckFlag(kTruncatingToInt32); }
@@ -898,7 +933,7 @@ template<int V>
class HTemplateInstruction : public HInstruction {
public:
int OperandCount() { return V; }
- HValue* OperandAt(int i) { return inputs_[i]; }
+ HValue* OperandAt(int i) const { return inputs_[i]; }
protected:
void InternalSetOperandAt(int i, HValue* value) { inputs_[i] = value; }
@@ -950,7 +985,7 @@ class HTemplateControlInstruction: public HControlInstruction {
void SetSuccessorAt(int i, HBasicBlock* block) { successors_[i] = block; }
int OperandCount() { return V; }
- HValue* OperandAt(int i) { return inputs_[i]; }
+ HValue* OperandAt(int i) const { return inputs_[i]; }
protected:
@@ -987,14 +1022,15 @@ class HSoftDeoptimize: public HTemplateInstruction<0> {
class HDeoptimize: public HControlInstruction {
public:
- explicit HDeoptimize(int environment_length) : values_(environment_length) { }
+ HDeoptimize(int environment_length, Zone* zone)
+ : values_(environment_length, zone) { }
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
}
virtual int OperandCount() { return values_.length(); }
- virtual HValue* OperandAt(int index) { return values_[index]; }
+ virtual HValue* OperandAt(int index) const { return values_[index]; }
virtual void PrintDataTo(StringStream* stream);
virtual int SuccessorCount() { return 0; }
@@ -1006,8 +1042,8 @@ class HDeoptimize: public HControlInstruction {
UNREACHABLE();
}
- void AddEnvironmentValue(HValue* value) {
- values_.Add(NULL);
+ void AddEnvironmentValue(HValue* value, Zone* zone) {
+ values_.Add(NULL, zone);
SetOperandAt(values_.length() - 1, value);
}
@@ -1155,7 +1191,7 @@ class HUnaryOperation: public HTemplateInstruction<1> {
return reinterpret_cast<HUnaryOperation*>(value);
}
- HValue* value() { return OperandAt(0); }
+ HValue* value() const { return OperandAt(0); }
virtual void PrintDataTo(StringStream* stream);
};
@@ -1231,8 +1267,8 @@ class HChange: public HUnaryOperation {
virtual HType CalculateInferredType();
virtual HValue* Canonicalize();
- Representation from() { return value()->representation(); }
- Representation to() { return representation(); }
+ Representation from() const { return value()->representation(); }
+ Representation to() const { return representation(); }
bool deoptimize_on_undefined() const {
return CheckFlag(kDeoptimizeOnUndefined);
}
@@ -1251,6 +1287,11 @@ class HChange: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const {
+ return !from().IsTagged() || value()->type().IsSmi();
+ }
};
@@ -1270,23 +1311,27 @@ class HClampToUint8: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
class HSimulate: public HInstruction {
public:
- HSimulate(int ast_id, int pop_count)
+ HSimulate(BailoutId ast_id, int pop_count, Zone* zone)
: ast_id_(ast_id),
pop_count_(pop_count),
- values_(2),
- assigned_indexes_(2) {}
+ values_(2, zone),
+ assigned_indexes_(2, zone),
+ zone_(zone) {}
virtual ~HSimulate() {}
virtual void PrintDataTo(StringStream* stream);
- bool HasAstId() const { return ast_id_ != AstNode::kNoNumber; }
- int ast_id() const { return ast_id_; }
- void set_ast_id(int id) {
+ bool HasAstId() const { return !ast_id_.IsNone(); }
+ BailoutId ast_id() const { return ast_id_; }
+ void set_ast_id(BailoutId id) {
ASSERT(!HasAstId());
ast_id_ = id;
}
@@ -1307,7 +1352,7 @@ class HSimulate: public HInstruction {
AddValue(kNoIndex, value);
}
virtual int OperandCount() { return values_.length(); }
- virtual HValue* OperandAt(int index) { return values_[index]; }
+ virtual HValue* OperandAt(int index) const { return values_[index]; }
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
@@ -1327,17 +1372,18 @@ class HSimulate: public HInstruction {
private:
static const int kNoIndex = -1;
void AddValue(int index, HValue* value) {
- assigned_indexes_.Add(index);
+ assigned_indexes_.Add(index, zone_);
// Resize the list of pushed values.
- values_.Add(NULL);
+ values_.Add(NULL, zone_);
// Set the operand through the base method in HValue to make sure that the
// use lists are correctly updated.
SetOperandAt(values_.length() - 1, value);
}
- int ast_id_;
+ BailoutId ast_id_;
int pop_count_;
ZoneList<HValue*> values_;
ZoneList<int> assigned_indexes_;
+ Zone* zone_;
};
@@ -1377,20 +1423,30 @@ class HStackCheck: public HTemplateInstruction<1> {
};
+enum InliningKind {
+ NORMAL_RETURN, // Normal function/method call and return.
+ DROP_EXTRA_ON_RETURN, // Drop an extra value from the environment on return.
+ CONSTRUCT_CALL_RETURN, // Either use allocated receiver or return value.
+ GETTER_CALL_RETURN, // Returning from a getter, need to restore context.
+ SETTER_CALL_RETURN // Use the RHS of the assignment as the return value.
+};
+
+
class HEnterInlined: public HTemplateInstruction<0> {
public:
HEnterInlined(Handle<JSFunction> closure,
int arguments_count,
FunctionLiteral* function,
CallKind call_kind,
- bool is_construct,
+ InliningKind inlining_kind,
Variable* arguments_var,
ZoneList<HValue*>* arguments_values)
: closure_(closure),
arguments_count_(arguments_count),
+ arguments_pushed_(false),
function_(function),
call_kind_(call_kind),
- is_construct_(is_construct),
+ inlining_kind_(inlining_kind),
arguments_var_(arguments_var),
arguments_values_(arguments_values) {
}
@@ -1399,9 +1455,11 @@ class HEnterInlined: public HTemplateInstruction<0> {
Handle<JSFunction> closure() const { return closure_; }
int arguments_count() const { return arguments_count_; }
+ bool arguments_pushed() const { return arguments_pushed_; }
+ void set_arguments_pushed() { arguments_pushed_ = true; }
FunctionLiteral* function() const { return function_; }
CallKind call_kind() const { return call_kind_; }
- bool is_construct() const { return is_construct_; }
+ InliningKind inlining_kind() const { return inlining_kind_; }
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
@@ -1415,9 +1473,10 @@ class HEnterInlined: public HTemplateInstruction<0> {
private:
Handle<JSFunction> closure_;
int arguments_count_;
+ bool arguments_pushed_;
FunctionLiteral* function_;
CallKind call_kind_;
- bool is_construct_;
+ InliningKind inlining_kind_;
Variable* arguments_var_;
ZoneList<HValue*>* arguments_values_;
};
@@ -1425,21 +1484,13 @@ class HEnterInlined: public HTemplateInstruction<0> {
class HLeaveInlined: public HTemplateInstruction<0> {
public:
- explicit HLeaveInlined(bool arguments_pushed)
- : arguments_pushed_(arguments_pushed) { }
+ HLeaveInlined() { }
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
}
- bool arguments_pushed() {
- return arguments_pushed_;
- }
-
DECLARE_CONCRETE_INSTRUCTION(LeaveInlined)
-
- private:
- bool arguments_pushed_;
};
@@ -1461,7 +1512,7 @@ class HPushArgument: public HUnaryOperation {
class HThisFunction: public HTemplateInstruction<0> {
public:
- explicit HThisFunction(Handle<JSFunction> closure) : closure_(closure) {
+ HThisFunction() {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
}
@@ -1470,18 +1521,13 @@ class HThisFunction: public HTemplateInstruction<0> {
return Representation::None();
}
- Handle<JSFunction> closure() const { return closure_; }
-
DECLARE_CONCRETE_INSTRUCTION(ThisFunction)
protected:
- virtual bool DataEquals(HValue* other) {
- HThisFunction* b = HThisFunction::cast(other);
- return *closure() == *b->closure();
- }
+ virtual bool DataEquals(HValue* other) { return true; }
private:
- Handle<JSFunction> closure_;
+ virtual bool IsDeletable() const { return true; }
};
@@ -1500,6 +1546,9 @@ class HContext: public HTemplateInstruction<0> {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -1518,6 +1567,9 @@ class HOuterContext: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -1573,6 +1625,7 @@ class HGlobalObject: public HUnaryOperation {
}
private:
+ virtual bool IsDeletable() const { return true; }
bool qml_global_;
};
@@ -1593,6 +1646,9 @@ class HGlobalReceiver: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -1855,7 +1911,9 @@ class HCallRuntime: public HCall<1> {
class HJSArrayLength: public HTemplateInstruction<2> {
public:
- HJSArrayLength(HValue* value, HValue* typecheck) {
+ HJSArrayLength(HValue* value, HValue* typecheck,
+ HType type = HType::Tagged()) {
+ set_type(type);
// The length of an array is stored as a tagged value in the array
// object. It is guaranteed to be 32 bit integer, but it can be
// represented as either a smi or heap number.
@@ -1879,13 +1937,17 @@ class HJSArrayLength: public HTemplateInstruction<2> {
DECLARE_CONCRETE_INSTRUCTION(JSArrayLength)
protected:
- virtual bool DataEquals(HValue* other) { return true; }
+ virtual bool DataEquals(HValue* other_raw) { return true; }
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
class HFixedArrayBaseLength: public HUnaryOperation {
public:
explicit HFixedArrayBaseLength(HValue* value) : HUnaryOperation(value) {
+ set_type(HType::Smi());
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
SetGVNFlag(kDependsOnArrayLengths);
@@ -1899,6 +1961,32 @@ class HFixedArrayBaseLength: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const { return true; }
+};
+
+
+class HMapEnumLength: public HUnaryOperation {
+ public:
+ explicit HMapEnumLength(HValue* value) : HUnaryOperation(value) {
+ set_type(HType::Smi());
+ set_representation(Representation::Tagged());
+ SetFlag(kUseGVN);
+ SetGVNFlag(kDependsOnMaps);
+ }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(MapEnumLength)
+
+ protected:
+ virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -1918,6 +2006,9 @@ class HElementsKind: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -1940,6 +2031,9 @@ class HBitNot: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -2022,18 +2116,27 @@ class HUnaryMathOperation: public HTemplateInstruction<2> {
}
private:
+ virtual bool IsDeletable() const { return true; }
+
BuiltinFunctionId op_;
};
-class HLoadElements: public HUnaryOperation {
+class HLoadElements: public HTemplateInstruction<2> {
public:
- explicit HLoadElements(HValue* value) : HUnaryOperation(value) {
+ HLoadElements(HValue* value, HValue* typecheck) {
+ SetOperandAt(0, value);
+ SetOperandAt(1, typecheck);
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
SetGVNFlag(kDependsOnElementsPointer);
}
+ HValue* value() { return OperandAt(0); }
+ HValue* typecheck() { return OperandAt(1); }
+
+ virtual void PrintDataTo(StringStream* stream);
+
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
}
@@ -2042,6 +2145,9 @@ class HLoadElements: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -2065,12 +2171,16 @@ class HLoadExternalArrayPointer: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
class HCheckMaps: public HTemplateInstruction<2> {
public:
- HCheckMaps(HValue* value, Handle<Map> map, HValue* typecheck = NULL) {
+ HCheckMaps(HValue* value, Handle<Map> map, Zone* zone,
+ HValue* typecheck = NULL) {
SetOperandAt(0, value);
// If callers don't depend on a typecheck, they can pass in NULL. In that
// case we use a copy of the |value| argument as a dummy value.
@@ -2079,9 +2189,9 @@ class HCheckMaps: public HTemplateInstruction<2> {
SetFlag(kUseGVN);
SetGVNFlag(kDependsOnMaps);
SetGVNFlag(kDependsOnElementsKind);
- map_set()->Add(map);
+ map_set()->Add(map, zone);
}
- HCheckMaps(HValue* value, SmallMapList* maps) {
+ HCheckMaps(HValue* value, SmallMapList* maps, Zone* zone) {
SetOperandAt(0, value);
SetOperandAt(1, value);
set_representation(Representation::Tagged());
@@ -2089,37 +2199,31 @@ class HCheckMaps: public HTemplateInstruction<2> {
SetGVNFlag(kDependsOnMaps);
SetGVNFlag(kDependsOnElementsKind);
for (int i = 0; i < maps->length(); i++) {
- map_set()->Add(maps->at(i));
+ map_set()->Add(maps->at(i), zone);
}
map_set()->Sort();
}
- static HCheckMaps* NewWithTransitions(HValue* object, Handle<Map> map) {
- HCheckMaps* check_map = new HCheckMaps(object, map);
+ static HCheckMaps* NewWithTransitions(HValue* object, Handle<Map> map,
+ Zone* zone) {
+ HCheckMaps* check_map = new(zone) HCheckMaps(object, map, zone);
SmallMapList* map_set = check_map->map_set();
- // If the map to check has the untransitioned elements, it can be hoisted
- // above TransitionElements instructions.
- if (map->has_fast_smi_only_elements()) {
- check_map->ClearGVNFlag(kDependsOnElementsKind);
- }
-
- Map* transitioned_fast_element_map =
- map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL);
- ASSERT(transitioned_fast_element_map == NULL ||
- map->elements_kind() != FAST_ELEMENTS);
- if (transitioned_fast_element_map != NULL) {
- map_set->Add(Handle<Map>(transitioned_fast_element_map));
- }
- Map* transitioned_double_map =
- map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL);
- ASSERT(transitioned_double_map == NULL ||
- map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
- if (transitioned_double_map != NULL) {
- map_set->Add(Handle<Map>(transitioned_double_map));
- }
+ // Since transitioned elements maps of the initial map don't fail the map
+ // check, the CheckMaps instruction doesn't need to depend on ElementsKinds.
+ check_map->ClearGVNFlag(kDependsOnElementsKind);
+
+ ElementsKind kind = map->elements_kind();
+ bool packed = IsFastPackedElementsKind(kind);
+ while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
+ kind = GetNextMoreGeneralFastElementsKind(kind, packed);
+ Map* transitioned_map =
+ map->LookupElementsTransitionMap(kind);
+ if (transitioned_map) {
+ map_set->Add(Handle<Map>(transitioned_map), zone);
+ }
+ };
map_set->Sort();
-
return check_map;
}
@@ -2185,17 +2289,17 @@ class HCheckFunction: public HUnaryOperation {
class HCheckInstanceType: public HUnaryOperation {
public:
- static HCheckInstanceType* NewIsSpecObject(HValue* value) {
- return new HCheckInstanceType(value, IS_SPEC_OBJECT);
+ static HCheckInstanceType* NewIsSpecObject(HValue* value, Zone* zone) {
+ return new(zone) HCheckInstanceType(value, IS_SPEC_OBJECT);
}
- static HCheckInstanceType* NewIsJSArray(HValue* value) {
- return new HCheckInstanceType(value, IS_JS_ARRAY);
+ static HCheckInstanceType* NewIsJSArray(HValue* value, Zone* zone) {
+ return new(zone) HCheckInstanceType(value, IS_JS_ARRAY);
}
- static HCheckInstanceType* NewIsString(HValue* value) {
- return new HCheckInstanceType(value, IS_STRING);
+ static HCheckInstanceType* NewIsString(HValue* value, Zone* zone) {
+ return new(zone) HCheckInstanceType(value, IS_STRING);
}
- static HCheckInstanceType* NewIsSymbol(HValue* value) {
- return new HCheckInstanceType(value, IS_SYMBOL);
+ static HCheckInstanceType* NewIsSymbol(HValue* value, Zone* zone) {
+ return new(zone) HCheckInstanceType(value, IS_SYMBOL);
}
virtual void PrintDataTo(StringStream* stream);
@@ -2286,10 +2390,6 @@ class HCheckPrototypeMaps: public HTemplateInstruction<0> {
SetGVNFlag(kDependsOnMaps);
}
-#ifdef DEBUG
- virtual void Verify();
-#endif
-
Handle<JSObject> prototype() const { return prototype_; }
Handle<JSObject> holder() const { return holder_; }
@@ -2299,8 +2399,10 @@ class HCheckPrototypeMaps: public HTemplateInstruction<0> {
return Representation::None();
}
+ virtual void PrintDataTo(StringStream* stream);
+
virtual intptr_t Hashcode() {
- ASSERT(!HEAP->IsAllocationAllowed());
+ ASSERT_ALLOCATION_DISABLED;
intptr_t hash = reinterpret_cast<intptr_t>(*prototype());
hash = 17 * hash + reinterpret_cast<intptr_t>(*holder());
return hash;
@@ -2344,8 +2446,8 @@ class HCheckSmi: public HUnaryOperation {
class HPhi: public HValue {
public:
- explicit HPhi(int merged_index)
- : inputs_(2),
+ HPhi(int merged_index, Zone* zone)
+ : inputs_(2, zone),
merged_index_(merged_index),
phi_id_(-1),
is_live_(false),
@@ -2367,7 +2469,7 @@ class HPhi: public HValue {
}
virtual HType CalculateInferredType();
virtual int OperandCount() { return inputs_.length(); }
- virtual HValue* OperandAt(int index) { return inputs_[index]; }
+ virtual HValue* OperandAt(int index) const { return inputs_[index]; }
HValue* GetRedundantReplacement();
void AddInput(HValue* value);
bool HasRealUses();
@@ -2424,11 +2526,15 @@ class HPhi: public HValue {
bool AllOperandsConvertibleToInteger() {
for (int i = 0; i < OperandCount(); ++i) {
- if (!OperandAt(i)->IsConvertibleToInteger()) return false;
+ if (!OperandAt(i)->IsConvertibleToInteger()) {
+ return false;
+ }
}
return true;
}
+ void ResetInteger32Uses();
+
protected:
virtual void DeleteFromGraph();
virtual void InternalSetOperandAt(int index, HValue* value) {
@@ -2459,26 +2565,51 @@ class HArgumentsObject: public HTemplateInstruction<0> {
}
DECLARE_CONCRETE_INSTRUCTION(ArgumentsObject)
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
class HConstant: public HTemplateInstruction<0> {
public:
HConstant(Handle<Object> handle, Representation r);
+ HConstant(int32_t value, Representation r);
+ HConstant(double value, Representation r);
- Handle<Object> handle() const { return handle_; }
+ Handle<Object> handle() {
+ if (handle_.is_null()) {
+ handle_ = FACTORY->NewNumber(double_value_, TENURED);
+ }
+ ASSERT(has_int32_value_ || !handle_->IsSmi());
+ return handle_;
+ }
bool InOldSpace() const { return !HEAP->InNewSpace(*handle_); }
bool ImmortalImmovable() const {
+ if (has_int32_value_) {
+ return false;
+ }
+ if (has_double_value_) {
+ if (BitCast<int64_t>(double_value_) == BitCast<int64_t>(-0.0) ||
+ isnan(double_value_)) {
+ return true;
+ }
+ return false;
+ }
+
+ ASSERT(!handle_.is_null());
Heap* heap = HEAP;
+ // We should have handled minus_zero_value and nan_value in the
+ // has_double_value_ clause above.
+ ASSERT(*handle_ != heap->minus_zero_value());
+ ASSERT(*handle_ != heap->nan_value());
if (*handle_ == heap->undefined_value()) return true;
if (*handle_ == heap->null_value()) return true;
if (*handle_ == heap->true_value()) return true;
if (*handle_ == heap->false_value()) return true;
if (*handle_ == heap->the_hole_value()) return true;
- if (*handle_ == heap->minus_zero_value()) return true;
- if (*handle_ == heap->nan_value()) return true;
if (*handle_ == heap->empty_string()) return true;
return false;
}
@@ -2488,20 +2619,15 @@ class HConstant: public HTemplateInstruction<0> {
}
virtual bool IsConvertibleToInteger() const {
- if (handle_->IsSmi()) return true;
- if (handle_->IsHeapNumber() &&
- (HeapNumber::cast(*handle_)->value() ==
- static_cast<double>(NumberToInt32(*handle_)))) return true;
- return false;
+ return has_int32_value_;
}
virtual bool EmitAtUses() { return !representation().IsDouble(); }
- virtual HValue* Canonicalize();
virtual void PrintDataTo(StringStream* stream);
virtual HType CalculateInferredType();
- bool IsInteger() const { return handle_->IsSmi(); }
- HConstant* CopyToRepresentation(Representation r) const;
- HConstant* CopyToTruncatedInt32() const;
+ bool IsInteger() { return handle()->IsSmi(); }
+ HConstant* CopyToRepresentation(Representation r, Zone* zone) const;
+ HConstant* CopyToTruncatedInt32(Zone* zone) const;
bool HasInteger32Value() const { return has_int32_value_; }
int32_t Integer32Value() const {
ASSERT(HasInteger32Value());
@@ -2512,24 +2638,35 @@ class HConstant: public HTemplateInstruction<0> {
ASSERT(HasDoubleValue());
return double_value_;
}
- bool HasNumberValue() const { return has_int32_value_ || has_double_value_; }
+ bool HasNumberValue() const { return has_double_value_; }
int32_t NumberValueAsInteger32() const {
ASSERT(HasNumberValue());
- if (has_int32_value_) return int32_value_;
- return DoubleToInt32(double_value_);
+ // Irrespective of whether a numeric HConstant can be safely
+ // represented as an int32, we store the (in some cases lossy)
+ // representation of the number in int32_value_.
+ return int32_value_;
}
- bool HasStringValue() const { return handle_->IsString(); }
- bool ToBoolean() const;
+ bool ToBoolean();
+
+ bool IsUint32() {
+ return HasInteger32Value() && (Integer32Value() >= 0);
+ }
virtual intptr_t Hashcode() {
- ASSERT(!HEAP->allow_allocation(false));
- intptr_t hash = reinterpret_cast<intptr_t>(*handle());
- // Prevent smis from having fewer hash values when truncated to
- // the least significant bits.
- const int kShiftSize = kSmiShiftSize + kSmiTagSize;
- STATIC_ASSERT(kShiftSize != 0);
- return hash ^ (hash >> kShiftSize);
+ ASSERT_ALLOCATION_DISABLED;
+ intptr_t hash;
+
+ if (has_int32_value_) {
+ hash = static_cast<intptr_t>(int32_value_);
+ } else if (has_double_value_) {
+ hash = static_cast<intptr_t>(BitCast<int64_t>(double_value_));
+ } else {
+ ASSERT(!handle_.is_null());
+ hash = reinterpret_cast<intptr_t>(*handle_);
+ }
+
+ return hash;
}
#ifdef DEBUG
@@ -2543,15 +2680,34 @@ class HConstant: public HTemplateInstruction<0> {
virtual bool DataEquals(HValue* other) {
HConstant* other_constant = HConstant::cast(other);
- return handle().is_identical_to(other_constant->handle());
+ if (has_int32_value_) {
+ return other_constant->has_int32_value_ &&
+ int32_value_ == other_constant->int32_value_;
+ } else if (has_double_value_) {
+ return other_constant->has_double_value_ &&
+ BitCast<int64_t>(double_value_) ==
+ BitCast<int64_t>(other_constant->double_value_);
+ } else {
+ ASSERT(!handle_.is_null());
+ return !other_constant->handle_.is_null() &&
+ *handle_ == *other_constant->handle_;
+ }
}
private:
+ virtual bool IsDeletable() const { return true; }
+
+ // If this is a numerical constant, handle_ either points to to the
+ // HeapObject the constant originated from or is null. If the
+ // constant is non-numeric, handle_ always points to a valid
+ // constant HeapObject.
Handle<Object> handle_;
- // The following two values represent the int32 and the double value of the
- // given constant if there is a lossless conversion between the constant
- // and the specific representation.
+ // We store the HConstant in the most specific form safely possible.
+ // The two flags, has_int32_value_ and has_double_value_ tell us if
+ // int32_value_ and double_value_ hold valid, safe representations
+ // of the constant. has_int32_value_ implies has_double_value_ but
+ // not the converse.
bool has_int32_value_ : 1;
bool has_double_value_ : 1;
int32_t int32_value_;
@@ -2578,6 +2734,7 @@ class HBinaryOperation: public HTemplateInstruction<3> {
if (IsCommutative() && left()->IsConstant()) return right();
return left();
}
+
HValue* MostConstantOperand() {
if (IsCommutative() && left()->IsConstant()) return left();
return right();
@@ -2586,6 +2743,8 @@ class HBinaryOperation: public HTemplateInstruction<3> {
virtual bool IsCommutative() const { return false; }
virtual void PrintDataTo(StringStream* stream);
+
+ DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation)
};
@@ -2660,6 +2819,9 @@ class HArgumentsElements: public HTemplateInstruction<0> {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+ private:
+ virtual bool IsDeletable() const { return true; }
+
bool from_inlined_;
};
@@ -2679,6 +2841,9 @@ class HArgumentsLength: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -2711,16 +2876,42 @@ class HAccessArgumentsAt: public HTemplateInstruction<3> {
};
+enum BoundsCheckKeyMode {
+ DONT_ALLOW_SMI_KEY,
+ ALLOW_SMI_KEY
+};
+
+
class HBoundsCheck: public HTemplateInstruction<2> {
public:
- HBoundsCheck(HValue* index, HValue* length) {
+ HBoundsCheck(HValue* index, HValue* length,
+ BoundsCheckKeyMode key_mode = DONT_ALLOW_SMI_KEY)
+ : key_mode_(key_mode) {
SetOperandAt(0, index);
SetOperandAt(1, length);
set_representation(Representation::Integer32());
SetFlag(kUseGVN);
}
- virtual Representation RequiredInputRepresentation(int index) {
+ virtual Representation RequiredInputRepresentation(int arg_index) {
+ if (key_mode_ == DONT_ALLOW_SMI_KEY ||
+ !length()->representation().IsTagged()) {
+ return Representation::Integer32();
+ }
+ // If the index is tagged and isn't constant, then allow the length
+ // to be tagged, since it is usually already tagged from loading it out of
+ // the length field of a JSArray. This allows for direct comparison without
+ // untagging.
+ if (index()->representation().IsTagged() && !index()->IsConstant()) {
+ return Representation::Tagged();
+ }
+ // Also allow the length to be tagged if the index is constant, because
+ // it can be tagged to allow direct comparison.
+ if (index()->IsConstant() &&
+ index()->representation().IsInteger32() &&
+ arg_index == 1) {
+ return Representation::Tagged();
+ }
return Representation::Integer32();
}
@@ -2733,6 +2924,7 @@ class HBoundsCheck: public HTemplateInstruction<2> {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+ BoundsCheckKeyMode key_mode_;
};
@@ -2743,6 +2935,9 @@ class HBitwiseBinaryOperation: public HBinaryOperation {
set_representation(Representation::Tagged());
SetFlag(kFlexibleRepresentation);
SetAllSideEffects();
+ observed_input_representation_[0] = Representation::Tagged();
+ observed_input_representation_[1] = Representation::None();
+ observed_input_representation_[2] = Representation::None();
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -2762,7 +2957,21 @@ class HBitwiseBinaryOperation: public HBinaryOperation {
virtual HType CalculateInferredType();
+ virtual Representation ObservedInputRepresentation(int index) {
+ return observed_input_representation_[index];
+ }
+
+ void InitializeObservedInputRepresentation(Representation r) {
+ observed_input_representation_[1] = r;
+ observed_input_representation_[2] = r;
+ }
+
DECLARE_ABSTRACT_INSTRUCTION(BitwiseBinaryOperation)
+
+ private:
+ virtual bool IsDeletable() const { return true; }
+
+ Representation observed_input_representation_[3];
};
@@ -2772,8 +2981,11 @@ class HMathFloorOfDiv: public HBinaryOperation {
: HBinaryOperation(context, left, right) {
set_representation(Representation::Integer32());
SetFlag(kUseGVN);
+ SetFlag(kCanOverflow);
}
+ virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
+
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Integer32();
}
@@ -2782,6 +2994,9 @@ class HMathFloorOfDiv: public HBinaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -2814,6 +3029,9 @@ class HArithmeticBinaryOperation: public HBinaryOperation {
}
return HValue::InferredRepresentation();
}
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -3099,6 +3317,9 @@ class HGetCachedArrayIndex: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -3203,7 +3424,7 @@ class HPower: public HTemplateInstruction<2> {
}
HValue* left() { return OperandAt(0); }
- HValue* right() { return OperandAt(1); }
+ HValue* right() const { return OperandAt(1); }
virtual Representation RequiredInputRepresentation(int index) {
return index == 0
@@ -3215,6 +3436,11 @@ class HPower: public HTemplateInstruction<2> {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ virtual bool IsDeletable() const {
+ return !right()->representation().IsTagged();
+ }
};
@@ -3232,6 +3458,9 @@ class HRandom: public HTemplateInstruction<1> {
}
DECLARE_CONCRETE_INSTRUCTION(Random)
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -3378,6 +3607,47 @@ class HDiv: public HArithmeticBinaryOperation {
};
+class HMathMinMax: public HArithmeticBinaryOperation {
+ public:
+ enum Operation { kMathMin, kMathMax };
+
+ HMathMinMax(HValue* context, HValue* left, HValue* right, Operation op)
+ : HArithmeticBinaryOperation(context, left, right),
+ operation_(op) { }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return index == 0
+ ? Representation::Tagged()
+ : representation();
+ }
+
+ virtual Representation InferredRepresentation() {
+ if (left()->representation().IsInteger32() &&
+ right()->representation().IsInteger32()) {
+ return Representation::Integer32();
+ }
+ return Representation::Double();
+ }
+
+ virtual bool IsCommutative() const { return true; }
+
+ Operation operation() { return operation_; }
+
+ DECLARE_CONCRETE_INSTRUCTION(MathMinMax)
+
+ protected:
+ virtual bool DataEquals(HValue* other) {
+ return other->IsMathMinMax() &&
+ HMathMinMax::cast(other)->operation_ == operation_;
+ }
+
+ virtual Range* InferRange(Zone* zone);
+
+ private:
+ Operation operation_;
+};
+
+
class HBitwise: public HBitwiseBinaryOperation {
public:
HBitwise(Token::Value op, HValue* context, HValue* left, HValue* right)
@@ -3472,13 +3742,32 @@ class HSar: public HBitwiseBinaryOperation {
};
+class HRor: public HBitwiseBinaryOperation {
+ public:
+ HRor(HValue* context, HValue* left, HValue* right)
+ : HBitwiseBinaryOperation(context, left, right) {
+ ChangeRepresentation(Representation::Integer32());
+ }
+
+ static HInstruction* NewHRor(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right);
+
+ DECLARE_CONCRETE_INSTRUCTION(Ror)
+
+ protected:
+ virtual bool DataEquals(HValue* other) { return true; }
+};
+
+
class HOsrEntry: public HTemplateInstruction<0> {
public:
- explicit HOsrEntry(int ast_id) : ast_id_(ast_id) {
+ explicit HOsrEntry(BailoutId ast_id) : ast_id_(ast_id) {
SetGVNFlag(kChangesOsrEntries);
}
- int ast_id() const { return ast_id_; }
+ BailoutId ast_id() const { return ast_id_; }
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
@@ -3487,7 +3776,7 @@ class HOsrEntry: public HTemplateInstruction<0> {
DECLARE_CONCRETE_INSTRUCTION(OsrEntry)
private:
- int ast_id_;
+ BailoutId ast_id_;
};
@@ -3581,12 +3870,12 @@ class HLoadGlobalCell: public HTemplateInstruction<0> {
}
Handle<JSGlobalPropertyCell> cell() const { return cell_; }
- bool RequiresHoleCheck();
+ bool RequiresHoleCheck() const;
virtual void PrintDataTo(StringStream* stream);
virtual intptr_t Hashcode() {
- ASSERT(!HEAP->allow_allocation(false));
+ ASSERT_ALLOCATION_DISABLED;
return reinterpret_cast<intptr_t>(*cell_);
}
@@ -3603,6 +3892,8 @@ class HLoadGlobalCell: public HTemplateInstruction<0> {
}
private:
+ virtual bool IsDeletable() const { return !RequiresHoleCheck(); }
+
Handle<JSGlobalPropertyCell> cell_;
PropertyDetails details_;
};
@@ -3650,7 +3941,8 @@ inline bool StoringValueNeedsWriteBarrier(HValue* value) {
inline bool ReceiverObjectNeedsWriteBarrier(HValue* object,
HValue* new_space_dominator) {
- return !object->IsAllocateObject() || (object != new_space_dominator);
+ return (!object->IsAllocateObject() && !object->IsFastLiteral()) ||
+ (object != new_space_dominator);
}
@@ -3763,7 +4055,7 @@ class HLoadContextSlot: public HUnaryOperation {
return mode_ == kCheckDeoptimize;
}
- bool RequiresHoleCheck() {
+ bool RequiresHoleCheck() const {
return mode_ != kNoCheck;
}
@@ -3782,6 +4074,8 @@ class HLoadContextSlot: public HUnaryOperation {
}
private:
+ virtual bool IsDeletable() const { return !RequiresHoleCheck(); }
+
int slot_index_;
Mode mode_;
};
@@ -3874,6 +4168,8 @@ class HLoadNamedField: public HUnaryOperation {
}
private:
+ virtual bool IsDeletable() const { return true; }
+
bool is_in_object_;
int offset_;
};
@@ -3884,7 +4180,8 @@ class HLoadNamedFieldPolymorphic: public HTemplateInstruction<2> {
HLoadNamedFieldPolymorphic(HValue* context,
HValue* object,
SmallMapList* types,
- Handle<String> name);
+ Handle<String> name,
+ Zone* zone);
HValue* context() { return OperandAt(0); }
HValue* object() { return OperandAt(1); }
@@ -3971,162 +4268,134 @@ class ArrayInstructionInterface {
virtual ~ArrayInstructionInterface() { };
};
-class HLoadKeyedFastElement
- : public HTemplateInstruction<2>, public ArrayInstructionInterface {
+
+class HLoadKeyed
+ : public HTemplateInstruction<3>, public ArrayInstructionInterface {
public:
- enum HoleCheckMode { PERFORM_HOLE_CHECK, OMIT_HOLE_CHECK };
+ HLoadKeyed(HValue* obj,
+ HValue* key,
+ HValue* dependency,
+ ElementsKind elements_kind)
+ : bit_field_(0) {
+ bit_field_ = ElementsKindField::encode(elements_kind);
- HLoadKeyedFastElement(HValue* obj,
- HValue* key,
- HoleCheckMode hole_check_mode = PERFORM_HOLE_CHECK)
- : hole_check_mode_(hole_check_mode),
- index_offset_(0),
- is_dehoisted_(false) {
SetOperandAt(0, obj);
SetOperandAt(1, key);
- set_representation(Representation::Tagged());
- SetGVNFlag(kDependsOnArrayElements);
- SetFlag(kUseGVN);
- }
+ SetOperandAt(2, dependency);
- HValue* object() { return OperandAt(0); }
- HValue* key() { return OperandAt(1); }
- uint32_t index_offset() { return index_offset_; }
- void SetIndexOffset(uint32_t index_offset) { index_offset_ = index_offset; }
- HValue* GetKey() { return key(); }
- void SetKey(HValue* key) { SetOperandAt(1, key); }
- bool IsDehoisted() { return is_dehoisted_; }
- void SetDehoisted(bool is_dehoisted) { is_dehoisted_ = is_dehoisted; }
-
- virtual Representation RequiredInputRepresentation(int index) {
- // The key is supposed to be Integer32.
- return index == 0
- ? Representation::Tagged()
- : Representation::Integer32();
- }
+ if (!is_external()) {
+ // I can detect the case between storing double (holey and fast) and
+ // smi/object by looking at elements_kind_.
+ ASSERT(IsFastSmiOrObjectElementsKind(elements_kind) ||
+ IsFastDoubleElementsKind(elements_kind));
- virtual void PrintDataTo(StringStream* stream);
+ if (IsFastSmiOrObjectElementsKind(elements_kind)) {
+ if (IsFastSmiElementsKind(elements_kind) &&
+ IsFastPackedElementsKind(elements_kind)) {
+ set_type(HType::Smi());
+ }
- bool RequiresHoleCheck();
-
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastElement)
-
- protected:
- virtual bool DataEquals(HValue* other) {
- if (!other->IsLoadKeyedFastElement()) return false;
- HLoadKeyedFastElement* other_load = HLoadKeyedFastElement::cast(other);
- if (is_dehoisted_ && index_offset_ != other_load->index_offset_)
- return false;
- return hole_check_mode_ == other_load->hole_check_mode_;
- }
-
- private:
- HoleCheckMode hole_check_mode_;
- uint32_t index_offset_;
- bool is_dehoisted_;
-};
+ set_representation(Representation::Tagged());
+ SetGVNFlag(kDependsOnArrayElements);
+ } else {
+ set_representation(Representation::Double());
+ SetGVNFlag(kDependsOnDoubleArrayElements);
+ }
+ } else {
+ if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
+ elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
+ set_representation(Representation::Double());
+ } else {
+ set_representation(Representation::Integer32());
+ }
+ SetGVNFlag(kDependsOnSpecializedArrayElements);
+ // Native code could change the specialized array.
+ SetGVNFlag(kDependsOnCalls);
+ }
-class HLoadKeyedFastDoubleElement
- : public HTemplateInstruction<2>, public ArrayInstructionInterface {
- public:
- HLoadKeyedFastDoubleElement(HValue* elements, HValue* key)
- : index_offset_(0), is_dehoisted_(false) {
- SetOperandAt(0, elements);
- SetOperandAt(1, key);
- set_representation(Representation::Double());
- SetGVNFlag(kDependsOnDoubleArrayElements);
SetFlag(kUseGVN);
}
+ bool is_external() const {
+ return IsExternalArrayElementsKind(elements_kind());
+ }
HValue* elements() { return OperandAt(0); }
HValue* key() { return OperandAt(1); }
- uint32_t index_offset() { return index_offset_; }
- void SetIndexOffset(uint32_t index_offset) { index_offset_ = index_offset; }
+ HValue* dependency() { return OperandAt(2); }
+ uint32_t index_offset() { return IndexOffsetField::decode(bit_field_); }
+ void SetIndexOffset(uint32_t index_offset) {
+ bit_field_ = IndexOffsetField::update(bit_field_, index_offset);
+ }
HValue* GetKey() { return key(); }
void SetKey(HValue* key) { SetOperandAt(1, key); }
- bool IsDehoisted() { return is_dehoisted_; }
- void SetDehoisted(bool is_dehoisted) { is_dehoisted_ = is_dehoisted; }
-
- virtual Representation RequiredInputRepresentation(int index) {
- // The key is supposed to be Integer32.
- return index == 0
- ? Representation::Tagged()
- : Representation::Integer32();
+ bool IsDehoisted() { return IsDehoistedField::decode(bit_field_); }
+ void SetDehoisted(bool is_dehoisted) {
+ bit_field_ = IsDehoistedField::update(bit_field_, is_dehoisted);
+ }
+ ElementsKind elements_kind() const {
+ return ElementsKindField::decode(bit_field_);
}
- virtual void PrintDataTo(StringStream* stream);
-
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastDoubleElement)
-
- protected:
- virtual bool DataEquals(HValue* other) { return true; }
-
- private:
- uint32_t index_offset_;
- bool is_dehoisted_;
-};
-
-
-class HLoadKeyedSpecializedArrayElement
- : public HTemplateInstruction<2>, public ArrayInstructionInterface {
- public:
- HLoadKeyedSpecializedArrayElement(HValue* external_elements,
- HValue* key,
- ElementsKind elements_kind)
- : elements_kind_(elements_kind),
- index_offset_(0),
- is_dehoisted_(false) {
- SetOperandAt(0, external_elements);
- SetOperandAt(1, key);
- if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
- elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
- set_representation(Representation::Double());
- } else {
- set_representation(Representation::Integer32());
+ virtual Representation RequiredInputRepresentation(int index) {
+ // kind_fast: tagged[int32] (none)
+ // kind_double: tagged[int32] (none)
+ // kind_external: external[int32] (none)
+ if (index == 0) {
+ return is_external() ? Representation::External()
+ : Representation::Tagged();
}
- SetGVNFlag(kDependsOnSpecializedArrayElements);
- // Native code could change the specialized array.
- SetGVNFlag(kDependsOnCalls);
- SetFlag(kUseGVN);
+ if (index == 1) return Representation::Integer32();
+ return Representation::None();
}
virtual void PrintDataTo(StringStream* stream);
- virtual Representation RequiredInputRepresentation(int index) {
- // The key is supposed to be Integer32, but the base pointer
- // for the element load is a naked pointer.
- return index == 0
- ? Representation::External()
- : Representation::Integer32();
- }
-
- HValue* external_pointer() { return OperandAt(0); }
- HValue* key() { return OperandAt(1); }
- ElementsKind elements_kind() const { return elements_kind_; }
- uint32_t index_offset() { return index_offset_; }
- void SetIndexOffset(uint32_t index_offset) { index_offset_ = index_offset; }
- HValue* GetKey() { return key(); }
- void SetKey(HValue* key) { SetOperandAt(1, key); }
- bool IsDehoisted() { return is_dehoisted_; }
- void SetDehoisted(bool is_dehoisted) { is_dehoisted_ = is_dehoisted; }
+ bool RequiresHoleCheck() const;
virtual Range* InferRange(Zone* zone);
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedSpecializedArrayElement)
+ DECLARE_CONCRETE_INSTRUCTION(LoadKeyed)
protected:
virtual bool DataEquals(HValue* other) {
- if (!other->IsLoadKeyedSpecializedArrayElement()) return false;
- HLoadKeyedSpecializedArrayElement* cast_other =
- HLoadKeyedSpecializedArrayElement::cast(other);
- return elements_kind_ == cast_other->elements_kind();
+ if (!other->IsLoadKeyed()) return false;
+ HLoadKeyed* other_load = HLoadKeyed::cast(other);
+
+ if (IsDehoisted() && index_offset() != other_load->index_offset())
+ return false;
+ return elements_kind() == other_load->elements_kind();
}
private:
- ElementsKind elements_kind_;
- uint32_t index_offset_;
- bool is_dehoisted_;
+ virtual bool IsDeletable() const {
+ return !RequiresHoleCheck();
+ }
+
+ // Establish some checks around our packed fields
+ enum LoadKeyedBits {
+ kBitsForElementsKind = 5,
+ kBitsForIndexOffset = 26,
+ kBitsForIsDehoisted = 1,
+
+ kStartElementsKind = 0,
+ kStartIndexOffset = kStartElementsKind + kBitsForElementsKind,
+ kStartIsDehoisted = kStartIndexOffset + kBitsForIndexOffset
+ };
+
+ STATIC_ASSERT((kBitsForElementsKind + kBitsForIndexOffset +
+ kBitsForIsDehoisted) <= sizeof(uint32_t)*8);
+ STATIC_ASSERT(kElementsKindCount <= (1 << kBitsForElementsKind));
+ class ElementsKindField:
+ public BitField<ElementsKind, kStartElementsKind, kBitsForElementsKind>
+ {}; // NOLINT
+ class IndexOffsetField:
+ public BitField<uint32_t, kStartIndexOffset, kBitsForIndexOffset>
+ {}; // NOLINT
+ class IsDehoistedField:
+ public BitField<bool, kStartIsDehoisted, kBitsForIsDehoisted>
+ {}; // NOLINT
+ uint32_t bit_field_;
};
@@ -4147,6 +4416,7 @@ class HLoadKeyedGeneric: public HTemplateInstruction<3> {
virtual void PrintDataTo(StringStream* stream);
virtual Representation RequiredInputRepresentation(int index) {
+ // tagged[tagged]
return Representation::Tagged();
}
@@ -4204,6 +4474,10 @@ class HStoreNamedField: public HTemplateInstruction<2> {
ReceiverObjectNeedsWriteBarrier(object(), new_space_dominator());
}
+ bool NeedsWriteBarrierForMap() {
+ return ReceiverObjectNeedsWriteBarrier(object(), new_space_dominator());
+ }
+
private:
Handle<String> name_;
bool is_in_object_;
@@ -4248,83 +4522,56 @@ class HStoreNamedGeneric: public HTemplateInstruction<3> {
};
-class HStoreKeyedFastElement
+class HStoreKeyed
: public HTemplateInstruction<3>, public ArrayInstructionInterface {
public:
- HStoreKeyedFastElement(HValue* obj, HValue* key, HValue* val,
- ElementsKind elements_kind = FAST_ELEMENTS)
+ HStoreKeyed(HValue* obj, HValue* key, HValue* val,
+ ElementsKind elements_kind)
: elements_kind_(elements_kind), index_offset_(0), is_dehoisted_(false) {
SetOperandAt(0, obj);
SetOperandAt(1, key);
SetOperandAt(2, val);
- SetGVNFlag(kChangesArrayElements);
- }
-
- virtual Representation RequiredInputRepresentation(int index) {
- // The key is supposed to be Integer32.
- return index == 1
- ? Representation::Integer32()
- : Representation::Tagged();
- }
-
- HValue* object() { return OperandAt(0); }
- HValue* key() { return OperandAt(1); }
- HValue* value() { return OperandAt(2); }
- bool value_is_smi() {
- return elements_kind_ == FAST_SMI_ONLY_ELEMENTS;
- }
- uint32_t index_offset() { return index_offset_; }
- void SetIndexOffset(uint32_t index_offset) { index_offset_ = index_offset; }
- HValue* GetKey() { return key(); }
- void SetKey(HValue* key) { SetOperandAt(1, key); }
- bool IsDehoisted() { return is_dehoisted_; }
- void SetDehoisted(bool is_dehoisted) { is_dehoisted_ = is_dehoisted; }
- bool NeedsWriteBarrier() {
- if (value_is_smi()) {
- return false;
+ if (is_external()) {
+ SetGVNFlag(kChangesSpecializedArrayElements);
+ } else if (IsFastDoubleElementsKind(elements_kind)) {
+ SetGVNFlag(kChangesDoubleArrayElements);
+ SetFlag(kDeoptimizeOnUndefined);
} else {
- return StoringValueNeedsWriteBarrier(value());
+ SetGVNFlag(kChangesArrayElements);
}
}
- virtual void PrintDataTo(StringStream* stream);
-
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedFastElement)
-
- private:
- ElementsKind elements_kind_;
- uint32_t index_offset_;
- bool is_dehoisted_;
-};
-
-
-class HStoreKeyedFastDoubleElement
- : public HTemplateInstruction<3>, public ArrayInstructionInterface {
- public:
- HStoreKeyedFastDoubleElement(HValue* elements,
- HValue* key,
- HValue* val)
- : index_offset_(0), is_dehoisted_(false) {
- SetOperandAt(0, elements);
- SetOperandAt(1, key);
- SetOperandAt(2, val);
- SetGVNFlag(kChangesDoubleArrayElements);
- }
-
virtual Representation RequiredInputRepresentation(int index) {
- if (index == 1) {
+ // kind_fast: tagged[int32] = tagged
+ // kind_double: tagged[int32] = double
+ // kind_external: external[int32] = (double | int32)
+ if (index == 0) {
+ return is_external() ? Representation::External()
+ : Representation::Tagged();
+ } else if (index == 1) {
return Representation::Integer32();
- } else if (index == 2) {
+ }
+
+ ASSERT_EQ(index, 2);
+ if (IsDoubleOrFloatElementsKind(elements_kind())) {
return Representation::Double();
- } else {
- return Representation::Tagged();
}
+
+ return is_external() ? Representation::Integer32()
+ : Representation::Tagged();
}
+ bool is_external() const {
+ return IsExternalArrayElementsKind(elements_kind());
+ }
HValue* elements() { return OperandAt(0); }
HValue* key() { return OperandAt(1); }
HValue* value() { return OperandAt(2); }
+ bool value_is_smi() const {
+ return IsFastSmiElementsKind(elements_kind_);
+ }
+ ElementsKind elements_kind() const { return elements_kind_; }
uint32_t index_offset() { return index_offset_; }
void SetIndexOffset(uint32_t index_offset) { index_offset_ = index_offset; }
HValue* GetKey() { return key(); }
@@ -4333,64 +4580,18 @@ class HStoreKeyedFastDoubleElement
void SetDehoisted(bool is_dehoisted) { is_dehoisted_ = is_dehoisted; }
bool NeedsWriteBarrier() {
- return StoringValueNeedsWriteBarrier(value());
+ if (value_is_smi()) {
+ return false;
+ } else {
+ return StoringValueNeedsWriteBarrier(value());
+ }
}
bool NeedsCanonicalization();
virtual void PrintDataTo(StringStream* stream);
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedFastDoubleElement)
-
- private:
- uint32_t index_offset_;
- bool is_dehoisted_;
-};
-
-
-class HStoreKeyedSpecializedArrayElement
- : public HTemplateInstruction<3>, public ArrayInstructionInterface {
- public:
- HStoreKeyedSpecializedArrayElement(HValue* external_elements,
- HValue* key,
- HValue* val,
- ElementsKind elements_kind)
- : elements_kind_(elements_kind), index_offset_(0), is_dehoisted_(false) {
- SetGVNFlag(kChangesSpecializedArrayElements);
- SetOperandAt(0, external_elements);
- SetOperandAt(1, key);
- SetOperandAt(2, val);
- }
-
- virtual void PrintDataTo(StringStream* stream);
-
- virtual Representation RequiredInputRepresentation(int index) {
- if (index == 0) {
- return Representation::External();
- } else {
- bool float_or_double_elements =
- elements_kind() == EXTERNAL_FLOAT_ELEMENTS ||
- elements_kind() == EXTERNAL_DOUBLE_ELEMENTS;
- if (index == 2 && float_or_double_elements) {
- return Representation::Double();
- } else {
- return Representation::Integer32();
- }
- }
- }
-
- HValue* external_pointer() { return OperandAt(0); }
- HValue* key() { return OperandAt(1); }
- HValue* value() { return OperandAt(2); }
- ElementsKind elements_kind() const { return elements_kind_; }
- uint32_t index_offset() { return index_offset_; }
- void SetIndexOffset(uint32_t index_offset) { index_offset_ = index_offset; }
- HValue* GetKey() { return key(); }
- void SetKey(HValue* key) { SetOperandAt(1, key); }
- bool IsDehoisted() { return is_dehoisted_; }
- void SetDehoisted(bool is_dehoisted) { is_dehoisted_ = is_dehoisted; }
-
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedSpecializedArrayElement)
+ DECLARE_CONCRETE_INSTRUCTION(StoreKeyed)
private:
ElementsKind elements_kind_;
@@ -4421,6 +4622,7 @@ class HStoreKeyedGeneric: public HTemplateInstruction<4> {
StrictModeFlag strict_mode_flag() { return strict_mode_flag_; }
virtual Representation RequiredInputRepresentation(int index) {
+ // tagged[tagged] = tagged
return Representation::Tagged();
}
@@ -4443,8 +4645,14 @@ class HTransitionElementsKind: public HTemplateInstruction<1> {
SetOperandAt(0, object);
SetFlag(kUseGVN);
SetGVNFlag(kChangesElementsKind);
- SetGVNFlag(kChangesElementsPointer);
- SetGVNFlag(kChangesNewSpacePromotion);
+ if (original_map->has_fast_double_elements()) {
+ SetGVNFlag(kChangesElementsPointer);
+ SetGVNFlag(kChangesNewSpacePromotion);
+ }
+ if (transitioned_map->has_fast_double_elements()) {
+ SetGVNFlag(kChangesElementsPointer);
+ SetGVNFlag(kChangesNewSpacePromotion);
+ }
set_representation(Representation::Tagged());
}
@@ -4480,6 +4688,7 @@ class HStringAdd: public HBinaryOperation {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
SetGVNFlag(kDependsOnMaps);
+ SetGVNFlag(kChangesNewSpacePromotion);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -4494,6 +4703,10 @@ class HStringAdd: public HBinaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ // TODO(svenpanne) Might be safe, but leave it out until we know for sure.
+ // private:
+ // virtual bool IsDeletable() const { return true; }
};
@@ -4528,6 +4741,10 @@ class HStringCharCodeAt: public HTemplateInstruction<3> {
virtual Range* InferRange(Zone* zone) {
return new(zone) Range(0, String::kMaxUtf16CodeUnit);
}
+
+ // TODO(svenpanne) Might be safe, but leave it out until we know for sure.
+ // private:
+ // virtual bool IsDeletable() const { return true; }
};
@@ -4554,6 +4771,10 @@ class HStringCharFromCode: public HTemplateInstruction<2> {
virtual bool DataEquals(HValue* other) { return true; }
DECLARE_CONCRETE_INSTRUCTION(StringCharFromCode)
+
+ // TODO(svenpanne) Might be safe, but leave it out until we know for sure.
+ // private:
+ // virtual bool IsDeletable() const { return true; }
};
@@ -4582,6 +4803,9 @@ class HStringLength: public HUnaryOperation {
virtual Range* InferRange(Zone* zone) {
return new(zone) Range(0, String::kMaxLength);
}
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -4608,6 +4832,9 @@ class HAllocateObject: public HTemplateInstruction<1> {
DECLARE_CONCRETE_INSTRUCTION(AllocateObject)
private:
+ // TODO(svenpanne) Might be safe, but leave it out until we know for sure.
+ // virtual bool IsDeletable() const { return true; }
+
Handle<JSFunction> constructor_;
};
@@ -4624,6 +4851,8 @@ class HMaterializedLiteral: public HTemplateInstruction<V> {
int depth() const { return depth_; }
private:
+ virtual bool IsDeletable() const { return true; }
+
int literal_index_;
int depth_;
};
@@ -4682,7 +4911,7 @@ class HArrayLiteral: public HMaterializedLiteral<1> {
HValue* context() { return OperandAt(0); }
ElementsKind boilerplate_elements_kind() const {
if (!boilerplate_object_->IsJSObject()) {
- return FAST_ELEMENTS;
+ return TERMINAL_FAST_ELEMENTS_KIND;
}
return Handle<JSObject>::cast(boilerplate_object_)->GetElementsKind();
}
@@ -4744,10 +4973,12 @@ class HObjectLiteral: public HMaterializedLiteral<1> {
class HRegExpLiteral: public HMaterializedLiteral<1> {
public:
HRegExpLiteral(HValue* context,
+ Handle<FixedArray> literals,
Handle<String> pattern,
Handle<String> flags,
int literal_index)
: HMaterializedLiteral<1>(literal_index, 0),
+ literals_(literals),
pattern_(pattern),
flags_(flags) {
SetOperandAt(0, context);
@@ -4755,6 +4986,7 @@ class HRegExpLiteral: public HMaterializedLiteral<1> {
}
HValue* context() { return OperandAt(0); }
+ Handle<FixedArray> literals() { return literals_; }
Handle<String> pattern() { return pattern_; }
Handle<String> flags() { return flags_; }
@@ -4766,6 +4998,7 @@ class HRegExpLiteral: public HMaterializedLiteral<1> {
DECLARE_CONCRETE_INSTRUCTION(RegExpLiteral)
private:
+ Handle<FixedArray> literals_;
Handle<String> pattern_;
Handle<String> flags_;
};
@@ -4795,6 +5028,8 @@ class HFunctionLiteral: public HTemplateInstruction<1> {
bool pretenure() const { return pretenure_; }
private:
+ virtual bool IsDeletable() const { return true; }
+
Handle<SharedFunctionInfo> shared_info_;
bool pretenure_;
};
@@ -4811,7 +5046,6 @@ class HTypeof: public HTemplateInstruction<2> {
HValue* context() { return OperandAt(0); }
HValue* value() { return OperandAt(1); }
- virtual HValue* Canonicalize();
virtual void PrintDataTo(StringStream* stream);
virtual Representation RequiredInputRepresentation(int index) {
@@ -4819,6 +5053,9 @@ class HTypeof: public HTemplateInstruction<2> {
}
DECLARE_CONCRETE_INSTRUCTION(Typeof)
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -4837,6 +5074,9 @@ class HToFastProperties: public HUnaryOperation {
}
DECLARE_CONCRETE_INSTRUCTION(ToFastProperties)
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -4851,6 +5091,9 @@ class HValueOf: public HUnaryOperation {
}
DECLARE_CONCRETE_INSTRUCTION(ValueOf)
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
@@ -5047,6 +5290,9 @@ class HLoadFieldByIndex : public HTemplateInstruction<2> {
}
DECLARE_CONCRETE_INSTRUCTION(LoadFieldByIndex);
+
+ private:
+ virtual bool IsDeletable() const { return true; }
};
diff --git a/src/3rdparty/v8/src/hydrogen.cc b/src/3rdparty/v8/src/hydrogen.cc
index cfe80d2..043d567 100644
--- a/src/3rdparty/v8/src/hydrogen.cc
+++ b/src/3rdparty/v8/src/hydrogen.cc
@@ -55,19 +55,19 @@ namespace internal {
HBasicBlock::HBasicBlock(HGraph* graph)
: block_id_(graph->GetNextBlockID()),
graph_(graph),
- phis_(4),
+ phis_(4, graph->zone()),
first_(NULL),
last_(NULL),
end_(NULL),
loop_information_(NULL),
- predecessors_(2),
+ predecessors_(2, graph->zone()),
dominator_(NULL),
- dominated_blocks_(4),
+ dominated_blocks_(4, graph->zone()),
last_environment_(NULL),
argument_count_(-1),
first_instruction_index_(-1),
last_instruction_index_(-1),
- deleted_phis_(4),
+ deleted_phis_(4, graph->zone()),
parent_loop_header_(NULL),
is_inline_return_target_(false),
is_deoptimizing_(false),
@@ -76,7 +76,7 @@ HBasicBlock::HBasicBlock(HGraph* graph)
void HBasicBlock::AttachLoopInformation() {
ASSERT(!IsLoopHeader());
- loop_information_ = new(zone()) HLoopInformation(this);
+ loop_information_ = new(zone()) HLoopInformation(this, zone());
}
@@ -88,7 +88,7 @@ void HBasicBlock::DetachLoopInformation() {
void HBasicBlock::AddPhi(HPhi* phi) {
ASSERT(!IsStartBlock());
- phis_.Add(phi);
+ phis_.Add(phi, zone());
phi->SetBlock(this);
}
@@ -119,29 +119,30 @@ void HBasicBlock::AddInstruction(HInstruction* instr) {
HDeoptimize* HBasicBlock::CreateDeoptimize(
HDeoptimize::UseEnvironment has_uses) {
ASSERT(HasEnvironment());
- if (has_uses == HDeoptimize::kNoUses) return new(zone()) HDeoptimize(0);
+ if (has_uses == HDeoptimize::kNoUses)
+ return new(zone()) HDeoptimize(0, zone());
HEnvironment* environment = last_environment();
- HDeoptimize* instr = new(zone()) HDeoptimize(environment->length());
+ HDeoptimize* instr = new(zone()) HDeoptimize(environment->length(), zone());
for (int i = 0; i < environment->length(); i++) {
HValue* val = environment->values()->at(i);
- instr->AddEnvironmentValue(val);
+ instr->AddEnvironmentValue(val, zone());
}
return instr;
}
-HSimulate* HBasicBlock::CreateSimulate(int ast_id) {
+HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id) {
ASSERT(HasEnvironment());
HEnvironment* environment = last_environment();
- ASSERT(ast_id == AstNode::kNoNumber ||
+ ASSERT(ast_id.IsNone() ||
environment->closure()->shared()->VerifyBailoutId(ast_id));
int push_count = environment->push_count();
int pop_count = environment->pop_count();
- HSimulate* instr = new(zone()) HSimulate(ast_id, pop_count);
+ HSimulate* instr = new(zone()) HSimulate(ast_id, pop_count, zone());
for (int i = push_count - 1; i >= 0; --i) {
instr->AddPushedValue(environment->ExpressionStackAt(i));
}
@@ -165,32 +166,31 @@ void HBasicBlock::Finish(HControlInstruction* end) {
void HBasicBlock::Goto(HBasicBlock* block, FunctionState* state) {
- bool drop_extra = state != NULL && state->drop_extra();
- bool arguments_pushed = state != NULL && state->arguments_pushed();
+ bool drop_extra = state != NULL &&
+ state->inlining_kind() == DROP_EXTRA_ON_RETURN;
if (block->IsInlineReturnTarget()) {
- AddInstruction(new(zone()) HLeaveInlined(arguments_pushed));
+ AddInstruction(new(zone()) HLeaveInlined());
last_environment_ = last_environment()->DiscardInlined(drop_extra);
}
- AddSimulate(AstNode::kNoNumber);
+ AddSimulate(BailoutId::None());
HGoto* instr = new(zone()) HGoto(block);
Finish(instr);
}
void HBasicBlock::AddLeaveInlined(HValue* return_value,
- HBasicBlock* target,
FunctionState* state) {
- bool drop_extra = state != NULL && state->drop_extra();
- bool arguments_pushed = state != NULL && state->arguments_pushed();
+ HBasicBlock* target = state->function_return();
+ bool drop_extra = state->inlining_kind() == DROP_EXTRA_ON_RETURN;
ASSERT(target->IsInlineReturnTarget());
ASSERT(return_value != NULL);
- AddInstruction(new(zone()) HLeaveInlined(arguments_pushed));
+ AddInstruction(new(zone()) HLeaveInlined());
last_environment_ = last_environment()->DiscardInlined(drop_extra);
last_environment()->Push(return_value);
- AddSimulate(AstNode::kNoNumber);
+ AddSimulate(BailoutId::None());
HGoto* instr = new(zone()) HGoto(target);
Finish(instr);
}
@@ -203,7 +203,7 @@ void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
}
-void HBasicBlock::SetJoinId(int ast_id) {
+void HBasicBlock::SetJoinId(BailoutId ast_id) {
int length = predecessors_.length();
ASSERT(length > 0);
for (int i = 0; i < length; i++) {
@@ -278,7 +278,7 @@ void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
SetInitialEnvironment(pred->last_environment()->Copy());
}
- predecessors_.Add(pred);
+ predecessors_.Add(pred, zone());
}
@@ -291,7 +291,7 @@ void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
dominated_blocks_[index]->block_id() < block->block_id()) {
++index;
}
- dominated_blocks_.InsertAt(index, block);
+ dominated_blocks_.InsertAt(index, block, zone());
}
@@ -404,7 +404,7 @@ void HBasicBlock::Verify() {
void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
- this->back_edges_.Add(block);
+ this->back_edges_.Add(block, block->zone());
AddBlock(block);
}
@@ -430,7 +430,7 @@ void HLoopInformation::AddBlock(HBasicBlock* block) {
AddBlock(block->parent_loop_header());
} else {
block->set_parent_loop_header(loop_header());
- blocks_.Add(block);
+ blocks_.Add(block, block->zone());
for (int i = 0; i < block->predecessors()->length(); ++i) {
AddBlock(block->predecessors()->at(i));
}
@@ -451,8 +451,8 @@ class ReachabilityAnalyzer BASE_EMBEDDED {
int block_count,
HBasicBlock* dont_visit)
: visited_count_(0),
- stack_(16),
- reachable_(block_count, ZONE),
+ stack_(16, entry_block->zone()),
+ reachable_(block_count, entry_block->zone()),
dont_visit_(dont_visit) {
PushBlock(entry_block);
Analyze();
@@ -466,7 +466,7 @@ class ReachabilityAnalyzer BASE_EMBEDDED {
if (block != NULL && block != dont_visit_ &&
!reachable_.Contains(block->block_id())) {
reachable_.Add(block->block_id());
- stack_.Add(block);
+ stack_.Add(block, block->zone());
visited_count_++;
}
}
@@ -526,7 +526,8 @@ void HGraph::Verify(bool do_full_verify) const {
// Check that all join blocks have predecessors that end with an
// unconditional goto and agree on their environment node id.
if (block->predecessors()->length() >= 2) {
- int id = block->predecessors()->first()->last_environment()->ast_id();
+ BailoutId id =
+ block->predecessors()->first()->last_environment()->ast_id();
for (int k = 0; k < block->predecessors()->length(); k++) {
HBasicBlock* predecessor = block->predecessors()->at(k);
ASSERT(predecessor->end()->IsGoto());
@@ -567,9 +568,9 @@ void HGraph::Verify(bool do_full_verify) const {
HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
- Object* value) {
+ Handle<Object> value) {
if (!pointer->is_set()) {
- HConstant* constant = new(zone()) HConstant(Handle<Object>(value),
+ HConstant* constant = new(zone()) HConstant(value,
Representation::Tagged());
constant->InsertAfter(GetConstantUndefined());
pointer->set(constant);
@@ -578,28 +579,40 @@ HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
}
+HConstant* HGraph::GetConstantInt32(SetOncePointer<HConstant>* pointer,
+ int32_t value) {
+ if (!pointer->is_set()) {
+ HConstant* constant =
+ new(zone()) HConstant(value, Representation::Integer32());
+ constant->InsertAfter(GetConstantUndefined());
+ pointer->set(constant);
+ }
+ return pointer->get();
+}
+
+
HConstant* HGraph::GetConstant1() {
- return GetConstant(&constant_1_, Smi::FromInt(1));
+ return GetConstantInt32(&constant_1_, 1);
}
HConstant* HGraph::GetConstantMinus1() {
- return GetConstant(&constant_minus1_, Smi::FromInt(-1));
+ return GetConstantInt32(&constant_minus1_, -1);
}
HConstant* HGraph::GetConstantTrue() {
- return GetConstant(&constant_true_, isolate()->heap()->true_value());
+ return GetConstant(&constant_true_, isolate()->factory()->true_value());
}
HConstant* HGraph::GetConstantFalse() {
- return GetConstant(&constant_false_, isolate()->heap()->false_value());
+ return GetConstant(&constant_false_, isolate()->factory()->false_value());
}
HConstant* HGraph::GetConstantHole() {
- return GetConstant(&constant_hole_, isolate()->heap()->the_hole_value());
+ return GetConstant(&constant_hole_, isolate()->factory()->the_hole_value());
}
@@ -612,8 +625,8 @@ HGraphBuilder::HGraphBuilder(CompilationInfo* info,
graph_(NULL),
current_block_(NULL),
inlined_count_(0),
- globals_(10),
- zone_(info->isolate()->zone()),
+ globals_(10, info->zone()),
+ zone_(info->zone()),
inline_bailout_(false) {
// This is not initialized in the initializer list because the
// constructor for the initial state relies on function_state_ == NULL
@@ -623,7 +636,7 @@ HGraphBuilder::HGraphBuilder(CompilationInfo* info,
HBasicBlock* HGraphBuilder::CreateJoin(HBasicBlock* first,
HBasicBlock* second,
- int join_id) {
+ BailoutId join_id) {
if (first == NULL) {
return second;
} else if (second == NULL) {
@@ -676,61 +689,26 @@ HGraph::HGraph(CompilationInfo* info)
: isolate_(info->isolate()),
next_block_id_(0),
entry_block_(NULL),
- blocks_(8),
- values_(16),
- phi_list_(NULL) {
+ blocks_(8, info->zone()),
+ values_(16, info->zone()),
+ phi_list_(NULL),
+ uint32_instructions_(NULL),
+ info_(info),
+ zone_(info->zone()),
+ is_recursive_(false),
+ use_optimistic_licm_(false),
+ type_change_checksum_(0) {
start_environment_ =
- new(zone()) HEnvironment(NULL, info->scope(), info->closure());
- start_environment_->set_ast_id(AstNode::kFunctionEntryId);
+ new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
+ start_environment_->set_ast_id(BailoutId::FunctionEntry());
entry_block_ = CreateBasicBlock();
entry_block_->SetInitialEnvironment(start_environment_);
}
-Handle<Code> HGraph::Compile(CompilationInfo* info) {
- int values = GetMaximumValueID();
- if (values > LUnallocated::kMaxVirtualRegisters) {
- if (FLAG_trace_bailout) {
- PrintF("Not enough virtual registers for (values).\n");
- }
- return Handle<Code>::null();
- }
- LAllocator allocator(values, this);
- LChunkBuilder builder(info, this, &allocator);
- LChunk* chunk = builder.Build();
- if (chunk == NULL) return Handle<Code>::null();
-
- if (!allocator.Allocate(chunk)) {
- if (FLAG_trace_bailout) {
- PrintF("Not enough virtual registers (regalloc).\n");
- }
- return Handle<Code>::null();
- }
-
- MacroAssembler assembler(info->isolate(), NULL, 0);
- LCodeGen generator(chunk, &assembler, info);
-
- chunk->MarkEmptyBlocks();
-
- if (generator.GenerateCode()) {
- if (FLAG_trace_codegen) {
- PrintF("Crankshaft Compiler - ");
- }
- CodeGenerator::MakeCodePrologue(info);
- Code::Flags flags = Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
- Handle<Code> code =
- CodeGenerator::MakeCodeEpilogue(&assembler, flags, info);
- generator.FinishCode(code);
- CodeGenerator::PrintCode(code, info);
- return code;
- }
- return Handle<Code>::null();
-}
-
-
HBasicBlock* HGraph::CreateBasicBlock() {
HBasicBlock* result = new(zone()) HBasicBlock(this);
- blocks_.Add(result);
+ blocks_.Add(result, zone());
return result;
}
@@ -748,66 +726,319 @@ void HGraph::Canonicalize() {
}
}
+// Block ordering was implemented with two mutually recursive methods,
+// HGraph::Postorder and HGraph::PostorderLoopBlocks.
+// The recursion could lead to stack overflow so the algorithm has been
+// implemented iteratively.
+// At a high level the algorithm looks like this:
+//
+// Postorder(block, loop_header) : {
+// if (block has already been visited or is of another loop) return;
+// mark block as visited;
+// if (block is a loop header) {
+// VisitLoopMembers(block, loop_header);
+// VisitSuccessorsOfLoopHeader(block);
+// } else {
+// VisitSuccessors(block)
+// }
+// put block in result list;
+// }
+//
+// VisitLoopMembers(block, outer_loop_header) {
+// foreach (block b in block loop members) {
+// VisitSuccessorsOfLoopMember(b, outer_loop_header);
+// if (b is loop header) VisitLoopMembers(b);
+// }
+// }
+//
+// VisitSuccessorsOfLoopMember(block, outer_loop_header) {
+// foreach (block b in block successors) Postorder(b, outer_loop_header)
+// }
+//
+// VisitSuccessorsOfLoopHeader(block) {
+// foreach (block b in block successors) Postorder(b, block)
+// }
+//
+// VisitSuccessors(block, loop_header) {
+// foreach (block b in block successors) Postorder(b, loop_header)
+// }
+//
+// The ordering is started calling Postorder(entry, NULL).
+//
+// Each instance of PostorderProcessor represents the "stack frame" of the
+// recursion, and particularly keeps the state of the loop (iteration) of the
+// "Visit..." function it represents.
+// To recycle memory we keep all the frames in a double linked list but
+// this means that we cannot use constructors to initialize the frames.
+//
+class PostorderProcessor : public ZoneObject {
+ public:
+ // Back link (towards the stack bottom).
+ PostorderProcessor* parent() {return father_; }
+ // Forward link (towards the stack top).
+ PostorderProcessor* child() {return child_; }
+ HBasicBlock* block() { return block_; }
+ HLoopInformation* loop() { return loop_; }
+ HBasicBlock* loop_header() { return loop_header_; }
+
+ static PostorderProcessor* CreateEntryProcessor(Zone* zone,
+ HBasicBlock* block,
+ BitVector* visited) {
+ PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
+ return result->SetupSuccessors(zone, block, NULL, visited);
+ }
+
+ PostorderProcessor* PerformStep(Zone* zone,
+ BitVector* visited,
+ ZoneList<HBasicBlock*>* order) {
+ PostorderProcessor* next =
+ PerformNonBacktrackingStep(zone, visited, order);
+ if (next != NULL) {
+ return next;
+ } else {
+ return Backtrack(zone, visited, order);
+ }
+ }
-void HGraph::OrderBlocks() {
- HPhase phase("H_Block ordering");
- BitVector visited(blocks_.length(), zone());
-
- ZoneList<HBasicBlock*> reverse_result(8);
- HBasicBlock* start = blocks_[0];
- Postorder(start, &visited, &reverse_result, NULL);
+ private:
+ explicit PostorderProcessor(PostorderProcessor* father)
+ : father_(father), child_(NULL), successor_iterator(NULL) { }
+
+ // Each enum value states the cycle whose state is kept by this instance.
+ enum LoopKind {
+ NONE,
+ SUCCESSORS,
+ SUCCESSORS_OF_LOOP_HEADER,
+ LOOP_MEMBERS,
+ SUCCESSORS_OF_LOOP_MEMBER
+ };
+
+ // Each "Setup..." method is like a constructor for a cycle state.
+ PostorderProcessor* SetupSuccessors(Zone* zone,
+ HBasicBlock* block,
+ HBasicBlock* loop_header,
+ BitVector* visited) {
+ if (block == NULL || visited->Contains(block->block_id()) ||
+ block->parent_loop_header() != loop_header) {
+ kind_ = NONE;
+ block_ = NULL;
+ loop_ = NULL;
+ loop_header_ = NULL;
+ return this;
+ } else {
+ block_ = block;
+ loop_ = NULL;
+ visited->Add(block->block_id());
- blocks_.Rewind(0);
- int index = 0;
- for (int i = reverse_result.length() - 1; i >= 0; --i) {
- HBasicBlock* b = reverse_result[i];
- blocks_.Add(b);
- b->set_block_id(index++);
+ if (block->IsLoopHeader()) {
+ kind_ = SUCCESSORS_OF_LOOP_HEADER;
+ loop_header_ = block;
+ InitializeSuccessors();
+ PostorderProcessor* result = Push(zone);
+ return result->SetupLoopMembers(zone, block, block->loop_information(),
+ loop_header);
+ } else {
+ ASSERT(block->IsFinished());
+ kind_ = SUCCESSORS;
+ loop_header_ = loop_header;
+ InitializeSuccessors();
+ return this;
+ }
+ }
}
-}
+ PostorderProcessor* SetupLoopMembers(Zone* zone,
+ HBasicBlock* block,
+ HLoopInformation* loop,
+ HBasicBlock* loop_header) {
+ kind_ = LOOP_MEMBERS;
+ block_ = block;
+ loop_ = loop;
+ loop_header_ = loop_header;
+ InitializeLoopMembers();
+ return this;
+ }
+
+ PostorderProcessor* SetupSuccessorsOfLoopMember(
+ HBasicBlock* block,
+ HLoopInformation* loop,
+ HBasicBlock* loop_header) {
+ kind_ = SUCCESSORS_OF_LOOP_MEMBER;
+ block_ = block;
+ loop_ = loop;
+ loop_header_ = loop_header;
+ InitializeSuccessors();
+ return this;
+ }
+
+ // This method "allocates" a new stack frame.
+ PostorderProcessor* Push(Zone* zone) {
+ if (child_ == NULL) {
+ child_ = new(zone) PostorderProcessor(this);
+ }
+ return child_;
+ }
+
+ void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
+ ASSERT(block_->end()->FirstSuccessor() == NULL ||
+ order->Contains(block_->end()->FirstSuccessor()) ||
+ block_->end()->FirstSuccessor()->IsLoopHeader());
+ ASSERT(block_->end()->SecondSuccessor() == NULL ||
+ order->Contains(block_->end()->SecondSuccessor()) ||
+ block_->end()->SecondSuccessor()->IsLoopHeader());
+ order->Add(block_, zone);
+ }
+
+ // This method is the basic block to walk up the stack.
+ PostorderProcessor* Pop(Zone* zone,
+ BitVector* visited,
+ ZoneList<HBasicBlock*>* order) {
+ switch (kind_) {
+ case SUCCESSORS:
+ case SUCCESSORS_OF_LOOP_HEADER:
+ ClosePostorder(order, zone);
+ return father_;
+ case LOOP_MEMBERS:
+ return father_;
+ case SUCCESSORS_OF_LOOP_MEMBER:
+ if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
+ // In this case we need to perform a LOOP_MEMBERS cycle so we
+ // initialize it and return this instead of father.
+ return SetupLoopMembers(zone, block(),
+ block()->loop_information(), loop_header_);
+ } else {
+ return father_;
+ }
+ case NONE:
+ return father_;
+ }
+ UNREACHABLE();
+ return NULL;
+ }
-void HGraph::PostorderLoopBlocks(HLoopInformation* loop,
- BitVector* visited,
- ZoneList<HBasicBlock*>* order,
- HBasicBlock* loop_header) {
- for (int i = 0; i < loop->blocks()->length(); ++i) {
- HBasicBlock* b = loop->blocks()->at(i);
- for (HSuccessorIterator it(b->end()); !it.Done(); it.Advance()) {
- Postorder(it.Current(), visited, order, loop_header);
+ // Walks up the stack.
+ PostorderProcessor* Backtrack(Zone* zone,
+ BitVector* visited,
+ ZoneList<HBasicBlock*>* order) {
+ PostorderProcessor* parent = Pop(zone, visited, order);
+ while (parent != NULL) {
+ PostorderProcessor* next =
+ parent->PerformNonBacktrackingStep(zone, visited, order);
+ if (next != NULL) {
+ return next;
+ } else {
+ parent = parent->Pop(zone, visited, order);
+ }
}
- if (b->IsLoopHeader() && b != loop->loop_header()) {
- PostorderLoopBlocks(b->loop_information(), visited, order, loop_header);
+ return NULL;
+ }
+
+ PostorderProcessor* PerformNonBacktrackingStep(
+ Zone* zone,
+ BitVector* visited,
+ ZoneList<HBasicBlock*>* order) {
+ HBasicBlock* next_block;
+ switch (kind_) {
+ case SUCCESSORS:
+ next_block = AdvanceSuccessors();
+ if (next_block != NULL) {
+ PostorderProcessor* result = Push(zone);
+ return result->SetupSuccessors(zone, next_block,
+ loop_header_, visited);
+ }
+ break;
+ case SUCCESSORS_OF_LOOP_HEADER:
+ next_block = AdvanceSuccessors();
+ if (next_block != NULL) {
+ PostorderProcessor* result = Push(zone);
+ return result->SetupSuccessors(zone, next_block,
+ block(), visited);
+ }
+ break;
+ case LOOP_MEMBERS:
+ next_block = AdvanceLoopMembers();
+ if (next_block != NULL) {
+ PostorderProcessor* result = Push(zone);
+ return result->SetupSuccessorsOfLoopMember(next_block,
+ loop_, loop_header_);
+ }
+ break;
+ case SUCCESSORS_OF_LOOP_MEMBER:
+ next_block = AdvanceSuccessors();
+ if (next_block != NULL) {
+ PostorderProcessor* result = Push(zone);
+ return result->SetupSuccessors(zone, next_block,
+ loop_header_, visited);
+ }
+ break;
+ case NONE:
+ return NULL;
}
+ return NULL;
}
-}
+ // The following two methods implement a "foreach b in successors" cycle.
+ void InitializeSuccessors() {
+ loop_index = 0;
+ loop_length = 0;
+ successor_iterator = HSuccessorIterator(block_->end());
+ }
-void HGraph::Postorder(HBasicBlock* block,
- BitVector* visited,
- ZoneList<HBasicBlock*>* order,
- HBasicBlock* loop_header) {
- if (block == NULL || visited->Contains(block->block_id())) return;
- if (block->parent_loop_header() != loop_header) return;
- visited->Add(block->block_id());
- if (block->IsLoopHeader()) {
- PostorderLoopBlocks(block->loop_information(), visited, order, loop_header);
- for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
- Postorder(it.Current(), visited, order, block);
+ HBasicBlock* AdvanceSuccessors() {
+ if (!successor_iterator.Done()) {
+ HBasicBlock* result = successor_iterator.Current();
+ successor_iterator.Advance();
+ return result;
}
- } else {
- ASSERT(block->IsFinished());
- for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
- Postorder(it.Current(), visited, order, loop_header);
+ return NULL;
+ }
+
+ // The following two methods implement a "foreach b in loop members" cycle.
+ void InitializeLoopMembers() {
+ loop_index = 0;
+ loop_length = loop_->blocks()->length();
+ }
+
+ HBasicBlock* AdvanceLoopMembers() {
+ if (loop_index < loop_length) {
+ HBasicBlock* result = loop_->blocks()->at(loop_index);
+ loop_index++;
+ return result;
+ } else {
+ return NULL;
}
}
- ASSERT(block->end()->FirstSuccessor() == NULL ||
- order->Contains(block->end()->FirstSuccessor()) ||
- block->end()->FirstSuccessor()->IsLoopHeader());
- ASSERT(block->end()->SecondSuccessor() == NULL ||
- order->Contains(block->end()->SecondSuccessor()) ||
- block->end()->SecondSuccessor()->IsLoopHeader());
- order->Add(block);
+
+ LoopKind kind_;
+ PostorderProcessor* father_;
+ PostorderProcessor* child_;
+ HLoopInformation* loop_;
+ HBasicBlock* block_;
+ HBasicBlock* loop_header_;
+ int loop_index;
+ int loop_length;
+ HSuccessorIterator successor_iterator;
+};
+
+
+void HGraph::OrderBlocks() {
+ HPhase phase("H_Block ordering");
+ BitVector visited(blocks_.length(), zone());
+
+ ZoneList<HBasicBlock*> reverse_result(8, zone());
+ HBasicBlock* start = blocks_[0];
+ PostorderProcessor* postorder =
+ PostorderProcessor::CreateEntryProcessor(zone(), start, &visited);
+ while (postorder != NULL) {
+ postorder = postorder->PerformStep(zone(), &visited, &reverse_result);
+ }
+ blocks_.Rewind(0);
+ int index = 0;
+ for (int i = reverse_result.length() - 1; i >= 0; --i) {
+ HBasicBlock* b = reverse_result[i];
+ blocks_.Add(b, zone());
+ b->set_block_id(index++);
+ }
}
@@ -849,9 +1080,9 @@ void HGraph::EliminateRedundantPhis() {
// Worklist of phis that can potentially be eliminated. Initialized with
// all phi nodes. When elimination of a phi node modifies another phi node
// the modified phi node is added to the worklist.
- ZoneList<HPhi*> worklist(blocks_.length());
+ ZoneList<HPhi*> worklist(blocks_.length(), zone());
for (int i = 0; i < blocks_.length(); ++i) {
- worklist.AddAll(*blocks_[i]->phis());
+ worklist.AddAll(*blocks_[i]->phis(), zone());
}
while (!worklist.is_empty()) {
@@ -869,7 +1100,7 @@ void HGraph::EliminateRedundantPhis() {
for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
HValue* value = it.value();
value->SetOperandAt(it.index(), replacement);
- if (value->IsPhi()) worklist.Add(HPhi::cast(value));
+ if (value->IsPhi()) worklist.Add(HPhi::cast(value), zone());
}
block->RemovePhi(phi);
}
@@ -881,18 +1112,18 @@ void HGraph::EliminateUnreachablePhis() {
HPhase phase("H_Unreachable phi elimination", this);
// Initialize worklist.
- ZoneList<HPhi*> phi_list(blocks_.length());
- ZoneList<HPhi*> worklist(blocks_.length());
+ ZoneList<HPhi*> phi_list(blocks_.length(), zone());
+ ZoneList<HPhi*> worklist(blocks_.length(), zone());
for (int i = 0; i < blocks_.length(); ++i) {
for (int j = 0; j < blocks_[i]->phis()->length(); j++) {
HPhi* phi = blocks_[i]->phis()->at(j);
- phi_list.Add(phi);
+ phi_list.Add(phi, zone());
// We can't eliminate phis in the receiver position in the environment
// because in case of throwing an error we need this value to
// construct a stack trace.
if (phi->HasRealUses() || phi->IsReceiver()) {
phi->set_is_live(true);
- worklist.Add(phi);
+ worklist.Add(phi, zone());
}
}
}
@@ -904,7 +1135,7 @@ void HGraph::EliminateUnreachablePhis() {
HValue* operand = phi->OperandAt(i);
if (operand->IsPhi() && !HPhi::cast(operand)->is_live()) {
HPhi::cast(operand)->set_is_live(true);
- worklist.Add(HPhi::cast(operand));
+ worklist.Add(HPhi::cast(operand), zone());
}
}
}
@@ -951,11 +1182,11 @@ bool HGraph::CheckConstPhiUses() {
void HGraph::CollectPhis() {
int block_count = blocks_.length();
- phi_list_ = new ZoneList<HPhi*>(block_count);
+ phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
for (int i = 0; i < block_count; ++i) {
for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
HPhi* phi = blocks_[i]->phis()->at(j);
- phi_list_->Add(phi);
+ phi_list_->Add(phi, zone());
}
}
}
@@ -976,7 +1207,7 @@ void HGraph::InferTypes(ZoneList<HValue*>* worklist) {
HValue* use = it.value();
if (!in_worklist.Contains(use->id())) {
in_worklist.Add(use->id());
- worklist->Add(use);
+ worklist->Add(use, zone());
}
}
}
@@ -987,7 +1218,7 @@ void HGraph::InferTypes(ZoneList<HValue*>* worklist) {
class HRangeAnalysis BASE_EMBEDDED {
public:
explicit HRangeAnalysis(HGraph* graph) :
- graph_(graph), zone_(graph->isolate()->zone()), changed_ranges_(16) { }
+ graph_(graph), zone_(graph->zone()), changed_ranges_(16, zone_) { }
void Analyze();
@@ -1132,7 +1363,7 @@ void HRangeAnalysis::RollBackTo(int index) {
void HRangeAnalysis::AddRange(HValue* value, Range* range) {
Range* original_range = value->range();
value->AddNewRange(range, zone_);
- changed_ranges_.Add(value);
+ changed_ranges_.Add(value, zone_);
Range* new_range = value->range();
TraceRange("Updated range of %d set to [%d,%d]\n",
value->id(),
@@ -1260,18 +1491,18 @@ HValue* HValueMap::Lookup(HValue* value) const {
}
-void HValueMap::Resize(int new_size) {
+void HValueMap::Resize(int new_size, Zone* zone) {
ASSERT(new_size > count_);
// Hashing the values into the new array has no more collisions than in the
// old hash map, so we can use the existing lists_ array, if we are careful.
// Make sure we have at least one free element.
if (free_list_head_ == kNil) {
- ResizeLists(lists_size_ << 1);
+ ResizeLists(lists_size_ << 1, zone);
}
HValueMapListElement* new_array =
- ZONE->NewArray<HValueMapListElement>(new_size);
+ zone->NewArray<HValueMapListElement>(new_size);
memset(new_array, 0, sizeof(HValueMapListElement) * new_size);
HValueMapListElement* old_array = array_;
@@ -1289,14 +1520,14 @@ void HValueMap::Resize(int new_size) {
if (old_array[i].value != NULL) {
int current = old_array[i].next;
while (current != kNil) {
- Insert(lists_[current].value);
+ Insert(lists_[current].value, zone);
int next = lists_[current].next;
lists_[current].next = free_list_head_;
free_list_head_ = current;
current = next;
}
// Rehash the directly stored value.
- Insert(old_array[i].value);
+ Insert(old_array[i].value, zone);
}
}
}
@@ -1305,11 +1536,11 @@ void HValueMap::Resize(int new_size) {
}
-void HValueMap::ResizeLists(int new_size) {
+void HValueMap::ResizeLists(int new_size, Zone* zone) {
ASSERT(new_size > lists_size_);
HValueMapListElement* new_lists =
- ZONE->NewArray<HValueMapListElement>(new_size);
+ zone->NewArray<HValueMapListElement>(new_size);
memset(new_lists, 0, sizeof(HValueMapListElement) * new_size);
HValueMapListElement* old_lists = lists_;
@@ -1328,10 +1559,10 @@ void HValueMap::ResizeLists(int new_size) {
}
-void HValueMap::Insert(HValue* value) {
+void HValueMap::Insert(HValue* value, Zone* zone) {
ASSERT(value != NULL);
// Resizing when half of the hashtable is filled up.
- if (count_ >= array_size_ >> 1) Resize(array_size_ << 1);
+ if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone);
ASSERT(count_ < array_size_);
count_++;
uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode()));
@@ -1340,7 +1571,7 @@ void HValueMap::Insert(HValue* value) {
array_[pos].next = kNil;
} else {
if (free_list_head_ == kNil) {
- ResizeLists(lists_size_ << 1);
+ ResizeLists(lists_size_ << 1, zone);
}
int new_element_pos = free_list_head_;
ASSERT(new_element_pos != kNil);
@@ -1359,10 +1590,17 @@ HSideEffectMap::HSideEffectMap() : count_(0) {
HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) {
- memcpy(data_, other->data_, kNumberOfTrackedSideEffects * kPointerSize);
+ *this = *other; // Calls operator=.
}
+HSideEffectMap& HSideEffectMap::operator= (const HSideEffectMap& other) {
+ if (this != &other) {
+ memcpy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize);
+ }
+ return *this;
+}
+
void HSideEffectMap::Kill(GVNFlagSet flags) {
for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
@@ -1473,15 +1711,17 @@ class HGlobalValueNumberer BASE_EMBEDDED {
: graph_(graph),
info_(info),
removed_side_effects_(false),
- block_side_effects_(graph->blocks()->length()),
- loop_side_effects_(graph->blocks()->length()),
+ block_side_effects_(graph->blocks()->length(), graph->zone()),
+ loop_side_effects_(graph->blocks()->length(), graph->zone()),
visited_on_paths_(graph->zone(), graph->blocks()->length()) {
- ASSERT(info->isolate()->heap()->allow_allocation(false));
- block_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length());
- loop_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length());
- }
- ~HGlobalValueNumberer() {
- ASSERT(!info_->isolate()->heap()->allow_allocation(true));
+#ifdef DEBUG
+ ASSERT(info->isolate()->optimizing_compiler_thread()->IsOptimizerThread() ||
+ !info->isolate()->heap()->IsAllocationAllowed());
+#endif
+ block_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length(),
+ graph_->zone());
+ loop_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length(),
+ graph_->zone());
}
// Returns true if values with side effects are removed.
@@ -1491,9 +1731,7 @@ class HGlobalValueNumberer BASE_EMBEDDED {
GVNFlagSet CollectSideEffectsOnPathsToDominatedBlock(
HBasicBlock* dominator,
HBasicBlock* dominated);
- void AnalyzeBlock(HBasicBlock* block,
- HValueMap* map,
- HSideEffectMap* dominators);
+ void AnalyzeGraph();
void ComputeBlockSideEffects();
void LoopInvariantCodeMotion();
void ProcessLoopBlock(HBasicBlock* block,
@@ -1506,7 +1744,7 @@ class HGlobalValueNumberer BASE_EMBEDDED {
HGraph* graph() { return graph_; }
CompilationInfo* info() { return info_; }
- Zone* zone() { return graph_->zone(); }
+ Zone* zone() const { return graph_->zone(); }
HGraph* graph_;
CompilationInfo* info_;
@@ -1530,9 +1768,7 @@ bool HGlobalValueNumberer::Analyze() {
if (FLAG_loop_invariant_code_motion) {
LoopInvariantCodeMotion();
}
- HValueMap* map = new(zone()) HValueMap();
- HSideEffectMap side_effect_dominators;
- AnalyzeBlock(graph_->entry_block(), map, &side_effect_dominators);
+ AnalyzeGraph();
return removed_side_effects_;
}
@@ -1664,6 +1900,8 @@ GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
void HGlobalValueNumberer::LoopInvariantCodeMotion() {
+ TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n",
+ graph_->use_optimistic_licm() ? "yes" : "no");
for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
HBasicBlock* block = graph_->blocks()->at(i);
if (block->IsLoopHeader()) {
@@ -1707,51 +1945,8 @@ void HGlobalValueNumberer::ProcessLoopBlock(
*GetGVNFlagsString(instr->gvn_flags()),
*GetGVNFlagsString(loop_kills));
bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags);
- if (instr->IsTransitionElementsKind()) {
- // It's possible to hoist transitions out of a loop as long as the
- // hoisting wouldn't move the transition past a DependsOn of one of it's
- // changes or any instructions that might change an objects map or
- // elements contents.
- GVNFlagSet changes = instr->ChangesFlags();
- GVNFlagSet hoist_depends_blockers =
- HValue::ConvertChangesToDependsFlags(changes);
- // In addition to not hoisting transitions above other instructions that
- // change dependencies that the transition changes, it must not be
- // hoisted above map changes and stores to an elements backing store
- // that the transition might change.
- GVNFlagSet hoist_change_blockers = changes;
- hoist_change_blockers.Add(kChangesMaps);
- HTransitionElementsKind* trans = HTransitionElementsKind::cast(instr);
- if (trans->original_map()->has_fast_double_elements()) {
- hoist_change_blockers.Add(kChangesDoubleArrayElements);
- }
- if (trans->transitioned_map()->has_fast_double_elements()) {
- hoist_change_blockers.Add(kChangesArrayElements);
- }
- if (FLAG_trace_gvn) {
- GVNFlagSet hoist_blockers = hoist_depends_blockers;
- hoist_blockers.Add(hoist_change_blockers);
- GVNFlagSet first_time = *first_time_changes;
- first_time.Add(*first_time_depends);
- TRACE_GVN_4("Checking dependencies on HTransitionElementsKind "
- "%d (%s) hoist blockers: %s; "
- "first-time accumulated: %s\n",
- instr->id(),
- instr->Mnemonic(),
- *GetGVNFlagsString(hoist_blockers),
- *GetGVNFlagsString(first_time));
- }
- // It's possible to hoist transition from the current loop loop only if
- // they dominate all of the successor blocks in the same loop and there
- // are not any instructions that have Changes/DependsOn that intervene
- // between it and the beginning of the loop header.
- bool in_nested_loop = block != loop_header &&
- ((block->parent_loop_header() != loop_header) ||
- block->IsLoopHeader());
- can_hoist = !in_nested_loop &&
- block->IsLoopSuccessorDominator() &&
- !first_time_depends->ContainsAnyOf(hoist_depends_blockers) &&
- !first_time_changes->ContainsAnyOf(hoist_change_blockers);
+ if (can_hoist && !graph()->use_optimistic_licm()) {
+ can_hoist = block->IsLoopSuccessorDominator();
}
if (can_hoist) {
@@ -1794,7 +1989,7 @@ void HGlobalValueNumberer::ProcessLoopBlock(
bool HGlobalValueNumberer::AllowCodeMotion() {
- return info()->shared_info()->opt_count() + 1 < Compiler::kDefaultMaxOptCount;
+ return info()->shared_info()->opt_count() + 1 < FLAG_max_opt_count;
}
@@ -1826,89 +2021,220 @@ GVNFlagSet HGlobalValueNumberer::CollectSideEffectsOnPathsToDominatedBlock(
}
-void HGlobalValueNumberer::AnalyzeBlock(HBasicBlock* block,
- HValueMap* map,
- HSideEffectMap* dominators) {
- TRACE_GVN_2("Analyzing block B%d%s\n",
- block->block_id(),
- block->IsLoopHeader() ? " (loop header)" : "");
+// Each instance of this class is like a "stack frame" for the recursive
+// traversal of the dominator tree done during GVN (the stack is handled
+// as a double linked list).
+// We reuse frames when possible so the list length is limited by the depth
+// of the dominator tree but this forces us to initialize each frame calling
+// an explicit "Initialize" method instead of a using constructor.
+class GvnBasicBlockState: public ZoneObject {
+ public:
+ static GvnBasicBlockState* CreateEntry(Zone* zone,
+ HBasicBlock* entry_block,
+ HValueMap* entry_map) {
+ return new(zone)
+ GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone);
+ }
+
+ HBasicBlock* block() { return block_; }
+ HValueMap* map() { return map_; }
+ HSideEffectMap* dominators() { return &dominators_; }
+
+ GvnBasicBlockState* next_in_dominator_tree_traversal(
+ Zone* zone,
+ HBasicBlock** dominator) {
+ // This assignment needs to happen before calling next_dominated() because
+ // that call can reuse "this" if we are at the last dominated block.
+ *dominator = block();
+ GvnBasicBlockState* result = next_dominated(zone);
+ if (result == NULL) {
+ GvnBasicBlockState* dominator_state = pop();
+ if (dominator_state != NULL) {
+ // This branch is guaranteed not to return NULL because pop() never
+ // returns a state where "is_done() == true".
+ *dominator = dominator_state->block();
+ result = dominator_state->next_dominated(zone);
+ } else {
+ // Unnecessary (we are returning NULL) but done for cleanness.
+ *dominator = NULL;
+ }
+ }
+ return result;
+ }
- // If this is a loop header kill everything killed by the loop.
- if (block->IsLoopHeader()) {
- map->Kill(loop_side_effects_[block->block_id()]);
+ private:
+ void Initialize(HBasicBlock* block,
+ HValueMap* map,
+ HSideEffectMap* dominators,
+ bool copy_map,
+ Zone* zone) {
+ block_ = block;
+ map_ = copy_map ? map->Copy(zone) : map;
+ dominated_index_ = -1;
+ length_ = block->dominated_blocks()->length();
+ if (dominators != NULL) {
+ dominators_ = *dominators;
+ }
+ }
+ bool is_done() { return dominated_index_ >= length_; }
+
+ GvnBasicBlockState(GvnBasicBlockState* previous,
+ HBasicBlock* block,
+ HValueMap* map,
+ HSideEffectMap* dominators,
+ Zone* zone)
+ : previous_(previous), next_(NULL) {
+ Initialize(block, map, dominators, true, zone);
+ }
+
+ GvnBasicBlockState* next_dominated(Zone* zone) {
+ dominated_index_++;
+ if (dominated_index_ == length_ - 1) {
+ // No need to copy the map for the last child in the dominator tree.
+ Initialize(block_->dominated_blocks()->at(dominated_index_),
+ map(),
+ dominators(),
+ false,
+ zone);
+ return this;
+ } else if (dominated_index_ < length_) {
+ return push(zone,
+ block_->dominated_blocks()->at(dominated_index_),
+ dominators());
+ } else {
+ return NULL;
+ }
}
- // Go through all instructions of the current block.
- HInstruction* instr = block->first();
- while (instr != NULL) {
- HInstruction* next = instr->next();
- GVNFlagSet flags = instr->ChangesFlags();
- if (!flags.IsEmpty()) {
- // Clear all instructions in the map that are affected by side effects.
- // Store instruction as the dominating one for tracked side effects.
- map->Kill(flags);
- dominators->Store(flags, instr);
- TRACE_GVN_2("Instruction %d %s\n", instr->id(),
- *GetGVNFlagsString(flags));
+ GvnBasicBlockState* push(Zone* zone,
+ HBasicBlock* block,
+ HSideEffectMap* dominators) {
+ if (next_ == NULL) {
+ next_ =
+ new(zone) GvnBasicBlockState(this, block, map(), dominators, zone);
+ } else {
+ next_->Initialize(block, map(), dominators, true, zone);
}
- if (instr->CheckFlag(HValue::kUseGVN)) {
- ASSERT(!instr->HasObservableSideEffects());
- HValue* other = map->Lookup(instr);
- if (other != NULL) {
- ASSERT(instr->Equals(other) && other->Equals(instr));
- TRACE_GVN_4("Replacing value %d (%s) with value %d (%s)\n",
- instr->id(),
- instr->Mnemonic(),
- other->id(),
- other->Mnemonic());
- if (instr->HasSideEffects()) removed_side_effects_ = true;
- instr->DeleteAndReplaceWith(other);
- } else {
- map->Add(instr);
- }
+ return next_;
+ }
+ GvnBasicBlockState* pop() {
+ GvnBasicBlockState* result = previous_;
+ while (result != NULL && result->is_done()) {
+ TRACE_GVN_2("Backtracking from block B%d to block b%d\n",
+ block()->block_id(),
+ previous_->block()->block_id())
+ result = result->previous_;
}
- if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
- for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
- HValue* other = dominators->at(i);
- GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
- GVNFlag depends_on_flag = HValue::DependsOnFlagFromInt(i);
- if (instr->DependsOnFlags().Contains(depends_on_flag) &&
- (other != NULL)) {
- TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
- i,
+ return result;
+ }
+
+ GvnBasicBlockState* previous_;
+ GvnBasicBlockState* next_;
+ HBasicBlock* block_;
+ HValueMap* map_;
+ HSideEffectMap dominators_;
+ int dominated_index_;
+ int length_;
+};
+
+// This is a recursive traversal of the dominator tree but it has been turned
+// into a loop to avoid stack overflows.
+// The logical "stack frames" of the recursion are kept in a list of
+// GvnBasicBlockState instances.
+void HGlobalValueNumberer::AnalyzeGraph() {
+ HBasicBlock* entry_block = graph_->entry_block();
+ HValueMap* entry_map = new(zone()) HValueMap(zone());
+ GvnBasicBlockState* current =
+ GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map);
+
+ while (current != NULL) {
+ HBasicBlock* block = current->block();
+ HValueMap* map = current->map();
+ HSideEffectMap* dominators = current->dominators();
+
+ TRACE_GVN_2("Analyzing block B%d%s\n",
+ block->block_id(),
+ block->IsLoopHeader() ? " (loop header)" : "");
+
+ // If this is a loop header kill everything killed by the loop.
+ if (block->IsLoopHeader()) {
+ map->Kill(loop_side_effects_[block->block_id()]);
+ }
+
+ // Go through all instructions of the current block.
+ HInstruction* instr = block->first();
+ while (instr != NULL) {
+ HInstruction* next = instr->next();
+ GVNFlagSet flags = instr->ChangesFlags();
+ if (!flags.IsEmpty()) {
+ // Clear all instructions in the map that are affected by side effects.
+ // Store instruction as the dominating one for tracked side effects.
+ map->Kill(flags);
+ dominators->Store(flags, instr);
+ TRACE_GVN_2("Instruction %d %s\n", instr->id(),
+ *GetGVNFlagsString(flags));
+ }
+ if (instr->CheckFlag(HValue::kUseGVN)) {
+ ASSERT(!instr->HasObservableSideEffects());
+ HValue* other = map->Lookup(instr);
+ if (other != NULL) {
+ ASSERT(instr->Equals(other) && other->Equals(instr));
+ TRACE_GVN_4("Replacing value %d (%s) with value %d (%s)\n",
instr->id(),
instr->Mnemonic(),
other->id(),
other->Mnemonic());
- instr->SetSideEffectDominator(changes_flag, other);
+ if (instr->HasSideEffects()) removed_side_effects_ = true;
+ instr->DeleteAndReplaceWith(other);
+ } else {
+ map->Add(instr, zone());
}
}
+ if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
+ for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
+ HValue* other = dominators->at(i);
+ GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
+ GVNFlag depends_on_flag = HValue::DependsOnFlagFromInt(i);
+ if (instr->DependsOnFlags().Contains(depends_on_flag) &&
+ (other != NULL)) {
+ TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
+ i,
+ instr->id(),
+ instr->Mnemonic(),
+ other->id(),
+ other->Mnemonic());
+ instr->SetSideEffectDominator(changes_flag, other);
+ }
+ }
+ }
+ instr = next;
+ }
+
+ HBasicBlock* dominator_block;
+ GvnBasicBlockState* next =
+ current->next_in_dominator_tree_traversal(zone(), &dominator_block);
+
+ if (next != NULL) {
+ HBasicBlock* dominated = next->block();
+ HValueMap* successor_map = next->map();
+ HSideEffectMap* successor_dominators = next->dominators();
+
+ // Kill everything killed on any path between this block and the
+ // dominated block. We don't have to traverse these paths if the
+ // value map and the dominators list is already empty. If the range
+ // of block ids (block_id, dominated_id) is empty there are no such
+ // paths.
+ if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) &&
+ dominator_block->block_id() + 1 < dominated->block_id()) {
+ visited_on_paths_.Clear();
+ GVNFlagSet side_effects_on_all_paths =
+ CollectSideEffectsOnPathsToDominatedBlock(dominator_block,
+ dominated);
+ successor_map->Kill(side_effects_on_all_paths);
+ successor_dominators->Kill(side_effects_on_all_paths);
+ }
}
- instr = next;
- }
-
- // Recursively continue analysis for all immediately dominated blocks.
- int length = block->dominated_blocks()->length();
- for (int i = 0; i < length; ++i) {
- HBasicBlock* dominated = block->dominated_blocks()->at(i);
- // No need to copy the map for the last child in the dominator tree.
- HValueMap* successor_map = (i == length - 1) ? map : map->Copy(zone());
- HSideEffectMap successor_dominators(dominators);
-
- // Kill everything killed on any path between this block and the
- // dominated block. We don't have to traverse these paths if the
- // value map and the dominators list is already empty. If the range
- // of block ids (block_id, dominated_id) is empty there are no such
- // paths.
- if ((!successor_map->IsEmpty() || !successor_dominators.IsEmpty()) &&
- block->block_id() + 1 < dominated->block_id()) {
- visited_on_paths_.Clear();
- GVNFlagSet side_effects_on_all_paths =
- CollectSideEffectsOnPathsToDominatedBlock(block, dominated);
- successor_map->Kill(side_effects_on_all_paths);
- successor_dominators.Kill(side_effects_on_all_paths);
- }
- AnalyzeBlock(dominated, successor_map, &successor_dominators);
+ current = next;
}
}
@@ -1917,7 +2243,7 @@ class HInferRepresentation BASE_EMBEDDED {
public:
explicit HInferRepresentation(HGraph* graph)
: graph_(graph),
- worklist_(8),
+ worklist_(8, graph->zone()),
in_worklist_(graph->GetMaximumValueID(), graph->zone()) { }
void Analyze();
@@ -1929,7 +2255,7 @@ class HInferRepresentation BASE_EMBEDDED {
void AddDependantsToWorklist(HValue* current);
void InferBasedOnUses(HValue* current);
- Zone* zone() { return graph_->zone(); }
+ Zone* zone() const { return graph_->zone(); }
HGraph* graph_;
ZoneList<HValue*> worklist_;
@@ -1941,7 +2267,7 @@ void HInferRepresentation::AddToWorklist(HValue* current) {
if (current->representation().IsSpecialization()) return;
if (!current->CheckFlag(HValue::kFlexibleRepresentation)) return;
if (in_worklist_.Contains(current->id())) return;
- worklist_.Add(current);
+ worklist_.Add(current, zone());
in_worklist_.Add(current->id());
}
@@ -2008,8 +2334,16 @@ Representation HInferRepresentation::TryChange(HValue* value) {
for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) {
HValue* use = it.value();
- Representation rep = use->RequiredInputRepresentation(it.index());
+ Representation rep = use->ObservedInputRepresentation(it.index());
if (rep.IsNone()) continue;
+ if (FLAG_trace_representation) {
+ PrintF("%d %s is used by %d %s as %s\n",
+ value->id(),
+ value->Mnemonic(),
+ use->id(),
+ use->Mnemonic(),
+ rep.Mnemonic());
+ }
if (use->IsPhi()) HPhi::cast(use)->AddIndirectUsesTo(&use_count[0]);
use_count[rep.kind()] += use->LoopWeight();
}
@@ -2044,12 +2378,12 @@ void HInferRepresentation::Analyze() {
// bit-vector of length <number of phis>.
const ZoneList<HPhi*>* phi_list = graph_->phi_list();
int phi_count = phi_list->length();
- ZoneList<BitVector*> connected_phis(phi_count);
+ ZoneList<BitVector*> connected_phis(phi_count, graph_->zone());
for (int i = 0; i < phi_count; ++i) {
phi_list->at(i)->InitRealUses(i);
BitVector* connected_set = new(zone()) BitVector(phi_count, graph_->zone());
connected_set->Add(i);
- connected_phis.Add(connected_set);
+ connected_phis.Add(connected_set, zone());
}
// (2) Do a fixed point iteration to find the set of connected phis. A
@@ -2073,21 +2407,34 @@ void HInferRepresentation::Analyze() {
}
}
- // (3) Use the phi reachability information from step 2 to
- // (a) sum up the non-phi use counts of all connected phis.
- // (b) push information about values which can't be converted to integer
- // without deoptimization through the phi use-def chains, avoiding
- // unnecessary deoptimizations later.
+ // (3a) Use the phi reachability information from step 2 to
+ // push information about values which can't be converted to integer
+ // without deoptimization through the phi use-def chains, avoiding
+ // unnecessary deoptimizations later.
for (int i = 0; i < phi_count; ++i) {
HPhi* phi = phi_list->at(i);
bool cti = phi->AllOperandsConvertibleToInteger();
+ if (cti) continue;
+
+ for (BitVector::Iterator it(connected_phis.at(i));
+ !it.Done();
+ it.Advance()) {
+ HPhi* phi = phi_list->at(it.Current());
+ phi->set_is_convertible_to_integer(false);
+ phi->ResetInteger32Uses();
+ }
+ }
+
+ // (3b) Use the phi reachability information from step 2 to
+ // sum up the non-phi use counts of all connected phis.
+ for (int i = 0; i < phi_count; ++i) {
+ HPhi* phi = phi_list->at(i);
for (BitVector::Iterator it(connected_phis.at(i));
!it.Done();
it.Advance()) {
int index = it.Current();
- HPhi* it_use = phi_list->at(it.Current());
- if (index != i) phi->AddNonPhiUsesFrom(it_use); // Don't count twice!
- if (!cti) it_use->set_is_convertible_to_integer(false);
+ HPhi* it_use = phi_list->at(index);
+ if (index != i) phi->AddNonPhiUsesFrom(it_use); // Don't count twice.
}
}
@@ -2145,9 +2492,9 @@ void HGraph::InitializeInferredTypes(int from_inclusive, int to_inclusive) {
i = last_back_edge->block_id();
// Update phis of the loop header now after the whole loop body is
// guaranteed to be processed.
- ZoneList<HValue*> worklist(block->phis()->length());
+ ZoneList<HValue*> worklist(block->phis()->length(), zone());
for (int j = 0; j < block->phis()->length(); ++j) {
- worklist.Add(block->phis()->at(j));
+ worklist.Add(block->phis()->at(j), zone());
}
InferTypes(&worklist);
}
@@ -2170,8 +2517,8 @@ void HGraph::PropagateMinusZeroChecks(HValue* value, BitVector* visited) {
break;
}
- // For multiplication and division, we must propagate to the left and
- // the right side.
+ // For multiplication, division, and Math.min/max(), we must propagate
+ // to the left and the right side.
if (current->IsMul()) {
HMul* mul = HMul::cast(current);
mul->EnsureAndPropagateNotMinusZero(visited);
@@ -2182,6 +2529,11 @@ void HGraph::PropagateMinusZeroChecks(HValue* value, BitVector* visited) {
div->EnsureAndPropagateNotMinusZero(visited);
PropagateMinusZeroChecks(div->left(), visited);
PropagateMinusZeroChecks(div->right(), visited);
+ } else if (current->IsMathMinMax()) {
+ HMathMinMax* minmax = HMathMinMax::cast(current);
+ visited->Add(minmax->id());
+ PropagateMinusZeroChecks(minmax->left(), visited);
+ PropagateMinusZeroChecks(minmax->right(), visited);
}
current = current->EnsureAndPropagateNotMinusZero(visited);
@@ -2214,8 +2566,8 @@ void HGraph::InsertRepresentationChangeForUse(HValue* value,
HConstant* constant = HConstant::cast(value);
// Try to create a new copy of the constant with the new representation.
new_value = is_truncating
- ? constant->CopyToTruncatedInt32()
- : constant->CopyToRepresentation(to);
+ ? constant->CopyToTruncatedInt32(zone())
+ : constant->CopyToRepresentation(to, zone());
}
if (new_value == NULL) {
@@ -2331,6 +2683,229 @@ void HGraph::MarkDeoptimizeOnUndefined() {
}
+// Discover instructions that can be marked with kUint32 flag allowing
+// them to produce full range uint32 values.
+class Uint32Analysis BASE_EMBEDDED {
+ public:
+ explicit Uint32Analysis(Zone* zone) : zone_(zone), phis_(4, zone) { }
+
+ void Analyze(HInstruction* current);
+
+ void UnmarkUnsafePhis();
+
+ private:
+ bool IsSafeUint32Use(HValue* val, HValue* use);
+ bool Uint32UsesAreSafe(HValue* uint32val);
+ bool CheckPhiOperands(HPhi* phi);
+ void UnmarkPhi(HPhi* phi, ZoneList<HPhi*>* worklist);
+
+ Zone* zone_;
+ ZoneList<HPhi*> phis_;
+};
+
+
+bool Uint32Analysis::IsSafeUint32Use(HValue* val, HValue* use) {
+ // Operations that operatate on bits are safe.
+ if (use->IsBitwise() ||
+ use->IsShl() ||
+ use->IsSar() ||
+ use->IsShr() ||
+ use->IsBitNot()) {
+ return true;
+ } else if (use->IsChange() || use->IsSimulate()) {
+ // Conversions and deoptimization have special support for unt32.
+ return true;
+ } else if (use->IsStoreKeyed()) {
+ HStoreKeyed* store = HStoreKeyed::cast(use);
+ if (store->is_external()) {
+ // Storing a value into an external integer array is a bit level
+ // operation.
+ if (store->value() == val) {
+ // Clamping or a conversion to double should have beed inserted.
+ ASSERT(store->elements_kind() != EXTERNAL_PIXEL_ELEMENTS);
+ ASSERT(store->elements_kind() != EXTERNAL_FLOAT_ELEMENTS);
+ ASSERT(store->elements_kind() != EXTERNAL_DOUBLE_ELEMENTS);
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
+
+// Iterate over all uses and verify that they are uint32 safe: either don't
+// distinguish between int32 and uint32 due to their bitwise nature or
+// have special support for uint32 values.
+// Encountered phis are optimisitically treated as safe uint32 uses,
+// marked with kUint32 flag and collected in the phis_ list. A separate
+// path will be performed later by UnmarkUnsafePhis to clear kUint32 from
+// phis that are not actually uint32-safe (it requries fix point iteration).
+bool Uint32Analysis::Uint32UsesAreSafe(HValue* uint32val) {
+ bool collect_phi_uses = false;
+ for (HUseIterator it(uint32val->uses()); !it.Done(); it.Advance()) {
+ HValue* use = it.value();
+
+ if (use->IsPhi()) {
+ if (!use->CheckFlag(HInstruction::kUint32)) {
+ // There is a phi use of this value from a phis that is not yet
+ // collected in phis_ array. Separate pass is required.
+ collect_phi_uses = true;
+ }
+
+ // Optimistically treat phis as uint32 safe.
+ continue;
+ }
+
+ if (!IsSafeUint32Use(uint32val, use)) {
+ return false;
+ }
+ }
+
+ if (collect_phi_uses) {
+ for (HUseIterator it(uint32val->uses()); !it.Done(); it.Advance()) {
+ HValue* use = it.value();
+
+ // There is a phi use of this value from a phis that is not yet
+ // collected in phis_ array. Separate pass is required.
+ if (use->IsPhi() && !use->CheckFlag(HInstruction::kUint32)) {
+ use->SetFlag(HInstruction::kUint32);
+ phis_.Add(HPhi::cast(use), zone_);
+ }
+ }
+ }
+
+ return true;
+}
+
+
+// Analyze instruction and mark it with kUint32 if all its uses are uint32
+// safe.
+void Uint32Analysis::Analyze(HInstruction* current) {
+ if (Uint32UsesAreSafe(current)) current->SetFlag(HInstruction::kUint32);
+}
+
+
+// Check if all operands to the given phi are marked with kUint32 flag.
+bool Uint32Analysis::CheckPhiOperands(HPhi* phi) {
+ if (!phi->CheckFlag(HInstruction::kUint32)) {
+ // This phi is not uint32 safe. No need to check operands.
+ return false;
+ }
+
+ for (int j = 0; j < phi->OperandCount(); j++) {
+ HValue* operand = phi->OperandAt(j);
+ if (!operand->CheckFlag(HInstruction::kUint32)) {
+ // Lazyly mark constants that fit into uint32 range with kUint32 flag.
+ if (operand->IsConstant() &&
+ HConstant::cast(operand)->IsUint32()) {
+ operand->SetFlag(HInstruction::kUint32);
+ continue;
+ }
+
+ // This phi is not safe, some operands are not uint32 values.
+ return false;
+ }
+ }
+
+ return true;
+}
+
+
+// Remove kUint32 flag from the phi itself and its operands. If any operand
+// was a phi marked with kUint32 place it into a worklist for
+// transitive clearing of kUint32 flag.
+void Uint32Analysis::UnmarkPhi(HPhi* phi, ZoneList<HPhi*>* worklist) {
+ phi->ClearFlag(HInstruction::kUint32);
+ for (int j = 0; j < phi->OperandCount(); j++) {
+ HValue* operand = phi->OperandAt(j);
+ if (operand->CheckFlag(HInstruction::kUint32)) {
+ operand->ClearFlag(HInstruction::kUint32);
+ if (operand->IsPhi()) {
+ worklist->Add(HPhi::cast(operand), zone_);
+ }
+ }
+ }
+}
+
+
+void Uint32Analysis::UnmarkUnsafePhis() {
+ // No phis were collected. Nothing to do.
+ if (phis_.length() == 0) return;
+
+ // Worklist used to transitively clear kUint32 from phis that
+ // are used as arguments to other phis.
+ ZoneList<HPhi*> worklist(phis_.length(), zone_);
+
+ // Phi can be used as a uint32 value if and only if
+ // all its operands are uint32 values and all its
+ // uses are uint32 safe.
+
+ // Iterate over collected phis and unmark those that
+ // are unsafe. When unmarking phi unmark its operands
+ // and add it to the worklist if it is a phi as well.
+ // Phis that are still marked as safe are shifted down
+ // so that all safe phis form a prefix of the phis_ array.
+ int phi_count = 0;
+ for (int i = 0; i < phis_.length(); i++) {
+ HPhi* phi = phis_[i];
+
+ if (CheckPhiOperands(phi) && Uint32UsesAreSafe(phi)) {
+ phis_[phi_count++] = phi;
+ } else {
+ UnmarkPhi(phi, &worklist);
+ }
+ }
+
+ // Now phis array contains only those phis that have safe
+ // non-phi uses. Start transitively clearing kUint32 flag
+ // from phi operands of discovered non-safe phies until
+ // only safe phies are left.
+ while (!worklist.is_empty()) {
+ while (!worklist.is_empty()) {
+ HPhi* phi = worklist.RemoveLast();
+ UnmarkPhi(phi, &worklist);
+ }
+
+ // Check if any operands to safe phies were unmarked
+ // turning a safe phi into unsafe. The same value
+ // can flow into several phis.
+ int new_phi_count = 0;
+ for (int i = 0; i < phi_count; i++) {
+ HPhi* phi = phis_[i];
+
+ if (CheckPhiOperands(phi)) {
+ phis_[new_phi_count++] = phi;
+ } else {
+ UnmarkPhi(phi, &worklist);
+ }
+ }
+ phi_count = new_phi_count;
+ }
+}
+
+
+void HGraph::ComputeSafeUint32Operations() {
+ if (!FLAG_opt_safe_uint32_operations || uint32_instructions_ == NULL) {
+ return;
+ }
+
+ Uint32Analysis analysis(zone());
+ for (int i = 0; i < uint32_instructions_->length(); ++i) {
+ HInstruction* current = uint32_instructions_->at(i);
+ if (current->IsLinked() && current->representation().IsInteger32()) {
+ analysis.Analyze(current);
+ }
+ }
+
+ // Some phis might have been optimistically marked with kUint32 flag.
+ // Remove this flag from those phis that are unsafe and propagate
+ // this information transitively potentially clearing kUint32 flag
+ // from some non-phi operations that are used as operands to unsafe phis.
+ analysis.UnmarkUnsafePhis();
+}
+
+
void HGraph::ComputeMinusZeroChecks() {
BitVector visited(GetMaximumValueID(), zone());
for (int i = 0; i < blocks_.length(); ++i) {
@@ -2360,12 +2935,12 @@ void HGraph::ComputeMinusZeroChecks() {
FunctionState::FunctionState(HGraphBuilder* owner,
CompilationInfo* info,
TypeFeedbackOracle* oracle,
- ReturnHandlingFlag return_handling)
+ InliningKind inlining_kind)
: owner_(owner),
compilation_info_(info),
oracle_(oracle),
call_context_(NULL),
- return_handling_(return_handling),
+ inlining_kind_(inlining_kind),
function_return_(NULL),
test_context_(NULL),
entry_(NULL),
@@ -2378,10 +2953,13 @@ FunctionState::FunctionState(HGraphBuilder* owner,
HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
if_true->MarkAsInlineReturnTarget();
if_false->MarkAsInlineReturnTarget();
- Expression* cond = TestContext::cast(owner->ast_context())->condition();
+ TestContext* outer_test_context = TestContext::cast(owner->ast_context());
+ Expression* cond = outer_test_context->condition();
+ TypeFeedbackOracle* outer_oracle = outer_test_context->oracle();
// The AstContext constructor pushed on the context stack. This newed
// instance is the reason that AstContext can't be BASE_EMBEDDED.
- test_context_ = new TestContext(owner, cond, if_true, if_false);
+ test_context_ =
+ new TestContext(owner, cond, outer_oracle, if_true, if_false);
} else {
function_return_ = owner->graph()->CreateBasicBlock();
function_return()->MarkAsInlineReturnTarget();
@@ -2457,14 +3035,15 @@ void TestContext::ReturnValue(HValue* value) {
}
-void EffectContext::ReturnInstruction(HInstruction* instr, int ast_id) {
+void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
ASSERT(!instr->IsControlInstruction());
owner()->AddInstruction(instr);
if (instr->HasObservableSideEffects()) owner()->AddSimulate(ast_id);
}
-void EffectContext::ReturnControl(HControlInstruction* instr, int ast_id) {
+void EffectContext::ReturnControl(HControlInstruction* instr,
+ BailoutId ast_id) {
ASSERT(!instr->HasObservableSideEffects());
HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
@@ -2476,7 +3055,7 @@ void EffectContext::ReturnControl(HControlInstruction* instr, int ast_id) {
}
-void ValueContext::ReturnInstruction(HInstruction* instr, int ast_id) {
+void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
ASSERT(!instr->IsControlInstruction());
if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
return owner()->Bailout("bad value context for arguments object value");
@@ -2487,7 +3066,7 @@ void ValueContext::ReturnInstruction(HInstruction* instr, int ast_id) {
}
-void ValueContext::ReturnControl(HControlInstruction* instr, int ast_id) {
+void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
ASSERT(!instr->HasObservableSideEffects());
if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
return owner()->Bailout("bad value context for arguments object value");
@@ -2507,7 +3086,7 @@ void ValueContext::ReturnControl(HControlInstruction* instr, int ast_id) {
}
-void TestContext::ReturnInstruction(HInstruction* instr, int ast_id) {
+void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
ASSERT(!instr->IsControlInstruction());
HGraphBuilder* builder = owner();
builder->AddInstruction(instr);
@@ -2522,7 +3101,7 @@ void TestContext::ReturnInstruction(HInstruction* instr, int ast_id) {
}
-void TestContext::ReturnControl(HControlInstruction* instr, int ast_id) {
+void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
ASSERT(!instr->HasObservableSideEffects());
HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
@@ -2546,8 +3125,8 @@ void TestContext::BuildBranch(HValue* value) {
}
HBasicBlock* empty_true = builder->graph()->CreateBasicBlock();
HBasicBlock* empty_false = builder->graph()->CreateBasicBlock();
- unsigned test_id = condition()->test_id();
- ToBooleanStub::Types expected(builder->oracle()->ToBooleanTypes(test_id));
+ TypeFeedbackId test_id = condition()->test_id();
+ ToBooleanStub::Types expected(oracle()->ToBooleanTypes(test_id));
HBranch* test = new(zone()) HBranch(value, empty_true, empty_false, expected);
builder->current_block()->Finish(test);
@@ -2573,11 +3152,7 @@ void TestContext::BuildBranch(HValue* value) {
void HGraphBuilder::Bailout(const char* reason) {
- if (FLAG_trace_bailout) {
- SmartArrayPointer<char> name(
- info()->shared_info()->DebugName()->ToCString());
- PrintF("Bailout in HGraphBuilder: @\"%s\": %s\n", *name, reason);
- }
+ info()->set_bailout_reason(reason);
SetStackOverflow();
}
@@ -2605,17 +3180,14 @@ void HGraphBuilder::VisitForTypeOf(Expression* expr) {
void HGraphBuilder::VisitForControl(Expression* expr,
HBasicBlock* true_block,
HBasicBlock* false_block) {
- TestContext for_test(this, expr, true_block, false_block);
+ TestContext for_test(this, expr, oracle(), true_block, false_block);
Visit(expr);
}
-HValue* HGraphBuilder::VisitArgument(Expression* expr) {
- VisitForValue(expr);
- if (HasStackOverflow() || current_block() == NULL) return NULL;
- HValue* value = Pop();
- Push(AddInstruction(new(zone()) HPushArgument(value)));
- return value;
+void HGraphBuilder::VisitArgument(Expression* expr) {
+ CHECK_ALIVE(VisitForValue(expr));
+ Push(AddInstruction(new(zone()) HPushArgument(Pop())));
}
@@ -2670,7 +3242,7 @@ HGraph* HGraphBuilder::CreateGraph() {
HEnvironment* initial_env = environment()->CopyWithoutHistory();
HBasicBlock* body_entry = CreateBasicBlock(initial_env);
current_block()->Goto(body_entry);
- body_entry->SetJoinId(AstNode::kFunctionEntryId);
+ body_entry->SetJoinId(BailoutId::FunctionEntry());
set_current_block(body_entry);
// Handle implicit declaration of the function name in named function
@@ -2679,7 +3251,7 @@ HGraph* HGraphBuilder::CreateGraph() {
VisitVariableDeclaration(scope->function());
}
VisitDeclarations(scope->declarations());
- AddSimulate(AstNode::kDeclarationsId);
+ AddSimulate(BailoutId::Declarations());
HValue* context = environment()->LookupContext();
AddInstruction(
@@ -2693,50 +3265,78 @@ HGraph* HGraphBuilder::CreateGraph() {
current_block()->FinishExit(instr);
set_current_block(NULL);
}
+
+ // If the checksum of the number of type info changes is the same as the
+ // last time this function was compiled, then this recompile is likely not
+ // due to missing/inadequate type feedback, but rather too aggressive
+ // optimization. Disable optimistic LICM in that case.
+ Handle<Code> unoptimized_code(info()->shared_info()->code());
+ ASSERT(unoptimized_code->kind() == Code::FUNCTION);
+ Handle<Object> maybe_type_info(unoptimized_code->type_feedback_info());
+ Handle<TypeFeedbackInfo> type_info(
+ Handle<TypeFeedbackInfo>::cast(maybe_type_info));
+ int checksum = type_info->own_type_change_checksum();
+ int composite_checksum = graph()->update_type_change_checksum(checksum);
+ graph()->set_use_optimistic_licm(
+ !type_info->matches_inlined_type_change_checksum(composite_checksum));
+ type_info->set_inlined_type_change_checksum(composite_checksum);
}
- graph()->OrderBlocks();
- graph()->AssignDominators();
+ return graph();
+}
+
+bool HGraph::Optimize(SmartArrayPointer<char>* bailout_reason) {
+ *bailout_reason = SmartArrayPointer<char>();
+ OrderBlocks();
+ AssignDominators();
#ifdef DEBUG
// Do a full verify after building the graph and computing dominators.
- graph()->Verify(true);
+ Verify(true);
#endif
- graph()->PropagateDeoptimizingMark();
- if (!graph()->CheckConstPhiUses()) {
- Bailout("Unsupported phi use of const variable");
- return NULL;
+ PropagateDeoptimizingMark();
+ if (!CheckConstPhiUses()) {
+ *bailout_reason = SmartArrayPointer<char>(StrDup(
+ "Unsupported phi use of const variable"));
+ return false;
}
- graph()->EliminateRedundantPhis();
- if (!graph()->CheckArgumentsPhiUses()) {
- Bailout("Unsupported phi use of arguments");
- return NULL;
+ EliminateRedundantPhis();
+ if (!CheckArgumentsPhiUses()) {
+ *bailout_reason = SmartArrayPointer<char>(StrDup(
+ "Unsupported phi use of arguments"));
+ return false;
}
- if (FLAG_eliminate_dead_phis) graph()->EliminateUnreachablePhis();
- graph()->CollectPhis();
+ if (FLAG_eliminate_dead_phis) EliminateUnreachablePhis();
+ CollectPhis();
- if (graph()->has_osr_loop_entry()) {
- const ZoneList<HPhi*>* phis = graph()->osr_loop_entry()->phis();
+ if (has_osr_loop_entry()) {
+ const ZoneList<HPhi*>* phis = osr_loop_entry()->phis();
for (int j = 0; j < phis->length(); j++) {
HPhi* phi = phis->at(j);
- graph()->osr_values()->at(phi->merged_index())->set_incoming_value(phi);
+ osr_values()->at(phi->merged_index())->set_incoming_value(phi);
}
}
- HInferRepresentation rep(graph());
+ HInferRepresentation rep(this);
rep.Analyze();
- graph()->MarkDeoptimizeOnUndefined();
- graph()->InsertRepresentationChanges();
+ MarkDeoptimizeOnUndefined();
+ InsertRepresentationChanges();
+
+ InitializeInferredTypes();
- graph()->InitializeInferredTypes();
- graph()->Canonicalize();
+ // Must be performed before canonicalization to ensure that Canonicalize
+ // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
+ // zero.
+ ComputeSafeUint32Operations();
+
+ Canonicalize();
// Perform common subexpression elimination and loop-invariant code motion.
if (FLAG_use_gvn) {
- HPhase phase("H_Global value numbering", graph());
- HGlobalValueNumberer gvn(graph(), info());
+ HPhase phase("H_Global value numbering", this);
+ HGlobalValueNumberer gvn(this, info());
bool removed_side_effects = gvn.Analyze();
// Trigger a second analysis pass to further eliminate duplicate values that
// could only be discovered by removing side-effect-generating instructions
@@ -2748,19 +3348,20 @@ HGraph* HGraphBuilder::CreateGraph() {
}
if (FLAG_use_range) {
- HRangeAnalysis rangeAnalysis(graph());
+ HRangeAnalysis rangeAnalysis(this);
rangeAnalysis.Analyze();
}
- graph()->ComputeMinusZeroChecks();
+ ComputeMinusZeroChecks();
// Eliminate redundant stack checks on backwards branches.
- HStackCheckEliminator sce(graph());
+ HStackCheckEliminator sce(this);
sce.Process();
- graph()->EliminateRedundantBoundsChecks();
- graph()->DehoistSimpleArrayIndexComputations();
+ EliminateRedundantBoundsChecks();
+ DehoistSimpleArrayIndexComputations();
+ if (FLAG_dead_code_elimination) DeadCodeElimination();
- return graph();
+ return true;
}
@@ -2785,6 +3386,8 @@ class BoundsCheckKey : public ZoneObject {
static BoundsCheckKey* Create(Zone* zone,
HBoundsCheck* check,
int32_t* offset) {
+ if (!check->index()->representation().IsInteger32()) return NULL;
+
HValue* index_base = NULL;
HConstant* constant = NULL;
bool is_sub = false;
@@ -2851,7 +3454,8 @@ class BoundsCheckBbData: public ZoneObject {
int32_t LowerOffset() const { return lower_offset_; }
int32_t UpperOffset() const { return upper_offset_; }
HBasicBlock* BasicBlock() const { return basic_block_; }
- HBoundsCheck* Check() const { return check_; }
+ HBoundsCheck* LowerCheck() const { return lower_check_; }
+ HBoundsCheck* UpperCheck() const { return upper_check_; }
BoundsCheckBbData* NextInBasicBlock() const { return next_in_bb_; }
BoundsCheckBbData* FatherInDominatorTree() const { return father_in_dt_; }
@@ -2859,76 +3463,85 @@ class BoundsCheckBbData: public ZoneObject {
return offset >= LowerOffset() && offset <= UpperOffset();
}
- // This method removes new_check and modifies the current check so that it
- // also "covers" what new_check covered.
- // The obvious precondition is that new_check follows Check() in the
- // same basic block, and that new_offset is not covered (otherwise we
- // could simply remove new_check).
- // As a consequence LowerOffset() or UpperOffset() change (the covered
+ bool HasSingleCheck() { return lower_check_ == upper_check_; }
+
+ // The goal of this method is to modify either upper_offset_ or
+ // lower_offset_ so that also new_offset is covered (the covered
// range grows).
//
- // In the general case the check covering the current range should be like
- // these two checks:
- // 0 <= Key()->IndexBase() + LowerOffset()
- // Key()->IndexBase() + UpperOffset() < Key()->Length()
- //
- // We can transform the second check like this:
- // Key()->IndexBase() + LowerOffset() <
- // Key()->Length() + (LowerOffset() - UpperOffset())
- // so we can handle both checks with a single unsigned comparison.
+ // The precondition is that new_check follows UpperCheck() and
+ // LowerCheck() in the same basic block, and that new_offset is not
+ // covered (otherwise we could simply remove new_check).
//
- // The bulk of this method changes Check()->index() and Check()->length()
- // replacing them with new HAdd instructions to perform the transformation
- // described above.
+ // If HasSingleCheck() is true then new_check is added as "second check"
+ // (either upper or lower; note that HasSingleCheck() becomes false).
+ // Otherwise one of the current checks is modified so that it also covers
+ // new_offset, and new_check is removed.
void CoverCheck(HBoundsCheck* new_check,
int32_t new_offset) {
ASSERT(new_check->index()->representation().IsInteger32());
+ bool keep_new_check = false;
if (new_offset > upper_offset_) {
upper_offset_ = new_offset;
+ if (HasSingleCheck()) {
+ keep_new_check = true;
+ upper_check_ = new_check;
+ } else {
+ BuildOffsetAdd(upper_check_,
+ &added_upper_index_,
+ &added_upper_offset_,
+ Key()->IndexBase(),
+ new_check->index()->representation(),
+ new_offset);
+ upper_check_->SetOperandAt(0, added_upper_index_);
+ }
} else if (new_offset < lower_offset_) {
lower_offset_ = new_offset;
+ if (HasSingleCheck()) {
+ keep_new_check = true;
+ lower_check_ = new_check;
+ } else {
+ BuildOffsetAdd(lower_check_,
+ &added_lower_index_,
+ &added_lower_offset_,
+ Key()->IndexBase(),
+ new_check->index()->representation(),
+ new_offset);
+ lower_check_->SetOperandAt(0, added_lower_index_);
+ }
} else {
ASSERT(false);
}
- BuildOffsetAdd(&added_index_,
- &added_index_offset_,
- Key()->IndexBase(),
- new_check->index()->representation(),
- lower_offset_);
- Check()->SetOperandAt(0, added_index_);
- BuildOffsetAdd(&added_length_,
- &added_length_offset_,
- Key()->Length(),
- new_check->length()->representation(),
- lower_offset_ - upper_offset_);
- Check()->SetOperandAt(1, added_length_);
-
- new_check->DeleteAndReplaceWith(NULL);
+ if (!keep_new_check) {
+ new_check->DeleteAndReplaceWith(NULL);
+ }
}
void RemoveZeroOperations() {
- RemoveZeroAdd(&added_index_, &added_index_offset_);
- RemoveZeroAdd(&added_length_, &added_length_offset_);
+ RemoveZeroAdd(&added_lower_index_, &added_lower_offset_);
+ RemoveZeroAdd(&added_upper_index_, &added_upper_offset_);
}
BoundsCheckBbData(BoundsCheckKey* key,
int32_t lower_offset,
int32_t upper_offset,
HBasicBlock* bb,
- HBoundsCheck* check,
+ HBoundsCheck* lower_check,
+ HBoundsCheck* upper_check,
BoundsCheckBbData* next_in_bb,
BoundsCheckBbData* father_in_dt)
: key_(key),
lower_offset_(lower_offset),
upper_offset_(upper_offset),
basic_block_(bb),
- check_(check),
- added_index_offset_(NULL),
- added_index_(NULL),
- added_length_offset_(NULL),
- added_length_(NULL),
+ lower_check_(lower_check),
+ upper_check_(upper_check),
+ added_lower_index_(NULL),
+ added_lower_offset_(NULL),
+ added_upper_index_(NULL),
+ added_upper_offset_(NULL),
next_in_bb_(next_in_bb),
father_in_dt_(father_in_dt) { }
@@ -2937,29 +3550,33 @@ class BoundsCheckBbData: public ZoneObject {
int32_t lower_offset_;
int32_t upper_offset_;
HBasicBlock* basic_block_;
- HBoundsCheck* check_;
- HConstant* added_index_offset_;
- HAdd* added_index_;
- HConstant* added_length_offset_;
- HAdd* added_length_;
+ HBoundsCheck* lower_check_;
+ HBoundsCheck* upper_check_;
+ HAdd* added_lower_index_;
+ HConstant* added_lower_offset_;
+ HAdd* added_upper_index_;
+ HConstant* added_upper_offset_;
BoundsCheckBbData* next_in_bb_;
BoundsCheckBbData* father_in_dt_;
- void BuildOffsetAdd(HAdd** add,
+ void BuildOffsetAdd(HBoundsCheck* check,
+ HAdd** add,
HConstant** constant,
HValue* original_value,
Representation representation,
int32_t new_offset) {
HConstant* new_constant = new(BasicBlock()->zone())
- HConstant(Handle<Object>(Smi::FromInt(new_offset)),
- Representation::Integer32());
+ HConstant(new_offset, Representation::Integer32());
if (*add == NULL) {
- new_constant->InsertBefore(Check());
- *add = new(BasicBlock()->zone()) HAdd(NULL,
+ new_constant->InsertBefore(check);
+ // Because of the bounds checks elimination algorithm, the index is always
+ // an HAdd or an HSub here, so we can safely cast to an HBinaryOperation.
+ HValue* context = HBinaryOperation::cast(check->index())->context();
+ *add = new(BasicBlock()->zone()) HAdd(context,
original_value,
new_constant);
(*add)->AssumeRepresentation(representation);
- (*add)->InsertBefore(Check());
+ (*add)->InsertBefore(check);
} else {
new_constant->InsertBefore(*add);
(*constant)->DeleteAndReplaceWith(new_constant);
@@ -2985,20 +3602,22 @@ static bool BoundsCheckKeyMatch(void* key1, void* key2) {
class BoundsCheckTable : private ZoneHashMap {
public:
- BoundsCheckBbData** LookupOrInsert(BoundsCheckKey* key) {
+ BoundsCheckBbData** LookupOrInsert(BoundsCheckKey* key, Zone* zone) {
return reinterpret_cast<BoundsCheckBbData**>(
- &(Lookup(key, key->Hash(), true)->value));
+ &(Lookup(key, key->Hash(), true, ZoneAllocationPolicy(zone))->value));
}
- void Insert(BoundsCheckKey* key, BoundsCheckBbData* data) {
- Lookup(key, key->Hash(), true)->value = data;
+ void Insert(BoundsCheckKey* key, BoundsCheckBbData* data, Zone* zone) {
+ Lookup(key, key->Hash(), true, ZoneAllocationPolicy(zone))->value = data;
}
void Delete(BoundsCheckKey* key) {
Remove(key, key->Hash());
}
- BoundsCheckTable() : ZoneHashMap(BoundsCheckKeyMatch) { }
+ explicit BoundsCheckTable(Zone* zone)
+ : ZoneHashMap(BoundsCheckKeyMatch, ZoneHashMap::kDefaultHashMapCapacity,
+ ZoneAllocationPolicy(zone)) { }
};
@@ -3021,8 +3640,9 @@ void HGraph::EliminateRedundantBoundsChecks(HBasicBlock* bb,
int32_t offset;
BoundsCheckKey* key =
- BoundsCheckKey::Create(bb->zone(), check, &offset);
- BoundsCheckBbData** data_p = table->LookupOrInsert(key);
+ BoundsCheckKey::Create(zone(), check, &offset);
+ if (key == NULL) continue;
+ BoundsCheckBbData** data_p = table->LookupOrInsert(key, zone());
BoundsCheckBbData* data = *data_p;
if (data == NULL) {
bb_data_list = new(zone()) BoundsCheckBbData(key,
@@ -3030,6 +3650,7 @@ void HGraph::EliminateRedundantBoundsChecks(HBasicBlock* bb,
offset,
bb,
check,
+ check,
bb_data_list,
NULL);
*data_p = bb_data_list;
@@ -3044,14 +3665,15 @@ void HGraph::EliminateRedundantBoundsChecks(HBasicBlock* bb,
int32_t new_upper_offset = offset > data->UpperOffset()
? offset
: data->UpperOffset();
- bb_data_list = new(bb->zone()) BoundsCheckBbData(key,
- new_lower_offset,
- new_upper_offset,
- bb,
- check,
- bb_data_list,
- data);
- table->Insert(key, bb_data_list);
+ bb_data_list = new(zone()) BoundsCheckBbData(key,
+ new_lower_offset,
+ new_upper_offset,
+ bb,
+ data->LowerCheck(),
+ data->UpperCheck(),
+ bb_data_list,
+ data);
+ table->Insert(key, bb_data_list, zone());
}
}
@@ -3064,7 +3686,7 @@ void HGraph::EliminateRedundantBoundsChecks(HBasicBlock* bb,
data = data->NextInBasicBlock()) {
data->RemoveZeroOperations();
if (data->FatherInDominatorTree()) {
- table->Insert(data->Key(), data->FatherInDominatorTree());
+ table->Insert(data->Key(), data->FatherInDominatorTree(), zone());
} else {
table->Delete(data->Key());
}
@@ -3074,14 +3696,14 @@ void HGraph::EliminateRedundantBoundsChecks(HBasicBlock* bb,
void HGraph::EliminateRedundantBoundsChecks() {
HPhase phase("H_Eliminate bounds checks", this);
- AssertNoAllocation no_gc;
- BoundsCheckTable checks_table;
+ BoundsCheckTable checks_table(zone());
EliminateRedundantBoundsChecks(entry_block(), &checks_table);
}
static void DehoistArrayIndex(ArrayInstructionInterface* array_operation) {
HValue* index = array_operation->GetKey();
+ if (!index->representation().IsInteger32()) return;
HConstant* constant;
HValue* subexpression;
@@ -3136,27 +3758,11 @@ void HGraph::DehoistSimpleArrayIndexComputations() {
instr != NULL;
instr = instr->next()) {
ArrayInstructionInterface* array_instruction = NULL;
- if (instr->IsLoadKeyedFastElement()) {
- HLoadKeyedFastElement* op = HLoadKeyedFastElement::cast(instr);
- array_instruction = static_cast<ArrayInstructionInterface*>(op);
- } else if (instr->IsLoadKeyedFastDoubleElement()) {
- HLoadKeyedFastDoubleElement* op =
- HLoadKeyedFastDoubleElement::cast(instr);
- array_instruction = static_cast<ArrayInstructionInterface*>(op);
- } else if (instr->IsLoadKeyedSpecializedArrayElement()) {
- HLoadKeyedSpecializedArrayElement* op =
- HLoadKeyedSpecializedArrayElement::cast(instr);
- array_instruction = static_cast<ArrayInstructionInterface*>(op);
- } else if (instr->IsStoreKeyedFastElement()) {
- HStoreKeyedFastElement* op = HStoreKeyedFastElement::cast(instr);
+ if (instr->IsLoadKeyed()) {
+ HLoadKeyed* op = HLoadKeyed::cast(instr);
array_instruction = static_cast<ArrayInstructionInterface*>(op);
- } else if (instr->IsStoreKeyedFastDoubleElement()) {
- HStoreKeyedFastDoubleElement* op =
- HStoreKeyedFastDoubleElement::cast(instr);
- array_instruction = static_cast<ArrayInstructionInterface*>(op);
- } else if (instr->IsStoreKeyedSpecializedArrayElement()) {
- HStoreKeyedSpecializedArrayElement* op =
- HStoreKeyedSpecializedArrayElement::cast(instr);
+ } else if (instr->IsStoreKeyed()) {
+ HStoreKeyed* op = HStoreKeyed::cast(instr);
array_instruction = static_cast<ArrayInstructionInterface*>(op);
} else {
continue;
@@ -3167,6 +3773,36 @@ void HGraph::DehoistSimpleArrayIndexComputations() {
}
+void HGraph::DeadCodeElimination() {
+ HPhase phase("H_Dead code elimination", this);
+ ZoneList<HInstruction*> worklist(blocks_.length(), zone());
+ for (int i = 0; i < blocks()->length(); ++i) {
+ for (HInstruction* instr = blocks()->at(i)->first();
+ instr != NULL;
+ instr = instr->next()) {
+ if (instr->IsDead()) worklist.Add(instr, zone());
+ }
+ }
+
+ while (!worklist.is_empty()) {
+ HInstruction* instr = worklist.RemoveLast();
+ if (FLAG_trace_dead_code_elimination) {
+ HeapStringAllocator allocator;
+ StringStream stream(&allocator);
+ instr->PrintNameTo(&stream);
+ stream.Add(" = ");
+ instr->PrintTo(&stream);
+ PrintF("[removing dead instruction %s]\n", *stream.ToCString());
+ }
+ instr->DeleteAndReplaceWith(NULL);
+ for (int i = 0; i < instr->OperandCount(); ++i) {
+ HValue* operand = instr->OperandAt(i);
+ if (operand->IsDead()) worklist.Add(HInstruction::cast(operand), zone());
+ }
+ }
+}
+
+
HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
ASSERT(current_block() != NULL);
current_block()->AddInstruction(instr);
@@ -3174,7 +3810,7 @@ HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
}
-void HGraphBuilder::AddSimulate(int ast_id) {
+void HGraphBuilder::AddSimulate(BailoutId ast_id) {
ASSERT(current_block() != NULL);
current_block()->AddSimulate(ast_id);
}
@@ -3195,9 +3831,9 @@ void HGraphBuilder::PushAndAdd(HInstruction* instr) {
template <class Instruction>
HInstruction* HGraphBuilder::PreProcessCall(Instruction* call) {
int count = call->argument_count();
- ZoneList<HValue*> arguments(count);
+ ZoneList<HValue*> arguments(count, zone());
for (int i = 0; i < count; ++i) {
- arguments.Add(Pop());
+ arguments.Add(Pop(), zone());
}
while (!arguments.is_empty()) {
@@ -3418,28 +4054,29 @@ void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
+ FunctionState* state = function_state();
AstContext* context = call_context();
if (context == NULL) {
// Not an inlined return, so an actual one.
CHECK_ALIVE(VisitForValue(stmt->expression()));
HValue* result = environment()->Pop();
current_block()->FinishExit(new(zone()) HReturn(result));
- } else if (function_state()->is_construct()) {
- // Return from an inlined construct call. In a test context the return
- // value will always evaluate to true, in a value context the return value
- // needs to be a JSObject.
+ } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
+ // Return from an inlined construct call. In a test context the return value
+ // will always evaluate to true, in a value context the return value needs
+ // to be a JSObject.
if (context->IsTest()) {
TestContext* test = TestContext::cast(context);
CHECK_ALIVE(VisitForEffect(stmt->expression()));
- current_block()->Goto(test->if_true(), function_state());
+ current_block()->Goto(test->if_true(), state);
} else if (context->IsEffect()) {
CHECK_ALIVE(VisitForEffect(stmt->expression()));
- current_block()->Goto(function_return(), function_state());
+ current_block()->Goto(function_return(), state);
} else {
ASSERT(context->IsValue());
CHECK_ALIVE(VisitForValue(stmt->expression()));
HValue* return_value = Pop();
- HValue* receiver = environment()->Lookup(0);
+ HValue* receiver = environment()->arguments_environment()->Lookup(0);
HHasInstanceTypeAndBranch* typecheck =
new(zone()) HHasInstanceTypeAndBranch(return_value,
FIRST_SPEC_OBJECT_TYPE,
@@ -3449,31 +4086,36 @@ void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
typecheck->SetSuccessorAt(0, if_spec_object);
typecheck->SetSuccessorAt(1, not_spec_object);
current_block()->Finish(typecheck);
- if_spec_object->AddLeaveInlined(return_value,
- function_return(),
- function_state());
- not_spec_object->AddLeaveInlined(receiver,
- function_return(),
- function_state());
+ if_spec_object->AddLeaveInlined(return_value, state);
+ not_spec_object->AddLeaveInlined(receiver, state);
+ }
+ } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
+ // Return from an inlined setter call. The returned value is never used, the
+ // value of an assignment is always the value of the RHS of the assignment.
+ CHECK_ALIVE(VisitForEffect(stmt->expression()));
+ if (context->IsTest()) {
+ HValue* rhs = environment()->arguments_environment()->Lookup(1);
+ context->ReturnValue(rhs);
+ } else if (context->IsEffect()) {
+ current_block()->Goto(function_return(), state);
+ } else {
+ ASSERT(context->IsValue());
+ HValue* rhs = environment()->arguments_environment()->Lookup(1);
+ current_block()->AddLeaveInlined(rhs, state);
}
} else {
- // Return from an inlined function, visit the subexpression in the
+ // Return from a normal inlined function. Visit the subexpression in the
// expression context of the call.
if (context->IsTest()) {
TestContext* test = TestContext::cast(context);
- VisitForControl(stmt->expression(),
- test->if_true(),
- test->if_false());
+ VisitForControl(stmt->expression(), test->if_true(), test->if_false());
} else if (context->IsEffect()) {
CHECK_ALIVE(VisitForEffect(stmt->expression()));
- current_block()->Goto(function_return(), function_state());
+ current_block()->Goto(function_return(), state);
} else {
ASSERT(context->IsValue());
CHECK_ALIVE(VisitForValue(stmt->expression()));
- HValue* return_value = Pop();
- current_block()->AddLeaveInlined(return_value,
- function_return(),
- function_state());
+ current_block()->AddLeaveInlined(Pop(), state);
}
}
set_current_block(NULL);
@@ -3548,7 +4190,7 @@ void HGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
}
// 2. Build all the tests, with dangling true branches
- int default_id = AstNode::kNoNumber;
+ BailoutId default_id = BailoutId::None();
for (int i = 0; i < clause_count; ++i) {
CaseClause* clause = clauses->at(i);
if (clause->is_default()) {
@@ -3601,9 +4243,7 @@ void HGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
HBasicBlock* last_block = current_block();
if (not_string_block != NULL) {
- int join_id = (default_id != AstNode::kNoNumber)
- ? default_id
- : stmt->ExitId();
+ BailoutId join_id = !default_id.IsNone() ? default_id : stmt->ExitId();
last_block = CreateJoin(last_block, not_string_block, join_id);
}
@@ -3693,17 +4333,17 @@ bool HGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) {
non_osr_entry->Goto(loop_predecessor);
set_current_block(osr_entry);
- int osr_entry_id = statement->OsrEntryId();
+ BailoutId osr_entry_id = statement->OsrEntryId();
int first_expression_index = environment()->first_expression_index();
int length = environment()->length();
ZoneList<HUnknownOSRValue*>* osr_values =
- new(zone()) ZoneList<HUnknownOSRValue*>(length);
+ new(zone()) ZoneList<HUnknownOSRValue*>(length, zone());
for (int i = 0; i < first_expression_index; ++i) {
HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue;
AddInstruction(osr_value);
environment()->Bind(i, osr_value);
- osr_values->Add(osr_value);
+ osr_values->Add(osr_value, zone());
}
if (first_expression_index != length) {
@@ -3712,7 +4352,7 @@ bool HGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) {
HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue;
AddInstruction(osr_value);
environment()->Push(osr_value);
- osr_values->Add(osr_value);
+ osr_values->Add(osr_value, zone());
}
}
@@ -3917,15 +4557,14 @@ void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
map,
DescriptorArray::kEnumCacheBridgeCacheIndex));
- HInstruction* array_length = AddInstruction(
- new(zone()) HFixedArrayBaseLength(array));
+ HInstruction* enum_length = AddInstruction(new(zone()) HMapEnumLength(map));
HInstruction* start_index = AddInstruction(new(zone()) HConstant(
Handle<Object>(Smi::FromInt(0)), Representation::Integer32()));
Push(map);
Push(array);
- Push(array_length);
+ Push(enum_length);
Push(start_index);
HInstruction* index_cache = AddInstruction(
@@ -3963,10 +4602,11 @@ void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
set_current_block(loop_body);
HValue* key = AddInstruction(
- new(zone()) HLoadKeyedFastElement(
+ new(zone()) HLoadKeyed(
environment()->ExpressionStackAt(2), // Enum cache.
environment()->ExpressionStackAt(0), // Iteration index.
- HLoadKeyedFastElement::OMIT_HOLE_CHECK));
+ environment()->ExpressionStackAt(0),
+ FAST_ELEMENTS));
// Check if the expected map still matches that of the enumerable.
// If not just deoptimize.
@@ -4121,8 +4761,7 @@ HGraphBuilder::GlobalPropertyAccess HGraphBuilder::LookupGlobalProperty(
}
Handle<GlobalObject> global(info()->global_object());
global->Lookup(*var->name(), lookup);
- if (!lookup->IsFound() ||
- lookup->type() != NORMAL ||
+ if (!lookup->IsNormal() ||
(is_store && lookup->IsReadOnly()) ||
lookup->holder() != *global) {
return kUseGeneric;
@@ -4152,8 +4791,9 @@ void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
Variable* variable = expr->var();
switch (variable->location()) {
case Variable::UNALLOCATED: {
- if (variable->mode() == LET || variable->mode() == CONST_HARMONY) {
- return Bailout("reference to global harmony declared variable");
+ if (IsLexicalVariableMode(variable->mode())) {
+ // TODO(rossberg): should this be an ASSERT?
+ return Bailout("reference to global lexical variable");
}
// Handle known global constants like 'undefined' specially to avoid a
// load from a global cell for them.
@@ -4199,9 +4839,8 @@ void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
case Variable::LOCAL: {
HValue* value = environment()->Lookup(variable);
if (value == graph()->GetConstantHole()) {
- ASSERT(variable->mode() == CONST ||
- variable->mode() == CONST_HARMONY ||
- variable->mode() == LET);
+ ASSERT(IsDeclaredVariableMode(variable->mode()) &&
+ variable->mode() != VAR);
return Bailout("reference to uninitialized variable");
}
return ast_context()->ReturnValue(value);
@@ -4233,9 +4872,12 @@ void HGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
+ Handle<JSFunction> closure = function_state()->compilation_info()->closure();
+ Handle<FixedArray> literals(closure->literals());
HValue* context = environment()->LookupContext();
HRegExpLiteral* instr = new(zone()) HRegExpLiteral(context,
+ literals,
expr->pattern(),
expr->flags(),
expr->literal_index());
@@ -4243,6 +4885,86 @@ void HGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
}
+static void LookupInPrototypes(Handle<Map> map,
+ Handle<String> name,
+ LookupResult* lookup) {
+ while (map->prototype()->IsJSObject()) {
+ Handle<JSObject> holder(JSObject::cast(map->prototype()));
+ if (!holder->HasFastProperties()) break;
+ map = Handle<Map>(holder->map());
+ map->LookupDescriptor(*holder, *name, lookup);
+ if (lookup->IsFound()) return;
+ }
+ lookup->NotFound();
+}
+
+
+// Tries to find a JavaScript accessor of the given name in the prototype chain
+// starting at the given map. Return true iff there is one, including the
+// corresponding AccessorPair plus its holder (which could be null when the
+// accessor is found directly in the given map).
+static bool LookupAccessorPair(Handle<Map> map,
+ Handle<String> name,
+ Handle<AccessorPair>* accessors,
+ Handle<JSObject>* holder) {
+ LookupResult lookup(map->GetIsolate());
+
+ // Check for a JavaScript accessor directly in the map.
+ map->LookupDescriptor(NULL, *name, &lookup);
+ if (lookup.IsPropertyCallbacks()) {
+ Handle<Object> callback(lookup.GetValueFromMap(*map));
+ if (!callback->IsAccessorPair()) return false;
+ *accessors = Handle<AccessorPair>::cast(callback);
+ *holder = Handle<JSObject>();
+ return true;
+ }
+
+ // Everything else, e.g. a field, can't be an accessor call.
+ if (lookup.IsFound()) return false;
+
+ // Check for a JavaScript accessor somewhere in the proto chain.
+ LookupInPrototypes(map, name, &lookup);
+ if (lookup.IsPropertyCallbacks()) {
+ Handle<Object> callback(lookup.GetValue());
+ if (!callback->IsAccessorPair()) return false;
+ *accessors = Handle<AccessorPair>::cast(callback);
+ *holder = Handle<JSObject>(lookup.holder());
+ return true;
+ }
+
+ // We haven't found a JavaScript accessor anywhere.
+ return false;
+}
+
+
+static bool LookupGetter(Handle<Map> map,
+ Handle<String> name,
+ Handle<JSFunction>* getter,
+ Handle<JSObject>* holder) {
+ Handle<AccessorPair> accessors;
+ if (LookupAccessorPair(map, name, &accessors, holder) &&
+ accessors->getter()->IsJSFunction()) {
+ *getter = Handle<JSFunction>(JSFunction::cast(accessors->getter()));
+ return true;
+ }
+ return false;
+}
+
+
+static bool LookupSetter(Handle<Map> map,
+ Handle<String> name,
+ Handle<JSFunction>* setter,
+ Handle<JSObject>* holder) {
+ Handle<AccessorPair> accessors;
+ if (LookupAccessorPair(map, name, &accessors, holder) &&
+ accessors->setter()->IsJSFunction()) {
+ *setter = Handle<JSFunction>(JSFunction::cast(accessors->setter()));
+ return true;
+ }
+ return false;
+}
+
+
// Determines whether the given array or object literal boilerplate satisfies
// all limits to be considered for fast deep-copying and computes the total
// size of all objects that are part of the graph.
@@ -4258,7 +4980,7 @@ static bool IsFastLiteral(Handle<JSObject> boilerplate,
elements->map() != boilerplate->GetHeap()->fixed_cow_array_map()) {
if (boilerplate->HasFastDoubleElements()) {
*total_size += FixedDoubleArray::SizeFor(elements->length());
- } else if (boilerplate->HasFastElements()) {
+ } else if (boilerplate->HasFastObjectElements()) {
Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
int length = elements->length();
for (int i = 0; i < length; i++) {
@@ -4341,7 +5063,7 @@ void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
// of the property values and is the value of the entire expression.
PushAndAdd(literal);
- expr->CalculateEmitStore();
+ expr->CalculateEmitStore(zone());
for (int i = 0; i < expr->properties()->length(); i++) {
ObjectLiteral::Property* property = expr->properties()->at(i);
@@ -4360,7 +5082,23 @@ void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
property->RecordTypeFeedback(oracle());
CHECK_ALIVE(VisitForValue(value));
HValue* value = Pop();
- HInstruction* store = BuildStoreNamed(literal, value, property);
+ Handle<Map> map = property->GetReceiverType();
+ Handle<String> name = property->key()->AsPropertyName();
+ HInstruction* store;
+ if (map.is_null()) {
+ // If we don't know the monomorphic type, do a generic store.
+ CHECK_ALIVE(store = BuildStoreNamedGeneric(literal, name, value));
+ } else {
+#if DEBUG
+ Handle<JSFunction> setter;
+ Handle<JSObject> holder;
+ ASSERT(!LookupSetter(map, name, &setter, &holder));
+#endif
+ CHECK_ALIVE(store = BuildStoreNamedMonomorphic(literal,
+ name,
+ value,
+ map));
+ }
AddInstruction(store);
if (store->HasObservableSideEffects()) AddSimulate(key->id());
} else {
@@ -4457,7 +5195,9 @@ void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
HValue* value = Pop();
if (!Smi::IsValid(i)) return Bailout("Non-smi key in array literal");
- elements = new(zone()) HLoadElements(literal);
+ // Pass in literal as dummy depedency, since the receiver always has
+ // elements.
+ elements = new(zone()) HLoadElements(literal, literal);
AddInstruction(elements);
HValue* key = AddInstruction(
@@ -4465,22 +5205,21 @@ void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
Representation::Integer32()));
switch (boilerplate_elements_kind) {
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
// Smi-only arrays need a smi check.
AddInstruction(new(zone()) HCheckSmi(value));
// Fall through.
case FAST_ELEMENTS:
- AddInstruction(new(zone()) HStoreKeyedFastElement(
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ AddInstruction(new(zone()) HStoreKeyed(
elements,
key,
value,
boilerplate_elements_kind));
break;
- case FAST_DOUBLE_ELEMENTS:
- AddInstruction(new(zone()) HStoreKeyedFastDoubleElement(elements,
- key,
- value));
- break;
default:
UNREACHABLE();
break;
@@ -4497,10 +5236,17 @@ static bool ComputeLoadStoreField(Handle<Map> type,
Handle<String> name,
LookupResult* lookup,
bool is_store) {
- type->LookupInDescriptors(NULL, *name, lookup);
- if (!lookup->IsFound()) return false;
- if (lookup->type() == FIELD) return true;
- return is_store && (lookup->type() == MAP_TRANSITION) &&
+ // If we directly find a field, the access can be inlined.
+ type->LookupDescriptor(NULL, *name, lookup);
+ if (lookup->IsField()) return true;
+
+ // For a load, we are out of luck if there is no such field.
+ if (!is_store) return false;
+
+ // 2nd chance: A store into a non-existent field can still be inlined if we
+ // have a matching transition and some room left in the object.
+ type->LookupTransition(NULL, *name, lookup);
+ return lookup->IsTransitionToField(*type) &&
(type->unused_property_fields() > 0);
}
@@ -4508,8 +5254,8 @@ static bool ComputeLoadStoreField(Handle<Map> type,
static int ComputeLoadStoreFieldIndex(Handle<Map> type,
Handle<String> name,
LookupResult* lookup) {
- ASSERT(lookup->type() == FIELD || lookup->type() == MAP_TRANSITION);
- if (lookup->type() == FIELD) {
+ ASSERT(lookup->IsField() || lookup->IsTransitionToField(*type));
+ if (lookup->IsField()) {
return lookup->GetLocalFieldIndexFromMap(*type);
} else {
Map* transition = lookup->GetTransitionMapFromMap(*type);
@@ -4518,31 +5264,60 @@ static int ComputeLoadStoreFieldIndex(Handle<Map> type,
}
+void HGraphBuilder::AddCheckMapsWithTransitions(HValue* object,
+ Handle<Map> map) {
+ AddInstruction(new(zone()) HCheckNonSmi(object));
+ AddInstruction(HCheckMaps::NewWithTransitions(object, map, zone()));
+}
+
+
HInstruction* HGraphBuilder::BuildStoreNamedField(HValue* object,
Handle<String> name,
HValue* value,
- Handle<Map> type,
- LookupResult* lookup,
- bool smi_and_map_check) {
- if (smi_and_map_check) {
- AddInstruction(new(zone()) HCheckNonSmi(object));
- AddInstruction(HCheckMaps::NewWithTransitions(object, type));
+ Handle<Map> map,
+ LookupResult* lookup) {
+ ASSERT(lookup->IsFound());
+ // If the property does not exist yet, we have to check that it wasn't made
+ // readonly or turned into a setter by some meanwhile modifications on the
+ // prototype chain.
+ if (!lookup->IsProperty() && map->prototype()->IsJSReceiver()) {
+ Object* proto = map->prototype();
+ // First check that the prototype chain isn't affected already.
+ LookupResult proto_result(isolate());
+ proto->Lookup(*name, &proto_result);
+ if (proto_result.IsProperty()) {
+ // If the inherited property could induce readonly-ness, bail out.
+ if (proto_result.IsReadOnly() || !proto_result.IsCacheable()) {
+ Bailout("improper object on prototype chain for store");
+ return NULL;
+ }
+ // We only need to check up to the preexisting property.
+ proto = proto_result.holder();
+ } else {
+ // Otherwise, find the top prototype.
+ while (proto->GetPrototype()->IsJSObject()) proto = proto->GetPrototype();
+ ASSERT(proto->GetPrototype()->IsNull());
+ }
+ ASSERT(proto->IsJSObject());
+ AddInstruction(new(zone()) HCheckPrototypeMaps(
+ Handle<JSObject>(JSObject::cast(map->prototype())),
+ Handle<JSObject>(JSObject::cast(proto))));
}
- int index = ComputeLoadStoreFieldIndex(type, name, lookup);
+ int index = ComputeLoadStoreFieldIndex(map, name, lookup);
bool is_in_object = index < 0;
int offset = index * kPointerSize;
if (index < 0) {
// Negative property indices are in-object properties, indexed
// from the end of the fixed part of the object.
- offset += type->instance_size();
+ offset += map->instance_size();
} else {
offset += FixedArray::kHeaderSize;
}
HStoreNamedField* instr =
new(zone()) HStoreNamedField(object, name, value, is_in_object, offset);
- if (lookup->type() == MAP_TRANSITION) {
- Handle<Map> transition(lookup->GetTransitionMapFromMap(*type));
+ if (lookup->IsTransitionToField(*map)) {
+ Handle<Map> transition(lookup->GetTransitionMapFromMap(*map));
instr->set_transition(transition);
// TODO(fschneider): Record the new map type of the object in the IR to
// enable elimination of redundant checks after the transition store.
@@ -4565,44 +5340,31 @@ HInstruction* HGraphBuilder::BuildStoreNamedGeneric(HValue* object,
}
-HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object,
+HInstruction* HGraphBuilder::BuildCallSetter(HValue* object,
HValue* value,
- ObjectLiteral::Property* prop) {
- Literal* key = prop->key()->AsLiteral();
- Handle<String> name = Handle<String>::cast(key->handle());
- ASSERT(!name.is_null());
-
- LookupResult lookup(isolate());
- Handle<Map> type = prop->GetReceiverType();
- bool is_monomorphic = prop->IsMonomorphic() &&
- ComputeLoadStoreField(type, name, &lookup, true);
-
- return is_monomorphic
- ? BuildStoreNamedField(object, name, value, type, &lookup,
- true) // Needs smi and map check.
- : BuildStoreNamedGeneric(object, name, value);
+ Handle<Map> map,
+ Handle<JSFunction> setter,
+ Handle<JSObject> holder) {
+ AddCheckConstantFunction(holder, object, map);
+ AddInstruction(new(zone()) HPushArgument(object));
+ AddInstruction(new(zone()) HPushArgument(value));
+ return new(zone()) HCallConstantFunction(setter, 2);
}
-HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object,
- HValue* value,
- Expression* expr) {
- Property* prop = (expr->AsProperty() != NULL)
- ? expr->AsProperty()
- : expr->AsAssignment()->target()->AsProperty();
- Literal* key = prop->key()->AsLiteral();
- Handle<String> name = Handle<String>::cast(key->handle());
- ASSERT(!name.is_null());
-
+HInstruction* HGraphBuilder::BuildStoreNamedMonomorphic(HValue* object,
+ Handle<String> name,
+ HValue* value,
+ Handle<Map> map) {
+ // Handle a store to a known field.
LookupResult lookup(isolate());
- SmallMapList* types = expr->GetReceiverTypes();
- bool is_monomorphic = expr->IsMonomorphic() &&
- ComputeLoadStoreField(types->first(), name, &lookup, true);
+ if (ComputeLoadStoreField(map, name, &lookup, true)) {
+ AddCheckMapsWithTransitions(object, map);
+ return BuildStoreNamedField(object, name, value, map, &lookup);
+ }
- return is_monomorphic
- ? BuildStoreNamedField(object, name, value, types->first(), &lookup,
- true) // Needs smi and map check.
- : BuildStoreNamedGeneric(object, name, value);
+ // No luck, do a generic store.
+ return BuildStoreNamedGeneric(object, name, value);
}
@@ -4642,16 +5404,18 @@ void HGraphBuilder::HandlePolymorphicLoadNamedField(Property* expr,
// Use monomorphic load if property lookup results in the same field index
// for all maps. Requires special map check on the set of all handled maps.
+ AddInstruction(new(zone()) HCheckNonSmi(object));
HInstruction* instr;
if (count == types->length() && is_monomorphic_field) {
- AddInstruction(new(zone()) HCheckMaps(object, types));
- instr = BuildLoadNamedField(object, expr, map, &lookup, false);
+ AddInstruction(new(zone()) HCheckMaps(object, types, zone()));
+ instr = BuildLoadNamedField(object, map, &lookup);
} else {
HValue* context = environment()->LookupContext();
instr = new(zone()) HLoadNamedFieldPolymorphic(context,
object,
types,
- name);
+ name,
+ zone());
}
instr->set_position(expr->position());
@@ -4685,8 +5449,9 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
current_block()->Finish(compare);
set_current_block(if_true);
- HInstruction* instr =
- BuildStoreNamedField(object, name, value, map, &lookup, false);
+ HInstruction* instr;
+ CHECK_ALIVE(instr =
+ BuildStoreNamedField(object, name, value, map, &lookup));
instr->set_position(expr->position());
// Goto will add the HSimulate for the store.
AddInstruction(instr);
@@ -4737,41 +5502,66 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
void HGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
Property* prop = expr->target()->AsProperty();
ASSERT(prop != NULL);
- expr->RecordTypeFeedback(oracle());
+ expr->RecordTypeFeedback(oracle(), zone());
CHECK_ALIVE(VisitForValue(prop->obj()));
- HValue* value = NULL;
- HInstruction* instr = NULL;
-
if (prop->key()->IsPropertyName()) {
// Named store.
CHECK_ALIVE(VisitForValue(expr->value()));
- value = Pop();
- HValue* object = Pop();
+ HValue* value = environment()->ExpressionStackAt(0);
+ HValue* object = environment()->ExpressionStackAt(1);
Literal* key = prop->key()->AsLiteral();
Handle<String> name = Handle<String>::cast(key->handle());
ASSERT(!name.is_null());
+ HInstruction* instr = NULL;
SmallMapList* types = expr->GetReceiverTypes();
- LookupResult lookup(isolate());
-
- if (expr->IsMonomorphic()) {
- instr = BuildStoreNamed(object, value, expr);
+ bool monomorphic = expr->IsMonomorphic();
+ Handle<Map> map;
+ if (monomorphic) {
+ map = types->first();
+ if (map->is_dictionary_map()) monomorphic = false;
+ }
+ if (monomorphic) {
+ Handle<JSFunction> setter;
+ Handle<JSObject> holder;
+ if (LookupSetter(map, name, &setter, &holder)) {
+ AddCheckConstantFunction(holder, object, map);
+ if (FLAG_inline_accessors && TryInlineSetter(setter, expr, value)) {
+ return;
+ }
+ Drop(2);
+ AddInstruction(new(zone()) HPushArgument(object));
+ AddInstruction(new(zone()) HPushArgument(value));
+ instr = new(zone()) HCallConstantFunction(setter, 2);
+ } else {
+ Drop(2);
+ CHECK_ALIVE(instr = BuildStoreNamedMonomorphic(object,
+ name,
+ value,
+ map));
+ }
} else if (types != NULL && types->length() > 1) {
- HandlePolymorphicStoreNamedField(expr, object, value, types, name);
- return;
-
+ Drop(2);
+ return HandlePolymorphicStoreNamedField(expr, object, value, types, name);
} else {
+ Drop(2);
instr = BuildStoreNamedGeneric(object, name, value);
}
+ Push(value);
+ instr->set_position(expr->position());
+ AddInstruction(instr);
+ if (instr->HasObservableSideEffects()) AddSimulate(expr->AssignmentId());
+ return ast_context()->ReturnValue(Pop());
+
} else {
// Keyed store.
CHECK_ALIVE(VisitForValue(prop->key()));
CHECK_ALIVE(VisitForValue(expr->value()));
- value = Pop();
+ HValue* value = Pop();
HValue* key = Pop();
HValue* object = Pop();
bool has_side_effects = false;
@@ -4784,11 +5574,6 @@ void HGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
AddSimulate(expr->AssignmentId());
return ast_context()->ReturnValue(Pop());
}
- Push(value);
- instr->set_position(expr->position());
- AddInstruction(instr);
- if (instr->HasObservableSideEffects()) AddSimulate(expr->AssignmentId());
- return ast_context()->ReturnValue(Pop());
}
@@ -4798,7 +5583,7 @@ void HGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
void HGraphBuilder::HandleGlobalVariableAssignment(Variable* var,
HValue* value,
int position,
- int ast_id) {
+ BailoutId ast_id) {
LookupResult lookup(isolate());
GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, true);
if (type == kUseCell) {
@@ -4911,23 +5696,36 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
return ast_context()->ReturnValue(Pop());
} else if (prop != NULL) {
- prop->RecordTypeFeedback(oracle());
+ prop->RecordTypeFeedback(oracle(), zone());
if (prop->key()->IsPropertyName()) {
// Named property.
CHECK_ALIVE(VisitForValue(prop->obj()));
- HValue* obj = Top();
-
- HInstruction* load = NULL;
- if (prop->IsMonomorphic()) {
- Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
- Handle<Map> map = prop->GetReceiverTypes()->first();
- load = BuildLoadNamed(obj, prop, map, name);
+ HValue* object = Top();
+
+ Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
+ Handle<Map> map;
+ HInstruction* load;
+ bool monomorphic = prop->IsMonomorphic();
+ if (monomorphic) {
+ map = prop->GetReceiverTypes()->first();
+ // We can't generate code for a monomorphic dict mode load so
+ // just pretend it is not monomorphic.
+ if (map->is_dictionary_map()) monomorphic = false;
+ }
+ if (monomorphic) {
+ Handle<JSFunction> getter;
+ Handle<JSObject> holder;
+ if (LookupGetter(map, name, &getter, &holder)) {
+ load = BuildCallGetter(object, map, getter, holder);
+ } else {
+ load = BuildLoadNamedMonomorphic(object, name, prop, map);
+ }
} else {
- load = BuildLoadNamedGeneric(obj, prop);
+ load = BuildLoadNamedGeneric(object, name, prop);
}
PushAndAdd(load);
- if (load->HasObservableSideEffects()) AddSimulate(expr->CompoundLoadId());
+ if (load->HasObservableSideEffects()) AddSimulate(prop->LoadId());
CHECK_ALIVE(VisitForValue(expr->value()));
HValue* right = Pop();
@@ -4937,7 +5735,22 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
PushAndAdd(instr);
if (instr->HasObservableSideEffects()) AddSimulate(operation->id());
- HInstruction* store = BuildStoreNamed(obj, instr, prop);
+ HInstruction* store;
+ if (!monomorphic) {
+ // If we don't know the monomorphic type, do a generic store.
+ CHECK_ALIVE(store = BuildStoreNamedGeneric(object, name, instr));
+ } else {
+ Handle<JSFunction> setter;
+ Handle<JSObject> holder;
+ if (LookupSetter(map, name, &setter, &holder)) {
+ store = BuildCallSetter(object, instr, map, setter, holder);
+ } else {
+ CHECK_ALIVE(store = BuildStoreNamedMonomorphic(object,
+ name,
+ instr,
+ map));
+ }
+ }
AddInstruction(store);
// Drop the simulated receiver and value. Return the value.
Drop(2);
@@ -4954,11 +5767,11 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
bool has_side_effects = false;
HValue* load = HandleKeyedElementAccess(
- obj, key, NULL, prop, expr->CompoundLoadId(), RelocInfo::kNoPosition,
+ obj, key, NULL, prop, prop->LoadId(), RelocInfo::kNoPosition,
false, // is_store
&has_side_effects);
Push(load);
- if (has_side_effects) AddSimulate(expr->CompoundLoadId());
+ if (has_side_effects) AddSimulate(prop->LoadId());
CHECK_ALIVE(VisitForValue(expr->value()));
@@ -4969,7 +5782,7 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
PushAndAdd(instr);
if (instr->HasObservableSideEffects()) AddSimulate(operation->id());
- expr->RecordTypeFeedback(oracle());
+ expr->RecordTypeFeedback(oracle(), zone());
HandleKeyedElementAccess(obj, key, instr, expr, expr->AssignmentId(),
RelocInfo::kNoPosition,
true, // is_store
@@ -5017,7 +5830,7 @@ void HGraphBuilder::VisitAssignment(Assignment* expr) {
// We insert a use of the old value to detect unsupported uses of const
// variables (e.g. initialization inside a loop).
HValue* old_value = environment()->Lookup(var);
- AddInstruction(new HUseConst(old_value));
+ AddInstruction(new(zone()) HUseConst(old_value));
}
} else if (var->mode() == CONST_HARMONY) {
if (expr->op() != Token::INIT_CONST_HARMONY) {
@@ -5138,20 +5951,13 @@ void HGraphBuilder::VisitThrow(Throw* expr) {
HLoadNamedField* HGraphBuilder::BuildLoadNamedField(HValue* object,
- Property* expr,
- Handle<Map> type,
- LookupResult* lookup,
- bool smi_and_map_check) {
- if (smi_and_map_check) {
- AddInstruction(new(zone()) HCheckNonSmi(object));
- AddInstruction(HCheckMaps::NewWithTransitions(object, type));
- }
-
- int index = lookup->GetLocalFieldIndexFromMap(*type);
+ Handle<Map> map,
+ LookupResult* lookup) {
+ int index = lookup->GetLocalFieldIndexFromMap(*map);
if (index < 0) {
// Negative property indices are in-object properties, indexed
// from the end of the fixed part of the object.
- int offset = (index * kPointerSize) + type->instance_size();
+ int offset = (index * kPointerSize) + map->instance_size();
return new(zone()) HLoadNamedField(object, true, offset);
} else {
// Non-negative property indices are in the properties array.
@@ -5161,39 +5967,62 @@ HLoadNamedField* HGraphBuilder::BuildLoadNamedField(HValue* object,
}
-HInstruction* HGraphBuilder::BuildLoadNamedGeneric(HValue* obj,
+HInstruction* HGraphBuilder::BuildLoadNamedGeneric(HValue* object,
+ Handle<String> name,
Property* expr) {
if (expr->IsUninitialized() && !FLAG_always_opt) {
AddInstruction(new(zone()) HSoftDeoptimize);
current_block()->MarkAsDeoptimizing();
}
- ASSERT(expr->key()->IsPropertyName());
- Handle<Object> name = expr->key()->AsLiteral()->handle();
HValue* context = environment()->LookupContext();
- return new(zone()) HLoadNamedGeneric(context, obj, name);
+ return new(zone()) HLoadNamedGeneric(context, object, name);
}
-HInstruction* HGraphBuilder::BuildLoadNamed(HValue* obj,
- Property* expr,
- Handle<Map> map,
- Handle<String> name) {
+HInstruction* HGraphBuilder::BuildCallGetter(HValue* object,
+ Handle<Map> map,
+ Handle<JSFunction> getter,
+ Handle<JSObject> holder) {
+ AddCheckConstantFunction(holder, object, map);
+ AddInstruction(new(zone()) HPushArgument(object));
+ return new(zone()) HCallConstantFunction(getter, 1);
+}
+
+
+HInstruction* HGraphBuilder::BuildLoadNamedMonomorphic(HValue* object,
+ Handle<String> name,
+ Property* expr,
+ Handle<Map> map) {
+ // Handle a load from a known field.
+ ASSERT(!map->is_dictionary_map());
LookupResult lookup(isolate());
- map->LookupInDescriptors(NULL, *name, &lookup);
- if (lookup.IsFound() && lookup.type() == FIELD) {
- return BuildLoadNamedField(obj,
- expr,
- map,
- &lookup,
- true);
- } else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) {
- AddInstruction(new(zone()) HCheckNonSmi(obj));
- AddInstruction(HCheckMaps::NewWithTransitions(obj, map));
+ map->LookupDescriptor(NULL, *name, &lookup);
+ if (lookup.IsField()) {
+ AddCheckMapsWithTransitions(object, map);
+ return BuildLoadNamedField(object, map, &lookup);
+ }
+
+ // Handle a load of a constant known function.
+ if (lookup.IsConstantFunction()) {
+ AddCheckMapsWithTransitions(object, map);
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*map));
return new(zone()) HConstant(function, Representation::Tagged());
- } else {
- return BuildLoadNamedGeneric(obj, expr);
}
+
+ // Handle a load from a known field somewhere in the protoype chain.
+ LookupInPrototypes(map, name, &lookup);
+ if (lookup.IsField()) {
+ Handle<JSObject> prototype(JSObject::cast(map->prototype()));
+ Handle<JSObject> holder(lookup.holder());
+ Handle<Map> holder_map(holder->map());
+ AddCheckMapsWithTransitions(object, map);
+ HInstruction* holder_value =
+ AddInstruction(new(zone()) HCheckPrototypeMaps(prototype, holder));
+ return BuildLoadNamedField(holder_value, holder_map, &lookup);
+ }
+
+ // No luck, do a generic load.
+ return BuildLoadNamedGeneric(object, name, expr);
}
@@ -5208,6 +6037,7 @@ HInstruction* HGraphBuilder::BuildExternalArrayElementAccess(
HValue* external_elements,
HValue* checked_key,
HValue* val,
+ HValue* dependency,
ElementsKind elements_kind,
bool is_store) {
if (is_store) {
@@ -5235,20 +6065,31 @@ HInstruction* HGraphBuilder::BuildExternalArrayElementAccess(
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
break;
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}
- return new(zone()) HStoreKeyedSpecializedArrayElement(
- external_elements, checked_key, val, elements_kind);
+ return new(zone()) HStoreKeyed(external_elements,
+ checked_key,
+ val,
+ elements_kind);
} else {
ASSERT(val == NULL);
- return new(zone()) HLoadKeyedSpecializedArrayElement(
- external_elements, checked_key, elements_kind);
+ HLoadKeyed* load =
+ new(zone()) HLoadKeyed(
+ external_elements, checked_key, dependency, elements_kind);
+ if (FLAG_opt_safe_uint32_operations &&
+ elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
+ graph()->RecordUint32Instruction(load);
+ }
+ return load;
}
}
@@ -5256,20 +6097,22 @@ HInstruction* HGraphBuilder::BuildExternalArrayElementAccess(
HInstruction* HGraphBuilder::BuildFastElementAccess(HValue* elements,
HValue* checked_key,
HValue* val,
+ HValue* load_dependency,
ElementsKind elements_kind,
bool is_store) {
if (is_store) {
ASSERT(val != NULL);
switch (elements_kind) {
- case FAST_DOUBLE_ELEMENTS:
- return new(zone()) HStoreKeyedFastDoubleElement(
- elements, checked_key, val);
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
// Smi-only arrays need a smi check.
AddInstruction(new(zone()) HCheckSmi(val));
// Fall through.
case FAST_ELEMENTS:
- return new(zone()) HStoreKeyedFastElement(
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ return new(zone()) HStoreKeyed(
elements, checked_key, val, elements_kind);
default:
UNREACHABLE();
@@ -5277,57 +6120,144 @@ HInstruction* HGraphBuilder::BuildFastElementAccess(HValue* elements,
}
}
// It's an element load (!is_store).
- if (elements_kind == FAST_DOUBLE_ELEMENTS) {
- return new(zone()) HLoadKeyedFastDoubleElement(elements, checked_key);
- } else { // FAST_ELEMENTS or FAST_SMI_ONLY_ELEMENTS.
- return new(zone()) HLoadKeyedFastElement(elements, checked_key);
- }
+ return new(zone()) HLoadKeyed(elements,
+ checked_key,
+ load_dependency,
+ elements_kind);
}
HInstruction* HGraphBuilder::BuildMonomorphicElementAccess(HValue* object,
HValue* key,
HValue* val,
+ HValue* dependency,
Handle<Map> map,
bool is_store) {
- HInstruction* mapcheck = AddInstruction(new(zone()) HCheckMaps(object, map));
- bool fast_smi_only_elements = map->has_fast_smi_only_elements();
- bool fast_elements = map->has_fast_elements();
- HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object));
+ HCheckMaps* mapcheck = new(zone()) HCheckMaps(object, map,
+ zone(), dependency);
+ AddInstruction(mapcheck);
+ if (dependency) {
+ mapcheck->ClearGVNFlag(kDependsOnElementsKind);
+ }
+ return BuildUncheckedMonomorphicElementAccess(object, key, val,
+ mapcheck, map, is_store);
+}
+
+
+HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
+ HValue* object,
+ HValue* key,
+ HValue* val,
+ HCheckMaps* mapcheck,
+ Handle<Map> map,
+ bool is_store) {
+ // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
+ // on a HElementsTransition instruction. The flag can also be removed if the
+ // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
+ // ElementsKind transitions. Finally, the dependency can be removed for stores
+ // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
+ // generated store code.
+ if ((map->elements_kind() == FAST_HOLEY_ELEMENTS) ||
+ (map->elements_kind() == FAST_ELEMENTS && is_store)) {
+ mapcheck->ClearGVNFlag(kDependsOnElementsKind);
+ }
+ bool fast_smi_only_elements = map->has_fast_smi_elements();
+ bool fast_elements = map->has_fast_object_elements();
+ HInstruction* elements =
+ AddInstruction(new(zone()) HLoadElements(object, mapcheck));
if (is_store && (fast_elements || fast_smi_only_elements)) {
- AddInstruction(new(zone()) HCheckMaps(
- elements, isolate()->factory()->fixed_array_map()));
+ HCheckMaps* check_cow_map = new(zone()) HCheckMaps(
+ elements, isolate()->factory()->fixed_array_map(), zone());
+ check_cow_map->ClearGVNFlag(kDependsOnElementsKind);
+ AddInstruction(check_cow_map);
}
HInstruction* length = NULL;
HInstruction* checked_key = NULL;
if (map->has_external_array_elements()) {
length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
- checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
+ checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length,
+ ALLOW_SMI_KEY));
HLoadExternalArrayPointer* external_elements =
new(zone()) HLoadExternalArrayPointer(elements);
AddInstruction(external_elements);
- return BuildExternalArrayElementAccess(external_elements, checked_key,
- val, map->elements_kind(), is_store);
+ return BuildExternalArrayElementAccess(
+ external_elements, checked_key, val, mapcheck,
+ map->elements_kind(), is_store);
}
ASSERT(fast_smi_only_elements ||
fast_elements ||
map->has_fast_double_elements());
if (map->instance_type() == JS_ARRAY_TYPE) {
- length = AddInstruction(new(zone()) HJSArrayLength(object, mapcheck));
+ length = AddInstruction(new(zone()) HJSArrayLength(object, mapcheck,
+ HType::Smi()));
} else {
length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
}
- checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
- return BuildFastElementAccess(elements, checked_key, val,
+ checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length,
+ ALLOW_SMI_KEY));
+ return BuildFastElementAccess(elements, checked_key, val, mapcheck,
map->elements_kind(), is_store);
}
+HInstruction* HGraphBuilder::TryBuildConsolidatedElementLoad(
+ HValue* object,
+ HValue* key,
+ HValue* val,
+ SmallMapList* maps) {
+ // For polymorphic loads of similar elements kinds (i.e. all tagged or all
+ // double), always use the "worst case" code without a transition. This is
+ // much faster than transitioning the elements to the worst case, trading a
+ // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
+ bool has_double_maps = false;
+ bool has_smi_or_object_maps = false;
+ bool has_js_array_access = false;
+ bool has_non_js_array_access = false;
+ Handle<Map> most_general_consolidated_map;
+ for (int i = 0; i < maps->length(); ++i) {
+ Handle<Map> map = maps->at(i);
+ // Don't allow mixing of JSArrays with JSObjects.
+ if (map->instance_type() == JS_ARRAY_TYPE) {
+ if (has_non_js_array_access) return NULL;
+ has_js_array_access = true;
+ } else if (has_js_array_access) {
+ return NULL;
+ } else {
+ has_non_js_array_access = true;
+ }
+ // Don't allow mixed, incompatible elements kinds.
+ if (map->has_fast_double_elements()) {
+ if (has_smi_or_object_maps) return NULL;
+ has_double_maps = true;
+ } else if (map->has_fast_smi_or_object_elements()) {
+ if (has_double_maps) return NULL;
+ has_smi_or_object_maps = true;
+ } else {
+ return NULL;
+ }
+ // Remember the most general elements kind, the code for its load will
+ // properly handle all of the more specific cases.
+ if ((i == 0) || IsMoreGeneralElementsKindTransition(
+ most_general_consolidated_map->elements_kind(),
+ map->elements_kind())) {
+ most_general_consolidated_map = map;
+ }
+ }
+ if (!has_double_maps && !has_smi_or_object_maps) return NULL;
+
+ HCheckMaps* check_maps = new(zone()) HCheckMaps(object, maps, zone());
+ AddInstruction(check_maps);
+ HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
+ object, key, val, check_maps, most_general_consolidated_map, false);
+ return instr;
+}
+
+
HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
HValue* key,
HValue* val,
Expression* prop,
- int ast_id,
+ BailoutId ast_id,
int position,
bool is_store,
bool* has_side_effects) {
@@ -5336,6 +6266,19 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
SmallMapList* maps = prop->GetReceiverTypes();
bool todo_external_array = false;
+ if (!is_store) {
+ HInstruction* consolidated_load =
+ TryBuildConsolidatedElementLoad(object, key, val, maps);
+ if (consolidated_load != NULL) {
+ AddInstruction(consolidated_load);
+ *has_side_effects |= consolidated_load->HasObservableSideEffects();
+ if (position != RelocInfo::kNoPosition) {
+ consolidated_load->set_position(position);
+ }
+ return consolidated_load;
+ }
+ }
+
static const int kNumElementTypes = kElementsKindCount;
bool type_todo[kNumElementTypes];
for (int i = 0; i < kNumElementTypes; ++i) {
@@ -5349,8 +6292,8 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
for (int i = 0; i < maps->length(); ++i) {
Handle<Map> map = maps->at(i);
ElementsKind elements_kind = map->elements_kind();
- if (elements_kind == FAST_DOUBLE_ELEMENTS ||
- elements_kind == FAST_ELEMENTS) {
+ if (IsFastElementsKind(elements_kind) &&
+ elements_kind != GetInitialFastElementsKind()) {
possible_transitioned_maps.Add(map);
}
}
@@ -5364,15 +6307,20 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
int num_untransitionable_maps = 0;
Handle<Map> untransitionable_map;
+ HTransitionElementsKind* transition = NULL;
for (int i = 0; i < maps->length(); ++i) {
Handle<Map> map = maps->at(i);
ASSERT(map->IsMap());
if (!transition_target.at(i).is_null()) {
- AddInstruction(new(zone()) HTransitionElementsKind(
- object, map, transition_target.at(i)));
+ ASSERT(Map::IsValidElementsTransition(
+ map->elements_kind(),
+ transition_target.at(i)->elements_kind()));
+ transition = new(zone()) HTransitionElementsKind(
+ object, map, transition_target.at(i));
+ AddInstruction(transition);
} else {
type_todo[map->elements_kind()] = true;
- if (map->elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND) {
+ if (IsExternalArrayElementsKind(map->elements_kind())) {
todo_external_array = true;
}
num_untransitionable_maps++;
@@ -5389,37 +6337,36 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
: BuildLoadKeyedGeneric(object, key));
} else {
instr = AddInstruction(BuildMonomorphicElementAccess(
- object, key, val, untransitionable_map, is_store));
+ object, key, val, transition, untransitionable_map, is_store));
}
*has_side_effects |= instr->HasObservableSideEffects();
- instr->set_position(position);
+ if (position != RelocInfo::kNoPosition) instr->set_position(position);
return is_store ? NULL : instr;
}
- AddInstruction(HCheckInstanceType::NewIsSpecObject(object));
+ HInstruction* checkspec =
+ AddInstruction(HCheckInstanceType::NewIsSpecObject(object, zone()));
HBasicBlock* join = graph()->CreateBasicBlock();
HInstruction* elements_kind_instr =
AddInstruction(new(zone()) HElementsKind(object));
- HCompareConstantEqAndBranch* elements_kind_branch = NULL;
- HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object));
+ HInstruction* elements =
+ AddInstruction(new(zone()) HLoadElements(object, checkspec));
HLoadExternalArrayPointer* external_elements = NULL;
HInstruction* checked_key = NULL;
- // Generated code assumes that FAST_SMI_ONLY_ELEMENTS, FAST_ELEMENTS,
- // FAST_DOUBLE_ELEMENTS and DICTIONARY_ELEMENTS are handled before external
- // arrays.
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
- STATIC_ASSERT(FAST_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
+ // Generated code assumes that FAST_* and DICTIONARY_ELEMENTS ElementsKinds
+ // are handled before external arrays.
+ STATIC_ASSERT(FAST_SMI_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
STATIC_ASSERT(FAST_DOUBLE_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
STATIC_ASSERT(DICTIONARY_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
for (ElementsKind elements_kind = FIRST_ELEMENTS_KIND;
elements_kind <= LAST_ELEMENTS_KIND;
elements_kind = ElementsKind(elements_kind + 1)) {
- // After having handled FAST_ELEMENTS, FAST_SMI_ONLY_ELEMENTS,
- // FAST_DOUBLE_ELEMENTS and DICTIONARY_ELEMENTS, we need to add some code
- // that's executed for all external array cases.
+ // After having handled FAST_* and DICTIONARY_ELEMENTS, we need to add some
+ // code that's executed for all external array cases.
STATIC_ASSERT(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND ==
LAST_ELEMENTS_KIND);
if (elements_kind == FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND
@@ -5433,21 +6380,20 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
if (type_todo[elements_kind]) {
HBasicBlock* if_true = graph()->CreateBasicBlock();
HBasicBlock* if_false = graph()->CreateBasicBlock();
- elements_kind_branch = new(zone()) HCompareConstantEqAndBranch(
- elements_kind_instr, elements_kind, Token::EQ_STRICT);
+ HCompareConstantEqAndBranch* elements_kind_branch =
+ new(zone()) HCompareConstantEqAndBranch(
+ elements_kind_instr, elements_kind, Token::EQ_STRICT);
elements_kind_branch->SetSuccessorAt(0, if_true);
elements_kind_branch->SetSuccessorAt(1, if_false);
current_block()->Finish(elements_kind_branch);
set_current_block(if_true);
HInstruction* access;
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS ||
- elements_kind == FAST_ELEMENTS ||
- elements_kind == FAST_DOUBLE_ELEMENTS) {
- if (is_store && elements_kind != FAST_DOUBLE_ELEMENTS) {
+ if (IsFastElementsKind(elements_kind)) {
+ if (is_store && !IsFastDoubleElementsKind(elements_kind)) {
AddInstruction(new(zone()) HCheckMaps(
elements, isolate()->factory()->fixed_array_map(),
- elements_kind_branch));
+ zone(), elements_kind_branch));
}
// TODO(jkummerow): The need for these two blocks could be avoided
// in one of two ways:
@@ -5467,10 +6413,13 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
set_current_block(if_jsarray);
HInstruction* length;
- length = AddInstruction(new(zone()) HJSArrayLength(object, typecheck));
- checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
+ length = AddInstruction(new(zone()) HJSArrayLength(object, typecheck,
+ HType::Smi()));
+ checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length,
+ ALLOW_SMI_KEY));
access = AddInstruction(BuildFastElementAccess(
- elements, checked_key, val, elements_kind, is_store));
+ elements, checked_key, val, elements_kind_branch,
+ elements_kind, is_store));
if (!is_store) {
Push(access);
}
@@ -5483,9 +6432,11 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
set_current_block(if_fastobject);
length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
- checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
+ checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length,
+ ALLOW_SMI_KEY));
access = AddInstruction(BuildFastElementAccess(
- elements, checked_key, val, elements_kind, is_store));
+ elements, checked_key, val, elements_kind_branch,
+ elements_kind, is_store));
} else if (elements_kind == DICTIONARY_ELEMENTS) {
if (is_store) {
access = AddInstruction(BuildStoreKeyedGeneric(object, key, val));
@@ -5494,10 +6445,11 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
}
} else { // External array elements.
access = AddInstruction(BuildExternalArrayElementAccess(
- external_elements, checked_key, val, elements_kind, is_store));
+ external_elements, checked_key, val, elements_kind_branch,
+ elements_kind, is_store));
}
*has_side_effects |= access->HasObservableSideEffects();
- access->set_position(position);
+ if (position != RelocInfo::kNoPosition) access->set_position(position);
if (!is_store) {
Push(access);
}
@@ -5518,7 +6470,7 @@ HValue* HGraphBuilder::HandleKeyedElementAccess(HValue* obj,
HValue* key,
HValue* val,
Expression* expr,
- int ast_id,
+ BailoutId ast_id,
int position,
bool is_store,
bool* has_side_effects) {
@@ -5531,7 +6483,7 @@ HValue* HGraphBuilder::HandleKeyedElementAccess(HValue* obj,
: BuildLoadKeyedGeneric(obj, key);
} else {
AddInstruction(new(zone()) HCheckNonSmi(obj));
- instr = BuildMonomorphicElementAccess(obj, key, val, map, is_store);
+ instr = BuildMonomorphicElementAccess(obj, key, val, NULL, map, is_store);
}
} else if (expr->GetReceiverTypes() != NULL &&
!expr->GetReceiverTypes()->is_empty()) {
@@ -5544,7 +6496,7 @@ HValue* HGraphBuilder::HandleKeyedElementAccess(HValue* obj,
instr = BuildLoadKeyedGeneric(obj, key);
}
}
- instr->set_position(position);
+ if (position != RelocInfo::kNoPosition) instr->set_position(position);
AddInstruction(instr);
*has_side_effects = instr->HasObservableSideEffects();
return instr;
@@ -5572,6 +6524,7 @@ void HGraphBuilder::EnsureArgumentsArePushedForAccess() {
// Push arguments when entering inlined function.
HEnterInlined* entry = function_state()->entry();
+ entry->set_arguments_pushed();
ZoneList<HValue*>* arguments_values = entry->arguments_values();
@@ -5654,7 +6607,7 @@ void HGraphBuilder::VisitProperty(Property* expr) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- expr->RecordTypeFeedback(oracle());
+ expr->RecordTypeFeedback(oracle(), zone());
if (TryArgumentsAccess(expr)) return;
@@ -5665,13 +6618,12 @@ void HGraphBuilder::VisitProperty(Property* expr) {
HValue* array = Pop();
AddInstruction(new(zone()) HCheckNonSmi(array));
HInstruction* mapcheck =
- AddInstruction(HCheckInstanceType::NewIsJSArray(array));
+ AddInstruction(HCheckInstanceType::NewIsJSArray(array, zone()));
instr = new(zone()) HJSArrayLength(array, mapcheck);
-
} else if (expr->IsStringLength()) {
HValue* string = Pop();
AddInstruction(new(zone()) HCheckNonSmi(string));
- AddInstruction(HCheckInstanceType::NewIsString(string));
+ AddInstruction(HCheckInstanceType::NewIsString(string, zone()));
instr = new(zone()) HStringLength(string);
} else if (expr->IsStringAccess()) {
CHECK_ALIVE(VisitForValue(expr->key()));
@@ -5692,15 +6644,27 @@ void HGraphBuilder::VisitProperty(Property* expr) {
Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
SmallMapList* types = expr->GetReceiverTypes();
- HValue* obj = Pop();
+ bool monomorphic = expr->IsMonomorphic();
+ Handle<Map> map;
if (expr->IsMonomorphic()) {
- instr = BuildLoadNamed(obj, expr, types->first(), name);
+ map = types->first();
+ if (map->is_dictionary_map()) monomorphic = false;
+ }
+ if (monomorphic) {
+ Handle<JSFunction> getter;
+ Handle<JSObject> holder;
+ if (LookupGetter(map, name, &getter, &holder)) {
+ AddCheckConstantFunction(holder, Top(), map);
+ if (FLAG_inline_accessors && TryInlineGetter(getter, expr)) return;
+ AddInstruction(new(zone()) HPushArgument(Pop()));
+ instr = new(zone()) HCallConstantFunction(getter, 1);
+ } else {
+ instr = BuildLoadNamedMonomorphic(Pop(), name, expr, map);
+ }
} else if (types != NULL && types->length() > 1) {
- AddInstruction(new(zone()) HCheckNonSmi(obj));
- HandlePolymorphicLoadNamedField(expr, obj, types, name);
- return;
+ return HandlePolymorphicLoadNamedField(expr, Pop(), types, name);
} else {
- instr = BuildLoadNamedGeneric(obj, expr);
+ instr = BuildLoadNamedGeneric(Pop(), name, expr);
}
} else {
@@ -5730,22 +6694,23 @@ void HGraphBuilder::VisitProperty(Property* expr) {
}
-void HGraphBuilder::AddCheckConstantFunction(Call* expr,
+void HGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
+ Handle<Map> receiver_map) {
+ if (!holder.is_null()) {
+ AddInstruction(new(zone()) HCheckPrototypeMaps(
+ Handle<JSObject>(JSObject::cast(receiver_map->prototype())), holder));
+ }
+}
+
+
+void HGraphBuilder::AddCheckConstantFunction(Handle<JSObject> holder,
HValue* receiver,
- Handle<Map> receiver_map,
- bool smi_and_map_check) {
+ Handle<Map> receiver_map) {
// Constant functions have the nice property that the map will change if they
// are overwritten. Therefore it is enough to check the map of the holder and
// its prototypes.
- if (smi_and_map_check) {
- AddInstruction(new(zone()) HCheckNonSmi(receiver));
- AddInstruction(HCheckMaps::NewWithTransitions(receiver, receiver_map));
- }
- if (!expr->holder().is_null()) {
- AddInstruction(new(zone()) HCheckPrototypeMaps(
- Handle<JSObject>(JSObject::cast(receiver_map->prototype())),
- expr->holder()));
- }
+ AddCheckMapsWithTransitions(receiver, receiver_map);
+ AddCheckPrototypeMaps(holder, receiver_map);
}
@@ -5827,7 +6792,7 @@ void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
set_current_block(if_true);
expr->ComputeTarget(map, name);
- AddCheckConstantFunction(expr, receiver, map, false);
+ AddCheckPrototypeMaps(expr->holder(), map);
if (FLAG_trace_inlining && FLAG_polymorphic_inlining) {
Handle<JSFunction> caller = info()->closure();
SmartArrayPointer<char> caller_name =
@@ -5941,11 +6906,11 @@ int HGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
bool HGraphBuilder::TryInline(CallKind call_kind,
Handle<JSFunction> target,
- ZoneList<Expression*>* arguments,
- HValue* receiver,
- int ast_id,
- int return_id,
- ReturnHandlingFlag return_handling) {
+ int arguments_count,
+ HValue* implicit_return_value,
+ BailoutId ast_id,
+ BailoutId return_id,
+ InliningKind inlining_kind) {
int nodes_added = InliningAstSize(target);
if (nodes_added == kNotInlinable) return false;
@@ -6002,13 +6967,13 @@ bool HGraphBuilder::TryInline(CallKind call_kind,
}
// Parse and allocate variables.
- CompilationInfo target_info(target);
+ CompilationInfo target_info(target, zone());
if (!ParserApi::Parse(&target_info, kNoParsingFlags) ||
!Scope::Analyze(&target_info)) {
if (target_info.isolate()->has_pending_exception()) {
// Parse or scope error, never optimize this function.
SetStackOverflow();
- target_shared->DisableOptimization();
+ target_shared->DisableOptimization("parse/scope error");
}
TraceInline(target, caller, "parse failure");
return false;
@@ -6074,7 +7039,7 @@ bool HGraphBuilder::TryInline(CallKind call_kind,
// The scope info might not have been set if a lazily compiled
// function is inlined before being called for the first time.
Handle<ScopeInfo> target_scope_info =
- ScopeInfo::Create(target_info.scope());
+ ScopeInfo::Create(target_info.scope(), zone());
target_shared->set_scope_info(*target_scope_info);
}
target_shared->EnableDeoptimizationSupport(*target_info.code());
@@ -6090,31 +7055,34 @@ bool HGraphBuilder::TryInline(CallKind call_kind,
// Save the pending call context and type feedback oracle. Set up new ones
// for the inlined function.
ASSERT(target_shared->has_deoptimization_support());
+ Handle<Code> unoptimized_code(target_shared->code());
TypeFeedbackOracle target_oracle(
- Handle<Code>(target_shared->code()),
- Handle<Context>(target->context()->global_context()),
- isolate());
+ unoptimized_code,
+ Handle<Context>(target->context()->native_context()),
+ isolate(),
+ zone());
// The function state is new-allocated because we need to delete it
// in two different places.
FunctionState* target_state = new FunctionState(
- this, &target_info, &target_oracle, return_handling);
+ this, &target_info, &target_oracle, inlining_kind);
HConstant* undefined = graph()->GetConstantUndefined();
HEnvironment* inner_env =
environment()->CopyForInlining(target,
- arguments->length(),
+ arguments_count,
function,
undefined,
call_kind,
- function_state()->is_construct());
+ function_state()->inlining_kind());
#ifdef V8_TARGET_ARCH_IA32
// IA32 only, overwrite the caller's context in the deoptimization
// environment with the correct one.
//
// TODO(kmillikin): implement the same inlining on other platforms so we
// can remove the unsightly ifdefs in this function.
- HConstant* context = new HConstant(Handle<Context>(target->context()),
- Representation::Tagged());
+ HConstant* context =
+ new(zone()) HConstant(Handle<Context>(target->context()),
+ Representation::Tagged());
AddInstruction(context);
inner_env->BindContext(context);
#endif
@@ -6129,18 +7097,18 @@ bool HGraphBuilder::TryInline(CallKind call_kind,
if (function->scope()->arguments() != NULL) {
HEnvironment* arguments_env = inner_env->arguments_environment();
int arguments_count = arguments_env->parameter_count();
- arguments_values = new(zone()) ZoneList<HValue*>(arguments_count);
+ arguments_values = new(zone()) ZoneList<HValue*>(arguments_count, zone());
for (int i = 0; i < arguments_count; i++) {
- arguments_values->Add(arguments_env->Lookup(i));
+ arguments_values->Add(arguments_env->Lookup(i), zone());
}
}
HEnterInlined* enter_inlined =
new(zone()) HEnterInlined(target,
- arguments->length(),
+ arguments_count,
function,
call_kind,
- function_state()->is_construct(),
+ function_state()->inlining_kind(),
function->scope()->arguments(),
arguments_values);
function_state()->set_entry(enter_inlined);
@@ -6160,7 +7128,7 @@ bool HGraphBuilder::TryInline(CallKind call_kind,
// Bail out if the inline function did, as we cannot residualize a call
// instead.
TraceInline(target, caller, "inline graph construction failed");
- target_shared->DisableOptimization();
+ target_shared->DisableOptimization("inlining bailed out");
inline_bailout_ = true;
delete target_state;
return true;
@@ -6169,30 +7137,51 @@ bool HGraphBuilder::TryInline(CallKind call_kind,
// Update inlined nodes count.
inlined_count_ += nodes_added;
+ ASSERT(unoptimized_code->kind() == Code::FUNCTION);
+ Handle<Object> maybe_type_info(unoptimized_code->type_feedback_info());
+ Handle<TypeFeedbackInfo> type_info(
+ Handle<TypeFeedbackInfo>::cast(maybe_type_info));
+ graph()->update_type_change_checksum(type_info->own_type_change_checksum());
+
TraceInline(target, caller, NULL);
if (current_block() != NULL) {
- // Add default return value (i.e. undefined for normals calls or the newly
- // allocated receiver for construct calls) if control can fall off the
- // body. In a test context, undefined is false and any JSObject is true.
- if (call_context()->IsValue()) {
- ASSERT(function_return() != NULL);
- HValue* return_value = function_state()->is_construct()
- ? receiver
- : undefined;
- current_block()->AddLeaveInlined(return_value,
- function_return(),
- function_state());
- } else if (call_context()->IsEffect()) {
- ASSERT(function_return() != NULL);
- current_block()->Goto(function_return(), function_state());
+ FunctionState* state = function_state();
+ if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
+ // Falling off the end of an inlined construct call. In a test context the
+ // return value will always evaluate to true, in a value context the
+ // return value is the newly allocated receiver.
+ if (call_context()->IsTest()) {
+ current_block()->Goto(inlined_test_context()->if_true(), state);
+ } else if (call_context()->IsEffect()) {
+ current_block()->Goto(function_return(), state);
+ } else {
+ ASSERT(call_context()->IsValue());
+ current_block()->AddLeaveInlined(implicit_return_value, state);
+ }
+ } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
+ // Falling off the end of an inlined setter call. The returned value is
+ // never used, the value of an assignment is always the value of the RHS
+ // of the assignment.
+ if (call_context()->IsTest()) {
+ inlined_test_context()->ReturnValue(implicit_return_value);
+ } else if (call_context()->IsEffect()) {
+ current_block()->Goto(function_return(), state);
+ } else {
+ ASSERT(call_context()->IsValue());
+ current_block()->AddLeaveInlined(implicit_return_value, state);
+ }
} else {
- ASSERT(call_context()->IsTest());
- ASSERT(inlined_test_context() != NULL);
- HBasicBlock* target = function_state()->is_construct()
- ? inlined_test_context()->if_true()
- : inlined_test_context()->if_false();
- current_block()->Goto(target, function_state());
+ // Falling off the end of a normal inlined function. This basically means
+ // returning undefined.
+ if (call_context()->IsTest()) {
+ current_block()->Goto(inlined_test_context()->if_false(), state);
+ } else if (call_context()->IsEffect()) {
+ current_block()->Goto(function_return(), state);
+ } else {
+ ASSERT(call_context()->IsValue());
+ current_block()->AddLeaveInlined(undefined, state);
+ }
}
}
@@ -6240,7 +7229,7 @@ bool HGraphBuilder::TryInlineCall(Call* expr, bool drop_extra) {
return TryInline(call_kind,
expr->target(),
- expr->arguments(),
+ expr->arguments()->length(),
NULL,
expr->id(),
expr->ReturnId(),
@@ -6248,17 +7237,43 @@ bool HGraphBuilder::TryInlineCall(Call* expr, bool drop_extra) {
}
-bool HGraphBuilder::TryInlineConstruct(CallNew* expr, HValue* receiver) {
+bool HGraphBuilder::TryInlineConstruct(CallNew* expr,
+ HValue* implicit_return_value) {
return TryInline(CALL_AS_FUNCTION,
expr->target(),
- expr->arguments(),
- receiver,
+ expr->arguments()->length(),
+ implicit_return_value,
expr->id(),
expr->ReturnId(),
CONSTRUCT_CALL_RETURN);
}
+bool HGraphBuilder::TryInlineGetter(Handle<JSFunction> getter,
+ Property* prop) {
+ return TryInline(CALL_AS_METHOD,
+ getter,
+ 0,
+ NULL,
+ prop->id(),
+ prop->LoadId(),
+ GETTER_CALL_RETURN);
+}
+
+
+bool HGraphBuilder::TryInlineSetter(Handle<JSFunction> setter,
+ Assignment* assignment,
+ HValue* implicit_return_value) {
+ return TryInline(CALL_AS_METHOD,
+ setter,
+ 1,
+ implicit_return_value,
+ assignment->id(),
+ assignment->AssignmentId(),
+ SETTER_CALL_RETURN);
+}
+
+
bool HGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra) {
if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
@@ -6332,7 +7347,7 @@ bool HGraphBuilder::TryInlineBuiltinMethodCall(Call* expr,
case kMathCos:
case kMathTan:
if (argument_count == 2 && check_type == RECEIVER_MAP_CHECK) {
- AddCheckConstantFunction(expr, receiver, receiver_map, true);
+ AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
HValue* argument = Pop();
HValue* context = environment()->LookupContext();
Drop(1); // Receiver.
@@ -6345,7 +7360,7 @@ bool HGraphBuilder::TryInlineBuiltinMethodCall(Call* expr,
break;
case kMathPow:
if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) {
- AddCheckConstantFunction(expr, receiver, receiver_map, true);
+ AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
HValue* right = Pop();
HValue* left = Pop();
Pop(); // Pop receiver.
@@ -6387,7 +7402,7 @@ bool HGraphBuilder::TryInlineBuiltinMethodCall(Call* expr,
break;
case kMathRandom:
if (argument_count == 1 && check_type == RECEIVER_MAP_CHECK) {
- AddCheckConstantFunction(expr, receiver, receiver_map, true);
+ AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
Drop(1); // Receiver.
HValue* context = environment()->LookupContext();
HGlobalObject* global_object = new(zone()) HGlobalObject(context);
@@ -6400,82 +7415,15 @@ bool HGraphBuilder::TryInlineBuiltinMethodCall(Call* expr,
case kMathMax:
case kMathMin:
if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) {
- AddCheckConstantFunction(expr, receiver, receiver_map, true);
+ AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
HValue* right = Pop();
HValue* left = Pop();
- Pop(); // Pop receiver.
-
- HValue* left_operand = left;
- HValue* right_operand = right;
-
- // If we do not have two integers, we convert to double for comparison.
- if (!left->representation().IsInteger32() ||
- !right->representation().IsInteger32()) {
- if (!left->representation().IsDouble()) {
- HChange* left_convert = new(zone()) HChange(
- left,
- Representation::Double(),
- false, // Do not truncate when converting to double.
- true); // Deoptimize for undefined.
- left_convert->SetFlag(HValue::kBailoutOnMinusZero);
- left_operand = AddInstruction(left_convert);
- }
- if (!right->representation().IsDouble()) {
- HChange* right_convert = new(zone()) HChange(
- right,
- Representation::Double(),
- false, // Do not truncate when converting to double.
- true); // Deoptimize for undefined.
- right_convert->SetFlag(HValue::kBailoutOnMinusZero);
- right_operand = AddInstruction(right_convert);
- }
- }
-
- ASSERT(left_operand->representation().Equals(
- right_operand->representation()));
- ASSERT(!left_operand->representation().IsTagged());
-
- Token::Value op = (id == kMathMin) ? Token::LT : Token::GT;
-
- HCompareIDAndBranch* compare =
- new(zone()) HCompareIDAndBranch(left_operand, right_operand, op);
- compare->SetInputRepresentation(left_operand->representation());
-
- HBasicBlock* return_left = graph()->CreateBasicBlock();
- HBasicBlock* return_right = graph()->CreateBasicBlock();
-
- compare->SetSuccessorAt(0, return_left);
- compare->SetSuccessorAt(1, return_right);
- current_block()->Finish(compare);
-
- set_current_block(return_left);
- Push(left);
- set_current_block(return_right);
- // The branch above always returns the right operand if either of
- // them is NaN, but the spec requires that max/min(NaN, X) = NaN.
- // We add another branch that checks if the left operand is NaN or not.
- if (left_operand->representation().IsDouble()) {
- // If left_operand != left_operand then it is NaN.
- HCompareIDAndBranch* compare_nan = new(zone()) HCompareIDAndBranch(
- left_operand, left_operand, Token::EQ);
- compare_nan->SetInputRepresentation(left_operand->representation());
- HBasicBlock* left_is_number = graph()->CreateBasicBlock();
- HBasicBlock* left_is_nan = graph()->CreateBasicBlock();
- compare_nan->SetSuccessorAt(0, left_is_number);
- compare_nan->SetSuccessorAt(1, left_is_nan);
- current_block()->Finish(compare_nan);
- set_current_block(left_is_nan);
- Push(left);
- set_current_block(left_is_number);
- Push(right);
- return_right = CreateJoin(left_is_number, left_is_nan, expr->id());
- } else {
- Push(right);
- }
-
- HBasicBlock* join = CreateJoin(return_left, return_right, expr->id());
- set_current_block(join);
- ast_context()->ReturnValue(Pop());
+ Drop(1); // Receiver.
+ HValue* context = environment()->LookupContext();
+ HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
+ : HMathMinMax::kMathMax;
+ HMathMinMax* result = new(zone()) HMathMinMax(context, left, right, op);
+ ast_context()->ReturnInstruction(result, expr->id());
return true;
}
break;
@@ -6516,7 +7464,7 @@ bool HGraphBuilder::TryCallApply(Call* expr) {
VisitForValue(prop->obj());
if (HasStackOverflow() || current_block() == NULL) return true;
HValue* function = Top();
- AddCheckConstantFunction(expr, function, function_map, true);
+ AddCheckConstantFunction(expr->holder(), function, function_map);
Drop(1);
VisitForValue(args->at(0));
@@ -6635,7 +7583,7 @@ void HGraphBuilder::VisitCall(Call* expr) {
call = PreProcessCall(
new(zone()) HCallNamed(context, name, argument_count));
} else {
- AddCheckConstantFunction(expr, receiver, receiver_map, true);
+ AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
if (TryInlineCall(expr)) return;
call = PreProcessCall(
@@ -6706,6 +7654,11 @@ void HGraphBuilder::VisitCall(Call* expr) {
return;
}
if (TryInlineCall(expr)) return;
+
+ if (expr->target().is_identical_to(info()->closure())) {
+ graph()->MarkRecursive();
+ }
+
call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(),
argument_count));
} else {
@@ -6728,8 +7681,8 @@ void HGraphBuilder::VisitCall(Call* expr) {
HValue* function = Top();
HValue* context = environment()->LookupContext();
HGlobalObject* global = new(zone()) HGlobalObject(context);
- HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global);
AddInstruction(global);
+ HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global);
PushAndAdd(receiver);
CHECK_ALIVE(VisitExpressions(expr->arguments()));
AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
@@ -6759,8 +7712,8 @@ void HGraphBuilder::VisitCall(Call* expr) {
HValue* function = Top();
HValue* context = environment()->LookupContext();
HGlobalObject* global_object = new(zone()) HGlobalObject(context);
- HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global_object);
AddInstruction(global_object);
+ HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global_object);
AddInstruction(receiver);
PushAndAdd(new(zone()) HPushArgument(receiver));
CHECK_ALIVE(VisitArgumentList(expr->arguments()));
@@ -6833,8 +7786,8 @@ void HGraphBuilder::VisitCallNew(CallNew* expr) {
} else {
// The constructor function is both an operand to the instruction and an
// argument to the construct call.
- HValue* constructor = NULL;
- CHECK_ALIVE(constructor = VisitArgument(expr->expression()));
+ CHECK_ALIVE(VisitArgument(expr->expression()));
+ HValue* constructor = HPushArgument::cast(Top())->argument();
CHECK_ALIVE(VisitArgumentList(expr->arguments()));
HInstruction* call =
new(zone()) HCallNew(context, constructor, argument_count);
@@ -6892,7 +7845,6 @@ void HGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
int argument_count = expr->arguments()->length();
HCallRuntime* call =
new(zone()) HCallRuntime(context, name, function, argument_count);
- call->set_position(RelocInfo::kNoPosition);
Drop(argument_count);
return ast_context()->ReturnInstruction(call, expr->id());
}
@@ -7147,8 +8099,7 @@ void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
}
HValue* context = BuildContextChainWalk(var);
- HStoreContextSlot::Mode mode =
- (var->mode() == LET || var->mode() == CONST_HARMONY)
+ HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode())
? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
HStoreContextSlot* instr =
new(zone()) HStoreContextSlot(context, var->index(), mode, after);
@@ -7166,30 +8117,56 @@ void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
} else {
// Argument of the count operation is a property.
ASSERT(prop != NULL);
- prop->RecordTypeFeedback(oracle());
+ prop->RecordTypeFeedback(oracle(), zone());
if (prop->key()->IsPropertyName()) {
// Named property.
if (returns_original_input) Push(graph_->GetConstantUndefined());
CHECK_ALIVE(VisitForValue(prop->obj()));
- HValue* obj = Top();
-
- HInstruction* load = NULL;
- if (prop->IsMonomorphic()) {
- Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
- Handle<Map> map = prop->GetReceiverTypes()->first();
- load = BuildLoadNamed(obj, prop, map, name);
+ HValue* object = Top();
+
+ Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
+ Handle<Map> map;
+ HInstruction* load;
+ bool monomorphic = prop->IsMonomorphic();
+ if (monomorphic) {
+ map = prop->GetReceiverTypes()->first();
+ if (map->is_dictionary_map()) monomorphic = false;
+ }
+ if (monomorphic) {
+ Handle<JSFunction> getter;
+ Handle<JSObject> holder;
+ if (LookupGetter(map, name, &getter, &holder)) {
+ load = BuildCallGetter(object, map, getter, holder);
+ } else {
+ load = BuildLoadNamedMonomorphic(object, name, prop, map);
+ }
} else {
- load = BuildLoadNamedGeneric(obj, prop);
+ load = BuildLoadNamedGeneric(object, name, prop);
}
PushAndAdd(load);
- if (load->HasObservableSideEffects()) AddSimulate(expr->CountId());
+ if (load->HasObservableSideEffects()) AddSimulate(prop->LoadId());
after = BuildIncrement(returns_original_input, expr);
input = Pop();
- HInstruction* store = BuildStoreNamed(obj, after, prop);
+ HInstruction* store;
+ if (!monomorphic) {
+ // If we don't know the monomorphic type, do a generic store.
+ CHECK_ALIVE(store = BuildStoreNamedGeneric(object, name, after));
+ } else {
+ Handle<JSFunction> setter;
+ Handle<JSObject> holder;
+ if (LookupSetter(map, name, &setter, &holder)) {
+ store = BuildCallSetter(object, after, map, setter, holder);
+ } else {
+ CHECK_ALIVE(store = BuildStoreNamedMonomorphic(object,
+ name,
+ after,
+ map));
+ }
+ }
AddInstruction(store);
// Overwrite the receiver in the bailout environment with the result
@@ -7210,16 +8187,16 @@ void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
bool has_side_effects = false;
HValue* load = HandleKeyedElementAccess(
- obj, key, NULL, prop, expr->CountId(), RelocInfo::kNoPosition,
+ obj, key, NULL, prop, prop->LoadId(), RelocInfo::kNoPosition,
false, // is_store
&has_side_effects);
Push(load);
- if (has_side_effects) AddSimulate(expr->CountId());
+ if (has_side_effects) AddSimulate(prop->LoadId());
after = BuildIncrement(returns_original_input, expr);
input = Pop();
- expr->RecordTypeFeedback(oracle());
+ expr->RecordTypeFeedback(oracle(), zone());
HandleKeyedElementAccess(obj, key, after, expr, expr->AssignmentId(),
RelocInfo::kNoPosition,
true, // is_store
@@ -7245,7 +8222,7 @@ HStringCharCodeAt* HGraphBuilder::BuildStringCharCodeAt(HValue* context,
HValue* string,
HValue* index) {
AddInstruction(new(zone()) HCheckNonSmi(string));
- AddInstruction(HCheckInstanceType::NewIsString(string));
+ AddInstruction(HCheckInstanceType::NewIsString(string, zone()));
HStringLength* length = new(zone()) HStringLength(string);
AddInstruction(length);
HInstruction* checked_index =
@@ -7253,6 +8230,61 @@ HStringCharCodeAt* HGraphBuilder::BuildStringCharCodeAt(HValue* context,
return new(zone()) HStringCharCodeAt(context, string, checked_index);
}
+// Checks if the given shift amounts have form: (sa) and (32 - sa).
+static bool ShiftAmountsAllowReplaceByRotate(HValue* sa,
+ HValue* const32_minus_sa) {
+ if (!const32_minus_sa->IsSub()) return false;
+ HSub* sub = HSub::cast(const32_minus_sa);
+ HValue* const32 = sub->left();
+ if (!const32->IsConstant() ||
+ HConstant::cast(const32)->Integer32Value() != 32) {
+ return false;
+ }
+ return (sub->right() == sa);
+}
+
+
+// Checks if the left and the right are shift instructions with the oposite
+// directions that can be replaced by one rotate right instruction or not.
+// Returns the operand and the shift amount for the rotate instruction in the
+// former case.
+bool HGraphBuilder::MatchRotateRight(HValue* left,
+ HValue* right,
+ HValue** operand,
+ HValue** shift_amount) {
+ HShl* shl;
+ HShr* shr;
+ if (left->IsShl() && right->IsShr()) {
+ shl = HShl::cast(left);
+ shr = HShr::cast(right);
+ } else if (left->IsShr() && right->IsShl()) {
+ shl = HShl::cast(right);
+ shr = HShr::cast(left);
+ } else {
+ return false;
+ }
+
+ if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) &&
+ !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) {
+ return false;
+ }
+ *operand= shr->left();
+ *shift_amount = shr->right();
+ return true;
+}
+
+
+bool CanBeZero(HValue *right) {
+ if (right->IsConstant()) {
+ HConstant* right_const = HConstant::cast(right);
+ if (right_const->HasInteger32Value() &&
+ (right_const->Integer32Value() & 0x1f) != 0) {
+ return false;
+ }
+ }
+ return true;
+}
+
HInstruction* HGraphBuilder::BuildBinaryOperation(BinaryOperation* expr,
HValue* left,
@@ -7269,9 +8301,9 @@ HInstruction* HGraphBuilder::BuildBinaryOperation(BinaryOperation* expr,
case Token::ADD:
if (info.IsString()) {
AddInstruction(new(zone()) HCheckNonSmi(left));
- AddInstruction(HCheckInstanceType::NewIsString(left));
+ AddInstruction(HCheckInstanceType::NewIsString(left, zone()));
AddInstruction(new(zone()) HCheckNonSmi(right));
- AddInstruction(HCheckInstanceType::NewIsString(right));
+ AddInstruction(HCheckInstanceType::NewIsString(right, zone()));
instr = new(zone()) HStringAdd(context, left, right);
} else {
instr = HAdd::NewHAdd(zone(), context, left, right);
@@ -7291,14 +8323,27 @@ HInstruction* HGraphBuilder::BuildBinaryOperation(BinaryOperation* expr,
break;
case Token::BIT_XOR:
case Token::BIT_AND:
- case Token::BIT_OR:
instr = HBitwise::NewHBitwise(zone(), expr->op(), context, left, right);
break;
+ case Token::BIT_OR: {
+ HValue* operand, *shift_amount;
+ if (info.IsInteger32() &&
+ MatchRotateRight(left, right, &operand, &shift_amount)) {
+ instr = new(zone()) HRor(context, operand, shift_amount);
+ } else {
+ instr = HBitwise::NewHBitwise(zone(), expr->op(), context, left, right);
+ }
+ break;
+ }
case Token::SAR:
instr = HSar::NewHSar(zone(), context, left, right);
break;
case Token::SHR:
instr = HShr::NewHShr(zone(), context, left, right);
+ if (FLAG_opt_safe_uint32_operations && instr->IsShr() &&
+ CanBeZero(right)) {
+ graph()->RecordUint32Instruction(instr);
+ }
break;
case Token::SHL:
instr = HShl::NewHShl(zone(), context, left, right);
@@ -7311,14 +8356,16 @@ HInstruction* HGraphBuilder::BuildBinaryOperation(BinaryOperation* expr,
// for a smi operation. If one of the operands is a constant string
// do not generate code assuming it is a smi operation.
if (info.IsSmi() &&
- ((left->IsConstant() && HConstant::cast(left)->HasStringValue()) ||
- (right->IsConstant() && HConstant::cast(right)->HasStringValue()))) {
+ ((left->IsConstant() && HConstant::cast(left)->handle()->IsString()) ||
+ (right->IsConstant() && HConstant::cast(right)->handle()->IsString()))) {
return instr;
}
Representation rep = ToRepresentation(info);
// We only generate either int32 or generic tagged bitwise operations.
- if (instr->IsBitwiseBinaryOperation() && rep.IsDouble()) {
- rep = Representation::Integer32();
+ if (instr->IsBitwiseBinaryOperation()) {
+ HBitwiseBinaryOperation::cast(instr)->
+ InitializeObservedInputRepresentation(rep);
+ if (rep.IsDouble()) rep = Representation::Integer32();
}
TraceRepresentation(expr->op(), info, instr, rep);
instr->AssumeRepresentation(rep);
@@ -7395,7 +8442,7 @@ void HGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
// We need an extra block to maintain edge-split form.
HBasicBlock* empty_block = graph()->CreateBasicBlock();
HBasicBlock* eval_right = graph()->CreateBasicBlock();
- unsigned test_id = expr->left()->test_id();
+ TypeFeedbackId test_id = expr->left()->test_id();
ToBooleanStub::Types expected(oracle()->ToBooleanTypes(test_id));
HBranch* test = is_logical_and
? new(zone()) HBranch(Top(), eval_right, empty_block, expected)
@@ -7528,7 +8575,7 @@ static bool MatchLiteralCompareTypeof(HValue* left,
if (left->IsTypeof() &&
Token::IsEqualityOp(op) &&
right->IsConstant() &&
- HConstant::cast(right)->HasStringValue()) {
+ HConstant::cast(right)->handle()->IsString()) {
*typeof_expr = HTypeof::cast(left);
*check = Handle<String>::cast(HConstant::cast(right)->handle());
return true;
@@ -7634,9 +8681,7 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
Handle<GlobalObject> global(info()->global_object());
LookupResult lookup(isolate());
global->Lookup(*name, &lookup);
- if (lookup.IsFound() &&
- lookup.type() == NORMAL &&
- lookup.GetValue()->IsJSFunction()) {
+ if (lookup.IsNormal() && lookup.GetValue()->IsJSFunction()) {
Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
// If the function is in new space we assume it's more likely to
// change and thus prefer the general IC code.
@@ -7670,19 +8715,17 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
// Can we get away with map check and not instance type check?
Handle<Map> map = oracle()->GetCompareMap(expr);
if (!map.is_null()) {
- AddInstruction(new(zone()) HCheckNonSmi(left));
- AddInstruction(HCheckMaps::NewWithTransitions(left, map));
- AddInstruction(new(zone()) HCheckNonSmi(right));
- AddInstruction(HCheckMaps::NewWithTransitions(right, map));
+ AddCheckMapsWithTransitions(left, map);
+ AddCheckMapsWithTransitions(right, map);
HCompareObjectEqAndBranch* result =
new(zone()) HCompareObjectEqAndBranch(left, right);
result->set_position(expr->position());
return ast_context()->ReturnControl(result, expr->id());
} else {
AddInstruction(new(zone()) HCheckNonSmi(left));
- AddInstruction(HCheckInstanceType::NewIsSpecObject(left));
+ AddInstruction(HCheckInstanceType::NewIsSpecObject(left, zone()));
AddInstruction(new(zone()) HCheckNonSmi(right));
- AddInstruction(HCheckInstanceType::NewIsSpecObject(right));
+ AddInstruction(HCheckInstanceType::NewIsSpecObject(right, zone()));
HCompareObjectEqAndBranch* result =
new(zone()) HCompareObjectEqAndBranch(left, right);
result->set_position(expr->position());
@@ -7695,9 +8738,9 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
} else if (type_info.IsString() && oracle()->IsSymbolCompare(expr) &&
(op == Token::EQ || op == Token::EQ_STRICT)) {
AddInstruction(new(zone()) HCheckNonSmi(left));
- AddInstruction(HCheckInstanceType::NewIsSymbol(left));
+ AddInstruction(HCheckInstanceType::NewIsSymbol(left, zone()));
AddInstruction(new(zone()) HCheckNonSmi(right));
- AddInstruction(HCheckInstanceType::NewIsSymbol(right));
+ AddInstruction(HCheckInstanceType::NewIsSymbol(right, zone()));
HCompareObjectEqAndBranch* result =
new(zone()) HCompareObjectEqAndBranch(left, right);
result->set_position(expr->position());
@@ -7734,13 +8777,25 @@ void HGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
}
+HInstruction* HGraphBuilder::BuildThisFunction() {
+ // If we share optimized code between different closures, the
+ // this-function is not a constant, except inside an inlined body.
+ if (function_state()->outer() != NULL) {
+ return new(zone()) HConstant(
+ function_state()->compilation_info()->closure(),
+ Representation::Tagged());
+ } else {
+ return new(zone()) HThisFunction;
+ }
+}
+
+
void HGraphBuilder::VisitThisFunction(ThisFunction* expr) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- HThisFunction* self = new(zone()) HThisFunction(
- function_state()->compilation_info()->closure());
- return ast_context()->ReturnInstruction(self, expr->id());
+ HInstruction* instr = BuildThisFunction();
+ return ast_context()->ReturnInstruction(instr, expr->id());
}
@@ -7769,11 +8824,11 @@ void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* declaration) {
bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
switch (variable->location()) {
case Variable::UNALLOCATED:
- globals_.Add(variable->name());
+ globals_.Add(variable->name(), zone());
globals_.Add(variable->binding_needs_init()
? isolate()->factory()->the_hole_value()
- : isolate()->factory()->undefined_value());
- globals_.Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ : isolate()->factory()->undefined_value(), zone());
+ globals_.Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
return;
case Variable::PARAMETER:
case Variable::LOCAL:
@@ -7786,7 +8841,7 @@ void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* declaration) {
if (hole_init) {
HValue* value = graph()->GetConstantHole();
HValue* context = environment()->LookupContext();
- HStoreContextSlot* store = new HStoreContextSlot(
+ HStoreContextSlot* store = new(zone()) HStoreContextSlot(
context, variable->index(), HStoreContextSlot::kNoCheck, value);
AddInstruction(store);
if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
@@ -7803,13 +8858,13 @@ void HGraphBuilder::VisitFunctionDeclaration(FunctionDeclaration* declaration) {
Variable* variable = proxy->var();
switch (variable->location()) {
case Variable::UNALLOCATED: {
- globals_.Add(variable->name());
+ globals_.Add(variable->name(), zone());
Handle<SharedFunctionInfo> function =
Compiler::BuildFunctionInfo(declaration->fun(), info()->script());
// Check for stack-overflow exception.
if (function.is_null()) return SetStackOverflow();
- globals_.Add(function);
- globals_.Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ globals_.Add(function, zone());
+ globals_.Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
return;
}
case Variable::PARAMETER:
@@ -7823,7 +8878,7 @@ void HGraphBuilder::VisitFunctionDeclaration(FunctionDeclaration* declaration) {
CHECK_ALIVE(VisitForValue(declaration->fun()));
HValue* value = Pop();
HValue* context = environment()->LookupContext();
- HStoreContextSlot* store = new HStoreContextSlot(
+ HStoreContextSlot* store = new(zone()) HStoreContextSlot(
context, variable->index(), HStoreContextSlot::kNoCheck, value);
AddInstruction(store);
if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
@@ -7969,7 +9024,7 @@ void HGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
ASSERT(call->arguments()->length() == 0);
if (function_state()->outer() != NULL) {
// We are generating graph for inlined function.
- HValue* value = function_state()->is_construct()
+ HValue* value = function_state()->inlining_kind() == CONSTRUCT_CALL_RETURN
? graph()->GetConstantTrue()
: graph()->GetConstantFalse();
return ast_context()->ReturnValue(value);
@@ -8005,8 +9060,10 @@ void HGraphBuilder::GenerateArguments(CallRuntime* call) {
HInstruction* elements = AddInstruction(
new(zone()) HArgumentsElements(false));
HInstruction* length = AddInstruction(new(zone()) HArgumentsLength(elements));
+ HInstruction* checked_index =
+ AddInstruction(new(zone()) HBoundsCheck(index, length));
HAccessArgumentsAt* result =
- new(zone()) HAccessArgumentsAt(elements, length, index);
+ new(zone()) HAccessArgumentsAt(elements, length, checked_index);
return ast_context()->ReturnInstruction(result, call->id());
}
@@ -8069,11 +9126,11 @@ void HGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
// Create in-object property store to kValueOffset.
set_current_block(if_js_value);
Handle<String> name = isolate()->factory()->undefined_symbol();
- AddInstruction(new HStoreNamedField(object,
- name,
- value,
- true, // in-object store.
- JSValue::kValueOffset));
+ AddInstruction(new(zone()) HStoreNamedField(object,
+ name,
+ value,
+ true, // in-object store.
+ JSValue::kValueOffset));
if_js_value->Goto(join);
join->SetJoinId(call->id());
set_current_block(join);
@@ -8361,33 +9418,38 @@ void HGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
HEnvironment::HEnvironment(HEnvironment* outer,
Scope* scope,
- Handle<JSFunction> closure)
+ Handle<JSFunction> closure,
+ Zone* zone)
: closure_(closure),
- values_(0),
- assigned_variables_(4),
+ values_(0, zone),
+ assigned_variables_(4, zone),
frame_type_(JS_FUNCTION),
parameter_count_(0),
specials_count_(1),
local_count_(0),
outer_(outer),
+ entry_(NULL),
pop_count_(0),
push_count_(0),
- ast_id_(AstNode::kNoNumber) {
+ ast_id_(BailoutId::None()),
+ zone_(zone) {
Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0);
}
-HEnvironment::HEnvironment(const HEnvironment* other)
- : values_(0),
- assigned_variables_(0),
+HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
+ : values_(0, zone),
+ assigned_variables_(0, zone),
frame_type_(JS_FUNCTION),
parameter_count_(0),
specials_count_(1),
local_count_(0),
outer_(NULL),
+ entry_(NULL),
pop_count_(0),
push_count_(0),
- ast_id_(other->ast_id()) {
+ ast_id_(other->ast_id()),
+ zone_(zone) {
Initialize(other);
}
@@ -8395,17 +9457,20 @@ HEnvironment::HEnvironment(const HEnvironment* other)
HEnvironment::HEnvironment(HEnvironment* outer,
Handle<JSFunction> closure,
FrameType frame_type,
- int arguments)
+ int arguments,
+ Zone* zone)
: closure_(closure),
- values_(arguments),
- assigned_variables_(0),
+ values_(arguments, zone),
+ assigned_variables_(0, zone),
frame_type_(frame_type),
parameter_count_(arguments),
local_count_(0),
outer_(outer),
+ entry_(NULL),
pop_count_(0),
push_count_(0),
- ast_id_(AstNode::kNoNumber) {
+ ast_id_(BailoutId::None()),
+ zone_(zone) {
}
@@ -8417,19 +9482,20 @@ void HEnvironment::Initialize(int parameter_count,
// Avoid reallocating the temporaries' backing store on the first Push.
int total = parameter_count + specials_count_ + local_count + stack_height;
- values_.Initialize(total + 4);
- for (int i = 0; i < total; ++i) values_.Add(NULL);
+ values_.Initialize(total + 4, zone());
+ for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
}
void HEnvironment::Initialize(const HEnvironment* other) {
closure_ = other->closure();
- values_.AddAll(other->values_);
- assigned_variables_.AddAll(other->assigned_variables_);
+ values_.AddAll(other->values_, zone());
+ assigned_variables_.AddAll(other->assigned_variables_, zone());
frame_type_ = other->frame_type_;
parameter_count_ = other->parameter_count_;
local_count_ = other->local_count_;
if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
+ entry_ = other->entry_;
pop_count_ = other->pop_count_;
push_count_ = other->push_count_;
ast_id_ = other->ast_id_;
@@ -8453,7 +9519,7 @@ void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
} else if (values_[i] != other->values_[i]) {
// There is a fresh value on the incoming edge, a phi is needed.
ASSERT(values_[i] != NULL && other->values_[i] != NULL);
- HPhi* phi = new(block->zone()) HPhi(i);
+ HPhi* phi = new(zone()) HPhi(i, zone());
HValue* old_value = values_[i];
for (int j = 0; j < block->predecessors()->length(); j++) {
phi->AddInput(old_value);
@@ -8469,7 +9535,7 @@ void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
void HEnvironment::Bind(int index, HValue* value) {
ASSERT(value != NULL);
if (!assigned_variables_.Contains(index)) {
- assigned_variables_.Add(index);
+ assigned_variables_.Add(index, zone());
}
values_[index] = value;
}
@@ -8509,7 +9575,7 @@ void HEnvironment::Drop(int count) {
HEnvironment* HEnvironment::Copy() const {
- return new(closure()->GetIsolate()->zone()) HEnvironment(this);
+ return new(zone()) HEnvironment(this, zone());
}
@@ -8523,7 +9589,7 @@ HEnvironment* HEnvironment::CopyWithoutHistory() const {
HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
HEnvironment* new_env = Copy();
for (int i = 0; i < values_.length(); ++i) {
- HPhi* phi = new(loop_header->zone()) HPhi(i);
+ HPhi* phi = new(zone()) HPhi(i, zone());
phi->AddInput(values_[i]);
new_env->values_[i] = phi;
loop_header->AddPhi(phi);
@@ -8537,8 +9603,9 @@ HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
Handle<JSFunction> target,
FrameType frame_type,
int arguments) const {
- HEnvironment* new_env = new(closure()->GetIsolate()->zone())
- HEnvironment(outer, target, frame_type, arguments + 1);
+ HEnvironment* new_env =
+ new(zone()) HEnvironment(outer, target, frame_type,
+ arguments + 1, zone());
for (int i = 0; i <= arguments; ++i) { // Include receiver.
new_env->Push(ExpressionStackAt(arguments - i));
}
@@ -8553,11 +9620,9 @@ HEnvironment* HEnvironment::CopyForInlining(
FunctionLiteral* function,
HConstant* undefined,
CallKind call_kind,
- bool is_construct) const {
+ InliningKind inlining_kind) const {
ASSERT(frame_type() == JS_FUNCTION);
- Zone* zone = closure()->GetIsolate()->zone();
-
// Outer environment is a copy of this one without the arguments.
int arity = function->scope()->num_parameters();
@@ -8565,11 +9630,19 @@ HEnvironment* HEnvironment::CopyForInlining(
outer->Drop(arguments + 1); // Including receiver.
outer->ClearHistory();
- if (is_construct) {
+ if (inlining_kind == CONSTRUCT_CALL_RETURN) {
// Create artificial constructor stub environment. The receiver should
// actually be the constructor function, but we pass the newly allocated
// object instead, DoComputeConstructStubFrame() relies on that.
outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
+ } else if (inlining_kind == GETTER_CALL_RETURN) {
+ // We need an additional StackFrame::INTERNAL frame for restoring the
+ // correct context.
+ outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments);
+ } else if (inlining_kind == SETTER_CALL_RETURN) {
+ // We need an additional StackFrame::INTERNAL frame for temporarily saving
+ // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter.
+ outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments);
}
if (arity != arguments) {
@@ -8578,7 +9651,7 @@ HEnvironment* HEnvironment::CopyForInlining(
}
HEnvironment* inner =
- new(zone) HEnvironment(outer, function->scope(), target);
+ new(zone()) HEnvironment(outer, function->scope(), target, zone());
// Get the argument values from the original environment.
for (int i = 0; i <= arity; ++i) { // Include receiver.
HValue* push = (i <= arguments) ?
@@ -8589,7 +9662,7 @@ HEnvironment* HEnvironment::CopyForInlining(
// builtin function, pass undefined as the receiver for function
// calls (instead of the global receiver).
if ((target->shared()->native() || !function->is_classic_mode()) &&
- call_kind == CALL_AS_FUNCTION && !is_construct) {
+ call_kind == CALL_AS_FUNCTION && inlining_kind != CONSTRUCT_CALL_RETURN) {
inner->SetValueAt(0, undefined);
}
inner->SetValueAt(arity + 1, LookupContext());
@@ -8597,7 +9670,7 @@ HEnvironment* HEnvironment::CopyForInlining(
inner->SetValueAt(i, undefined);
}
- inner->set_ast_id(AstNode::kFunctionEntryId);
+ inner->set_ast_id(BailoutId::FunctionEntry());
return inner;
}
@@ -8768,27 +9841,28 @@ void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
for (int i = 0; i < fixed_d->length(); ++i) {
- TraceLiveRange(fixed_d->at(i), "fixed");
+ TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
}
const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
for (int i = 0; i < fixed->length(); ++i) {
- TraceLiveRange(fixed->at(i), "fixed");
+ TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
}
const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
for (int i = 0; i < live_ranges->length(); ++i) {
- TraceLiveRange(live_ranges->at(i), "object");
+ TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
}
}
-void HTracer::TraceLiveRange(LiveRange* range, const char* type) {
+void HTracer::TraceLiveRange(LiveRange* range, const char* type,
+ Zone* zone) {
if (range != NULL && !range->IsEmpty()) {
PrintIndent();
trace_.Add("%d %s", range->id(), type);
if (range->HasRegisterAssigned()) {
- LOperand* op = range->CreateAssignedOperand(ZONE);
+ LOperand* op = range->CreateAssignedOperand(zone);
int assigned_reg = op->index();
if (op->IsDoubleRegister()) {
trace_.Add(" \"%s\"",
diff --git a/src/3rdparty/v8/src/hydrogen.h b/src/3rdparty/v8/src/hydrogen.h
index b56a5af..3748970 100644
--- a/src/3rdparty/v8/src/hydrogen.h
+++ b/src/3rdparty/v8/src/hydrogen.h
@@ -77,7 +77,7 @@ class HBasicBlock: public ZoneObject {
return &deleted_phis_;
}
void RecordDeletedPhi(int merge_index) {
- deleted_phis_.Add(merge_index);
+ deleted_phis_.Add(merge_index, zone());
}
HBasicBlock* dominator() const { return dominator_; }
HEnvironment* last_environment() const { return last_environment_; }
@@ -118,14 +118,14 @@ class HBasicBlock: public ZoneObject {
bool HasParentLoopHeader() const { return parent_loop_header_ != NULL; }
- void SetJoinId(int ast_id);
+ void SetJoinId(BailoutId ast_id);
void Finish(HControlInstruction* last);
void FinishExit(HControlInstruction* instruction);
void Goto(HBasicBlock* block, FunctionState* state = NULL);
int PredecessorIndexOf(HBasicBlock* predecessor) const;
- void AddSimulate(int ast_id) { AddInstruction(CreateSimulate(ast_id)); }
+ void AddSimulate(BailoutId ast_id) { AddInstruction(CreateSimulate(ast_id)); }
void AssignCommonDominator(HBasicBlock* other);
void AssignLoopSuccessorDominators();
@@ -135,9 +135,7 @@ class HBasicBlock: public ZoneObject {
// Add the inlined function exit sequence, adding an HLeaveInlined
// instruction and updating the bailout environment.
- void AddLeaveInlined(HValue* return_value,
- HBasicBlock* target,
- FunctionState* state = NULL);
+ void AddLeaveInlined(HValue* return_value, FunctionState* state);
// If a target block is tagged as an inline function return, all
// predecessors should contain the inlined exit sequence:
@@ -158,7 +156,7 @@ class HBasicBlock: public ZoneObject {
dominates_loop_successors_ = true;
}
- inline Zone* zone();
+ inline Zone* zone() const;
#ifdef DEBUG
void Verify();
@@ -168,7 +166,7 @@ class HBasicBlock: public ZoneObject {
void RegisterPredecessor(HBasicBlock* pred);
void AddDominatedBlock(HBasicBlock* block);
- HSimulate* CreateSimulate(int ast_id);
+ HSimulate* CreateSimulate(BailoutId ast_id);
HDeoptimize* CreateDeoptimize(HDeoptimize::UseEnvironment has_uses);
int block_id_;
@@ -212,12 +210,12 @@ class HPredecessorIterator BASE_EMBEDDED {
class HLoopInformation: public ZoneObject {
public:
- explicit HLoopInformation(HBasicBlock* loop_header)
- : back_edges_(4),
+ HLoopInformation(HBasicBlock* loop_header, Zone* zone)
+ : back_edges_(4, zone),
loop_header_(loop_header),
- blocks_(8),
+ blocks_(8, zone),
stack_check_(NULL) {
- blocks_.Add(loop_header);
+ blocks_.Add(loop_header, zone);
}
virtual ~HLoopInformation() {}
@@ -247,7 +245,8 @@ class HGraph: public ZoneObject {
explicit HGraph(CompilationInfo* info);
Isolate* isolate() { return isolate_; }
- Zone* zone() { return isolate_->zone(); }
+ Zone* zone() const { return zone_; }
+ CompilationInfo* info() const { return info_; }
const ZoneList<HBasicBlock*>* blocks() const { return &blocks_; }
const ZoneList<HPhi*>* phi_list() const { return phi_list_; }
@@ -259,6 +258,7 @@ class HGraph: public ZoneObject {
void InsertRepresentationChanges();
void MarkDeoptimizeOnUndefined();
void ComputeMinusZeroChecks();
+ void ComputeSafeUint32Operations();
bool ProcessArgumentsObject();
void EliminateRedundantPhis();
void EliminateUnreachablePhis();
@@ -268,7 +268,9 @@ class HGraph: public ZoneObject {
void ReplaceCheckedValues();
void EliminateRedundantBoundsChecks();
void DehoistSimpleArrayIndexComputations();
+ void DeadCodeElimination();
void PropagateDeoptimizingMark();
+ void EliminateUnusedInstructions();
// Returns false if there are phi-uses of the arguments-object
// which are not supported by the optimizing compiler.
@@ -280,8 +282,6 @@ class HGraph: public ZoneObject {
void CollectPhis();
- Handle<Code> Compile(CompilationInfo* info);
-
void set_undefined_constant(HConstant* constant) {
undefined_constant_.set(constant);
}
@@ -304,7 +304,7 @@ class HGraph: public ZoneObject {
int GetMaximumValueID() const { return values_.length(); }
int GetNextBlockID() { return next_block_id_++; }
int GetNextValueID(HValue* value) {
- values_.Add(value);
+ values_.Add(value, zone());
return values_.length() - 1;
}
HValue* LookupValue(int id) const {
@@ -312,6 +312,8 @@ class HGraph: public ZoneObject {
return NULL;
}
+ bool Optimize(SmartArrayPointer<char>* bailout_reason);
+
#ifdef DEBUG
void Verify(bool do_full_verify) const;
#endif
@@ -336,17 +338,39 @@ class HGraph: public ZoneObject {
osr_values_.set(values);
}
+ int update_type_change_checksum(int delta) {
+ type_change_checksum_ += delta;
+ return type_change_checksum_;
+ }
+
+ bool use_optimistic_licm() {
+ return use_optimistic_licm_;
+ }
+
+ void set_use_optimistic_licm(bool value) {
+ use_optimistic_licm_ = value;
+ }
+
+ void MarkRecursive() {
+ is_recursive_ = true;
+ }
+
+ bool is_recursive() const {
+ return is_recursive_;
+ }
+
+ void RecordUint32Instruction(HInstruction* instr) {
+ if (uint32_instructions_ == NULL) {
+ uint32_instructions_ = new(zone()) ZoneList<HInstruction*>(4, zone());
+ }
+ uint32_instructions_->Add(instr, zone());
+ }
+
private:
- void Postorder(HBasicBlock* block,
- BitVector* visited,
- ZoneList<HBasicBlock*>* order,
- HBasicBlock* loop_header);
- void PostorderLoopBlocks(HLoopInformation* loop,
- BitVector* visited,
- ZoneList<HBasicBlock*>* order,
- HBasicBlock* loop_header);
HConstant* GetConstant(SetOncePointer<HConstant>* pointer,
- Object* value);
+ Handle<Object> value);
+ HConstant* GetConstantInt32(SetOncePointer<HConstant>* pointer,
+ int32_t integer_value);
void MarkAsDeoptimizingRecursively(HBasicBlock* block);
void InsertTypeConversions(HInstruction* instr);
@@ -369,6 +393,7 @@ class HGraph: public ZoneObject {
ZoneList<HBasicBlock*> blocks_;
ZoneList<HValue*> values_;
ZoneList<HPhi*>* phi_list_;
+ ZoneList<HInstruction*>* uint32_instructions_;
SetOncePointer<HConstant> undefined_constant_;
SetOncePointer<HConstant> constant_1_;
SetOncePointer<HConstant> constant_minus1_;
@@ -380,29 +405,36 @@ class HGraph: public ZoneObject {
SetOncePointer<HBasicBlock> osr_loop_entry_;
SetOncePointer<ZoneList<HUnknownOSRValue*> > osr_values_;
+ CompilationInfo* info_;
+ Zone* zone_;
+
+ bool is_recursive_;
+ bool use_optimistic_licm_;
+ int type_change_checksum_;
+
DISALLOW_COPY_AND_ASSIGN(HGraph);
};
-Zone* HBasicBlock::zone() { return graph_->zone(); }
+Zone* HBasicBlock::zone() const { return graph_->zone(); }
// Type of stack frame an environment might refer to.
-enum FrameType { JS_FUNCTION, JS_CONSTRUCT, ARGUMENTS_ADAPTOR };
+enum FrameType {
+ JS_FUNCTION,
+ JS_CONSTRUCT,
+ JS_GETTER,
+ JS_SETTER,
+ ARGUMENTS_ADAPTOR
+};
class HEnvironment: public ZoneObject {
public:
HEnvironment(HEnvironment* outer,
Scope* scope,
- Handle<JSFunction> closure);
-
- HEnvironment* DiscardInlined(bool drop_extra) {
- HEnvironment* outer = outer_;
- while (outer->frame_type() != JS_FUNCTION) outer = outer->outer_;
- if (drop_extra) outer->Drop(1);
- return outer;
- }
+ Handle<JSFunction> closure,
+ Zone* zone);
HEnvironment* arguments_environment() {
return outer()->frame_type() == ARGUMENTS_ADAPTOR ? outer() : this;
@@ -422,8 +454,11 @@ class HEnvironment: public ZoneObject {
int pop_count() const { return pop_count_; }
int push_count() const { return push_count_; }
- int ast_id() const { return ast_id_; }
- void set_ast_id(int id) { ast_id_ = id; }
+ BailoutId ast_id() const { return ast_id_; }
+ void set_ast_id(BailoutId id) { ast_id_ = id; }
+
+ HEnterInlined* entry() const { return entry_; }
+ void set_entry(HEnterInlined* entry) { entry_ = entry; }
int length() const { return values_.length(); }
bool is_special_index(int i) const {
@@ -462,7 +497,7 @@ class HEnvironment: public ZoneObject {
void Push(HValue* value) {
ASSERT(value != NULL);
++push_count_;
- values_.Add(value);
+ values_.Add(value, zone());
}
HValue* Pop() {
@@ -501,7 +536,14 @@ class HEnvironment: public ZoneObject {
FunctionLiteral* function,
HConstant* undefined,
CallKind call_kind,
- bool is_construct) const;
+ InliningKind inlining_kind) const;
+
+ HEnvironment* DiscardInlined(bool drop_extra) {
+ HEnvironment* outer = outer_;
+ while (outer->frame_type() != JS_FUNCTION) outer = outer->outer_;
+ if (drop_extra) outer->Drop(1);
+ return outer;
+ }
void AddIncomingEdge(HBasicBlock* block, HEnvironment* other);
@@ -519,13 +561,16 @@ class HEnvironment: public ZoneObject {
void PrintTo(StringStream* stream);
void PrintToStd();
+ Zone* zone() const { return zone_; }
+
private:
- explicit HEnvironment(const HEnvironment* other);
+ HEnvironment(const HEnvironment* other, Zone* zone);
HEnvironment(HEnvironment* outer,
Handle<JSFunction> closure,
FrameType frame_type,
- int arguments);
+ int arguments,
+ Zone* zone);
// Create an artificial stub environment (e.g. for argument adaptor or
// constructor stub).
@@ -560,9 +605,11 @@ class HEnvironment: public ZoneObject {
int specials_count_;
int local_count_;
HEnvironment* outer_;
+ HEnterInlined* entry_;
int pop_count_;
int push_count_;
- int ast_id_;
+ BailoutId ast_id_;
+ Zone* zone_;
};
@@ -590,13 +637,13 @@ class AstContext {
// Add a hydrogen instruction to the instruction stream (recording an
// environment simulation if necessary) and then fill this context with
// the instruction as value.
- virtual void ReturnInstruction(HInstruction* instr, int ast_id) = 0;
+ virtual void ReturnInstruction(HInstruction* instr, BailoutId ast_id) = 0;
// Finishes the current basic block and materialize a boolean for
// value context, nothing for effect, generate a branch for test context.
// Call this function in tail position in the Visit functions for
// expressions.
- virtual void ReturnControl(HControlInstruction* instr, int ast_id) = 0;
+ virtual void ReturnControl(HControlInstruction* instr, BailoutId ast_id) = 0;
void set_for_typeof(bool for_typeof) { for_typeof_ = for_typeof; }
bool is_for_typeof() { return for_typeof_; }
@@ -607,7 +654,7 @@ class AstContext {
HGraphBuilder* owner() const { return owner_; }
- inline Zone* zone();
+ inline Zone* zone() const;
// We want to be able to assert, in a context-specific way, that the stack
// height makes sense when the context is filled.
@@ -631,8 +678,8 @@ class EffectContext: public AstContext {
virtual ~EffectContext();
virtual void ReturnValue(HValue* value);
- virtual void ReturnInstruction(HInstruction* instr, int ast_id);
- virtual void ReturnControl(HControlInstruction* instr, int ast_id);
+ virtual void ReturnInstruction(HInstruction* instr, BailoutId ast_id);
+ virtual void ReturnControl(HControlInstruction* instr, BailoutId ast_id);
};
@@ -644,8 +691,8 @@ class ValueContext: public AstContext {
virtual ~ValueContext();
virtual void ReturnValue(HValue* value);
- virtual void ReturnInstruction(HInstruction* instr, int ast_id);
- virtual void ReturnControl(HControlInstruction* instr, int ast_id);
+ virtual void ReturnInstruction(HInstruction* instr, BailoutId ast_id);
+ virtual void ReturnControl(HControlInstruction* instr, BailoutId ast_id);
bool arguments_allowed() { return flag_ == ARGUMENTS_ALLOWED; }
@@ -658,17 +705,19 @@ class TestContext: public AstContext {
public:
TestContext(HGraphBuilder* owner,
Expression* condition,
+ TypeFeedbackOracle* oracle,
HBasicBlock* if_true,
HBasicBlock* if_false)
: AstContext(owner, Expression::kTest),
condition_(condition),
+ oracle_(oracle),
if_true_(if_true),
if_false_(if_false) {
}
virtual void ReturnValue(HValue* value);
- virtual void ReturnInstruction(HInstruction* instr, int ast_id);
- virtual void ReturnControl(HControlInstruction* instr, int ast_id);
+ virtual void ReturnInstruction(HInstruction* instr, BailoutId ast_id);
+ virtual void ReturnControl(HControlInstruction* instr, BailoutId ast_id);
static TestContext* cast(AstContext* context) {
ASSERT(context->IsTest());
@@ -676,6 +725,7 @@ class TestContext: public AstContext {
}
Expression* condition() const { return condition_; }
+ TypeFeedbackOracle* oracle() const { return oracle_; }
HBasicBlock* if_true() const { return if_true_; }
HBasicBlock* if_false() const { return if_false_; }
@@ -685,31 +735,24 @@ class TestContext: public AstContext {
void BuildBranch(HValue* value);
Expression* condition_;
+ TypeFeedbackOracle* oracle_;
HBasicBlock* if_true_;
HBasicBlock* if_false_;
};
-enum ReturnHandlingFlag {
- NORMAL_RETURN,
- DROP_EXTRA_ON_RETURN,
- CONSTRUCT_CALL_RETURN
-};
-
-
class FunctionState {
public:
FunctionState(HGraphBuilder* owner,
CompilationInfo* info,
TypeFeedbackOracle* oracle,
- ReturnHandlingFlag return_handling);
+ InliningKind inlining_kind);
~FunctionState();
CompilationInfo* compilation_info() { return compilation_info_; }
TypeFeedbackOracle* oracle() { return oracle_; }
AstContext* call_context() { return call_context_; }
- bool drop_extra() { return return_handling_ == DROP_EXTRA_ON_RETURN; }
- bool is_construct() { return return_handling_ == CONSTRUCT_CALL_RETURN; }
+ InliningKind inlining_kind() const { return inlining_kind_; }
HBasicBlock* function_return() { return function_return_; }
TestContext* test_context() { return test_context_; }
void ClearInlinedTestContext() {
@@ -739,11 +782,8 @@ class FunctionState {
// inlined. NULL when not inlining.
AstContext* call_context_;
- // Indicate whether we have to perform special handling on return from
- // inlined functions.
- // - DROP_EXTRA_ON_RETURN: Drop an extra value from the environment.
- // - CONSTRUCT_CALL_RETURN: Either use allocated receiver or return value.
- ReturnHandlingFlag return_handling_;
+ // The kind of call which is currently being inlined.
+ InliningKind inlining_kind_;
// When inlining in an effect or value context, this is the return block.
// It is NULL otherwise. When inlining in a test context, there are a
@@ -840,7 +880,7 @@ class HGraphBuilder: public AstVisitor {
// Adding instructions.
HInstruction* AddInstruction(HInstruction* instr);
- void AddSimulate(int ast_id);
+ void AddSimulate(BailoutId ast_id);
// Bailout environment manipulation.
void Push(HValue* value) { environment()->Push(value); }
@@ -850,7 +890,7 @@ class HGraphBuilder: public AstVisitor {
HBasicBlock* CreateJoin(HBasicBlock* first,
HBasicBlock* second,
- int join_id);
+ BailoutId join_id);
TypeFeedbackOracle* oracle() const { return function_state()->oracle(); }
@@ -858,6 +898,12 @@ class HGraphBuilder: public AstVisitor {
void VisitDeclarations(ZoneList<Declaration*>* declarations);
+ void* operator new(size_t size, Zone* zone) {
+ return zone->New(static_cast<int>(size));
+ }
+ void operator delete(void* pointer, Zone* zone) { }
+ void operator delete(void* pointer) { }
+
private:
// Type of a member function that generates inline code for a native function.
typedef void (HGraphBuilder::*InlineFunctionGenerator)(CallRuntime* call);
@@ -963,9 +1009,8 @@ class HGraphBuilder: public AstVisitor {
HBasicBlock* true_block,
HBasicBlock* false_block);
- // Visit an argument subexpression and emit a push to the outgoing
- // arguments. Returns the hydrogen value that was pushed.
- HValue* VisitArgument(Expression* expr);
+ // Visit an argument subexpression and emit a push to the outgoing arguments.
+ void VisitArgument(Expression* expr);
void VisitArgumentList(ZoneList<Expression*>* arguments);
@@ -1014,14 +1059,18 @@ class HGraphBuilder: public AstVisitor {
int InliningAstSize(Handle<JSFunction> target);
bool TryInline(CallKind call_kind,
Handle<JSFunction> target,
- ZoneList<Expression*>* arguments,
- HValue* receiver,
- int ast_id,
- int return_id,
- ReturnHandlingFlag return_handling);
+ int arguments_count,
+ HValue* implicit_return_value,
+ BailoutId ast_id,
+ BailoutId return_id,
+ InliningKind inlining_kind);
bool TryInlineCall(Call* expr, bool drop_extra = false);
- bool TryInlineConstruct(CallNew* expr, HValue* receiver);
+ bool TryInlineConstruct(CallNew* expr, HValue* implicit_return_value);
+ bool TryInlineGetter(Handle<JSFunction> getter, Property* prop);
+ bool TryInlineSetter(Handle<JSFunction> setter,
+ Assignment* assignment,
+ HValue* implicit_return_value);
bool TryInlineBuiltinMethodCall(Call* expr,
HValue* receiver,
Handle<Map> receiver_map,
@@ -1038,7 +1087,7 @@ class HGraphBuilder: public AstVisitor {
void HandleGlobalVariableAssignment(Variable* var,
HValue* value,
int position,
- int ast_id);
+ BailoutId ast_id);
void HandlePropertyAssignment(Assignment* expr);
void HandleCompoundAssignment(Assignment* expr);
@@ -1070,36 +1119,37 @@ class HGraphBuilder: public AstVisitor {
HValue* right);
HInstruction* BuildIncrement(bool returns_original_input,
CountOperation* expr);
- HLoadNamedField* BuildLoadNamedField(HValue* object,
- Property* expr,
- Handle<Map> type,
- LookupResult* result,
- bool smi_and_map_check);
- HInstruction* BuildLoadNamedGeneric(HValue* object, Property* expr);
- HInstruction* BuildLoadKeyedGeneric(HValue* object,
- HValue* key);
- HInstruction* BuildExternalArrayElementAccess(
- HValue* external_elements,
- HValue* checked_key,
- HValue* val,
- ElementsKind elements_kind,
- bool is_store);
HInstruction* BuildFastElementAccess(HValue* elements,
HValue* checked_key,
HValue* val,
+ HValue* dependency,
ElementsKind elements_kind,
bool is_store);
+ HInstruction* TryBuildConsolidatedElementLoad(HValue* object,
+ HValue* key,
+ HValue* val,
+ SmallMapList* maps);
+
+ HInstruction* BuildUncheckedMonomorphicElementAccess(HValue* object,
+ HValue* key,
+ HValue* val,
+ HCheckMaps* mapcheck,
+ Handle<Map> map,
+ bool is_store);
+
HInstruction* BuildMonomorphicElementAccess(HValue* object,
HValue* key,
HValue* val,
+ HValue* dependency,
Handle<Map> map,
bool is_store);
+
HValue* HandlePolymorphicElementAccess(HValue* object,
HValue* key,
HValue* val,
Expression* prop,
- int ast_id,
+ BailoutId ast_id,
int position,
bool is_store,
bool* has_side_effects);
@@ -1108,42 +1158,75 @@ class HGraphBuilder: public AstVisitor {
HValue* key,
HValue* val,
Expression* expr,
- int ast_id,
+ BailoutId ast_id,
int position,
bool is_store,
bool* has_side_effects);
- HInstruction* BuildLoadNamed(HValue* object,
- Property* prop,
- Handle<Map> map,
- Handle<String> name);
- HInstruction* BuildStoreNamed(HValue* object,
- HValue* value,
- Expression* expr);
- HInstruction* BuildStoreNamed(HValue* object,
- HValue* value,
- ObjectLiteral::Property* prop);
+ HLoadNamedField* BuildLoadNamedField(HValue* object,
+ Handle<Map> map,
+ LookupResult* result);
+ HInstruction* BuildLoadNamedGeneric(HValue* object,
+ Handle<String> name,
+ Property* expr);
+ HInstruction* BuildCallGetter(HValue* object,
+ Handle<Map> map,
+ Handle<JSFunction> getter,
+ Handle<JSObject> holder);
+ HInstruction* BuildLoadNamedMonomorphic(HValue* object,
+ Handle<String> name,
+ Property* expr,
+ Handle<Map> map);
+ HInstruction* BuildLoadKeyedGeneric(HValue* object, HValue* key);
+ HInstruction* BuildExternalArrayElementAccess(
+ HValue* external_elements,
+ HValue* checked_key,
+ HValue* val,
+ HValue* dependency,
+ ElementsKind elements_kind,
+ bool is_store);
+
+ void AddCheckMapsWithTransitions(HValue* object,
+ Handle<Map> map);
+
HInstruction* BuildStoreNamedField(HValue* object,
Handle<String> name,
HValue* value,
- Handle<Map> type,
- LookupResult* lookup,
- bool smi_and_map_check);
+ Handle<Map> map,
+ LookupResult* lookup);
HInstruction* BuildStoreNamedGeneric(HValue* object,
Handle<String> name,
HValue* value);
+ HInstruction* BuildCallSetter(HValue* object,
+ HValue* value,
+ Handle<Map> map,
+ Handle<JSFunction> setter,
+ Handle<JSObject> holder);
+ HInstruction* BuildStoreNamedMonomorphic(HValue* object,
+ Handle<String> name,
+ HValue* value,
+ Handle<Map> map);
HInstruction* BuildStoreKeyedGeneric(HValue* object,
HValue* key,
HValue* value);
HValue* BuildContextChainWalk(Variable* var);
- void AddCheckConstantFunction(Call* expr,
+ HInstruction* BuildThisFunction();
+
+ void AddCheckPrototypeMaps(Handle<JSObject> holder,
+ Handle<Map> receiver_map);
+
+ void AddCheckConstantFunction(Handle<JSObject> holder,
HValue* receiver,
- Handle<Map> receiver_map,
- bool smi_and_map_check);
+ Handle<Map> receiver_map);
+
+ bool MatchRotateRight(HValue* left,
+ HValue* right,
+ HValue** operand,
+ HValue** shift_amount);
- Zone* zone() { return zone_; }
+ Zone* zone() const { return zone_; }
// The translation state of the currently-being-translated function.
FunctionState* function_state_;
@@ -1175,12 +1258,12 @@ class HGraphBuilder: public AstVisitor {
};
-Zone* AstContext::zone() { return owner_->zone(); }
+Zone* AstContext::zone() const { return owner_->zone(); }
class HValueMap: public ZoneObject {
public:
- HValueMap()
+ explicit HValueMap(Zone* zone)
: array_size_(0),
lists_size_(0),
count_(0),
@@ -1188,15 +1271,15 @@ class HValueMap: public ZoneObject {
array_(NULL),
lists_(NULL),
free_list_head_(kNil) {
- ResizeLists(kInitialSize);
- Resize(kInitialSize);
+ ResizeLists(kInitialSize, zone);
+ Resize(kInitialSize, zone);
}
void Kill(GVNFlagSet flags);
- void Add(HValue* value) {
+ void Add(HValue* value, Zone* zone) {
present_flags_.Add(value->gvn_flags());
- Insert(value);
+ Insert(value, zone);
}
HValue* Lookup(HValue* value) const;
@@ -1220,9 +1303,9 @@ class HValueMap: public ZoneObject {
HValueMap(Zone* zone, const HValueMap* other);
- void Resize(int new_size);
- void ResizeLists(int new_size);
- void Insert(HValue* value);
+ void Resize(int new_size, Zone* zone);
+ void ResizeLists(int new_size, Zone* zone);
+ void Insert(HValue* value, Zone* zone);
uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); }
int array_size_;
@@ -1241,6 +1324,7 @@ class HSideEffectMap BASE_EMBEDDED {
public:
HSideEffectMap();
explicit HSideEffectMap(HSideEffectMap* other);
+ HSideEffectMap& operator= (const HSideEffectMap& other);
void Kill(GVNFlagSet flags);
@@ -1374,7 +1458,7 @@ class HTracer: public Malloced {
WriteChars(filename, "", 0, false);
}
- void TraceLiveRange(LiveRange* range, const char* type);
+ void TraceLiveRange(LiveRange* range, const char* type, Zone* zone);
void Trace(const char* name, HGraph* graph, LChunk* chunk);
void FlushToFile();
diff --git a/src/3rdparty/v8/src/ia32/assembler-ia32-inl.h b/src/3rdparty/v8/src/ia32/assembler-ia32-inl.h
index 3cf0d00..114f878 100644
--- a/src/3rdparty/v8/src/ia32/assembler-ia32-inl.h
+++ b/src/3rdparty/v8/src/ia32/assembler-ia32-inl.h
@@ -46,12 +46,21 @@ namespace v8 {
namespace internal {
+static const byte kCallOpcode = 0xE8;
+
+
// The modes possibly affected by apply must be in kApplyMask.
void RelocInfo::apply(intptr_t delta) {
if (rmode_ == RUNTIME_ENTRY || IsCodeTarget(rmode_)) {
int32_t* p = reinterpret_cast<int32_t*>(pc_);
*p -= delta; // Relocate entry.
CPU::FlushICache(p, sizeof(uint32_t));
+ } else if (rmode_ == CODE_AGE_SEQUENCE) {
+ if (*pc_ == kCallOpcode) {
+ int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
+ *p -= delta; // Relocate entry.
+ CPU::FlushICache(p, sizeof(uint32_t));
+ }
} else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) {
// Special handling of js_return when a break point is set (call
// instruction has been inserted).
@@ -150,10 +159,7 @@ Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
JSGlobalPropertyCell* RelocInfo::target_cell() {
ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
- Address address = Memory::Address_at(pc_);
- Object* object = HeapObject::FromAddress(
- address - JSGlobalPropertyCell::kValueOffset);
- return reinterpret_cast<JSGlobalPropertyCell*>(object);
+ return JSGlobalPropertyCell::FromValueAddress(Memory::Address_at(pc_));
}
@@ -172,6 +178,21 @@ void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
}
+Code* RelocInfo::code_age_stub() {
+ ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+ ASSERT(*pc_ == kCallOpcode);
+ return Code::GetCodeFromTargetAddress(
+ Assembler::target_address_at(pc_ + 1));
+}
+
+
+void RelocInfo::set_code_age_stub(Code* stub) {
+ ASSERT(*pc_ == kCallOpcode);
+ ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+ Assembler::set_target_address_at(pc_ + 1, stub->instruction_start());
+}
+
+
Address RelocInfo::call_address() {
ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
@@ -209,7 +230,7 @@ Object** RelocInfo::call_object_address() {
bool RelocInfo::IsPatchedReturnSequence() {
- return *pc_ == 0xE8;
+ return *pc_ == kCallOpcode;
}
@@ -230,7 +251,9 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
visitor->VisitExternalReference(this);
CPU::FlushICache(pc_, sizeof(Address));
-#ifdef ENABLE_DEBUGGER_SUPPORT
+ } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+ visitor->VisitCodeAgeSequence(this);
+ #ifdef ENABLE_DEBUGGER_SUPPORT
// TODO(isolates): Get a cached isolate below.
} else if (((RelocInfo::IsJSReturn(mode) &&
IsPatchedReturnSequence()) ||
@@ -258,6 +281,8 @@ void RelocInfo::Visit(Heap* heap) {
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
StaticVisitor::VisitExternalReference(this);
CPU::FlushICache(pc_, sizeof(Address));
+ } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+ StaticVisitor::VisitCodeAgeSequence(heap, this);
#ifdef ENABLE_DEBUGGER_SUPPORT
} else if (heap->isolate()->debug()->has_break_points() &&
((RelocInfo::IsJSReturn(mode) &&
@@ -338,9 +363,9 @@ void Assembler::emit(Handle<Object> handle) {
}
-void Assembler::emit(uint32_t x, RelocInfo::Mode rmode, unsigned id) {
- if (rmode == RelocInfo::CODE_TARGET && id != kNoASTId) {
- RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, static_cast<intptr_t>(id));
+void Assembler::emit(uint32_t x, RelocInfo::Mode rmode, TypeFeedbackId id) {
+ if (rmode == RelocInfo::CODE_TARGET && !id.IsNone()) {
+ RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, id.ToInt());
} else if (rmode != RelocInfo::NONE) {
RecordRelocInfo(rmode);
}
@@ -390,6 +415,11 @@ void Assembler::set_target_address_at(Address pc, Address target) {
}
+Address Assembler::target_address_from_return_address(Address pc) {
+ return pc - kCallTargetAddressOffset;
+}
+
+
Displacement Assembler::disp_at(Label* L) {
return Displacement(long_at(L->pos()));
}
diff --git a/src/3rdparty/v8/src/ia32/assembler-ia32.cc b/src/3rdparty/v8/src/ia32/assembler-ia32.cc
index a42f632..06fc411 100644
--- a/src/3rdparty/v8/src/ia32/assembler-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/assembler-ia32.cc
@@ -169,7 +169,7 @@ void Displacement::init(Label* L, Type type) {
const int RelocInfo::kApplyMask =
RelocInfo::kCodeTargetMask | 1 << RelocInfo::RUNTIME_ENTRY |
1 << RelocInfo::JS_RETURN | 1 << RelocInfo::INTERNAL_REFERENCE |
- 1 << RelocInfo::DEBUG_BREAK_SLOT;
+ 1 << RelocInfo::DEBUG_BREAK_SLOT | 1 << RelocInfo::CODE_AGE_SEQUENCE;
bool RelocInfo::IsCodedSpecially() {
@@ -314,8 +314,7 @@ static void InitCoverageLog();
Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
: AssemblerBase(arg_isolate),
- positions_recorder_(this),
- emit_debug_code_(FLAG_debug_code) {
+ positions_recorder_(this) {
if (buffer == NULL) {
// Do our own buffer management.
if (buffer_size <= kMinimalBufferSize) {
@@ -1064,6 +1063,25 @@ void Assembler::rcr(Register dst, uint8_t imm8) {
}
}
+void Assembler::ror(Register dst, uint8_t imm8) {
+ EnsureSpace ensure_space(this);
+ ASSERT(is_uint5(imm8)); // illegal shift count
+ if (imm8 == 1) {
+ EMIT(0xD1);
+ EMIT(0xC8 | dst.code());
+ } else {
+ EMIT(0xC1);
+ EMIT(0xC8 | dst.code());
+ EMIT(imm8);
+ }
+}
+
+void Assembler::ror_cl(Register dst) {
+ EnsureSpace ensure_space(this);
+ EMIT(0xD3);
+ EMIT(0xC8 | dst.code());
+}
+
void Assembler::sar(Register dst, uint8_t imm8) {
EnsureSpace ensure_space(this);
@@ -1373,7 +1391,7 @@ void Assembler::bind_to(Label* L, int pos) {
ASSERT(offset_to_next <= 0);
// Relative address, relative to point after address.
int disp = pos - fixup_pos - sizeof(int8_t);
- ASSERT(0 <= disp && disp <= 127);
+ CHECK(0 <= disp && disp <= 127);
set_byte_at(fixup_pos, disp);
if (offset_to_next < 0) {
L->link_to(fixup_pos + offset_to_next, Label::kNear);
@@ -1440,7 +1458,7 @@ int Assembler::CallSize(Handle<Code> code, RelocInfo::Mode rmode) {
void Assembler::call(Handle<Code> code,
RelocInfo::Mode rmode,
- unsigned ast_id) {
+ TypeFeedbackId ast_id) {
positions_recorder()->WriteRecordedPositions();
EnsureSpace ensure_space(this);
ASSERT(RelocInfo::IsCodeTarget(rmode));
@@ -1501,7 +1519,7 @@ void Assembler::jmp(Handle<Code> code, RelocInfo::Mode rmode) {
void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
EnsureSpace ensure_space(this);
- ASSERT(0 <= cc && cc < 16);
+ ASSERT(0 <= cc && static_cast<int>(cc) < 16);
if (L->is_bound()) {
const int short_size = 2;
const int long_size = 6;
@@ -1533,7 +1551,7 @@ void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
void Assembler::j(Condition cc, byte* entry, RelocInfo::Mode rmode) {
EnsureSpace ensure_space(this);
- ASSERT((0 <= cc) && (cc < 16));
+ ASSERT((0 <= cc) && (static_cast<int>(cc) < 16));
// 0000 1111 1000 tttn #32-bit disp.
EMIT(0x0F);
EMIT(0x80 | cc);
@@ -1938,6 +1956,16 @@ void Assembler::cvttsd2si(Register dst, const Operand& src) {
}
+void Assembler::cvtsd2si(Register dst, XMMRegister src) {
+ ASSERT(CpuFeatures::IsEnabled(SSE2));
+ EnsureSpace ensure_space(this);
+ EMIT(0xF2);
+ EMIT(0x0F);
+ EMIT(0x2D);
+ emit_sse_operand(dst, src);
+}
+
+
void Assembler::cvtsi2sd(XMMRegister dst, const Operand& src) {
ASSERT(CpuFeatures::IsEnabled(SSE2));
EnsureSpace ensure_space(this);
@@ -2044,6 +2072,15 @@ void Assembler::andpd(XMMRegister dst, XMMRegister src) {
}
+void Assembler::orpd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ EMIT(0x66);
+ EMIT(0x0F);
+ EMIT(0x56);
+ emit_sse_operand(dst, src);
+}
+
+
void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
ASSERT(CpuFeatures::IsEnabled(SSE2));
EnsureSpace ensure_space(this);
@@ -2086,6 +2123,16 @@ void Assembler::movmskpd(Register dst, XMMRegister src) {
}
+void Assembler::pcmpeqd(XMMRegister dst, XMMRegister src) {
+ ASSERT(CpuFeatures::IsEnabled(SSE2));
+ EnsureSpace ensure_space(this);
+ EMIT(0x66);
+ EMIT(0x0F);
+ EMIT(0x76);
+ emit_sse_operand(dst, src);
+}
+
+
void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) {
ASSERT(CpuFeatures::IsEnabled(SSE2));
EnsureSpace ensure_space(this);
diff --git a/src/3rdparty/v8/src/ia32/assembler-ia32.h b/src/3rdparty/v8/src/ia32/assembler-ia32.h
index 4ead80b..9fb7baa 100644
--- a/src/3rdparty/v8/src/ia32/assembler-ia32.h
+++ b/src/3rdparty/v8/src/ia32/assembler-ia32.h
@@ -584,9 +584,6 @@ class Assembler : public AssemblerBase {
Assembler(Isolate* isolate, void* buffer, int buffer_size);
~Assembler();
- // Overrides the default provided by FLAG_debug_code.
- void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
-
// GetCode emits any pending (non-emitted) code and fills the descriptor
// desc. GetCode() is idempotent; it returns the same result if no other
// Assembler functions are invoked in between GetCode() calls.
@@ -596,6 +593,10 @@ class Assembler : public AssemblerBase {
inline static Address target_address_at(Address pc);
inline static void set_target_address_at(Address pc, Address target);
+ // Return the code target address at a call site from the return address
+ // of that call in the instruction stream.
+ inline static Address target_address_from_return_address(Address pc);
+
// This sets the branch destination (which is in the instruction on x86).
// This is for calls and branches within generated code.
inline static void deserialization_set_special_target_at(
@@ -624,6 +625,7 @@ class Assembler : public AssemblerBase {
static const int kPatchDebugBreakSlotAddressOffset = 1; // JMP imm32.
static const int kCallInstructionLength = 5;
+ static const int kPatchDebugBreakSlotReturnOffset = kPointerSize;
static const int kJSReturnSequenceLength = 6;
// The debug break slot must be able to contain a call instruction.
@@ -807,6 +809,8 @@ class Assembler : public AssemblerBase {
void rcl(Register dst, uint8_t imm8);
void rcr(Register dst, uint8_t imm8);
+ void ror(Register dst, uint8_t imm8);
+ void ror_cl(Register dst);
void sar(Register dst, uint8_t imm8);
void sar_cl(Register dst);
@@ -883,8 +887,8 @@ class Assembler : public AssemblerBase {
void call(const Operand& adr);
int CallSize(Handle<Code> code, RelocInfo::Mode mode);
void call(Handle<Code> code,
- RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
- unsigned ast_id = kNoASTId);
+ RelocInfo::Mode rmode,
+ TypeFeedbackId id = TypeFeedbackId::None());
// Jumps
// unconditional jump to L
@@ -978,6 +982,7 @@ class Assembler : public AssemblerBase {
// SSE2 instructions
void cvttss2si(Register dst, const Operand& src);
void cvttsd2si(Register dst, const Operand& src);
+ void cvtsd2si(Register dst, XMMRegister src);
void cvtsi2sd(XMMRegister dst, Register src) { cvtsi2sd(dst, Operand(src)); }
void cvtsi2sd(XMMRegister dst, const Operand& src);
@@ -993,6 +998,7 @@ class Assembler : public AssemblerBase {
void sqrtsd(XMMRegister dst, XMMRegister src);
void andpd(XMMRegister dst, XMMRegister src);
+ void orpd(XMMRegister dst, XMMRegister src);
void ucomisd(XMMRegister dst, XMMRegister src);
void ucomisd(XMMRegister dst, const Operand& src);
@@ -1009,6 +1015,7 @@ class Assembler : public AssemblerBase {
void movmskpd(Register dst, XMMRegister src);
void cmpltsd(XMMRegister dst, XMMRegister src);
+ void pcmpeqd(XMMRegister dst, XMMRegister src);
void movaps(XMMRegister dst, XMMRegister src);
@@ -1110,8 +1117,6 @@ class Assembler : public AssemblerBase {
void set_byte_at(int pos, byte value) { buffer_[pos] = value; }
protected:
- bool emit_debug_code() const { return emit_debug_code_; }
-
void movsd(XMMRegister dst, const Operand& src);
void movsd(const Operand& dst, XMMRegister src);
@@ -1136,7 +1141,7 @@ class Assembler : public AssemblerBase {
inline void emit(Handle<Object> handle);
inline void emit(uint32_t x,
RelocInfo::Mode rmode,
- unsigned ast_id = kNoASTId);
+ TypeFeedbackId id = TypeFeedbackId::None());
inline void emit(const Immediate& x);
inline void emit_w(const Immediate& x);
@@ -1184,9 +1189,6 @@ class Assembler : public AssemblerBase {
RelocInfoWriter reloc_info_writer;
PositionsRecorder positions_recorder_;
-
- bool emit_debug_code_;
-
friend class PositionsRecorder;
};
diff --git a/src/3rdparty/v8/src/ia32/builtins-ia32.cc b/src/3rdparty/v8/src/ia32/builtins-ia32.cc
index a36763d..01785bb 100644
--- a/src/3rdparty/v8/src/ia32/builtins-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/builtins-ia32.cc
@@ -74,6 +74,43 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
}
+static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
+ __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kCodeOffset));
+ __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
+ __ jmp(eax);
+}
+
+
+void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
+ GenerateTailCallToSharedCode(masm);
+}
+
+
+void Builtins::Generate_ParallelRecompile(MacroAssembler* masm) {
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ // Push a copy of the function onto the stack.
+ __ push(edi);
+ // Push call kind information.
+ __ push(ecx);
+
+ __ push(edi); // Function is also the parameter to the runtime call.
+ __ CallRuntime(Runtime::kParallelRecompile, 1);
+
+ // Restore call kind information.
+ __ pop(ecx);
+ // Restore receiver.
+ __ pop(edi);
+
+ // Tear down internal frame.
+ }
+
+ GenerateTailCallToSharedCode(masm);
+}
+
+
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool count_constructions) {
@@ -501,6 +538,42 @@ void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
}
+static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
+ // For now, we are relying on the fact that make_code_young doesn't do any
+ // garbage collection which allows us to save/restore the registers without
+ // worrying about which of them contain pointers. We also don't build an
+ // internal frame to make the code faster, since we shouldn't have to do stack
+ // crawls in MakeCodeYoung. This seems a bit fragile.
+
+ // Re-execute the code that was patched back to the young age when
+ // the stub returns.
+ __ sub(Operand(esp, 0), Immediate(5));
+ __ pushad();
+ __ mov(eax, Operand(esp, 8 * kPointerSize));
+ {
+ FrameScope scope(masm, StackFrame::MANUAL);
+ __ PrepareCallCFunction(1, ebx);
+ __ mov(Operand(esp, 0), eax);
+ __ CallCFunction(
+ ExternalReference::get_make_code_young_function(masm->isolate()), 1);
+ }
+ __ popad();
+ __ ret(0);
+}
+
+#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
+void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
+ MacroAssembler* masm) { \
+ GenerateMakeCodeYoungAgainCommon(masm); \
+} \
+void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
+ MacroAssembler* masm) { \
+ GenerateMakeCodeYoungAgainCommon(masm); \
+}
+CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
+#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
+
+
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
Deoptimizer::BailoutType type) {
{
@@ -641,9 +714,9 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// receiver.
__ bind(&use_global_receiver);
const int kGlobalIndex =
- Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
__ mov(ebx, FieldOperand(esi, kGlobalIndex));
- __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalContextOffset));
+ __ mov(ebx, FieldOperand(ebx, GlobalObject::kNativeContextOffset));
__ mov(ebx, FieldOperand(ebx, kGlobalIndex));
__ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
@@ -819,9 +892,9 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Use the current global receiver object as the receiver.
__ bind(&use_global_receiver);
const int kGlobalOffset =
- Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
__ mov(ebx, FieldOperand(esi, kGlobalOffset));
- __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalContextOffset));
+ __ mov(ebx, FieldOperand(ebx, GlobalObject::kNativeContextOffset));
__ mov(ebx, FieldOperand(ebx, kGlobalOffset));
__ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
@@ -900,7 +973,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0);
- __ LoadInitialArrayMap(array_function, scratch2, scratch1);
+ __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
@@ -1003,7 +1076,8 @@ static void AllocateJSArray(MacroAssembler* masm,
ASSERT(!fill_with_hole || array_size.is(ecx)); // rep stos count
ASSERT(!fill_with_hole || !result.is(eax)); // result is never eax
- __ LoadInitialArrayMap(array_function, scratch, elements_array);
+ __ LoadInitialArrayMap(array_function, scratch,
+ elements_array, fill_with_hole);
// Allocate the JSArray object together with space for a FixedArray with the
// requested elements.
@@ -1274,11 +1348,11 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ jmp(&prepare_generic_code_call);
__ bind(&not_double);
- // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
+ // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
__ mov(ebx, Operand(esp, 0));
__ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
__ LoadTransitionedArrayMapConditional(
- FAST_SMI_ONLY_ELEMENTS,
+ FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
edi,
eax,
diff --git a/src/3rdparty/v8/src/ia32/code-stubs-ia32.cc b/src/3rdparty/v8/src/ia32/code-stubs-ia32.cc
index fe9db7b..80954b8 100644
--- a/src/3rdparty/v8/src/ia32/code-stubs-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/code-stubs-ia32.cc
@@ -66,9 +66,13 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
void FastNewClosureStub::Generate(MacroAssembler* masm) {
// Create a new closure from the given function info in new
// space. Set the context to the current context in esi.
+ Counters* counters = masm->isolate()->counters();
+
Label gc;
__ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
+ __ IncrementCounter(counters->fast_new_closure_total(), 1);
+
// Get the function info from the stack.
__ mov(edx, Operand(esp, 1 * kPointerSize));
@@ -76,12 +80,12 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
? Context::FUNCTION_MAP_INDEX
: Context::STRICT_MODE_FUNCTION_MAP_INDEX;
- // Compute the function map in the current global context and set that
+ // Compute the function map in the current native context and set that
// as the map of the allocated object.
- __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
- __ mov(ecx, Operand(ecx, Context::SlotOffset(map_index)));
- __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
+ __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ mov(ecx, FieldOperand(ecx, GlobalObject::kNativeContextOffset));
+ __ mov(ebx, Operand(ecx, Context::SlotOffset(map_index)));
+ __ mov(FieldOperand(eax, JSObject::kMapOffset), ebx);
// Initialize the rest of the function. We don't have to update the
// write barrier because the allocated object is in new space.
@@ -94,11 +98,20 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
__ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx);
__ mov(FieldOperand(eax, JSFunction::kContextOffset), esi);
__ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx);
- __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset),
- Immediate(factory->undefined_value()));
// Initialize the code pointer in the function to be the one
// found in the shared function info object.
+ // But first check if there is an optimized version for our context.
+ Label check_optimized;
+ Label install_unoptimized;
+ if (FLAG_cache_optimized_code) {
+ __ mov(ebx, FieldOperand(edx, SharedFunctionInfo::kOptimizedCodeMapOffset));
+ __ test(ebx, ebx);
+ __ j(not_zero, &check_optimized, Label::kNear);
+ }
+ __ bind(&install_unoptimized);
+ __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset),
+ Immediate(factory->undefined_value()));
__ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
__ lea(edx, FieldOperand(edx, Code::kHeaderSize));
__ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx);
@@ -106,6 +119,68 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
// Return and remove the on-stack parameter.
__ ret(1 * kPointerSize);
+ __ bind(&check_optimized);
+
+ __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1);
+
+ // ecx holds native context, ebx points to fixed array of 3-element entries
+ // (native context, optimized code, literals).
+ // Map must never be empty, so check the first elements.
+ Label install_optimized;
+ // Speculatively move code object into edx.
+ __ mov(edx, FieldOperand(ebx, FixedArray::kHeaderSize + kPointerSize));
+ __ cmp(ecx, FieldOperand(ebx, FixedArray::kHeaderSize));
+ __ j(equal, &install_optimized);
+
+ // Iterate through the rest of map backwards. edx holds an index as a Smi.
+ Label loop;
+ Label restore;
+ __ mov(edx, FieldOperand(ebx, FixedArray::kLengthOffset));
+ __ bind(&loop);
+ // Do not double check first entry.
+ __ cmp(edx, Immediate(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
+ __ j(equal, &restore);
+ __ sub(edx, Immediate(Smi::FromInt(
+ SharedFunctionInfo::kEntryLength))); // Skip an entry.
+ __ cmp(ecx, CodeGenerator::FixedArrayElementOperand(ebx, edx, 0));
+ __ j(not_equal, &loop, Label::kNear);
+ // Hit: fetch the optimized code.
+ __ mov(edx, CodeGenerator::FixedArrayElementOperand(ebx, edx, 1));
+
+ __ bind(&install_optimized);
+ __ IncrementCounter(counters->fast_new_closure_install_optimized(), 1);
+
+ // TODO(fschneider): Idea: store proper code pointers in the optimized code
+ // map and either unmangle them on marking or do nothing as the whole map is
+ // discarded on major GC anyway.
+ __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
+ __ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx);
+
+ // Now link a function into a list of optimized functions.
+ __ mov(edx, ContextOperand(ecx, Context::OPTIMIZED_FUNCTIONS_LIST));
+
+ __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset), edx);
+ // No need for write barrier as JSFunction (eax) is in the new space.
+
+ __ mov(ContextOperand(ecx, Context::OPTIMIZED_FUNCTIONS_LIST), eax);
+ // Store JSFunction (eax) into edx before issuing write barrier as
+ // it clobbers all the registers passed.
+ __ mov(edx, eax);
+ __ RecordWriteContextSlot(
+ ecx,
+ Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST),
+ edx,
+ ebx,
+ kDontSaveFPRegs);
+
+ // Return and remove the on-stack parameter.
+ __ ret(1 * kPointerSize);
+
+ __ bind(&restore);
+ // Restore SharedFunctionInfo into edx.
+ __ mov(edx, Operand(esp, 1 * kPointerSize));
+ __ jmp(&install_unoptimized);
+
// Create a new closure through the slower runtime call.
__ bind(&gc);
__ pop(ecx); // Temporarily remove return address.
@@ -142,12 +217,12 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
__ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
// Copy the global object from the previous context.
- __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
+ __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), ebx);
// Copy the qml global object from the previous context.
- __ mov(ebx, Operand(esi, Context::SlotOffset(Context::QML_GLOBAL_INDEX)));
- __ mov(Operand(eax, Context::SlotOffset(Context::QML_GLOBAL_INDEX)), ebx);
+ __ mov(ebx, Operand(esi, Context::SlotOffset(Context::QML_GLOBAL_OBJECT_INDEX)));
+ __ mov(Operand(eax, Context::SlotOffset(Context::QML_GLOBAL_OBJECT_INDEX)), ebx);
// Initialize the rest of the slots to undefined.
@@ -191,9 +266,9 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ mov(FieldOperand(eax, Context::kLengthOffset),
Immediate(Smi::FromInt(length)));
- // If this block context is nested in the global context we get a smi
+ // If this block context is nested in the native context we get a smi
// sentinel instead of a function. The block context should get the
- // canonical empty function of the global context as its closure which
+ // canonical empty function of the native context as its closure which
// we still have to look up.
Label after_sentinel;
__ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear);
@@ -203,7 +278,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ Assert(equal, message);
}
__ mov(ecx, GlobalObjectOperand());
- __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
+ __ mov(ecx, FieldOperand(ecx, GlobalObject::kNativeContextOffset));
__ mov(ecx, ContextOperand(ecx, Context::CLOSURE_INDEX));
__ bind(&after_sentinel);
@@ -213,12 +288,12 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ mov(ContextOperand(eax, Context::EXTENSION_INDEX), ebx);
// Copy the global object from the previous context.
- __ mov(ebx, ContextOperand(esi, Context::GLOBAL_INDEX));
- __ mov(ContextOperand(eax, Context::GLOBAL_INDEX), ebx);
+ __ mov(ebx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
+ __ mov(ContextOperand(eax, Context::GLOBAL_OBJECT_INDEX), ebx);
// Copy the qml global object from the previous context.
- __ mov(ebx, ContextOperand(esi, Context::QML_GLOBAL_INDEX));
- __ mov(ContextOperand(eax, Context::QML_GLOBAL_INDEX), ebx);
+ __ mov(ebx, ContextOperand(esi, Context::QML_GLOBAL_OBJECT_INDEX));
+ __ mov(ContextOperand(eax, Context::QML_GLOBAL_OBJECT_INDEX), ebx);
// Initialize the rest of the slots to the hole value.
if (slots_ == 1) {
@@ -1727,9 +1802,10 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
if (result_type_ <= BinaryOpIC::INT32) {
__ cvttsd2si(ecx, Operand(xmm0));
__ cvtsi2sd(xmm2, ecx);
- __ ucomisd(xmm0, xmm2);
- __ j(not_zero, &not_int32);
- __ j(carry, &not_int32);
+ __ pcmpeqd(xmm2, xmm0);
+ __ movmskpd(ecx, xmm2);
+ __ test(ecx, Immediate(1));
+ __ j(zero, &not_int32);
}
GenerateHeapResultAllocation(masm, &call_runtime);
__ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
@@ -3147,21 +3223,28 @@ void MathPowStub::Generate(MacroAssembler* masm) {
__ movsd(double_scratch2, double_result); // Load double_exponent with 1.
// Get absolute value of exponent.
- Label no_neg, while_true, no_multiply;
+ Label no_neg, while_true, while_false;
__ test(scratch, scratch);
__ j(positive, &no_neg, Label::kNear);
__ neg(scratch);
__ bind(&no_neg);
- __ bind(&while_true);
+ __ j(zero, &while_false, Label::kNear);
__ shr(scratch, 1);
- __ j(not_carry, &no_multiply, Label::kNear);
- __ mulsd(double_result, double_scratch);
- __ bind(&no_multiply);
+ // Above condition means CF==0 && ZF==0. This means that the
+ // bit that has been shifted out is 0 and the result is not 0.
+ __ j(above, &while_true, Label::kNear);
+ __ movsd(double_result, double_scratch);
+ __ j(zero, &while_false, Label::kNear);
+ __ bind(&while_true);
+ __ shr(scratch, 1);
__ mulsd(double_scratch, double_scratch);
+ __ j(above, &while_true, Label::kNear);
+ __ mulsd(double_result, double_scratch);
__ j(not_zero, &while_true);
+ __ bind(&while_false);
// scratch has the original value of the exponent - if the exponent is
// negative, return 1/result.
__ test(exponent, exponent);
@@ -3368,10 +3451,10 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
// esp[0] = mapped parameter count (tagged)
// esp[8] = parameter count (tagged)
// esp[12] = address of receiver argument
- // Get the arguments boilerplate from the current (global) context into edi.
+ // Get the arguments boilerplate from the current native context into edi.
Label has_mapped_parameters, copy;
- __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
+ __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ mov(edi, FieldOperand(edi, GlobalObject::kNativeContextOffset));
__ mov(ebx, Operand(esp, 0 * kPointerSize));
__ test(ebx, ebx);
__ j(not_zero, &has_mapped_parameters, Label::kNear);
@@ -3519,7 +3602,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
__ bind(&runtime);
__ pop(eax); // Remove saved parameter count.
__ mov(Operand(esp, 1 * kPointerSize), ecx); // Patch argument count.
- __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
+ __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
}
@@ -3561,9 +3644,9 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Do the allocation of both objects in one go.
__ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
- // Get the arguments boilerplate from the current (global) context.
- __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
+ // Get the arguments boilerplate from the current native context.
+ __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ mov(edi, FieldOperand(edi, GlobalObject::kNativeContextOffset));
const int offset =
Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
__ mov(edi, Operand(edi, offset));
@@ -3682,7 +3765,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
__ add(edx, Immediate(2)); // edx was a smi.
// Check that the static offsets vector buffer is large enough.
- __ cmp(edx, OffsetsVector::kStaticOffsetsVectorSize);
+ __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize);
__ j(above, &runtime);
// ecx: RegExp data (FixedArray)
@@ -3831,20 +3914,24 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ IncrementCounter(counters->regexp_entry_native(), 1);
// Isolates: note we add an additional parameter here (isolate pointer).
- static const int kRegExpExecuteArguments = 8;
+ static const int kRegExpExecuteArguments = 9;
__ EnterApiExitFrame(kRegExpExecuteArguments);
- // Argument 8: Pass current isolate address.
- __ mov(Operand(esp, 7 * kPointerSize),
+ // Argument 9: Pass current isolate address.
+ __ mov(Operand(esp, 8 * kPointerSize),
Immediate(ExternalReference::isolate_address()));
- // Argument 7: Indicate that this is a direct call from JavaScript.
- __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
+ // Argument 8: Indicate that this is a direct call from JavaScript.
+ __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
- // Argument 6: Start (high end) of backtracking stack memory area.
+ // Argument 7: Start (high end) of backtracking stack memory area.
__ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
__ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
- __ mov(Operand(esp, 5 * kPointerSize), esi);
+ __ mov(Operand(esp, 6 * kPointerSize), esi);
+
+ // Argument 6: Set the number of capture registers to zero to force global
+ // regexps to behave as non-global. This does not affect non-global regexps.
+ __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
// Argument 5: static offsets vector buffer.
__ mov(Operand(esp, 4 * kPointerSize),
@@ -3907,7 +3994,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Check the result.
Label success;
- __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
+ __ cmp(eax, 1);
+ // We expect exactly one result since we force the called regexp to behave
+ // as non-global.
__ j(equal, &success);
Label failure;
__ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
@@ -4070,11 +4159,11 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
// Set empty properties FixedArray.
// Set elements to point to FixedArray allocated right after the JSArray.
// Interleave operations for better latency.
- __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
+ __ mov(edx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
Factory* factory = masm->isolate()->factory();
__ mov(ecx, Immediate(factory->empty_fixed_array()));
__ lea(ebx, Operand(eax, JSRegExpResult::kSize));
- __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
+ __ mov(edx, FieldOperand(edx, GlobalObject::kNativeContextOffset));
__ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
__ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
__ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
@@ -4098,15 +4187,15 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
Immediate(factory->fixed_array_map()));
// Set length.
__ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
- // Fill contents of fixed-array with the-hole.
+ // Fill contents of fixed-array with undefined.
__ SmiUntag(ecx);
- __ mov(edx, Immediate(factory->the_hole_value()));
+ __ mov(edx, Immediate(factory->undefined_value()));
__ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
- // Fill fixed array elements with hole.
+ // Fill fixed array elements with undefined.
// eax: JSArray.
// ecx: Number of elements to fill.
// ebx: Start of elements in FixedArray.
- // edx: the hole.
+ // edx: undefined.
Label loop;
__ test(ecx, ecx);
__ bind(&loop);
@@ -4292,13 +4381,13 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ jmp(&not_user_equal);
__ bind(&user_equal);
-
+
__ pop(ebx); // Return address.
__ push(eax);
__ push(edx);
__ push(ebx);
__ TailCallRuntime(Runtime::kUserObjectEquals, 2, 1);
-
+
__ bind(&not_user_equal);
}
@@ -5676,7 +5765,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
__ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
__ and_(ecx, edi);
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+ STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
__ test(ecx, Immediate(kStringEncodingMask));
__ j(zero, &non_ascii);
@@ -5685,7 +5774,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ AllocateAsciiConsString(ecx, edi, no_reg, &call_runtime);
__ bind(&allocated);
// Fill the fields of the cons string.
- if (FLAG_debug_code) __ AbortIfNotSmi(ebx);
+ __ AssertSmi(ebx);
__ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
__ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
Immediate(String::kEmptyHashField));
@@ -5704,9 +5793,9 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
__ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
__ xor_(edi, ecx);
- STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
- __ and_(edi, kAsciiStringTag | kAsciiDataHintTag);
- __ cmp(edi, kAsciiStringTag | kAsciiDataHintTag);
+ STATIC_ASSERT(kOneByteStringTag != 0 && kAsciiDataHintTag != 0);
+ __ and_(edi, kOneByteStringTag | kAsciiDataHintTag);
+ __ cmp(edi, kOneByteStringTag | kAsciiDataHintTag);
__ j(equal, &ascii_data);
// Allocate a two byte cons string.
__ AllocateTwoByteConsString(ecx, edi, no_reg, &call_runtime);
@@ -6277,7 +6366,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// string's encoding is wrong because we always have to recheck encoding of
// the newly created string's parent anyways due to externalized strings.
Label two_byte_slice, set_slice_header;
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+ STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
__ test(ebx, Immediate(kStringEncodingMask));
__ j(zero, &two_byte_slice, Label::kNear);
@@ -6324,7 +6413,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ push(edx);
__ push(edi);
__ SmiUntag(ecx);
- STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0);
+ STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
__ test_b(ebx, kStringEncodingMask);
__ j(zero, &two_byte_sequential);
@@ -6946,8 +7035,7 @@ void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
ASSERT(!name.is(r0));
ASSERT(!name.is(r1));
- // Assert that name contains a string.
- if (FLAG_debug_code) __ AbortIfNotString(name);
+ __ AssertString(name);
__ mov(r1, FieldOperand(elements, kCapacityOffset));
__ shr(r1, kSmiTagSize); // convert smi to int
@@ -7111,8 +7199,8 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
// KeyedStoreStubCompiler::GenerateStoreFastElement.
{ REG(edi), REG(ebx), REG(ecx), EMIT_REMEMBERED_SET},
{ REG(edx), REG(edi), REG(ebx), EMIT_REMEMBERED_SET},
- // ElementsTransitionGenerator::GenerateSmiOnlyToObject
- // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
+ // ElementsTransitionGenerator::GenerateMapChangeElementTransition
+ // and ElementsTransitionGenerator::GenerateSmiToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
{ REG(edx), REG(ebx), REG(edi), EMIT_REMEMBERED_SET},
{ REG(edx), REG(ebx), REG(edi), OMIT_REMEMBERED_SET},
@@ -7121,6 +7209,8 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
{ REG(edx), REG(eax), REG(edi), EMIT_REMEMBERED_SET},
// StoreArrayLiteralElementStub::Generate
{ REG(ebx), REG(eax), REG(ecx), EMIT_REMEMBERED_SET},
+ // FastNewClosureStub
+ { REG(ecx), REG(edx), REG(ebx), EMIT_REMEMBERED_SET},
// Null termination.
{ REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
@@ -7169,6 +7259,11 @@ void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
}
+bool CodeStub::CanUseFPRegisters() {
+ return CpuFeatures::IsSupported(SSE2);
+}
+
+
// Takes the input in 3 registers: address_ value_ and object_. A pointer to
// the value has just been written into the object, now this stub makes sure
// we keep the GC informed. The word in the object where the value has been
@@ -7289,6 +7384,17 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
Mode mode) {
Label object_is_black, need_incremental, need_incremental_pop_object;
+ __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
+ __ and_(regs_.scratch0(), regs_.object());
+ __ mov(regs_.scratch1(),
+ Operand(regs_.scratch0(),
+ MemoryChunk::kWriteBarrierCounterOffset));
+ __ sub(regs_.scratch1(), Immediate(1));
+ __ mov(Operand(regs_.scratch0(),
+ MemoryChunk::kWriteBarrierCounterOffset),
+ regs_.scratch1());
+ __ j(negative, &need_incremental);
+
// Let's look at the color of the object: If it is not black we don't have
// to inform the incremental marker.
__ JumpIfBlack(regs_.object(),
@@ -7384,9 +7490,9 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ CheckFastElements(edi, &double_elements);
- // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+ // Check for FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS elements
__ JumpIfSmi(eax, &smi_element);
- __ CheckFastSmiOnlyElements(edi, &fast_elements, Label::kNear);
+ __ CheckFastSmiElements(edi, &fast_elements, Label::kNear);
// Store into the array literal requires a elements transition. Call into
// the runtime.
@@ -7408,7 +7514,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ pop(edx);
__ jmp(&slow_elements);
- // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+ // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
__ bind(&fast_elements);
__ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
__ lea(ecx, FieldOperand(ebx, ecx, times_half_pointer_size,
@@ -7421,15 +7527,15 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
OMIT_SMI_CHECK);
__ ret(0);
- // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
- // FAST_ELEMENTS, and value is Smi.
+ // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
+ // and value is Smi.
__ bind(&smi_element);
__ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
__ mov(FieldOperand(ebx, ecx, times_half_pointer_size,
FixedArrayBase::kHeaderSize), eax);
__ ret(0);
- // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
+ // Array literal has ElementsKind of FAST_*_DOUBLE_ELEMENTS.
__ bind(&double_elements);
__ push(edx);
@@ -7445,6 +7551,38 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ ret(0);
}
+
+void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
+ if (entry_hook_ != NULL) {
+ ProfileEntryHookStub stub;
+ masm->CallStub(&stub);
+ }
+}
+
+
+void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
+ // Ecx is the only volatile register we must save.
+ __ push(ecx);
+
+ // Calculate and push the original stack pointer.
+ __ lea(eax, Operand(esp, kPointerSize));
+ __ push(eax);
+
+ // Calculate and push the function address.
+ __ mov(eax, Operand(eax, 0));
+ __ sub(eax, Immediate(Assembler::kCallInstructionLength));
+ __ push(eax);
+
+ // Call the entry hook.
+ int32_t hook_location = reinterpret_cast<int32_t>(&entry_hook_);
+ __ call(Operand(hook_location, RelocInfo::NONE));
+ __ add(esp, Immediate(2 * kPointerSize));
+
+ // Restore ecx.
+ __ pop(ecx);
+ __ ret(0);
+}
+
#undef __
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/ia32/codegen-ia32.cc b/src/3rdparty/v8/src/ia32/codegen-ia32.cc
index cff6454..4c79519 100644
--- a/src/3rdparty/v8/src/ia32/codegen-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/codegen-ia32.cc
@@ -351,7 +351,7 @@ OS::MemCopyFunction CreateMemCopyFunction() {
#define __ ACCESS_MASM(masm)
-void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
+void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : value
@@ -372,7 +372,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
}
-void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
+void ElementsTransitionGenerator::GenerateSmiToDouble(
MacroAssembler* masm, Label* fail) {
// ----------- S t a t e -------------
// -- eax : value
@@ -732,7 +732,7 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
// Dispatch on the encoding: ASCII or two-byte.
Label ascii;
__ bind(&seq_string);
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+ STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
__ test(result, Immediate(kStringEncodingMask));
__ j(not_zero, &ascii, Label::kNear);
@@ -757,6 +757,103 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
#undef __
+static const int kNoCodeAgeSequenceLength = 5;
+
+static byte* GetNoCodeAgeSequence(uint32_t* length) {
+ static bool initialized = false;
+ static byte sequence[kNoCodeAgeSequenceLength];
+ *length = kNoCodeAgeSequenceLength;
+ if (!initialized) {
+ // The sequence of instructions that is patched out for aging code is the
+ // following boilerplate stack-building prologue that is found both in
+ // FUNCTION and OPTIMIZED_FUNCTION code:
+ CodePatcher patcher(sequence, kNoCodeAgeSequenceLength);
+ patcher.masm()->push(ebp);
+ patcher.masm()->mov(ebp, esp);
+ patcher.masm()->push(esi);
+ patcher.masm()->push(edi);
+ initialized = true;
+ }
+ return sequence;
+}
+
+
+byte* Code::FindPlatformCodeAgeSequence() {
+ byte* start = instruction_start();
+ uint32_t young_length;
+ byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+ if (!memcmp(start, young_sequence, young_length) ||
+ *start == kCallOpcode) {
+ return start;
+ } else {
+ if (kind() == FUNCTION) {
+ byte* start_after_strict =
+ start + kSizeOfFullCodegenStrictModePrologue;
+ ASSERT(!memcmp(start_after_strict, young_sequence, young_length) ||
+ start[kSizeOfFullCodegenStrictModePrologue] == kCallOpcode);
+ return start_after_strict;
+ } else {
+ ASSERT(kind() == OPTIMIZED_FUNCTION);
+ start = instruction_start() + kSizeOfOptimizedStrictModePrologue;
+ if (!memcmp(start, young_sequence, young_length) ||
+ *start == kCallOpcode) {
+ return start;
+ }
+ start = instruction_start() + kSizeOfOptimizedAlignStackPrologue;
+ if (!memcmp(start, young_sequence, young_length) ||
+ *start == kCallOpcode) {
+ return start;
+ }
+ start = instruction_start() + kSizeOfOptimizedAlignStackPrologue +
+ kSizeOfOptimizedStrictModePrologue;
+ ASSERT(!memcmp(start, young_sequence, young_length) ||
+ *start == kCallOpcode);
+ return start;
+ }
+ }
+}
+
+
+bool Code::IsYoungSequence(byte* sequence) {
+ uint32_t young_length;
+ byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+ bool result = (!memcmp(sequence, young_sequence, young_length));
+ ASSERT(result || *sequence == kCallOpcode);
+ return result;
+}
+
+
+void Code::GetCodeAgeAndParity(byte* sequence, Age* age,
+ MarkingParity* parity) {
+ if (IsYoungSequence(sequence)) {
+ *age = kNoAge;
+ *parity = NO_MARKING_PARITY;
+ } else {
+ sequence++; // Skip the kCallOpcode byte
+ Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
+ Assembler::kCallTargetAddressOffset;
+ Code* stub = GetCodeFromTargetAddress(target_address);
+ GetCodeAgeAndParity(stub, age, parity);
+ }
+}
+
+
+void Code::PatchPlatformCodeAge(byte* sequence,
+ Code::Age age,
+ MarkingParity parity) {
+ uint32_t young_length;
+ byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+ if (age == kNoAge) {
+ memcpy(sequence, young_sequence, young_length);
+ CPU::FlushICache(sequence, young_length);
+ } else {
+ Code* stub = GetCodeAgeStub(age, parity);
+ CodePatcher patcher(sequence, young_length);
+ patcher.masm()->call(stub->instruction_start(), RelocInfo::NONE);
+ }
+}
+
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_IA32
diff --git a/src/3rdparty/v8/src/ia32/codegen-ia32.h b/src/3rdparty/v8/src/ia32/codegen-ia32.h
index f4ab0b5..a783e9a 100644
--- a/src/3rdparty/v8/src/ia32/codegen-ia32.h
+++ b/src/3rdparty/v8/src/ia32/codegen-ia32.h
@@ -37,6 +37,10 @@ namespace internal {
// Forward declarations
class CompilationInfo;
+static const int kSizeOfFullCodegenStrictModePrologue = 34;
+static const int kSizeOfOptimizedStrictModePrologue = 12;
+static const int kSizeOfOptimizedAlignStackPrologue = 44;
+
// -------------------------------------------------------------------------
// CodeGenerator
diff --git a/src/3rdparty/v8/src/ia32/deoptimizer-ia32.cc b/src/3rdparty/v8/src/ia32/deoptimizer-ia32.cc
index 73961e1..99ad522 100644
--- a/src/3rdparty/v8/src/ia32/deoptimizer-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/deoptimizer-ia32.cc
@@ -117,6 +117,10 @@ void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
if (!function->IsOptimized()) return;
+ // The optimized code is going to be patched, so we cannot use it
+ // any more. Play safe and reset the whole cache.
+ function->shared()->ClearOptimizedCodeMap();
+
Isolate* isolate = function->GetIsolate();
HandleScope scope(isolate);
AssertNoAllocation no_allocation;
@@ -194,8 +198,7 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
// ignore all slots that might have been recorded on it.
isolate->heap()->mark_compact_collector()->InvalidateCode(code);
- // Set the code for the function to non-optimized version.
- function->ReplaceCode(function->shared()->code());
+ ReplaceCodeForRelatedFunctions(function, code);
if (FLAG_trace_deopt) {
PrintF("[forced deoptimization: ");
@@ -284,11 +287,11 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
}
-static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) {
+static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) {
ByteArray* translations = data->TranslationByteArray();
int length = data->DeoptCount();
for (int i = 0; i < length; i++) {
- if (static_cast<unsigned>(data->AstId(i)->value()) == ast_id) {
+ if (data->AstId(i) == ast_id) {
TranslationIterator it(translations, data->TranslationIndex(i)->value());
int value = it.Next();
ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value));
@@ -310,7 +313,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
// the ast id. Confusing.
ASSERT(bailout_id_ == ast_id);
- int bailout_id = LookupBailoutId(data, ast_id);
+ int bailout_id = LookupBailoutId(data, BailoutId(ast_id));
unsigned translation_index = data->TranslationIndex(bailout_id)->value();
ByteArray* translations = data->TranslationByteArray();
@@ -330,9 +333,9 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
unsigned node_id = iterator.Next();
USE(node_id);
ASSERT(node_id == ast_id);
- JSFunction* function = JSFunction::cast(ComputeLiteral(iterator.Next()));
- USE(function);
- ASSERT(function == function_);
+ int closure_id = iterator.Next();
+ USE(closure_id);
+ ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
unsigned height = iterator.Next();
unsigned height_in_bytes = height * kPointerSize;
USE(height_in_bytes);
@@ -351,10 +354,12 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
PrintF("[on-stack replacement: begin 0x%08" V8PRIxPTR " ",
reinterpret_cast<intptr_t>(function_));
function_->PrintName();
- PrintF(" => node=%u, frame=%d->%d]\n",
+ PrintF(" => node=%u, frame=%d->%d, ebp:esp=0x%08x:0x%08x]\n",
ast_id,
input_frame_size,
- output_frame_size);
+ output_frame_size,
+ input_->GetRegister(ebp.code()),
+ input_->GetRegister(esp.code()));
}
// There's only one output frame in the OSR case.
@@ -404,7 +409,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
name = "function";
break;
}
- PrintF(" [esp + %d] <- 0x%08x ; [esp + %d] (fixed part - %s)\n",
+ PrintF(" [sp + %d] <- 0x%08x ; [sp + %d] (fixed part - %s)\n",
output_offset,
input_value,
input_offset,
@@ -415,6 +420,24 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
output_offset -= kPointerSize;
}
+ // All OSR stack frames are dynamically aligned to an 8-byte boundary.
+ int frame_pointer = input_->GetRegister(ebp.code());
+ if ((frame_pointer & kPointerSize) != 0) {
+ frame_pointer -= kPointerSize;
+ has_alignment_padding_ = 1;
+ }
+
+ int32_t alignment_state = (has_alignment_padding_ == 1) ?
+ kAlignmentPaddingPushed :
+ kNoAlignmentPadding;
+ if (FLAG_trace_osr) {
+ PrintF(" [sp + %d] <- 0x%08x ; (alignment state)\n",
+ output_offset,
+ alignment_state);
+ }
+ output_[0]->SetFrameSlot(output_offset, alignment_state);
+ output_offset -= kPointerSize;
+
// Translate the rest of the frame.
while (ok && input_offset >= 0) {
ok = DoOsrTranslateCommand(&iterator, &input_offset);
@@ -427,7 +450,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
output_[0]->SetPc(reinterpret_cast<uint32_t>(from_));
} else {
// Set up the frame pointer and the context pointer.
- output_[0]->SetRegister(ebp.code(), input_->GetRegister(ebp.code()));
+ output_[0]->SetRegister(ebp.code(), frame_pointer);
output_[0]->SetRegister(esi.code(), input_->GetRegister(esi.code()));
unsigned pc_offset = data->OsrPcOffset()->value();
@@ -436,15 +459,15 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
output_[0]->SetPc(pc);
}
Code* continuation =
- function->GetIsolate()->builtins()->builtin(Builtins::kNotifyOSR);
+ function_->GetIsolate()->builtins()->builtin(Builtins::kNotifyOSR);
output_[0]->SetContinuation(
reinterpret_cast<uint32_t>(continuation->entry()));
if (FLAG_trace_osr) {
PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ",
ok ? "finished" : "aborted",
- reinterpret_cast<intptr_t>(function));
- function->PrintName();
+ reinterpret_cast<intptr_t>(function_));
+ function_->PrintName();
PrintF(" => pc=0x%0x]\n", output_[0]->GetPc());
}
}
@@ -659,16 +682,143 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
}
+void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
+ int frame_index,
+ bool is_setter_stub_frame) {
+ JSFunction* accessor = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ // The receiver (and the implicit return value, if any) are expected in
+ // registers by the LoadIC/StoreIC, so they don't belong to the output stack
+ // frame. This means that we have to use a height of 0.
+ unsigned height = 0;
+ unsigned height_in_bytes = height * kPointerSize;
+ const char* kind = is_setter_stub_frame ? "setter" : "getter";
+ if (FLAG_trace_deopt) {
+ PrintF(" translating %s stub => height=%u\n", kind, height_in_bytes);
+ }
+
+ // We need 1 stack entry for the return address + 4 stack entries from
+ // StackFrame::INTERNAL (FP, context, frame type, code object, see
+ // MacroAssembler::EnterFrame). For a setter stub frame we need one additional
+ // entry for the implicit return value, see
+ // StoreStubCompiler::CompileStoreViaSetter.
+ unsigned fixed_frame_entries = 1 + 4 + (is_setter_stub_frame ? 1 : 0);
+ unsigned fixed_frame_size = fixed_frame_entries * kPointerSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, accessor);
+ output_frame->SetFrameType(StackFrame::INTERNAL);
+
+ // A frame for an accessor stub can not be the topmost or bottommost one.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous frame's top and
+ // this frame's size.
+ intptr_t top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ unsigned output_offset = output_frame_size;
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // The context can be gotten from the previous frame.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetContext();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; context\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // A marker value is used in place of the function.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::INTERNAL));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; function (%s sentinel)\n",
+ top_address + output_offset, output_offset, value, kind);
+ }
+
+ // Get Code object from accessor stub.
+ output_offset -= kPointerSize;
+ Builtins::Name name = is_setter_stub_frame ?
+ Builtins::kStoreIC_Setter_ForDeopt :
+ Builtins::kLoadIC_Getter_ForDeopt;
+ Code* accessor_stub = isolate_->builtins()->builtin(name);
+ value = reinterpret_cast<intptr_t>(accessor_stub);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; code object\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Skip receiver.
+ Translation::Opcode opcode =
+ static_cast<Translation::Opcode>(iterator->Next());
+ iterator->Skip(Translation::NumberOfOperandsFor(opcode));
+
+ if (is_setter_stub_frame) {
+ // The implicit return value was part of the artificial setter stub
+ // environment.
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Smi* offset = is_setter_stub_frame ?
+ isolate_->heap()->setter_stub_deopt_pc_offset() :
+ isolate_->heap()->getter_stub_deopt_pc_offset();
+ intptr_t pc = reinterpret_cast<intptr_t>(
+ accessor_stub->instruction_start() + offset->value());
+ output_frame->SetPc(pc);
+}
+
+
void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
int frame_index) {
- int node_id = iterator->Next();
- JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ BailoutId node_id = BailoutId(iterator->Next());
+ JSFunction* function;
+ if (frame_index != 0) {
+ function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ } else {
+ int closure_id = iterator->Next();
+ USE(closure_id);
+ ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
+ function = function_;
+ }
unsigned height = iterator->Next();
unsigned height_in_bytes = height * kPointerSize;
if (FLAG_trace_deopt) {
PrintF(" translating ");
function->PrintName();
- PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes);
+ PrintF(" => node=%d, height=%d\n", node_id.ToInt(), height_in_bytes);
}
// The 'fixed' part of the frame consists of the incoming parameters and
@@ -688,24 +838,38 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
ASSERT(output_[frame_index] == NULL);
output_[frame_index] = output_frame;
+ // Compute the incoming parameter translation.
+ int parameter_count = function->shared()->formal_parameter_count() + 1;
+ unsigned output_offset = output_frame_size;
+ unsigned input_offset = input_frame_size;
+
+ unsigned alignment_state_offset =
+ input_offset - parameter_count * kPointerSize -
+ StandardFrameConstants::kFixedFrameSize -
+ kPointerSize;
+ ASSERT(JavaScriptFrameConstants::kDynamicAlignmentStateOffset ==
+ JavaScriptFrameConstants::kLocal0Offset);
+
// The top address for the bottommost output frame can be computed from
// the input frame pointer and the output frame's height. For all
// subsequent output frames, it can be computed from the previous one's
// top address and the current frame's size.
uint32_t top_address;
if (is_bottommost) {
+ int32_t alignment_state = input_->GetFrameSlot(alignment_state_offset);
+ has_alignment_padding_ =
+ (alignment_state == kAlignmentPaddingPushed) ? 1 : 0;
// 2 = context and function in the frame.
- top_address =
- input_->GetRegister(ebp.code()) - (2 * kPointerSize) - height_in_bytes;
+ // If the optimized frame had alignment padding, adjust the frame pointer
+ // to point to the new position of the old frame pointer after padding
+ // is removed. Subtract 2 * kPointerSize for the context and function slots.
+ top_address = input_->GetRegister(ebp.code()) - (2 * kPointerSize) -
+ height_in_bytes + has_alignment_padding_ * kPointerSize;
} else {
top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
}
output_frame->SetTop(top_address);
- // Compute the incoming parameter translation.
- int parameter_count = function->shared()->formal_parameter_count() + 1;
- unsigned output_offset = output_frame_size;
- unsigned input_offset = input_frame_size;
for (int i = 0; i < parameter_count; ++i) {
output_offset -= kPointerSize;
DoTranslateCommand(iterator, frame_index, output_offset);
@@ -747,13 +911,17 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
}
output_frame->SetFrameSlot(output_offset, value);
intptr_t fp_value = top_address + output_offset;
- ASSERT(!is_bottommost || input_->GetRegister(ebp.code()) == fp_value);
+ ASSERT(!is_bottommost ||
+ (input_->GetRegister(ebp.code()) + has_alignment_padding_ * kPointerSize) ==
+ fp_value);
output_frame->SetFp(fp_value);
if (is_topmost) output_frame->SetRegister(ebp.code(), fp_value);
if (FLAG_trace_deopt) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
fp_value, output_offset, value);
}
+ ASSERT(!is_bottommost || !has_alignment_padding_ ||
+ (fp_value & kPointerSize) != 0);
// For the bottommost output frame the context can be gotten from the input
// frame. For all subsequent output frames it can be gotten from the function
@@ -948,6 +1116,28 @@ void Deoptimizer::EntryGenerator::Generate() {
}
__ pop(eax);
+ if (type() != OSR) {
+ // If frame was dynamically aligned, pop padding.
+ Label no_padding;
+ __ cmp(Operand(eax, Deoptimizer::has_alignment_padding_offset()),
+ Immediate(0));
+ __ j(equal, &no_padding);
+ __ pop(ecx);
+ if (FLAG_debug_code) {
+ __ cmp(ecx, Immediate(kAlignmentZapValue));
+ __ Assert(equal, "alignment marker expected");
+ }
+ __ bind(&no_padding);
+ } else {
+ // If frame needs dynamic alignment push padding.
+ Label no_padding;
+ __ cmp(Operand(eax, Deoptimizer::has_alignment_padding_offset()),
+ Immediate(0));
+ __ j(equal, &no_padding);
+ __ push(Immediate(kAlignmentZapValue));
+ __ bind(&no_padding);
+ }
+
// Replace the current frame with the output frames.
Label outer_push_loop, inner_push_loop;
// Outer loop state: eax = current FrameDescription**, edx = one past the
diff --git a/src/3rdparty/v8/src/ia32/disasm-ia32.cc b/src/3rdparty/v8/src/ia32/disasm-ia32.cc
index b5ddcca..dd07deb 100644
--- a/src/3rdparty/v8/src/ia32/disasm-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/disasm-ia32.cc
@@ -31,6 +31,8 @@
#include "v8.h"
+#undef CONST
+
#if defined(V8_TARGET_ARCH_IA32)
#include "disasm.h"
@@ -553,6 +555,7 @@ int DisassemblerIA32::F7Instruction(byte* data) {
case 2: mnem = "not"; break;
case 3: mnem = "neg"; break;
case 4: mnem = "mul"; break;
+ case 5: mnem = "imul"; break;
case 7: mnem = "idiv"; break;
default: UnimplementedInstruction();
}
@@ -1266,6 +1269,14 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer,
NameOfXMMRegister(regop),
NameOfXMMRegister(rm));
data++;
+ } else if (*data == 0x56) {
+ data++;
+ int mod, regop, rm;
+ get_modrm(*data, &mod, &regop, &rm);
+ AppendToBuffer("orpd %s,%s",
+ NameOfXMMRegister(regop),
+ NameOfXMMRegister(rm));
+ data++;
} else if (*data == 0x57) {
data++;
int mod, regop, rm;
@@ -1296,6 +1307,14 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer,
NameOfXMMRegister(rm),
static_cast<int>(imm8));
data += 2;
+ } else if (*data == 0x76) {
+ data++;
+ int mod, regop, rm;
+ get_modrm(*data, &mod, &regop, &rm);
+ AppendToBuffer("pcmpeqd %s,%s",
+ NameOfXMMRegister(regop),
+ NameOfXMMRegister(rm));
+ data++;
} else if (*data == 0x90) {
data++;
AppendToBuffer("nop"); // 2 byte nop.
@@ -1463,6 +1482,7 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer,
switch (b2) {
case 0x2A: mnem = "cvtsi2sd"; break;
case 0x2C: mnem = "cvttsd2si"; break;
+ case 0x2D: mnem = "cvtsd2si"; break;
case 0x51: mnem = "sqrtsd"; break;
case 0x58: mnem = "addsd"; break;
case 0x59: mnem = "mulsd"; break;
@@ -1475,7 +1495,7 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer,
if (b2 == 0x2A) {
AppendToBuffer("%s %s,", mnem, NameOfXMMRegister(regop));
data += PrintRightOperand(data);
- } else if (b2 == 0x2C) {
+ } else if (b2 == 0x2C || b2 == 0x2D) {
AppendToBuffer("%s %s,", mnem, NameOfCPURegister(regop));
data += PrintRightXMMOperand(data);
} else if (b2 == 0xC2) {
diff --git a/src/3rdparty/v8/src/ia32/frames-ia32.h b/src/3rdparty/v8/src/ia32/frames-ia32.h
index 9e51857..18915e2 100644
--- a/src/3rdparty/v8/src/ia32/frames-ia32.h
+++ b/src/3rdparty/v8/src/ia32/frames-ia32.h
@@ -53,6 +53,10 @@ typedef Object* JSCallerSavedBuffer[kNumJSCallerSaved];
// Number of registers for which space is reserved in safepoints.
const int kNumSafepointRegisters = 8;
+const int kNoAlignmentPadding = 0;
+const int kAlignmentPaddingPushed = 2;
+const int kAlignmentZapValue = 0x12345678; // Not heap object tagged.
+
// ----------------------------------------------------
@@ -119,6 +123,8 @@ class JavaScriptFrameConstants : public AllStatic {
// Caller SP-relative.
static const int kParam0Offset = -2 * kPointerSize;
static const int kReceiverOffset = -1 * kPointerSize;
+
+ static const int kDynamicAlignmentStateOffset = kLocal0Offset;
};
diff --git a/src/3rdparty/v8/src/ia32/full-codegen-ia32.cc b/src/3rdparty/v8/src/ia32/full-codegen-ia32.cc
index 66cf497..c58f242 100644
--- a/src/3rdparty/v8/src/ia32/full-codegen-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/full-codegen-ia32.cc
@@ -123,6 +123,8 @@ void FullCodeGenerator::Generate() {
SetFunctionPosition(function());
Comment cmnt(masm_, "[ function compiled by full code generator");
+ ProfileEntryHookStub::MaybeCallEntryHook(masm_);
+
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
@@ -136,6 +138,8 @@ void FullCodeGenerator::Generate() {
// function calls.
if (!info->is_classic_mode() || info->is_native()) {
Label ok;
+ Label start;
+ __ bind(&start);
__ test(ecx, ecx);
__ j(zero, &ok, Label::kNear);
// +1 for return address.
@@ -147,6 +151,8 @@ void FullCodeGenerator::Generate() {
__ mov(Operand(esp, receiver_offset),
Immediate(isolate()->factory()->undefined_value()));
__ bind(&ok);
+ ASSERT(!FLAG_age_code ||
+ (kSizeOfFullCodegenStrictModePrologue == ok.pos() - start.pos()));
}
// Open a frame scope to indicate that there is a frame on the stack. The
@@ -177,11 +183,14 @@ void FullCodeGenerator::Generate() {
int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
if (heap_slots > 0 ||
(scope()->is_qml_mode() && scope()->is_global_scope())) {
- Comment cmnt(masm_, "[ Allocate local context");
+ Comment cmnt(masm_, "[ Allocate context");
// Argument to NewContext is the function, which is still in edi.
__ push(edi);
- if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
+ if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
+ __ Push(info->scope()->GetScopeInfo());
+ __ CallRuntime(Runtime::kNewGlobalContext, 2);
+ } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+ FastNewContextStub stub((heap_slots < 0) ? 0 : heap_slots);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kNewFunctionContext, 1);
@@ -228,7 +237,7 @@ void FullCodeGenerator::Generate() {
__ lea(edx,
Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
__ push(edx);
- __ SafePush(Immediate(Smi::FromInt(num_parameters)));
+ __ push(Immediate(Smi::FromInt(num_parameters)));
// Arguments to ArgumentsAccessStub:
// function, receiver address, parameter count.
// The stub will rewrite receiver and parameter count if the previous
@@ -258,7 +267,7 @@ void FullCodeGenerator::Generate() {
scope()->VisitIllegalRedeclaration(this);
} else {
- PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
+ PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
{ Comment cmnt(masm_, "[ Declarations");
// For named function expressions, declare the function name as a
// constant.
@@ -273,7 +282,7 @@ void FullCodeGenerator::Generate() {
}
{ Comment cmnt(masm_, "[ Stack check");
- PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
+ PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
Label ok;
ExternalReference stack_limit =
ExternalReference::address_of_stack_limit(isolate());
@@ -318,20 +327,12 @@ void FullCodeGenerator::EmitProfilingCounterReset() {
// Self-optimization is a one-off thing: if it fails, don't try again.
reset_value = Smi::kMaxValue;
}
- if (isolate()->IsDebuggerActive()) {
- // Detect debug break requests as soon as possible.
- reset_value = 10;
- }
__ mov(ebx, Immediate(profiling_counter_));
__ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
Immediate(Smi::FromInt(reset_value)));
}
-static const int kMaxBackEdgeWeight = 127;
-static const int kBackEdgeDistanceDivisor = 100;
-
-
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
Label* back_edge_target) {
Comment cmnt(masm_, "[ Stack check");
@@ -343,7 +344,7 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
ASSERT(back_edge_target->is_bound());
int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
weight = Min(kMaxBackEdgeWeight,
- Max(1, distance / kBackEdgeDistanceDivisor));
+ Max(1, distance / kBackEdgeDistanceUnit));
}
EmitProfilingCounterDecrement(weight);
__ j(positive, &ok, Label::kNear);
@@ -405,7 +406,7 @@ void FullCodeGenerator::EmitReturnSequence() {
} else if (FLAG_weighted_back_edges) {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
- Max(1, distance / kBackEdgeDistanceDivisor));
+ Max(1, distance / kBackEdgeDistanceUnit));
}
EmitProfilingCounterDecrement(weight);
Label ok;
@@ -761,7 +762,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
// The variable in the declaration always resides in the current function
// context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
- if (FLAG_debug_code) {
+ if (generate_debug_code_) {
// Check that we're not inside a with or catch context.
__ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
__ cmp(ebx, isolate()->factory()->with_context_map());
@@ -783,11 +784,11 @@ void FullCodeGenerator::VisitVariableDeclaration(
bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
switch (variable->location()) {
case Variable::UNALLOCATED:
- globals_->Add(variable->name());
+ globals_->Add(variable->name(), zone());
globals_->Add(variable->binding_needs_init()
? isolate()->factory()->the_hole_value()
- : isolate()->factory()->undefined_value());
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ : isolate()->factory()->undefined_value(), zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
break;
case Variable::PARAMETER:
@@ -815,10 +816,9 @@ void FullCodeGenerator::VisitVariableDeclaration(
__ push(esi);
__ push(Immediate(variable->name()));
// VariableDeclaration nodes are always introduced in one of four modes.
- ASSERT(mode == VAR || mode == LET ||
- mode == CONST || mode == CONST_HARMONY);
- PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
- ? READ_ONLY : NONE;
+ ASSERT(IsDeclaredVariableMode(mode));
+ PropertyAttributes attr =
+ IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
__ push(Immediate(Smi::FromInt(attr)));
// Push initial value, if any.
// Note: For variables we must not push an initial value (such as
@@ -842,13 +842,13 @@ void FullCodeGenerator::VisitFunctionDeclaration(
Variable* variable = proxy->var();
switch (variable->location()) {
case Variable::UNALLOCATED: {
- globals_->Add(variable->name());
+ globals_->Add(variable->name(), zone());
Handle<SharedFunctionInfo> function =
Compiler::BuildFunctionInfo(declaration->fun(), script());
// Check for stack-overflow exception.
if (function.is_null()) return SetStackOverflow();
- globals_->Add(function);
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ globals_->Add(function, zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
break;
}
@@ -899,9 +899,9 @@ void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
switch (variable->location()) {
case Variable::UNALLOCATED: {
Comment cmnt(masm_, "[ ModuleDeclaration");
- globals_->Add(variable->name());
- globals_->Add(instance);
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ globals_->Add(variable->name(), zone());
+ globals_->Add(instance, zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
Visit(declaration->module());
break;
}
@@ -1098,19 +1098,28 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// We got a map in register eax. Get the enumeration cache from it.
+ Label no_descriptors;
__ bind(&use_cache);
+
+ __ EnumLength(edx, eax);
+ __ cmp(edx, Immediate(Smi::FromInt(0)));
+ __ j(equal, &no_descriptors);
+
__ LoadInstanceDescriptors(eax, ecx);
- __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
- __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
+ __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
+ __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
// Set up the four remaining stack slots.
__ push(eax); // Map.
- __ push(edx); // Enumeration cache.
- __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
- __ push(eax); // Enumeration cache length (as smi).
+ __ push(ecx); // Enumeration cache.
+ __ push(edx); // Number of valid entries for the map in the enum cache.
__ push(Immediate(Smi::FromInt(0))); // Initial index.
__ jmp(&loop);
+ __ bind(&no_descriptors);
+ __ add(esp, Immediate(kPointerSize));
+ __ jmp(&exit);
+
// We got a fixed array in register eax. Iterate through that.
Label non_proxy;
__ bind(&fixed_array);
@@ -1119,7 +1128,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
isolate()->factory()->NewJSGlobalPropertyCell(
Handle<Object>(
Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
- RecordTypeFeedbackCell(stmt->PrepareId(), cell);
+ RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
__ LoadHeapObject(ebx, cell);
__ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
Immediate(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
@@ -1275,9 +1284,9 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
__ mov(temp, context);
}
__ bind(&next);
- // Terminate at global context.
+ // Terminate at native context.
__ cmp(FieldOperand(temp, HeapObject::kMapOffset),
- Immediate(isolate()->factory()->global_context_map()));
+ Immediate(isolate()->factory()->native_context_map()));
__ j(equal, &fast, Label::kNear);
// Check that extension is NULL.
__ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
@@ -1561,9 +1570,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// Mark all computed expressions that are bound to a key that
// is shadowed by a later occurrence of the same key. For the
// marked expressions, no store code is emitted.
- expr->CalculateEmitStore();
+ expr->CalculateEmitStore(zone());
- AccessorTable accessor_table(isolate()->zone());
+ AccessorTable accessor_table(zone());
for (int i = 0; i < expr->properties()->length(); i++) {
ObjectLiteral::Property* property = expr->properties()->at(i);
if (property->IsCompileTimeValue()) continue;
@@ -1589,7 +1598,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, key->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -1653,7 +1662,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT_EQ(2, constant_elements->length());
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
- bool has_constant_fast_elements = constant_elements_kind == FAST_ELEMENTS;
+ bool has_constant_fast_elements =
+ IsFastObjectElementsKind(constant_elements_kind);
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
@@ -1664,7 +1674,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
- // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+ // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
FastCloneShallowArrayStub stub(
@@ -1676,10 +1686,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
- ASSERT(constant_elements_kind == FAST_ELEMENTS ||
- constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
+ ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
- // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+ // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
? FastCloneShallowArrayStub::CLONE_ELEMENTS
@@ -1707,9 +1716,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
}
VisitForAccumulatorValue(subexpr);
- if (constant_elements_kind == FAST_ELEMENTS) {
- // Fast-case array literal with ElementsKind of FAST_ELEMENTS, they cannot
- // transition and don't need to call the runtime stub.
+ if (IsFastObjectElementsKind(constant_elements_kind)) {
+ // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
+ // cannot transition and don't need to call the runtime stub.
int offset = FixedArray::kHeaderSize + (i * kPointerSize);
__ mov(ebx, Operand(esp, 0)); // Copy of array literal.
__ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
@@ -1801,11 +1810,11 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
break;
case NAMED_PROPERTY:
EmitNamedPropertyLoad(property);
- PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
+ PrepareForBailoutForId(property->LoadId(), TOS_REG);
break;
case KEYED_PROPERTY:
EmitKeyedPropertyLoad(property);
- PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
+ PrepareForBailoutForId(property->LoadId(), TOS_REG);
break;
}
}
@@ -1861,14 +1870,14 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
ASSERT(!key->handle()->IsSmi());
__ mov(ecx, Immediate(key->handle()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
}
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
}
@@ -1889,7 +1898,8 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&stub_call);
__ mov(eax, ecx);
BinaryOpStub stub(op, mode);
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
__ jmp(&done, Label::kNear);
@@ -1974,7 +1984,8 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
__ pop(edx);
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
context()->Plug(eax);
}
@@ -2101,7 +2112,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// in harmony mode.
if (var->IsStackAllocated() || var->IsContextSlot()) {
MemOperand location = VarOperand(var, ecx);
- if (FLAG_debug_code && op == Token::INIT_LET) {
+ if (generate_debug_code_ && op == Token::INIT_LET) {
// Check for an uninitialized let binding.
__ mov(edx, location);
__ cmp(edx, isolate()->factory()->the_hole_value());
@@ -2136,37 +2147,15 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
ASSERT(prop != NULL);
ASSERT(prop->key()->AsLiteral() != NULL);
- // If the assignment starts a block of assignments to the same object,
- // change to slow case to avoid the quadratic behavior of repeatedly
- // adding fast properties.
- if (expr->starts_initialization_block()) {
- __ push(result_register());
- __ push(Operand(esp, kPointerSize)); // Receiver is now under value.
- __ CallRuntime(Runtime::kToSlowProperties, 1);
- __ pop(result_register());
- }
-
// Record source code position before IC call.
SetSourcePosition(expr->position());
__ mov(ecx, prop->key()->AsLiteral()->handle());
- if (expr->ends_initialization_block()) {
- __ mov(edx, Operand(esp, 0));
- } else {
- __ pop(edx);
- }
+ __ pop(edx);
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
- // If the assignment ends an initialization block, revert to fast case.
- if (expr->ends_initialization_block()) {
- __ push(eax); // Result of assignment, saved even if not needed.
- __ push(Operand(esp, kPointerSize)); // Receiver is under value.
- __ CallRuntime(Runtime::kToFastProperties, 1);
- __ pop(eax);
- __ Drop(1);
- }
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(eax);
}
@@ -2178,38 +2167,14 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
// esp[0] : key
// esp[kPointerSize] : receiver
- // If the assignment starts a block of assignments to the same object,
- // change to slow case to avoid the quadratic behavior of repeatedly
- // adding fast properties.
- if (expr->starts_initialization_block()) {
- __ push(result_register());
- // Receiver is now under the key and value.
- __ push(Operand(esp, 2 * kPointerSize));
- __ CallRuntime(Runtime::kToSlowProperties, 1);
- __ pop(result_register());
- }
-
__ pop(ecx); // Key.
- if (expr->ends_initialization_block()) {
- __ mov(edx, Operand(esp, 0)); // Leave receiver on the stack for later.
- } else {
- __ pop(edx);
- }
+ __ pop(edx);
// Record source code position before IC call.
SetSourcePosition(expr->position());
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
-
- // If the assignment ends an initialization block, revert to fast case.
- if (expr->ends_initialization_block()) {
- __ pop(edx);
- __ push(eax); // Result of assignment, saved even if not needed.
- __ push(edx);
- __ CallRuntime(Runtime::kToFastProperties, 1);
- __ pop(eax);
- }
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(eax);
@@ -2224,6 +2189,7 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
VisitForAccumulatorValue(expr->obj());
__ mov(edx, result_register());
EmitNamedPropertyLoad(expr);
+ PrepareForBailoutForId(expr->LoadId(), TOS_REG);
context()->Plug(eax);
} else {
VisitForStackValue(expr->obj());
@@ -2238,7 +2204,7 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
void FullCodeGenerator::CallIC(Handle<Code> code,
RelocInfo::Mode rmode,
- unsigned ast_id) {
+ TypeFeedbackId ast_id) {
ic_total_count_++;
__ call(code, rmode, ast_id);
}
@@ -2262,7 +2228,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
SetSourcePosition(expr->position());
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- CallIC(ic, mode, expr->id());
+ CallIC(ic, mode, expr->CallFeedbackId());
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -2294,7 +2260,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
__ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key.
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -2314,20 +2280,18 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
// Record source position for debugger.
SetSourcePosition(expr->position());
- // Record call targets in unoptimized code, but not in the snapshot.
- if (!Serializer::enabled()) {
- flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
- Handle<Object> uninitialized =
- TypeFeedbackCells::UninitializedSentinel(isolate());
- Handle<JSGlobalPropertyCell> cell =
- isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
- RecordTypeFeedbackCell(expr->id(), cell);
- __ mov(ebx, cell);
- }
+ // Record call targets in unoptimized code.
+ flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
+ __ mov(ebx, cell);
CallFunctionStub stub(arg_count, flags);
__ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
- __ CallStub(&stub, expr->id());
+ __ CallStub(&stub, expr->CallFeedbackId());
RecordJSReturnSite(expr);
// Restore context register.
@@ -2502,24 +2466,18 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
SetSourcePosition(expr->position());
// Load function and argument count into edi and eax.
- __ SafeSet(eax, Immediate(arg_count));
+ __ Set(eax, Immediate(arg_count));
__ mov(edi, Operand(esp, arg_count * kPointerSize));
- // Record call targets in unoptimized code, but not in the snapshot.
- CallFunctionFlags flags;
- if (!Serializer::enabled()) {
- flags = RECORD_CALL_TARGET;
- Handle<Object> uninitialized =
- TypeFeedbackCells::UninitializedSentinel(isolate());
- Handle<JSGlobalPropertyCell> cell =
- isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
- RecordTypeFeedbackCell(expr->id(), cell);
- __ mov(ebx, cell);
- } else {
- flags = NO_CALL_FUNCTION_FLAGS;
- }
+ // Record call targets in unoptimized code.
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
+ __ mov(ebx, cell);
- CallConstructStub stub(flags);
+ CallConstructStub stub(RECORD_CALL_TARGET);
__ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(eax);
@@ -2660,7 +2618,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
- if (FLAG_debug_code) __ AbortIfSmi(eax);
+ __ AssertNotSmi(eax);
// Check whether this map has already been checked to be safe for default
// valueOf.
@@ -2676,45 +2634,51 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
__ j(equal, if_false);
// Look for valueOf symbol in the descriptor array, and indicate false if
- // found. The type is not checked, so if it is a transition it is a false
- // negative.
+ // found. Since we omit an enumeration index check, if it is added via a
+ // transition that shares its descriptor array, this is a false positive.
+ Label entry, loop, done;
+
+ // Skip loop if no descriptors are valid.
+ __ NumberOfOwnDescriptors(ecx, ebx);
+ __ cmp(ecx, 0);
+ __ j(equal, &done);
+
__ LoadInstanceDescriptors(ebx, ebx);
- __ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
- // ebx: descriptor array
- // ecx: length of descriptor array
+ // ebx: descriptor array.
+ // ecx: valid entries in the descriptor array.
// Calculate the end of the descriptor array.
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kPointerSize == 4);
- __ lea(ecx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
+ __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
+ __ lea(ecx, Operand(ebx, ecx, times_2, DescriptorArray::kFirstOffset));
// Calculate location of the first key name.
- __ add(ebx,
- Immediate(FixedArray::kHeaderSize +
- DescriptorArray::kFirstIndex * kPointerSize));
+ __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
// Loop through all the keys in the descriptor array. If one of these is the
// symbol valueOf the result is false.
- Label entry, loop;
__ jmp(&entry);
__ bind(&loop);
__ mov(edx, FieldOperand(ebx, 0));
__ cmp(edx, FACTORY->value_of_symbol());
__ j(equal, if_false);
- __ add(ebx, Immediate(kPointerSize));
+ __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
__ bind(&entry);
__ cmp(ebx, ecx);
__ j(not_equal, &loop);
+ __ bind(&done);
+
// Reload map as register ebx was used as temporary above.
__ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
- // If a valueOf property is not found on the object check that it's
+ // If a valueOf property is not found on the object check that its
// prototype is the un-modified String prototype. If not result is false.
__ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
__ JumpIfSmi(ecx, if_false);
__ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
- __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
__ mov(edx,
- FieldOperand(edx, GlobalObject::kGlobalContextOffset));
+ FieldOperand(edx, GlobalObject::kNativeContextOffset));
__ cmp(ecx,
ContextOperand(edx,
Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
@@ -2860,7 +2824,7 @@ void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
// parameter count in eax.
VisitForAccumulatorValue(args->at(0));
__ mov(edx, eax);
- __ SafeSet(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
+ __ Set(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
__ CallStub(&stub);
context()->Plug(eax);
@@ -2872,7 +2836,7 @@ void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
Label exit;
// Get the number of formal parameters.
- __ SafeSet(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
+ __ Set(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
// Check if the calling frame is an arguments adaptor frame.
__ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
@@ -2885,7 +2849,7 @@ void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
__ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ bind(&exit);
- if (FLAG_debug_code) __ AbortIfNotSmi(eax);
+ __ AssertSmi(eax);
context()->Plug(eax);
}
@@ -2989,8 +2953,8 @@ void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
__ bind(&heapnumber_allocated);
__ PrepareCallCFunction(1, ebx);
- __ mov(eax, ContextOperand(context_register(), Context::GLOBAL_INDEX));
- __ mov(eax, FieldOperand(eax, GlobalObject::kGlobalContextOffset));
+ __ mov(eax, ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
+ __ mov(eax, FieldOperand(eax, GlobalObject::kNativeContextOffset));
__ mov(Operand(esp, 0), eax);
__ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
@@ -3077,19 +3041,18 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
VisitForAccumulatorValue(args->at(0)); // Load the object.
- Label runtime, done;
+ Label runtime, done, not_date_object;
Register object = eax;
Register result = eax;
Register scratch = ecx;
-#ifdef DEBUG
- __ AbortIfSmi(object);
+ __ JumpIfSmi(object, &not_date_object);
__ CmpObjectType(object, JS_DATE_TYPE, scratch);
- __ Assert(equal, "Trying to get date field from non-date.");
-#endif
+ __ j(not_equal, &not_date_object);
if (index->value() == 0) {
__ mov(result, FieldOperand(object, JSDate::kValueOffset));
+ __ jmp(&done);
} else {
if (index->value() < JSDate::kFirstUncachedField) {
ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
@@ -3105,8 +3068,12 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
__ mov(Operand(esp, 0), object);
__ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
__ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
- __ bind(&done);
+ __ jmp(&done);
}
+
+ __ bind(&not_date_object);
+ __ CallRuntime(Runtime::kThrowNotDateError, 0);
+ __ bind(&done);
context()->Plug(result);
}
@@ -3377,10 +3344,11 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
}
VisitForAccumulatorValue(args->last()); // Function.
- // Check for proxy.
- Label proxy, done;
- __ CmpObjectType(eax, JS_FUNCTION_PROXY_TYPE, ebx);
- __ j(equal, &proxy);
+ Label runtime, done;
+ // Check for non-function argument (including proxy).
+ __ JumpIfSmi(eax, &runtime);
+ __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
+ __ j(not_equal, &runtime);
// InvokeFunction requires the function in edi. Move it in there.
__ mov(edi, result_register());
@@ -3390,7 +3358,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ jmp(&done);
- __ bind(&proxy);
+ __ bind(&runtime);
__ push(eax);
__ CallRuntime(Runtime::kCall, args->length());
__ bind(&done);
@@ -3420,7 +3388,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
Handle<FixedArray> jsfunction_result_caches(
- isolate()->global_context()->jsfunction_result_caches());
+ isolate()->native_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
__ Abort("Attempt to use undefined cache.");
__ mov(eax, isolate()->factory()->undefined_value());
@@ -3433,9 +3401,9 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
Register key = eax;
Register cache = ebx;
Register tmp = ecx;
- __ mov(cache, ContextOperand(esi, Context::GLOBAL_INDEX));
+ __ mov(cache, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
__ mov(cache,
- FieldOperand(cache, GlobalObject::kGlobalContextOffset));
+ FieldOperand(cache, GlobalObject::kNativeContextOffset));
__ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
__ mov(cache,
FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
@@ -3505,9 +3473,7 @@ void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
VisitForAccumulatorValue(args->at(0));
- if (FLAG_debug_code) {
- __ AbortIfNotString(eax);
- }
+ __ AssertString(eax);
Label materialize_true, materialize_false;
Label* if_true = NULL;
@@ -3530,9 +3496,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
- if (FLAG_debug_code) {
- __ AbortIfNotString(eax);
- }
+ __ AssertString(eax);
__ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
__ IndexFromHash(eax, eax);
@@ -3606,7 +3570,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// Loop condition: while (index < length).
// Live loop registers: index, array_length, string,
// scratch, string_length, elements.
- if (FLAG_debug_code) {
+ if (generate_debug_code_) {
__ cmp(index, array_length);
__ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin");
}
@@ -3620,7 +3584,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
__ and_(scratch, Immediate(
kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
- __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
+ __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
__ j(not_equal, &bailout);
__ add(string_length,
FieldOperand(string, SeqAsciiString::kLengthOffset));
@@ -3834,7 +3798,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- CallIC(ic, mode, expr->id());
+ CallIC(ic, mode, expr->CallRuntimeFeedbackId());
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
} else {
@@ -3992,7 +3956,8 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
// accumulator register eax.
VisitForAccumulatorValue(expr->expression());
SetSourcePosition(expr->position());
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->UnaryOperationFeedbackId());
context()->Plug(eax);
}
@@ -4050,7 +4015,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
if (assign_type == VARIABLE) {
PrepareForBailout(expr->expression(), TOS_REG);
} else {
- PrepareForBailoutForId(expr->CountId(), TOS_REG);
+ PrepareForBailoutForId(prop->LoadId(), TOS_REG);
}
// Call ToNumber only if operand is not a smi.
@@ -4113,7 +4078,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ mov(edx, eax);
__ mov(eax, Immediate(Smi::FromInt(1)));
BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
patch_site.EmitPatchInfo();
__ bind(&done);
@@ -4147,7 +4112,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4164,7 +4129,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
// Result is on the stack
@@ -4372,7 +4337,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
@@ -4454,7 +4419,7 @@ void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
Scope* declaration_scope = scope()->DeclarationScope();
if (declaration_scope->is_global_scope() ||
declaration_scope->is_module_scope()) {
- // Contexts nested in the global context have a canonical empty function
+ // Contexts nested in the native context have a canonical empty function
// as their closure, not the anonymous closure containing the global
// code. Pass a smi sentinel and let the runtime look up the empty
// function.
@@ -4483,14 +4448,51 @@ void FullCodeGenerator::EnterFinallyBlock() {
STATIC_ASSERT(kSmiTag == 0);
__ SmiTag(edx);
__ push(edx);
+
// Store result register while executing finally block.
__ push(result_register());
+
+ // Store pending message while executing finally block.
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ mov(edx, Operand::StaticVariable(pending_message_obj));
+ __ push(edx);
+
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ mov(edx, Operand::StaticVariable(has_pending_message));
+ __ SmiTag(edx);
+ __ push(edx);
+
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ mov(edx, Operand::StaticVariable(pending_message_script));
+ __ push(edx);
}
void FullCodeGenerator::ExitFinallyBlock() {
ASSERT(!result_register().is(edx));
+ // Restore pending message from stack.
+ __ pop(edx);
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ mov(Operand::StaticVariable(pending_message_script), edx);
+
+ __ pop(edx);
+ __ SmiUntag(edx);
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ mov(Operand::StaticVariable(has_pending_message), edx);
+
+ __ pop(edx);
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ mov(Operand::StaticVariable(pending_message_obj), edx);
+
+ // Restore result register from stack.
__ pop(result_register());
+
// Uncook return address.
__ pop(edx);
__ SmiUntag(edx);
@@ -4526,7 +4528,6 @@ FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
return previous_;
}
-
#undef __
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/ia32/ic-ia32.cc b/src/3rdparty/v8/src/ia32/ic-ia32.cc
index dc64a09..dae3bbd 100644
--- a/src/3rdparty/v8/src/ia32/ic-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/ic-ia32.cc
@@ -747,6 +747,125 @@ void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
}
+static void KeyedStoreGenerateGenericHelper(
+ MacroAssembler* masm,
+ Label* fast_object,
+ Label* fast_double,
+ Label* slow,
+ KeyedStoreCheckMap check_map,
+ KeyedStoreIncrementLength increment_length) {
+ Label transition_smi_elements;
+ Label finish_object_store, non_double_value, transition_double_elements;
+ Label fast_double_without_map_check;
+ // eax: value
+ // ecx: key (a smi)
+ // edx: receiver
+ // ebx: FixedArray receiver->elements
+ // edi: receiver map
+ // Fast case: Do the store, could either Object or double.
+ __ bind(fast_object);
+ if (check_map == kCheckMap) {
+ __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
+ __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
+ __ j(not_equal, fast_double);
+ }
+ // Smi stores don't require further checks.
+ Label non_smi_value;
+ __ JumpIfNotSmi(eax, &non_smi_value);
+ if (increment_length == kIncrementLength) {
+ // Add 1 to receiver->length.
+ __ add(FieldOperand(edx, JSArray::kLengthOffset),
+ Immediate(Smi::FromInt(1)));
+ }
+ // It's irrelevant whether array is smi-only or not when writing a smi.
+ __ mov(CodeGenerator::FixedArrayElementOperand(ebx, ecx), eax);
+ __ ret(0);
+
+ __ bind(&non_smi_value);
+ // Escape to elements kind transition case.
+ __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
+ __ CheckFastObjectElements(edi, &transition_smi_elements);
+
+ // Fast elements array, store the value to the elements backing store.
+ __ bind(&finish_object_store);
+ if (increment_length == kIncrementLength) {
+ // Add 1 to receiver->length.
+ __ add(FieldOperand(edx, JSArray::kLengthOffset),
+ Immediate(Smi::FromInt(1)));
+ }
+ __ mov(CodeGenerator::FixedArrayElementOperand(ebx, ecx), eax);
+ // Update write barrier for the elements array address.
+ __ mov(edx, eax); // Preserve the value which is returned.
+ __ RecordWriteArray(
+ ebx, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+ __ ret(0);
+
+ __ bind(fast_double);
+ if (check_map == kCheckMap) {
+ // Check for fast double array case. If this fails, call through to the
+ // runtime.
+ __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
+ __ j(not_equal, slow);
+ // If the value is a number, store it as a double in the FastDoubleElements
+ // array.
+ }
+ __ bind(&fast_double_without_map_check);
+ __ StoreNumberToDoubleElements(eax, ebx, ecx, edi, xmm0,
+ &transition_double_elements, false);
+ if (increment_length == kIncrementLength) {
+ // Add 1 to receiver->length.
+ __ add(FieldOperand(edx, JSArray::kLengthOffset),
+ Immediate(Smi::FromInt(1)));
+ }
+ __ ret(0);
+
+ __ bind(&transition_smi_elements);
+ __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
+
+ // Transition the array appropriately depending on the value type.
+ __ CheckMap(eax,
+ masm->isolate()->factory()->heap_number_map(),
+ &non_double_value,
+ DONT_DO_SMI_CHECK);
+
+ // Value is a double. Transition FAST_SMI_ELEMENTS -> FAST_DOUBLE_ELEMENTS
+ // and complete the store.
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_DOUBLE_ELEMENTS,
+ ebx,
+ edi,
+ slow);
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, slow);
+ __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
+ __ jmp(&fast_double_without_map_check);
+
+ __ bind(&non_double_value);
+ // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_ELEMENTS,
+ ebx,
+ edi,
+ slow);
+ ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm);
+ __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
+
+ __ bind(&transition_double_elements);
+ // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
+ // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
+ // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
+ __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
+ __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
+ FAST_ELEMENTS,
+ ebx,
+ edi,
+ slow);
+ ElementsTransitionGenerator::GenerateDoubleToObject(masm, slow);
+ __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
+}
+
+
void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
StrictModeFlag strict_mode) {
// ----------- S t a t e -------------
@@ -755,10 +874,9 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
- Label slow, fast_object_with_map_check, fast_object_without_map_check;
- Label fast_double_with_map_check, fast_double_without_map_check;
- Label check_if_double_array, array, extra, transition_smi_elements;
- Label finish_object_store, non_double_value, transition_double_elements;
+ Label slow, fast_object, fast_object_grow;
+ Label fast_double, fast_double_grow;
+ Label array, extra, check_if_double_array;
// Check that the object isn't a smi.
__ JumpIfSmi(edx, &slow);
@@ -785,7 +903,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
// Check array bounds. Both the key and the length of FixedArray are smis.
__ cmp(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
- __ j(below, &fast_object_with_map_check);
+ __ j(below, &fast_object);
// Slow case: call runtime.
__ bind(&slow);
@@ -808,18 +926,12 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
__ cmp(edi, masm->isolate()->factory()->fixed_array_map());
__ j(not_equal, &check_if_double_array);
- // Add 1 to receiver->length, and go to common element store code for Objects.
- __ add(FieldOperand(edx, JSArray::kLengthOffset),
- Immediate(Smi::FromInt(1)));
- __ jmp(&fast_object_without_map_check);
+ __ jmp(&fast_object_grow);
__ bind(&check_if_double_array);
__ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
__ j(not_equal, &slow);
- // Add 1 to receiver->length, and go to common element store code for doubles.
- __ add(FieldOperand(edx, JSArray::kLengthOffset),
- Immediate(Smi::FromInt(1)));
- __ jmp(&fast_double_without_map_check);
+ __ jmp(&fast_double_grow);
// Array case: Get the length and the elements array from the JS
// array. Check that the array is in fast mode (and writable); if it
@@ -836,94 +948,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // Compare smis.
__ j(above_equal, &extra);
- // Fast case: Do the store, could either Object or double.
- __ bind(&fast_object_with_map_check);
- // eax: value
- // ecx: key (a smi)
- // edx: receiver
- // ebx: FixedArray receiver->elements
- // edi: receiver map
- __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
- __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
- __ j(not_equal, &fast_double_with_map_check);
- __ bind(&fast_object_without_map_check);
- // Smi stores don't require further checks.
- Label non_smi_value;
- __ JumpIfNotSmi(eax, &non_smi_value);
- // It's irrelevant whether array is smi-only or not when writing a smi.
- __ mov(CodeGenerator::FixedArrayElementOperand(ebx, ecx), eax);
- __ ret(0);
-
- __ bind(&non_smi_value);
- // Escape to elements kind transition case.
- __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
- __ CheckFastObjectElements(edi, &transition_smi_elements);
-
- // Fast elements array, store the value to the elements backing store.
- __ bind(&finish_object_store);
- __ mov(CodeGenerator::FixedArrayElementOperand(ebx, ecx), eax);
- // Update write barrier for the elements array address.
- __ mov(edx, eax); // Preserve the value which is returned.
- __ RecordWriteArray(
- ebx, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ ret(0);
-
- __ bind(&fast_double_with_map_check);
- // Check for fast double array case. If this fails, call through to the
- // runtime.
- __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
- __ j(not_equal, &slow);
- __ bind(&fast_double_without_map_check);
- // If the value is a number, store it as a double in the FastDoubleElements
- // array.
- __ StoreNumberToDoubleElements(eax, ebx, ecx, edx, xmm0,
- &transition_double_elements, false);
- __ ret(0);
-
- __ bind(&transition_smi_elements);
- __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
-
- // Transition the array appropriately depending on the value type.
- __ CheckMap(eax,
- masm->isolate()->factory()->heap_number_map(),
- &non_double_value,
- DONT_DO_SMI_CHECK);
-
- // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
- // FAST_DOUBLE_ELEMENTS and complete the store.
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_DOUBLE_ELEMENTS,
- ebx,
- edi,
- &slow);
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
- __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
- __ jmp(&fast_double_without_map_check);
-
- __ bind(&non_double_value);
- // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_ELEMENTS,
- ebx,
- edi,
- &slow);
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
- __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
- __ jmp(&finish_object_store);
-
- __ bind(&transition_double_elements);
- // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
- // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
- // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
- __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
- __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
- FAST_ELEMENTS,
- ebx,
- edi,
- &slow);
- ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
- __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
- __ jmp(&finish_object_store);
+ KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
+ &slow, kCheckMap, kDontIncrementLength);
+ KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
+ &slow, kDontCheckMap, kIncrementLength);
}
@@ -943,7 +971,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
Code::Flags flags = Code::ComputeFlags(kind,
MONOMORPHIC,
extra_state,
- NORMAL,
+ Code::NORMAL,
argc);
Isolate* isolate = masm->isolate();
isolate->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx, eax);
@@ -1622,7 +1650,7 @@ void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
// Must return the modified receiver in eax.
if (!FLAG_trace_elements_transitions) {
Label fail;
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
__ mov(eax, edx);
__ Ret();
__ bind(&fail);
diff --git a/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.cc b/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.cc
index cf30156..34ce1cd 100644
--- a/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.cc
@@ -79,6 +79,10 @@ bool LCodeGen::GenerateCode() {
// the frame (that is done in GeneratePrologue).
FrameScope frame_scope(masm_, StackFrame::MANUAL);
+ dynamic_frame_alignment_ = (chunk()->num_double_slots() > 2 &&
+ !chunk()->graph()->is_recursive()) ||
+ !info()->osr_ast_id().IsNone();
+
return GeneratePrologue() &&
GenerateBody() &&
GenerateDeferredCode() &&
@@ -95,17 +99,8 @@ void LCodeGen::FinishCode(Handle<Code> code) {
}
-void LCodeGen::Abort(const char* format, ...) {
- if (FLAG_trace_bailout) {
- SmartArrayPointer<char> name(
- info()->shared_info()->DebugName()->ToCString());
- PrintF("Aborting LCodeGen in @\"%s\": ", *name);
- va_list arguments;
- va_start(arguments, format);
- OS::VPrint(format, arguments);
- va_end(arguments);
- PrintF("\n");
- }
+void LCodeGen::Abort(const char* reason) {
+ info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -131,6 +126,8 @@ void LCodeGen::Comment(const char* format, ...) {
bool LCodeGen::GeneratePrologue() {
ASSERT(is_generating());
+ ProfileEntryHookStub::MaybeCallEntryHook(masm_);
+
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
@@ -143,6 +140,8 @@ bool LCodeGen::GeneratePrologue() {
// receiver object). ecx is zero for method calls and non-zero for
// function calls.
if (!info_->is_classic_mode() || info_->is_native()) {
+ Label begin;
+ __ bind(&begin);
Label ok;
__ test(ecx, Operand(ecx));
__ j(zero, &ok, Label::kNear);
@@ -151,6 +150,39 @@ bool LCodeGen::GeneratePrologue() {
__ mov(Operand(esp, receiver_offset),
Immediate(isolate()->factory()->undefined_value()));
__ bind(&ok);
+ ASSERT(!FLAG_age_code ||
+ (kSizeOfOptimizedStrictModePrologue == ok.pos() - begin.pos()));
+ }
+
+
+ if (dynamic_frame_alignment_) {
+ Label begin;
+ __ bind(&begin);
+ // Move state of dynamic frame alignment into edx.
+ __ mov(edx, Immediate(kNoAlignmentPadding));
+
+ Label do_not_pad, align_loop;
+ STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
+ // Align esp + 4 to a multiple of 2 * kPointerSize.
+ __ test(esp, Immediate(kPointerSize));
+ __ j(not_zero, &do_not_pad, Label::kNear);
+ __ push(Immediate(0));
+ __ mov(ebx, esp);
+ __ mov(edx, Immediate(kAlignmentPaddingPushed));
+ // Copy arguments, receiver, and return address.
+ __ mov(ecx, Immediate(scope()->num_parameters() + 2));
+
+ __ bind(&align_loop);
+ __ mov(eax, Operand(ebx, 1 * kPointerSize));
+ __ mov(Operand(ebx, 0), eax);
+ __ add(Operand(ebx), Immediate(kPointerSize));
+ __ dec(ecx);
+ __ j(not_zero, &align_loop, Label::kNear);
+ __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue));
+ __ bind(&do_not_pad);
+ ASSERT(!FLAG_age_code ||
+ (kSizeOfOptimizedAlignStackPrologue ==
+ do_not_pad.pos() - begin.pos()));
}
__ push(ebp); // Caller's frame pointer.
@@ -158,9 +190,21 @@ bool LCodeGen::GeneratePrologue() {
__ push(esi); // Callee's context.
__ push(edi); // Callee's JS function.
+ if (dynamic_frame_alignment_ && FLAG_debug_code) {
+ __ test(esp, Immediate(kPointerSize));
+ __ Assert(zero, "frame is expected to be aligned");
+ }
+
// Reserve space for the stack slots needed by the code.
int slots = GetStackSlotCount();
- if (slots > 0) {
+ ASSERT_GE(slots, 1);
+ if (slots == 1) {
+ if (dynamic_frame_alignment_) {
+ __ push(edx);
+ } else {
+ __ push(Immediate(kNoAlignmentPadding));
+ }
+ } else {
if (FLAG_debug_code) {
__ mov(Operand(eax), Immediate(slots));
Label loop;
@@ -170,7 +214,7 @@ bool LCodeGen::GeneratePrologue() {
__ j(not_zero, &loop);
} else {
__ sub(Operand(esp), Immediate(slots * kPointerSize));
-#ifdef _MSC_VER
+ #ifdef _MSC_VER
// On windows, you may not access the stack more than one page below
// the most recently mapped page. To make the allocated area randomly
// accessible, we write to each page in turn (the value is irrelevant).
@@ -180,7 +224,18 @@ bool LCodeGen::GeneratePrologue() {
offset -= kPageSize) {
__ mov(Operand(esp, offset), eax);
}
-#endif
+ #endif
+ }
+
+ // Store dynamic frame alignment state in the first local.
+ if (dynamic_frame_alignment_) {
+ __ mov(Operand(ebp,
+ JavaScriptFrameConstants::kDynamicAlignmentStateOffset),
+ edx);
+ } else {
+ __ mov(Operand(ebp,
+ JavaScriptFrameConstants::kDynamicAlignmentStateOffset),
+ Immediate(kNoAlignmentPadding));
}
}
@@ -308,24 +363,24 @@ XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
int LCodeGen::ToInteger32(LConstantOperand* op) const {
- Handle<Object> value = chunk_->LookupLiteral(op);
+ HConstant* constant = chunk_->LookupConstant(op);
ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
- ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
- value->Number());
- return static_cast<int32_t>(value->Number());
+ ASSERT(constant->HasInteger32Value());
+ return constant->Integer32Value();
}
Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
- Handle<Object> literal = chunk_->LookupLiteral(op);
+ HConstant* constant = chunk_->LookupConstant(op);
ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
- return literal;
+ return constant->handle();
}
double LCodeGen::ToDouble(LConstantOperand* op) const {
- Handle<Object> value = chunk_->LookupLiteral(op);
- return value->Number();
+ HConstant* constant = chunk_->LookupConstant(op);
+ ASSERT(constant->HasDoubleValue());
+ return constant->DoubleValue();
}
@@ -359,7 +414,9 @@ Operand LCodeGen::HighOperand(LOperand* op) {
void LCodeGen::WriteTranslation(LEnvironment* environment,
- Translation* translation) {
+ Translation* translation,
+ int* arguments_index,
+ int* arguments_count) {
if (environment == NULL) return;
// The translation includes one command per value in the environment.
@@ -367,8 +424,20 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
// The output frame height does not include the parameters.
int height = translation_size - environment->parameter_count();
- WriteTranslation(environment->outer(), translation);
- int closure_id = DefineDeoptimizationLiteral(environment->closure());
+ // Function parameters are arguments to the outermost environment. The
+ // arguments index points to the first element of a sequence of tagged
+ // values on the stack that represent the arguments. This needs to be
+ // kept in sync with the LArgumentsElements implementation.
+ *arguments_index = -environment->parameter_count();
+ *arguments_count = environment->parameter_count();
+
+ WriteTranslation(environment->outer(),
+ translation,
+ arguments_index,
+ arguments_count);
+ int closure_id = *info()->closure() != *environment->closure()
+ ? DefineDeoptimizationLiteral(environment->closure())
+ : Translation::kSelfLiteralId;
switch (environment->frame_type()) {
case JS_FUNCTION:
translation->BeginJSFrame(environment->ast_id(), closure_id, height);
@@ -376,12 +445,31 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
case JS_CONSTRUCT:
translation->BeginConstructStubFrame(closure_id, translation_size);
break;
+ case JS_GETTER:
+ ASSERT(translation_size == 1);
+ ASSERT(height == 0);
+ translation->BeginGetterStubFrame(closure_id);
+ break;
+ case JS_SETTER:
+ ASSERT(translation_size == 2);
+ ASSERT(height == 0);
+ translation->BeginSetterStubFrame(closure_id);
+ break;
case ARGUMENTS_ADAPTOR:
translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
break;
- default:
- UNREACHABLE();
}
+
+ // Inlined frames which push their arguments cause the index to be
+ // bumped and another stack area to be used for materialization.
+ if (environment->entry() != NULL &&
+ environment->entry()->arguments_pushed()) {
+ *arguments_index = *arguments_index < 0
+ ? GetStackSlotCount()
+ : *arguments_index + *arguments_count;
+ *arguments_count = environment->entry()->arguments_count() + 1;
+ }
+
for (int i = 0; i < translation_size; ++i) {
LOperand* value = environment->values()->at(i);
// spilled_registers_ and spilled_double_registers_ are either
@@ -392,7 +480,10 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
translation->MarkDuplicate();
AddToTranslation(translation,
environment->spilled_registers()[value->index()],
- environment->HasTaggedValueAt(i));
+ environment->HasTaggedValueAt(i),
+ environment->HasUint32ValueAt(i),
+ *arguments_index,
+ *arguments_count);
} else if (
value->IsDoubleRegister() &&
environment->spilled_double_registers()[value->index()] != NULL) {
@@ -400,26 +491,39 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
AddToTranslation(
translation,
environment->spilled_double_registers()[value->index()],
- false);
+ false,
+ false,
+ *arguments_index,
+ *arguments_count);
}
}
- AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
+ AddToTranslation(translation,
+ value,
+ environment->HasTaggedValueAt(i),
+ environment->HasUint32ValueAt(i),
+ *arguments_index,
+ *arguments_count);
}
}
void LCodeGen::AddToTranslation(Translation* translation,
LOperand* op,
- bool is_tagged) {
+ bool is_tagged,
+ bool is_uint32,
+ int arguments_index,
+ int arguments_count) {
if (op == NULL) {
// TODO(twuerthinger): Introduce marker operands to indicate that this value
// is not present and must be reconstructed from the deoptimizer. Currently
// this is only used for the arguments object.
- translation->StoreArgumentsObject();
+ translation->StoreArgumentsObject(arguments_index, arguments_count);
} else if (op->IsStackSlot()) {
if (is_tagged) {
translation->StoreStackSlot(op->index());
+ } else if (is_uint32) {
+ translation->StoreUint32StackSlot(op->index());
} else {
translation->StoreInt32StackSlot(op->index());
}
@@ -433,6 +537,8 @@ void LCodeGen::AddToTranslation(Translation* translation,
Register reg = ToRegister(op);
if (is_tagged) {
translation->StoreRegister(reg);
+ } else if (is_uint32) {
+ translation->StoreUint32Register(reg);
} else {
translation->StoreInt32Register(reg);
}
@@ -440,8 +546,8 @@ void LCodeGen::AddToTranslation(Translation* translation,
XMMRegister reg = ToDoubleRegister(op);
translation->StoreDoubleRegister(reg);
} else if (op->IsConstantOperand()) {
- Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
- int src_index = DefineDeoptimizationLiteral(literal);
+ HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op));
+ int src_index = DefineDeoptimizationLiteral(constant->handle());
translation->StoreLiteral(src_index);
} else {
UNREACHABLE();
@@ -500,9 +606,9 @@ void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
} else if (context->IsStackSlot()) {
__ mov(esi, ToOperand(context));
} else if (context->IsConstantOperand()) {
- Handle<Object> literal =
- chunk_->LookupLiteral(LConstantOperand::cast(context));
- __ LoadHeapObject(esi, Handle<Context>::cast(literal));
+ HConstant* constant =
+ chunk_->LookupConstant(LConstantOperand::cast(context));
+ __ LoadHeapObject(esi, Handle<Context>::cast(constant->handle()));
} else {
UNREACHABLE();
}
@@ -531,20 +637,22 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(
int frame_count = 0;
int jsframe_count = 0;
+ int args_index = 0;
+ int args_count = 0;
for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
++frame_count;
if (e->frame_type() == JS_FUNCTION) {
++jsframe_count;
}
}
- Translation translation(&translations_, frame_count, jsframe_count);
- WriteTranslation(environment, &translation);
+ Translation translation(&translations_, frame_count, jsframe_count, zone());
+ WriteTranslation(environment, &translation, &args_index, &args_count);
int deoptimization_index = deoptimizations_.length();
int pc_offset = masm()->pc_offset();
environment->Register(deoptimization_index,
translation.index(),
(mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
- deoptimizations_.Add(environment);
+ deoptimizations_.Add(environment, zone());
}
}
@@ -566,19 +674,22 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
__ push(eax);
__ push(ebx);
__ mov(ebx, shared);
- __ mov(eax, FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset));
+ __ mov(eax,
+ FieldOperand(ebx, SharedFunctionInfo::kStressDeoptCounterOffset));
__ sub(Operand(eax), Immediate(Smi::FromInt(1)));
__ j(not_zero, &no_deopt, Label::kNear);
if (FLAG_trap_on_deopt) __ int3();
__ mov(eax, Immediate(Smi::FromInt(FLAG_deopt_every_n_times)));
- __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
+ __ mov(FieldOperand(ebx, SharedFunctionInfo::kStressDeoptCounterOffset),
+ eax);
__ pop(ebx);
__ pop(eax);
__ popfd();
__ jmp(entry, RelocInfo::RUNTIME_ENTRY);
__ bind(&no_deopt);
- __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
+ __ mov(FieldOperand(ebx, SharedFunctionInfo::kStressDeoptCounterOffset),
+ eax);
__ pop(ebx);
__ pop(eax);
__ popfd();
@@ -618,13 +729,13 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
}
data->SetLiteralArray(*literals);
- data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
+ data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
// Populate the deoptimization entries.
for (int i = 0; i < length; i++) {
LEnvironment* env = deoptimizations_[i];
- data->SetAstId(i, Smi::FromInt(env->ast_id()));
+ data->SetAstId(i, env->ast_id());
data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
data->SetArgumentsStackHeight(i,
Smi::FromInt(env->arguments_stack_height()));
@@ -639,7 +750,7 @@ int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
for (int i = 0; i < deoptimization_literals_.length(); ++i) {
if (deoptimization_literals_[i].is_identical_to(literal)) return i;
}
- deoptimization_literals_.Add(literal);
+ deoptimization_literals_.Add(literal, zone());
return result;
}
@@ -684,9 +795,9 @@ void LCodeGen::RecordSafepoint(
for (int i = 0; i < operands->length(); i++) {
LOperand* pointer = operands->at(i);
if (pointer->IsStackSlot()) {
- safepoint.DefinePointerSlot(pointer->index());
+ safepoint.DefinePointerSlot(pointer->index(), zone());
} else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
- safepoint.DefinePointerRegister(ToRegister(pointer));
+ safepoint.DefinePointerRegister(ToRegister(pointer), zone());
}
}
}
@@ -699,7 +810,7 @@ void LCodeGen::RecordSafepoint(LPointerMap* pointers,
void LCodeGen::RecordSafepoint(Safepoint::DeoptMode mode) {
- LPointerMap empty_pointers(RelocInfo::kNoPosition);
+ LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
RecordSafepoint(&empty_pointers, mode);
}
@@ -808,7 +919,7 @@ void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
void LCodeGen::DoModI(LModI* instr) {
if (instr->hydrogen()->HasPowerOf2Divisor()) {
- Register dividend = ToRegister(instr->InputAt(0));
+ Register dividend = ToRegister(instr->left());
int32_t divisor =
HConstant::cast(instr->hydrogen()->right())->Integer32Value();
@@ -832,8 +943,8 @@ void LCodeGen::DoModI(LModI* instr) {
__ bind(&done);
} else {
Label done, remainder_eq_dividend, slow, do_subtraction, both_positive;
- Register left_reg = ToRegister(instr->InputAt(0));
- Register right_reg = ToRegister(instr->InputAt(1));
+ Register left_reg = ToRegister(instr->left());
+ Register right_reg = ToRegister(instr->right());
Register result_reg = ToRegister(instr->result());
ASSERT(left_reg.is(eax));
@@ -863,7 +974,7 @@ void LCodeGen::DoModI(LModI* instr) {
__ j(less, &remainder_eq_dividend, Label::kNear);
// Check if the divisor is a PowerOfTwo integer.
- Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch = ToRegister(instr->temp());
__ mov(scratch, right_reg);
__ sub(Operand(scratch), Immediate(1));
__ test(scratch, Operand(right_reg));
@@ -919,11 +1030,11 @@ void LCodeGen::DoModI(LModI* instr) {
void LCodeGen::DoDivI(LDivI* instr) {
- LOperand* right = instr->InputAt(1);
+ LOperand* right = instr->right();
ASSERT(ToRegister(instr->result()).is(eax));
- ASSERT(ToRegister(instr->InputAt(0)).is(eax));
- ASSERT(!ToRegister(instr->InputAt(1)).is(eax));
- ASSERT(!ToRegister(instr->InputAt(1)).is(edx));
+ ASSERT(ToRegister(instr->left()).is(eax));
+ ASSERT(!ToRegister(instr->right()).is(eax));
+ ASSERT(!ToRegister(instr->right()).is(edx));
Register left_reg = eax;
@@ -964,12 +1075,115 @@ void LCodeGen::DoDivI(LDivI* instr) {
}
+void LCodeGen::DoMathFloorOfDiv(LMathFloorOfDiv* instr) {
+ ASSERT(instr->right()->IsConstantOperand());
+
+ Register dividend = ToRegister(instr->left());
+ int32_t divisor = ToInteger32(LConstantOperand::cast(instr->right()));
+ Register result = ToRegister(instr->result());
+
+ switch (divisor) {
+ case 0:
+ DeoptimizeIf(no_condition, instr->environment());
+ return;
+
+ case 1:
+ __ Move(result, dividend);
+ return;
+
+ case -1:
+ __ Move(result, dividend);
+ __ neg(result);
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ DeoptimizeIf(zero, instr->environment());
+ }
+ if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
+ DeoptimizeIf(overflow, instr->environment());
+ }
+ return;
+ }
+
+ uint32_t divisor_abs = abs(divisor);
+ if (IsPowerOf2(divisor_abs)) {
+ int32_t power = WhichPowerOf2(divisor_abs);
+ if (divisor < 0) {
+ // Input[dividend] is clobbered.
+ // The sequence is tedious because neg(dividend) might overflow.
+ __ mov(result, dividend);
+ __ sar(dividend, 31);
+ __ neg(result);
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ DeoptimizeIf(zero, instr->environment());
+ }
+ __ shl(dividend, 32 - power);
+ __ sar(result, power);
+ __ not_(dividend);
+ // Clear result.sign if dividend.sign is set.
+ __ and_(result, dividend);
+ } else {
+ __ Move(result, dividend);
+ __ sar(result, power);
+ }
+ } else {
+ ASSERT(ToRegister(instr->left()).is(eax));
+ ASSERT(ToRegister(instr->result()).is(edx));
+ Register scratch = ToRegister(instr->temp());
+
+ // Find b which: 2^b < divisor_abs < 2^(b+1).
+ unsigned b = 31 - CompilerIntrinsics::CountLeadingZeros(divisor_abs);
+ unsigned shift = 32 + b; // Precision +1bit (effectively).
+ double multiplier_f =
+ static_cast<double>(static_cast<uint64_t>(1) << shift) / divisor_abs;
+ int64_t multiplier;
+ if (multiplier_f - floor(multiplier_f) < 0.5) {
+ multiplier = static_cast<int64_t>(floor(multiplier_f));
+ } else {
+ multiplier = static_cast<int64_t>(floor(multiplier_f)) + 1;
+ }
+ // The multiplier is a uint32.
+ ASSERT(multiplier > 0 &&
+ multiplier < (static_cast<int64_t>(1) << 32));
+ __ mov(scratch, dividend);
+ if (divisor < 0 &&
+ instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ __ test(dividend, dividend);
+ DeoptimizeIf(zero, instr->environment());
+ }
+ __ mov(edx, static_cast<int32_t>(multiplier));
+ __ imul(edx);
+ if (static_cast<int32_t>(multiplier) < 0) {
+ __ add(edx, scratch);
+ }
+ Register reg_lo = eax;
+ Register reg_byte_scratch = scratch;
+ if (!reg_byte_scratch.is_byte_register()) {
+ __ xchg(reg_lo, reg_byte_scratch);
+ reg_lo = scratch;
+ reg_byte_scratch = eax;
+ }
+ if (divisor < 0) {
+ __ xor_(reg_byte_scratch, reg_byte_scratch);
+ __ cmp(reg_lo, 0x40000000);
+ __ setcc(above, reg_byte_scratch);
+ __ neg(edx);
+ __ sub(edx, reg_byte_scratch);
+ } else {
+ __ xor_(reg_byte_scratch, reg_byte_scratch);
+ __ cmp(reg_lo, 0xC0000000);
+ __ setcc(above_equal, reg_byte_scratch);
+ __ add(edx, reg_byte_scratch);
+ }
+ __ sar(edx, shift - 32);
+ }
+}
+
+
void LCodeGen::DoMulI(LMulI* instr) {
- Register left = ToRegister(instr->InputAt(0));
- LOperand* right = instr->InputAt(1);
+ Register left = ToRegister(instr->left());
+ LOperand* right = instr->right();
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
- __ mov(ToRegister(instr->TempAt(0)), left);
+ __ mov(ToRegister(instr->temp()), left);
}
if (right->IsConstantOperand()) {
@@ -1030,12 +1244,15 @@ void LCodeGen::DoMulI(LMulI* instr) {
__ test(left, Operand(left));
__ j(not_zero, &done, Label::kNear);
if (right->IsConstantOperand()) {
- if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
+ if (ToInteger32(LConstantOperand::cast(right)) < 0) {
DeoptimizeIf(no_condition, instr->environment());
+ } else if (ToInteger32(LConstantOperand::cast(right)) == 0) {
+ __ cmp(ToRegister(instr->temp()), Immediate(0));
+ DeoptimizeIf(less, instr->environment());
}
} else {
// Test the non-zero operand for negative sign.
- __ or_(ToRegister(instr->TempAt(0)), ToOperand(right));
+ __ or_(ToRegister(instr->temp()), ToOperand(right));
DeoptimizeIf(sign, instr->environment());
}
__ bind(&done);
@@ -1044,8 +1261,8 @@ void LCodeGen::DoMulI(LMulI* instr) {
void LCodeGen::DoBitI(LBitI* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
ASSERT(left->Equals(instr->result()));
ASSERT(left->IsRegister());
@@ -1085,14 +1302,21 @@ void LCodeGen::DoBitI(LBitI* instr) {
void LCodeGen::DoShiftI(LShiftI* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
ASSERT(left->Equals(instr->result()));
ASSERT(left->IsRegister());
if (right->IsRegister()) {
ASSERT(ToRegister(right).is(ecx));
switch (instr->op()) {
+ case Token::ROR:
+ __ ror_cl(ToRegister(left));
+ if (instr->can_deopt()) {
+ __ test(ToRegister(left), Immediate(0x80000000));
+ DeoptimizeIf(not_zero, instr->environment());
+ }
+ break;
case Token::SAR:
__ sar_cl(ToRegister(left));
break;
@@ -1114,6 +1338,14 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
int value = ToInteger32(LConstantOperand::cast(right));
uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
switch (instr->op()) {
+ case Token::ROR:
+ if (shift_count == 0 && instr->can_deopt()) {
+ __ test(ToRegister(left), Immediate(0x80000000));
+ DeoptimizeIf(not_zero, instr->environment());
+ } else {
+ __ ror(ToRegister(left), shift_count);
+ }
+ break;
case Token::SAR:
if (shift_count != 0) {
__ sar(ToRegister(left), shift_count);
@@ -1141,8 +1373,8 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
void LCodeGen::DoSubI(LSubI* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
ASSERT(left->Equals(instr->result()));
if (right->IsConstantOperand()) {
@@ -1171,7 +1403,7 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
if (BitCast<uint64_t, double>(v) == 0) {
__ xorps(res, res);
} else {
- Register temp = ToRegister(instr->TempAt(0));
+ Register temp = ToRegister(instr->temp());
uint64_t int_val = BitCast<uint64_t, double>(v);
int32_t lower = static_cast<int32_t>(int_val);
int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
@@ -1214,7 +1446,7 @@ void LCodeGen::DoConstantT(LConstantT* instr) {
void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Register result = ToRegister(instr->result());
- Register array = ToRegister(instr->InputAt(0));
+ Register array = ToRegister(instr->value());
__ mov(result, FieldOperand(array, JSArray::kLengthOffset));
}
@@ -1222,14 +1454,21 @@ void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
void LCodeGen::DoFixedArrayBaseLength(
LFixedArrayBaseLength* instr) {
Register result = ToRegister(instr->result());
- Register array = ToRegister(instr->InputAt(0));
+ Register array = ToRegister(instr->value());
__ mov(result, FieldOperand(array, FixedArrayBase::kLengthOffset));
}
+void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
+ Register result = ToRegister(instr->result());
+ Register map = ToRegister(instr->value());
+ __ EnumLength(result, map);
+}
+
+
void LCodeGen::DoElementsKind(LElementsKind* instr) {
Register result = ToRegister(instr->result());
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
// Load map into |result|.
__ mov(result, FieldOperand(input, HeapObject::kMapOffset));
@@ -1243,9 +1482,9 @@ void LCodeGen::DoElementsKind(LElementsKind* instr) {
void LCodeGen::DoValueOf(LValueOf* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
- Register map = ToRegister(instr->TempAt(0));
+ Register map = ToRegister(instr->temp());
ASSERT(input.is(result));
Label done;
@@ -1262,19 +1501,18 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
void LCodeGen::DoDateField(LDateField* instr) {
- Register object = ToRegister(instr->InputAt(0));
+ Register object = ToRegister(instr->date());
Register result = ToRegister(instr->result());
- Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch = ToRegister(instr->temp());
Smi* index = instr->index();
Label runtime, done;
ASSERT(object.is(result));
ASSERT(object.is(eax));
-#ifdef DEBUG
- __ AbortIfSmi(object);
+ __ test(object, Immediate(kSmiTagMask));
+ DeoptimizeIf(zero, instr->environment());
__ CmpObjectType(object, JS_DATE_TYPE, scratch);
- __ Assert(equal, "Trying to get date field from non-date.");
-#endif
+ DeoptimizeIf(not_equal, instr->environment());
if (index->value() == 0) {
__ mov(result, FieldOperand(object, JSDate::kValueOffset));
@@ -1299,7 +1537,7 @@ void LCodeGen::DoDateField(LDateField* instr) {
void LCodeGen::DoBitNotI(LBitNotI* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->Equals(instr->result()));
__ not_(ToRegister(input));
}
@@ -1318,8 +1556,8 @@ void LCodeGen::DoThrow(LThrow* instr) {
void LCodeGen::DoAddI(LAddI* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
ASSERT(left->Equals(instr->result()));
if (right->IsConstantOperand()) {
@@ -1334,9 +1572,70 @@ void LCodeGen::DoAddI(LAddI* instr) {
}
+void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
+ ASSERT(left->Equals(instr->result()));
+ HMathMinMax::Operation operation = instr->hydrogen()->operation();
+ if (instr->hydrogen()->representation().IsInteger32()) {
+ Label return_left;
+ Condition condition = (operation == HMathMinMax::kMathMin)
+ ? less_equal
+ : greater_equal;
+ if (right->IsConstantOperand()) {
+ Operand left_op = ToOperand(left);
+ Immediate right_imm = ToInteger32Immediate(right);
+ __ cmp(left_op, right_imm);
+ __ j(condition, &return_left, Label::kNear);
+ __ mov(left_op, right_imm);
+ } else {
+ Register left_reg = ToRegister(left);
+ Operand right_op = ToOperand(right);
+ __ cmp(left_reg, right_op);
+ __ j(condition, &return_left, Label::kNear);
+ __ mov(left_reg, right_op);
+ }
+ __ bind(&return_left);
+ } else {
+ ASSERT(instr->hydrogen()->representation().IsDouble());
+ Label check_nan_left, check_zero, return_left, return_right;
+ Condition condition = (operation == HMathMinMax::kMathMin) ? below : above;
+ XMMRegister left_reg = ToDoubleRegister(left);
+ XMMRegister right_reg = ToDoubleRegister(right);
+ __ ucomisd(left_reg, right_reg);
+ __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN.
+ __ j(equal, &check_zero, Label::kNear); // left == right.
+ __ j(condition, &return_left, Label::kNear);
+ __ jmp(&return_right, Label::kNear);
+
+ __ bind(&check_zero);
+ XMMRegister xmm_scratch = xmm0;
+ __ xorps(xmm_scratch, xmm_scratch);
+ __ ucomisd(left_reg, xmm_scratch);
+ __ j(not_equal, &return_left, Label::kNear); // left == right != 0.
+ // At this point, both left and right are either 0 or -0.
+ if (operation == HMathMinMax::kMathMin) {
+ __ orpd(left_reg, right_reg);
+ } else {
+ // Since we operate on +0 and/or -0, addsd and andsd have the same effect.
+ __ addsd(left_reg, right_reg);
+ }
+ __ jmp(&return_left, Label::kNear);
+
+ __ bind(&check_nan_left);
+ __ ucomisd(left_reg, left_reg); // NaN check.
+ __ j(parity_even, &return_left, Label::kNear); // left == NaN.
+ __ bind(&return_right);
+ __ movsd(left_reg, right_reg);
+
+ __ bind(&return_left);
+ }
+}
+
+
void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
- XMMRegister left = ToDoubleRegister(instr->InputAt(0));
- XMMRegister right = ToDoubleRegister(instr->InputAt(1));
+ XMMRegister left = ToDoubleRegister(instr->left());
+ XMMRegister right = ToDoubleRegister(instr->right());
XMMRegister result = ToDoubleRegister(instr->result());
// Modulo uses a fixed result register.
ASSERT(instr->op() == Token::MOD || left.is(result));
@@ -1422,17 +1721,17 @@ void LCodeGen::DoBranch(LBranch* instr) {
Representation r = instr->hydrogen()->value()->representation();
if (r.IsInteger32()) {
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
__ test(reg, Operand(reg));
EmitBranch(true_block, false_block, not_zero);
} else if (r.IsDouble()) {
- XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
+ XMMRegister reg = ToDoubleRegister(instr->value());
__ xorps(xmm0, xmm0);
__ ucomisd(reg, xmm0);
EmitBranch(true_block, false_block, not_equal);
} else {
ASSERT(r.IsTagged());
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
HType type = instr->hydrogen()->value()->type();
if (type.IsBoolean()) {
__ cmp(reg, factory()->true_value());
@@ -1480,7 +1779,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
Register map = no_reg; // Keep the compiler happy.
if (expected.NeedsMap()) {
- map = ToRegister(instr->TempAt(0));
+ map = ToRegister(instr->temp());
ASSERT(!map.is(reg));
__ mov(map, FieldOperand(reg, HeapObject::kMapOffset));
@@ -1573,8 +1872,8 @@ Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
int false_block = chunk_->LookupDestination(instr->false_block_id());
int true_block = chunk_->LookupDestination(instr->true_block_id());
Condition cc = TokenToCondition(instr->op(), instr->is_double());
@@ -1610,8 +1909,8 @@ void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
- Register left = ToRegister(instr->InputAt(0));
- Operand right = ToOperand(instr->InputAt(1));
+ Register left = ToRegister(instr->left());
+ Operand right = ToOperand(instr->right());
int false_block = chunk_->LookupDestination(instr->false_block_id());
int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -1621,7 +1920,7 @@ void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
- Register left = ToRegister(instr->InputAt(0));
+ Register left = ToRegister(instr->left());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1631,7 +1930,7 @@ void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
int false_block = chunk_->LookupDestination(instr->false_block_id());
// If the expression is known to be untagged or a smi, then it's definitely
@@ -1661,7 +1960,7 @@ void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
__ JumpIfSmi(reg, false_label);
// Check for undetectable objects by looking in the bit field in
// the map. The object has already been smi checked.
- Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch = ToRegister(instr->temp());
__ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
__ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
__ test(scratch, Immediate(1 << Map::kIsUndetectable));
@@ -1694,8 +1993,8 @@ Condition LCodeGen::EmitIsObject(Register input,
void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
- Register reg = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
+ Register reg = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1720,8 +2019,8 @@ Condition LCodeGen::EmitIsString(Register input,
void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
- Register reg = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
+ Register reg = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1734,7 +2033,7 @@ void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
- Operand input = ToOperand(instr->InputAt(0));
+ Operand input = ToOperand(instr->value());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1745,8 +2044,8 @@ void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
+ Register input = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1816,8 +2115,8 @@ static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
+ Register input = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1832,12 +2131,10 @@ void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
- if (FLAG_debug_code) {
- __ AbortIfNotString(input);
- }
+ __ AssertString(input);
__ mov(result, FieldOperand(input, String::kHashFieldOffset));
__ IndexFromHash(result, result);
@@ -1846,7 +2143,7 @@ void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
void LCodeGen::DoHasCachedArrayIndexAndBranch(
LHasCachedArrayIndexAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1923,9 +2220,9 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
- Register temp2 = ToRegister(instr->TempAt(1));
+ Register input = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
+ Register temp2 = ToRegister(instr->temp2());
Handle<String> class_name = instr->hydrogen()->class_name();
@@ -1942,7 +2239,7 @@ void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
int true_block = instr->true_block_id();
int false_block = instr->false_block_id();
@@ -1985,11 +2282,11 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
};
DeferredInstanceOfKnownGlobal* deferred;
- deferred = new DeferredInstanceOfKnownGlobal(this, instr);
+ deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
Label done, false_result;
- Register object = ToRegister(instr->InputAt(1));
- Register temp = ToRegister(instr->TempAt(0));
+ Register object = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
// A Smi is not an instance of anything.
__ JumpIfSmi(object, &false_result);
@@ -1998,7 +2295,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
// hole value will be patched to the last map/result pair generated by the
// instanceof stub.
Label cache_miss;
- Register map = ToRegister(instr->TempAt(0));
+ Register map = ToRegister(instr->temp());
__ mov(map, FieldOperand(object, HeapObject::kMapOffset));
__ bind(deferred->map_check()); // Label for calculating code patching.
Handle<JSGlobalPropertyCell> cache_cell =
@@ -2049,7 +2346,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
// register which is pushed last by PushSafepointRegisters as top of the
// stack is used to pass the offset to the location of the map check to
// the stub.
- Register temp = ToRegister(instr->TempAt(0));
+ Register temp = ToRegister(instr->temp());
ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
__ LoadHeapObject(InstanceofStub::right(), instr->function());
static const int kAdditionalDelta = 13;
@@ -2098,8 +2395,25 @@ void LCodeGen::DoReturn(LReturn* instr) {
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntime(Runtime::kTraceExit, 1);
}
+ if (dynamic_frame_alignment_) {
+ // Fetch the state of the dynamic frame alignment.
+ __ mov(edx, Operand(ebp,
+ JavaScriptFrameConstants::kDynamicAlignmentStateOffset));
+ }
__ mov(esp, ebp);
__ pop(ebp);
+ if (dynamic_frame_alignment_) {
+ Label no_padding;
+ __ cmp(edx, Immediate(kNoAlignmentPadding));
+ __ j(equal, &no_padding);
+ if (FLAG_debug_code) {
+ __ cmp(Operand(esp, (GetParameterCount() + 2) * kPointerSize),
+ Immediate(kAlignmentZapValue));
+ __ Assert(equal, "expected alignment marker");
+ }
+ __ Ret((GetParameterCount() + 2) * kPointerSize, ecx);
+ __ bind(&no_padding);
+ }
__ Ret((GetParameterCount() + 1) * kPointerSize, ecx);
}
@@ -2199,7 +2513,7 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
HType type = instr->hydrogen()->value()->type();
SmiCheck check_needed =
type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
- Register temp = ToRegister(instr->TempAt(0));
+ Register temp = ToRegister(instr->temp());
int offset = Context::SlotOffset(instr->slot_index());
__ RecordWriteContextSlot(context,
offset,
@@ -2229,12 +2543,12 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name) {
+ Handle<String> name,
+ LEnvironment* env) {
LookupResult lookup(isolate());
- type->LookupInDescriptors(NULL, *name, &lookup);
- ASSERT(lookup.IsFound() &&
- (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
- if (lookup.type() == FIELD) {
+ type->LookupDescriptor(NULL, *name, &lookup);
+ ASSERT(lookup.IsFound() || lookup.IsCacheable());
+ if (lookup.IsField()) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
int offset = index * kPointerSize;
if (index < 0) {
@@ -2246,9 +2560,23 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
__ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
__ mov(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
}
- } else {
+ } else if (lookup.IsConstantFunction()) {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
__ LoadHeapObject(result, function);
+ } else {
+ // Negative lookup.
+ // Check prototypes.
+ Handle<HeapObject> current(HeapObject::cast((*type)->prototype()));
+ Heap* heap = type->GetHeap();
+ while (*current != heap->null_value()) {
+ __ LoadHeapObject(result, current);
+ __ cmp(FieldOperand(result, HeapObject::kMapOffset),
+ Handle<Map>(current->map()));
+ DeoptimizeIf(not_equal, env);
+ current =
+ Handle<HeapObject>(HeapObject::cast(current->map()->prototype()));
+ }
+ __ mov(result, factory()->undefined_value());
}
}
@@ -2270,6 +2598,22 @@ void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
}
+// Check for cases where EmitLoadFieldOrConstantFunction needs to walk the
+// prototype chain, which causes unbounded code generation.
+static bool CompactEmit(SmallMapList* list,
+ Handle<String> name,
+ int i,
+ Isolate* isolate) {
+ Handle<Map> map = list->at(i);
+ // If the map has ElementsKind transitions, we will generate map checks
+ // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS).
+ if (map->HasElementsTransition()) return false;
+ LookupResult lookup(isolate);
+ map->LookupDescriptor(NULL, *name, &lookup);
+ return lookup.IsField() || lookup.IsConstantFunction();
+}
+
+
void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
Register object = ToRegister(instr->object());
Register result = ToRegister(instr->result());
@@ -2283,18 +2627,32 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
}
Handle<String> name = instr->hydrogen()->name();
Label done;
+ bool all_are_compact = true;
+ for (int i = 0; i < map_count; ++i) {
+ if (!CompactEmit(instr->hydrogen()->types(), name, i, isolate())) {
+ all_are_compact = false;
+ break;
+ }
+ }
for (int i = 0; i < map_count; ++i) {
bool last = (i == map_count - 1);
Handle<Map> map = instr->hydrogen()->types()->at(i);
- __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
+ Label check_passed;
+ __ CompareMap(object, map, &check_passed, ALLOW_ELEMENT_TRANSITION_MAPS);
if (last && !need_generic) {
DeoptimizeIf(not_equal, instr->environment());
- EmitLoadFieldOrConstantFunction(result, object, map, name);
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
} else {
Label next;
- __ j(not_equal, &next, Label::kNear);
- EmitLoadFieldOrConstantFunction(result, object, map, name);
- __ jmp(&done, Label::kNear);
+ bool compact = all_are_compact ? true :
+ CompactEmit(instr->hydrogen()->types(), name, i, isolate());
+ __ j(not_equal, &next, compact ? Label::kNear : Label::kFar);
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
+ __ jmp(&done, all_are_compact ? Label::kNear : Label::kFar);
__ bind(&next);
}
}
@@ -2320,7 +2678,7 @@ void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
Register function = ToRegister(instr->function());
- Register temp = ToRegister(instr->TempAt(0));
+ Register temp = ToRegister(instr->temp());
Register result = ToRegister(instr->result());
// Check that the function really is a function.
@@ -2362,7 +2720,7 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
void LCodeGen::DoLoadElements(LLoadElements* instr) {
Register result = ToRegister(instr->result());
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->object());
__ mov(result, FieldOperand(input, JSObject::kElementsOffset));
if (FLAG_debug_code) {
Label done, ok, fail;
@@ -2378,8 +2736,10 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
__ movzx_b(temp, FieldOperand(temp, Map::kBitField2Offset));
__ and_(temp, Map::kElementsKindMask);
__ shr(temp, Map::kElementsKindShift);
- __ cmp(temp, FAST_ELEMENTS);
- __ j(equal, &ok, Label::kNear);
+ __ cmp(temp, GetInitialFastElementsKind());
+ __ j(less, &fail, Label::kNear);
+ __ cmp(temp, TERMINAL_FAST_ELEMENTS_KIND);
+ __ j(less_equal, &ok, Label::kNear);
__ cmp(temp, FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
__ j(less, &fail, Label::kNear);
__ cmp(temp, LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
@@ -2396,7 +2756,7 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
void LCodeGen::DoLoadExternalArrayPointer(
LLoadExternalArrayPointer* instr) {
Register result = ToRegister(instr->result());
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->object());
__ mov(result, FieldOperand(input,
ExternalArray::kExternalPointerOffset));
}
@@ -2407,67 +2767,153 @@ void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Register length = ToRegister(instr->length());
Operand index = ToOperand(instr->index());
Register result = ToRegister(instr->result());
-
- __ sub(length, index);
- DeoptimizeIf(below_equal, instr->environment());
-
// There are two words between the frame pointer and the last argument.
// Subtracting from length accounts for one of them add one more.
+ __ sub(length, index);
__ mov(result, Operand(arguments, length, times_4, kPointerSize));
}
-void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
+void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
+ ElementsKind elements_kind = instr->elements_kind();
+ if (ExternalArrayOpRequiresTemp<HLoadKeyed>(instr->hydrogen())) {
+ __ SmiUntag(ToRegister(instr->key()));
+ }
+ Operand operand(BuildFastArrayOperand(
+ instr->elements(),
+ instr->key(),
+ instr->hydrogen()->key()->representation(),
+ elements_kind,
+ 0,
+ instr->additional_index()));
+ if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
+ XMMRegister result(ToDoubleRegister(instr->result()));
+ __ movss(result, operand);
+ __ cvtss2sd(result, result);
+ } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
+ __ movdbl(ToDoubleRegister(instr->result()), operand);
+ } else {
+ Register result(ToRegister(instr->result()));
+ switch (elements_kind) {
+ case EXTERNAL_BYTE_ELEMENTS:
+ __ movsx_b(result, operand);
+ break;
+ case EXTERNAL_PIXEL_ELEMENTS:
+ case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
+ __ movzx_b(result, operand);
+ break;
+ case EXTERNAL_SHORT_ELEMENTS:
+ __ movsx_w(result, operand);
+ break;
+ case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
+ __ movzx_w(result, operand);
+ break;
+ case EXTERNAL_INT_ELEMENTS:
+ __ mov(result, operand);
+ break;
+ case EXTERNAL_UNSIGNED_INT_ELEMENTS:
+ __ mov(result, operand);
+ if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) {
+ __ test(result, Operand(result));
+ DeoptimizeIf(negative, instr->environment());
+ }
+ break;
+ case EXTERNAL_FLOAT_ELEMENTS:
+ case EXTERNAL_DOUBLE_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ case DICTIONARY_ELEMENTS:
+ case NON_STRICT_ARGUMENTS_ELEMENTS:
+ UNREACHABLE();
+ break;
+ }
+ }
+}
+
+
+void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
+ XMMRegister result = ToDoubleRegister(instr->result());
+
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
+ sizeof(kHoleNanLower32);
+ Operand hole_check_operand = BuildFastArrayOperand(
+ instr->elements(), instr->key(),
+ instr->hydrogen()->key()->representation(),
+ FAST_DOUBLE_ELEMENTS,
+ offset,
+ instr->additional_index());
+ __ cmp(hole_check_operand, Immediate(kHoleNanUpper32));
+ DeoptimizeIf(equal, instr->environment());
+ }
+
+ Operand double_load_operand = BuildFastArrayOperand(
+ instr->elements(),
+ instr->key(),
+ instr->hydrogen()->key()->representation(),
+ FAST_DOUBLE_ELEMENTS,
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag,
+ instr->additional_index());
+ __ movdbl(result, double_load_operand);
+}
+
+
+void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
Register result = ToRegister(instr->result());
// Load the result.
__ mov(result,
BuildFastArrayOperand(instr->elements(),
instr->key(),
+ instr->hydrogen()->key()->representation(),
FAST_ELEMENTS,
FixedArray::kHeaderSize - kHeapObjectTag,
instr->additional_index()));
// Check for the hole value.
if (instr->hydrogen()->RequiresHoleCheck()) {
- __ cmp(result, factory()->the_hole_value());
- DeoptimizeIf(equal, instr->environment());
+ if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
+ __ test(result, Immediate(kSmiTagMask));
+ DeoptimizeIf(not_equal, instr->environment());
+ } else {
+ __ cmp(result, factory()->the_hole_value());
+ DeoptimizeIf(equal, instr->environment());
+ }
}
}
-void LCodeGen::DoLoadKeyedFastDoubleElement(
- LLoadKeyedFastDoubleElement* instr) {
- XMMRegister result = ToDoubleRegister(instr->result());
-
- int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
- sizeof(kHoleNanLower32);
- Operand hole_check_operand = BuildFastArrayOperand(
- instr->elements(), instr->key(),
- FAST_DOUBLE_ELEMENTS,
- offset,
- instr->additional_index());
- __ cmp(hole_check_operand, Immediate(kHoleNanUpper32));
- DeoptimizeIf(equal, instr->environment());
-
- Operand double_load_operand = BuildFastArrayOperand(
- instr->elements(),
- instr->key(),
- FAST_DOUBLE_ELEMENTS,
- FixedDoubleArray::kHeaderSize - kHeapObjectTag,
- instr->additional_index());
- __ movdbl(result, double_load_operand);
+void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
+ if (instr->is_external()) {
+ DoLoadKeyedExternalArray(instr);
+ } else if (instr->hydrogen()->representation().IsDouble()) {
+ DoLoadKeyedFixedDoubleArray(instr);
+ } else {
+ DoLoadKeyedFixedArray(instr);
+ }
}
Operand LCodeGen::BuildFastArrayOperand(
LOperand* elements_pointer,
LOperand* key,
+ Representation key_representation,
ElementsKind elements_kind,
uint32_t offset,
uint32_t additional_index) {
Register elements_pointer_reg = ToRegister(elements_pointer);
int shift_size = ElementsKindToShiftSize(elements_kind);
+ // Even though the HLoad/StoreKeyed instructions force the input
+ // representation for the key to be an integer, the input gets replaced during
+ // bound check elimination with the index argument to the bounds check, which
+ // can be tagged, so that case must be handled here, too.
+ if (key_representation.IsTagged() && (shift_size >= 1)) {
+ shift_size -= kSmiTagSize;
+ }
if (key->IsConstantOperand()) {
int constant_value = ToInteger32(LConstantOperand::cast(key));
if (constant_value & 0xF0000000) {
@@ -2486,61 +2932,6 @@ Operand LCodeGen::BuildFastArrayOperand(
}
-void LCodeGen::DoLoadKeyedSpecializedArrayElement(
- LLoadKeyedSpecializedArrayElement* instr) {
- ElementsKind elements_kind = instr->elements_kind();
- Operand operand(BuildFastArrayOperand(instr->external_pointer(),
- instr->key(),
- elements_kind,
- 0,
- instr->additional_index()));
- if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
- XMMRegister result(ToDoubleRegister(instr->result()));
- __ movss(result, operand);
- __ cvtss2sd(result, result);
- } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
- __ movdbl(ToDoubleRegister(instr->result()), operand);
- } else {
- Register result(ToRegister(instr->result()));
- switch (elements_kind) {
- case EXTERNAL_BYTE_ELEMENTS:
- __ movsx_b(result, operand);
- break;
- case EXTERNAL_PIXEL_ELEMENTS:
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- __ movzx_b(result, operand);
- break;
- case EXTERNAL_SHORT_ELEMENTS:
- __ movsx_w(result, operand);
- break;
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- __ movzx_w(result, operand);
- break;
- case EXTERNAL_INT_ELEMENTS:
- __ mov(result, operand);
- break;
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- __ mov(result, operand);
- __ test(result, Operand(result));
- // TODO(danno): we could be more clever here, perhaps having a special
- // version of the stub that detects if the overflow case actually
- // happens, and generate code that returns a double rather than int.
- DeoptimizeIf(negative, instr->environment());
- break;
- case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS:
- case FAST_DOUBLE_ELEMENTS:
- case DICTIONARY_ELEMENTS:
- case NON_STRICT_ARGUMENTS_ELEMENTS:
- UNREACHABLE();
- break;
- }
- }
-}
-
-
void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->object()).is(edx));
@@ -2581,7 +2972,7 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
- Operand elem = ToOperand(instr->InputAt(0));
+ Operand elem = ToOperand(instr->elements());
Register result = ToRegister(instr->result());
Label done;
@@ -2605,7 +2996,7 @@ void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
Register receiver = ToRegister(instr->receiver());
Register function = ToRegister(instr->function());
- Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch = ToRegister(instr->temp());
// If the receiver is null or undefined, we have to pass the global
// object as a receiver to normal functions. Values have to be
@@ -2618,12 +3009,12 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
__ test_b(FieldOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset),
1 << SharedFunctionInfo::kStrictModeBitWithinByte);
- __ j(not_equal, &receiver_ok, Label::kNear);
+ __ j(not_equal, &receiver_ok); // A near jump is not sufficient here!
// Do not transform the receiver to object for builtins.
__ test_b(FieldOperand(scratch, SharedFunctionInfo::kNativeByteOffset),
1 << SharedFunctionInfo::kNativeBitWithinByte);
- __ j(not_equal, &receiver_ok, Label::kNear);
+ __ j(not_equal, &receiver_ok);
// Normal function. Replace undefined or null with global receiver.
__ cmp(receiver, factory()->null_value());
@@ -2643,7 +3034,7 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
// if it's better to use it than to explicitly fetch it from the context
// here.
__ mov(receiver, Operand(ebp, StandardFrameConstants::kContextOffset));
- __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
+ __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_OBJECT_INDEX));
__ mov(receiver,
FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
__ bind(&receiver_ok);
@@ -2693,7 +3084,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
void LCodeGen::DoPushArgument(LPushArgument* instr) {
- LOperand* argument = instr->InputAt(0);
+ LOperand* argument = instr->value();
EmitPushTaggedOperand(argument);
}
@@ -2705,7 +3096,7 @@ void LCodeGen::DoDrop(LDrop* instr) {
void LCodeGen::DoThisFunction(LThisFunction* instr) {
Register result = ToRegister(instr->result());
- __ LoadHeapObject(result, instr->hydrogen()->closure());
+ __ mov(result, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
}
@@ -2724,7 +3115,7 @@ void LCodeGen::DoOuterContext(LOuterContext* instr) {
void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
- ASSERT(ToRegister(instr->InputAt(0)).is(esi));
+ ASSERT(ToRegister(instr->context()).is(esi));
__ push(esi); // The context is the first argument.
__ push(Immediate(instr->hydrogen()->pairs()));
__ push(Immediate(Smi::FromInt(instr->hydrogen()->flags())));
@@ -2735,7 +3126,10 @@ void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
Register context = ToRegister(instr->context());
Register result = ToRegister(instr->result());
- __ mov(result, Operand(context, Context::SlotOffset(instr->qml_global()?Context::QML_GLOBAL_INDEX:Context::GLOBAL_INDEX)));
+ __ mov(result,
+ Operand(context, Context::SlotOffset(instr->qml_global()
+ ? Context::QML_GLOBAL_OBJECT_INDEX
+ : Context::GLOBAL_OBJECT_INDEX)));
}
@@ -2762,17 +3156,8 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
__ LoadHeapObject(edi, function);
}
- // Change context if needed.
- bool change_context =
- (info()->closure()->context() != function->context()) ||
- scope()->contains_with() ||
- (scope()->num_heap_slots() > 0);
-
- if (change_context) {
- __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
- } else {
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
- }
+ // Change context.
+ __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
// Set eax to arguments count if adaption is not needed. Assumes that eax
// is available to write to at this point.
@@ -2901,7 +3286,7 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
EmitIntegerMathAbs(instr);
} else { // Tagged case.
DeferredMathAbsTaggedHeapNumber* deferred =
- new DeferredMathAbsTaggedHeapNumber(this, instr);
+ new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
Register input_reg = ToRegister(instr->value());
// Smi check.
__ JumpIfNotSmi(input_reg, deferred->entry());
@@ -2935,8 +3320,7 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
__ cmp(output_reg, 0x80000000u);
DeoptimizeIf(equal, instr->environment());
} else {
- Label negative_sign;
- Label done;
+ Label negative_sign, done;
// Deoptimize on unordered.
__ xorps(xmm_scratch, xmm_scratch); // Zero the register.
__ ucomisd(input_reg, xmm_scratch);
@@ -2962,9 +3346,9 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
DeoptimizeIf(equal, instr->environment());
__ jmp(&done, Label::kNear);
- // Non-zero negative reaches here
+ // Non-zero negative reaches here.
__ bind(&negative_sign);
- // Truncate, then compare and compensate
+ // Truncate, then compare and compensate.
__ cvttsd2si(output_reg, Operand(input_reg));
__ cvtsi2sd(xmm_scratch, output_reg);
__ ucomisd(input_reg, xmm_scratch);
@@ -3066,11 +3450,11 @@ void LCodeGen::DoPower(LPower* instr) {
Representation exponent_type = instr->hydrogen()->right()->representation();
// Having marked this as a call, we can use any registers.
// Just make sure that the input/output registers are the expected ones.
- ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
- ToDoubleRegister(instr->InputAt(1)).is(xmm1));
- ASSERT(!instr->InputAt(1)->IsRegister() ||
- ToRegister(instr->InputAt(1)).is(eax));
- ASSERT(ToDoubleRegister(instr->InputAt(0)).is(xmm2));
+ ASSERT(!instr->right()->IsDoubleRegister() ||
+ ToDoubleRegister(instr->right()).is(xmm1));
+ ASSERT(!instr->right()->IsRegister() ||
+ ToRegister(instr->right()).is(eax));
+ ASSERT(ToDoubleRegister(instr->left()).is(xmm2));
ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
if (exponent_type.IsTagged()) {
@@ -3103,21 +3487,21 @@ void LCodeGen::DoRandom(LRandom* instr) {
LRandom* instr_;
};
- DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
+ DeferredDoRandom* deferred = new(zone()) DeferredDoRandom(this, instr);
// Having marked this instruction as a call we can use any
// registers.
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
- ASSERT(ToRegister(instr->InputAt(0)).is(eax));
+ ASSERT(ToRegister(instr->global_object()).is(eax));
// Assert that the register size is indeed the size of each seed.
static const int kSeedSize = sizeof(uint32_t);
STATIC_ASSERT(kPointerSize == kSeedSize);
- __ mov(eax, FieldOperand(eax, GlobalObject::kGlobalContextOffset));
+ __ mov(eax, FieldOperand(eax, GlobalObject::kNativeContextOffset));
static const int kRandomSeedOffset =
FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
__ mov(ebx, FieldOperand(eax, kRandomSeedOffset));
- // ebx: FixedArray of the global context's random seeds
+ // ebx: FixedArray of the native context's random seeds
// Load state[0].
__ mov(ecx, FieldOperand(ebx, ByteArray::kHeaderSize));
@@ -3362,7 +3746,22 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
int offset = instr->offset();
if (!instr->transition().is_null()) {
- __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
+ if (!instr->hydrogen()->NeedsWriteBarrierForMap()) {
+ __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
+ } else {
+ Register temp = ToRegister(instr->temp());
+ Register temp_map = ToRegister(instr->temp_map());
+ __ mov(temp_map, instr->transition());
+ __ mov(FieldOperand(object, HeapObject::kMapOffset), temp_map);
+ // Update the write barrier for the map field.
+ __ RecordWriteField(object,
+ HeapObject::kMapOffset,
+ temp_map,
+ temp,
+ kSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ }
}
// Do the store.
@@ -3372,7 +3771,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
if (instr->is_in_object()) {
__ mov(FieldOperand(object, offset), value);
if (instr->hydrogen()->NeedsWriteBarrier()) {
- Register temp = ToRegister(instr->TempAt(0));
+ Register temp = ToRegister(instr->temp());
// Update the write barrier for the object for in-object properties.
__ RecordWriteField(object,
offset,
@@ -3383,7 +3782,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
check_needed);
}
} else {
- Register temp = ToRegister(instr->TempAt(0));
+ Register temp = ToRegister(instr->temp());
__ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset));
__ mov(FieldOperand(temp, offset), value);
if (instr->hydrogen()->NeedsWriteBarrier()) {
@@ -3414,10 +3813,36 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
}
+void LCodeGen::DeoptIfTaggedButNotSmi(LEnvironment* environment,
+ HValue* value,
+ LOperand* operand) {
+ if (value->representation().IsTagged() && !value->type().IsSmi()) {
+ if (operand->IsRegister()) {
+ __ test(ToRegister(operand), Immediate(kSmiTagMask));
+ } else {
+ __ test(ToOperand(operand), Immediate(kSmiTagMask));
+ }
+ DeoptimizeIf(not_zero, environment);
+ }
+}
+
+
void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
+ DeoptIfTaggedButNotSmi(instr->environment(),
+ instr->hydrogen()->length(),
+ instr->length());
+ DeoptIfTaggedButNotSmi(instr->environment(),
+ instr->hydrogen()->index(),
+ instr->index());
if (instr->index()->IsConstantOperand()) {
- __ cmp(ToOperand(instr->length()),
- Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
+ int constant_index =
+ ToInteger32(LConstantOperand::cast(instr->index()));
+ if (instr->hydrogen()->length()->representation().IsTagged()) {
+ __ cmp(ToOperand(instr->length()),
+ Immediate(Smi::FromInt(constant_index)));
+ } else {
+ __ cmp(ToOperand(instr->length()), Immediate(constant_index));
+ }
DeoptimizeIf(below_equal, instr->environment());
} else {
__ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
@@ -3426,14 +3851,18 @@ void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
}
-void LCodeGen::DoStoreKeyedSpecializedArrayElement(
- LStoreKeyedSpecializedArrayElement* instr) {
+void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
ElementsKind elements_kind = instr->elements_kind();
- Operand operand(BuildFastArrayOperand(instr->external_pointer(),
- instr->key(),
- elements_kind,
- 0,
- instr->additional_index()));
+ if (ExternalArrayOpRequiresTemp<HStoreKeyed>(instr->hydrogen())) {
+ __ SmiUntag(ToRegister(instr->key()));
+ }
+ Operand operand(BuildFastArrayOperand(
+ instr->elements(),
+ instr->key(),
+ instr->hydrogen()->key()->representation(),
+ elements_kind,
+ 0,
+ instr->additional_index()));
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
__ cvtsd2ss(xmm0, ToDoubleRegister(instr->value()));
__ movss(operand, xmm0);
@@ -3457,9 +3886,12 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
break;
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3469,14 +3901,41 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
}
-void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
+void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
+ XMMRegister value = ToDoubleRegister(instr->value());
+
+ if (instr->NeedsCanonicalization()) {
+ Label have_value;
+
+ __ ucomisd(value, value);
+ __ j(parity_odd, &have_value); // NaN.
+
+ ExternalReference canonical_nan_reference =
+ ExternalReference::address_of_canonical_non_hole_nan();
+ __ movdbl(value, Operand::StaticVariable(canonical_nan_reference));
+ __ bind(&have_value);
+ }
+
+ Operand double_store_operand = BuildFastArrayOperand(
+ instr->elements(),
+ instr->key(),
+ instr->hydrogen()->key()->representation(),
+ FAST_DOUBLE_ELEMENTS,
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag,
+ instr->additional_index());
+ __ movdbl(double_store_operand, value);
+}
+
+
+void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) {
Register value = ToRegister(instr->value());
- Register elements = ToRegister(instr->object());
+ Register elements = ToRegister(instr->elements());
Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
Operand operand = BuildFastArrayOperand(
- instr->object(),
+ instr->elements(),
instr->key(),
+ instr->hydrogen()->key()->representation(),
FAST_ELEMENTS,
FixedArray::kHeaderSize - kHeapObjectTag,
instr->additional_index());
@@ -3499,29 +3958,15 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
}
-void LCodeGen::DoStoreKeyedFastDoubleElement(
- LStoreKeyedFastDoubleElement* instr) {
- XMMRegister value = ToDoubleRegister(instr->value());
-
- if (instr->NeedsCanonicalization()) {
- Label have_value;
-
- __ ucomisd(value, value);
- __ j(parity_odd, &have_value); // NaN.
-
- ExternalReference canonical_nan_reference =
- ExternalReference::address_of_canonical_non_hole_nan();
- __ movdbl(value, Operand::StaticVariable(canonical_nan_reference));
- __ bind(&have_value);
+void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) {
+ // By cases...external, fast-double, fast
+ if (instr->is_external()) {
+ DoStoreKeyedExternalArray(instr);
+ } else if (instr->hydrogen()->value()->representation().IsDouble()) {
+ DoStoreKeyedFixedDoubleArray(instr);
+ } else {
+ DoStoreKeyedFixedArray(instr);
}
-
- Operand double_store_operand = BuildFastArrayOperand(
- instr->elements(),
- instr->key(),
- FAST_DOUBLE_ELEMENTS,
- FixedDoubleArray::kHeaderSize - kHeapObjectTag,
- instr->additional_index());
- __ movdbl(double_store_operand, value);
}
@@ -3540,7 +3985,7 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
Register object_reg = ToRegister(instr->object());
- Register new_map_reg = ToRegister(instr->new_map_reg());
+ Register new_map_reg = ToRegister(instr->new_map_temp());
Handle<Map> from_map = instr->original_map();
Handle<Map> to_map = instr->transitioned_map();
@@ -3548,26 +3993,35 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
ElementsKind to_kind = to_map->elements_kind();
Label not_applicable;
+ bool is_simple_map_transition =
+ IsSimpleMapChangeTransition(from_kind, to_kind);
+ Label::Distance branch_distance =
+ is_simple_map_transition ? Label::kNear : Label::kFar;
__ cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map);
- __ j(not_equal, &not_applicable);
- __ mov(new_map_reg, to_map);
- if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ __ j(not_equal, &not_applicable, branch_distance);
+ if (is_simple_map_transition) {
Register object_reg = ToRegister(instr->object());
- __ mov(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg);
+ Handle<Map> map = instr->hydrogen()->transitioned_map();
+ __ mov(FieldOperand(object_reg, HeapObject::kMapOffset),
+ Immediate(map));
// Write barrier.
- ASSERT_NE(instr->temp_reg(), NULL);
- __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
- ToRegister(instr->temp_reg()), kDontSaveFPRegs);
- } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
- to_kind == FAST_DOUBLE_ELEMENTS) {
- Register fixed_object_reg = ToRegister(instr->temp_reg());
+ ASSERT_NE(instr->temp(), NULL);
+ __ RecordWriteForMap(object_reg, to_map, new_map_reg,
+ ToRegister(instr->temp()),
+ kDontSaveFPRegs);
+ } else if (IsFastSmiElementsKind(from_kind) &&
+ IsFastDoubleElementsKind(to_kind)) {
+ __ mov(new_map_reg, to_map);
+ Register fixed_object_reg = ToRegister(instr->temp());
ASSERT(fixed_object_reg.is(edx));
ASSERT(new_map_reg.is(ebx));
__ mov(fixed_object_reg, object_reg);
CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
RelocInfo::CODE_TARGET, instr);
- } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
- Register fixed_object_reg = ToRegister(instr->temp_reg());
+ } else if (IsFastDoubleElementsKind(from_kind) &&
+ IsFastObjectElementsKind(to_kind)) {
+ __ mov(new_map_reg, to_map);
+ Register fixed_object_reg = ToRegister(instr->temp());
ASSERT(fixed_object_reg.is(edx));
ASSERT(new_map_reg.is(ebx));
__ mov(fixed_object_reg, object_reg);
@@ -3592,7 +4046,7 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
};
DeferredStringCharCodeAt* deferred =
- new DeferredStringCharCodeAt(this, instr);
+ new(zone()) DeferredStringCharCodeAt(this, instr);
StringCharLoadGenerator::Generate(masm(),
factory(),
@@ -3628,9 +4082,7 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
}
CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2,
instr, instr->context());
- if (FLAG_debug_code) {
- __ AbortIfNotSmi(eax);
- }
+ __ AssertSmi(eax);
__ SmiUntag(eax);
__ StoreToSafepointRegisterSlot(result, eax);
}
@@ -3648,7 +4100,7 @@ void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
};
DeferredStringCharFromCode* deferred =
- new DeferredStringCharFromCode(this, instr);
+ new(zone()) DeferredStringCharFromCode(this, instr);
ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
Register char_code = ToRegister(instr->char_code());
@@ -3700,7 +4152,7 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) {
void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister() || input->IsStackSlot());
LOperand* output = instr->result();
ASSERT(output->IsDoubleRegister());
@@ -3708,43 +4160,89 @@ void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
}
+void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
+ LOperand* input = instr->value();
+ LOperand* output = instr->result();
+ LOperand* temp = instr->temp();
+
+ __ LoadUint32(ToDoubleRegister(output),
+ ToRegister(input),
+ ToDoubleRegister(temp));
+}
+
+
void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
class DeferredNumberTagI: public LDeferredCode {
public:
DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
: LDeferredCode(codegen), instr_(instr) { }
- virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
+ virtual void Generate() {
+ codegen()->DoDeferredNumberTagI(instr_, instr_->value(), SIGNED_INT32);
+ }
virtual LInstruction* instr() { return instr_; }
private:
LNumberTagI* instr_;
};
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister() && input->Equals(instr->result()));
Register reg = ToRegister(input);
- DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
+ DeferredNumberTagI* deferred = new(zone()) DeferredNumberTagI(this, instr);
__ SmiTag(reg);
__ j(overflow, deferred->entry());
__ bind(deferred->exit());
}
-void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
+void LCodeGen::DoNumberTagU(LNumberTagU* instr) {
+ class DeferredNumberTagU: public LDeferredCode {
+ public:
+ DeferredNumberTagU(LCodeGen* codegen, LNumberTagU* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() {
+ codegen()->DoDeferredNumberTagI(instr_, instr_->value(), UNSIGNED_INT32);
+ }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LNumberTagU* instr_;
+ };
+
+ LOperand* input = instr->value();
+ ASSERT(input->IsRegister() && input->Equals(instr->result()));
+ Register reg = ToRegister(input);
+
+ DeferredNumberTagU* deferred = new(zone()) DeferredNumberTagU(this, instr);
+ __ cmp(reg, Immediate(Smi::kMaxValue));
+ __ j(above, deferred->entry());
+ __ SmiTag(reg);
+ __ bind(deferred->exit());
+}
+
+
+void LCodeGen::DoDeferredNumberTagI(LInstruction* instr,
+ LOperand* value,
+ IntegerSignedness signedness) {
Label slow;
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(value);
Register tmp = reg.is(eax) ? ecx : eax;
// Preserve the value of all registers.
PushSafepointRegistersScope scope(this);
- // There was overflow, so bits 30 and 31 of the original integer
- // disagree. Try to allocate a heap number in new space and store
- // the value in there. If that fails, call the runtime system.
Label done;
- __ SmiUntag(reg);
- __ xor_(reg, 0x80000000);
- __ cvtsi2sd(xmm0, Operand(reg));
+
+ if (signedness == SIGNED_INT32) {
+ // There was overflow, so bits 30 and 31 of the original integer
+ // disagree. Try to allocate a heap number in new space and store
+ // the value in there. If that fails, call the runtime system.
+ __ SmiUntag(reg);
+ __ xor_(reg, 0x80000000);
+ __ cvtsi2sd(xmm0, Operand(reg));
+ } else {
+ __ LoadUint32(xmm0, reg, xmm1);
+ }
+
if (FLAG_inline_new) {
__ AllocateHeapNumber(reg, tmp, no_reg, &slow);
__ jmp(&done, Label::kNear);
@@ -3787,11 +4285,11 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
LNumberTagD* instr_;
};
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
Register reg = ToRegister(instr->result());
- Register tmp = ToRegister(instr->TempAt(0));
+ Register tmp = ToRegister(instr->temp());
- DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
+ DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
if (FLAG_inline_new) {
__ AllocateHeapNumber(reg, tmp, no_reg, deferred->entry());
} else {
@@ -3824,7 +4322,7 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
void LCodeGen::DoSmiTag(LSmiTag* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister() && input->Equals(instr->result()));
ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
__ SmiTag(ToRegister(input));
@@ -3832,11 +4330,13 @@ void LCodeGen::DoSmiTag(LSmiTag* instr) {
void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister() && input->Equals(instr->result()));
if (instr->needs_check()) {
__ test(ToRegister(input), Immediate(kSmiTagMask));
DeoptimizeIf(not_zero, instr->environment());
+ } else {
+ __ AssertSmi(ToRegister(input));
}
__ SmiUntag(ToRegister(input));
}
@@ -3897,7 +4397,7 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
Label done, heap_number;
- Register input_reg = ToRegister(instr->InputAt(0));
+ Register input_reg = ToRegister(instr->value());
// Heap number map check.
__ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
@@ -3938,7 +4438,7 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
__ mov(input_reg, Operand(esp, 0)); // Low word of answer is the result.
__ add(Operand(esp), Immediate(kDoubleSize));
} else {
- XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
+ XMMRegister xmm_temp = ToDoubleRegister(instr->temp());
__ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
__ cvttsd2si(input_reg, Operand(xmm0));
__ cmp(input_reg, 0x80000000u);
@@ -3955,7 +4455,7 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
// Deoptimize if we don't have a heap number.
DeoptimizeIf(not_equal, instr->environment());
- XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
+ XMMRegister xmm_temp = ToDoubleRegister(instr->temp());
__ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
__ cvttsd2si(input_reg, Operand(xmm0));
__ cvtsi2sd(xmm_temp, Operand(input_reg));
@@ -3985,13 +4485,13 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
LTaggedToI* instr_;
};
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister());
ASSERT(input->Equals(instr->result()));
Register input_reg = ToRegister(input);
- DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
+ DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr);
// Smi check.
__ JumpIfNotSmi(input_reg, deferred->entry());
@@ -4004,9 +4504,9 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister());
- LOperand* temp = instr->TempAt(0);
+ LOperand* temp = instr->temp();
ASSERT(temp == NULL || temp->IsRegister());
LOperand* result = instr->result();
ASSERT(result->IsDoubleRegister());
@@ -4028,7 +4528,7 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsDoubleRegister());
LOperand* result = instr->result();
ASSERT(result->IsRegister());
@@ -4066,7 +4566,7 @@ void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
__ bind(&done);
} else {
Label done;
- Register temp_reg = ToRegister(instr->TempAt(0));
+ Register temp_reg = ToRegister(instr->temp());
XMMRegister xmm_scratch = xmm0;
// If cvttsd2si succeeded, we're done. Otherwise, we attempt
@@ -4145,22 +4645,22 @@ void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
__ test(ToOperand(input), Immediate(kSmiTagMask));
DeoptimizeIf(not_zero, instr->environment());
}
void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
__ test(ToOperand(input), Immediate(kSmiTagMask));
DeoptimizeIf(zero, instr->environment());
}
void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
- Register input = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
+ Register input = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
__ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
@@ -4230,7 +4730,7 @@ void LCodeGen::DoCheckMapCommon(Register reg,
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
@@ -4296,7 +4796,8 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
- Register reg = ToRegister(instr->TempAt(0));
+ ASSERT(instr->temp()->Equals(instr->result()));
+ Register reg = ToRegister(instr->temp());
Handle<JSObject> holder = instr->holder();
Handle<JSObject> current_prototype = instr->prototype();
@@ -4332,10 +4833,11 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
LAllocateObject* instr_;
};
- DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+ DeferredAllocateObject* deferred =
+ new(zone()) DeferredAllocateObject(this, instr);
Register result = ToRegister(instr->result());
- Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch = ToRegister(instr->temp());
Handle<JSFunction> constructor = instr->hydrogen()->constructor();
Handle<Map> initial_map(constructor->initial_map());
int instance_size = initial_map->instance_size();
@@ -4368,7 +4870,7 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
__ mov(map, FieldOperand(scratch, JSFunction::kPrototypeOrInitialMapOffset));
if (FLAG_debug_code) {
- __ AbortIfSmi(map);
+ __ AssertNotSmi(map);
__ cmpb(FieldOperand(map, Map::kInstanceSizeOffset),
instance_size >> kPointerSizeLog2);
__ Assert(equal, "Unexpected instance size");
@@ -4420,14 +4922,15 @@ void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
- Heap* heap = isolate()->heap();
+ Handle<FixedArray> literals(instr->environment()->closure()->literals());
ElementsKind boilerplate_elements_kind =
instr->hydrogen()->boilerplate_elements_kind();
// Deopt if the array literal boilerplate ElementsKind is of a type different
// than the expected one. The check isn't necessary if the boilerplate has
- // already been converted to FAST_ELEMENTS.
- if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
__ LoadHeapObject(eax, instr->hydrogen()->boilerplate_object());
__ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
// Load the map's "bit field 2". We only need the first byte,
@@ -4440,12 +4943,11 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
}
// Set up the parameters to the stub/runtime call.
- __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
- __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
+ __ PushHeapObject(literals);
__ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
// Boilerplate already exists, constant elements are never accessed.
// Pass an empty fixed array.
- __ push(Immediate(Handle<FixedArray>(heap->empty_fixed_array())));
+ __ push(Immediate(isolate()->factory()->empty_fixed_array()));
// Pick the right runtime function or stub to call.
int length = instr->hydrogen()->length();
@@ -4549,8 +5051,8 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
Handle<FixedDoubleArray>::cast(elements);
for (int i = 0; i < elements_length; i++) {
int64_t value = double_array->get_representation(i);
- int32_t value_low = value & 0xFFFFFFFF;
- int32_t value_high = value >> 32;
+ int32_t value_low = static_cast<int32_t>(value & 0xFFFFFFFF);
+ int32_t value_high = static_cast<int32_t>(value >> 32);
int total_offset =
elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
__ mov(FieldOperand(result, total_offset), Immediate(value_low));
@@ -4589,8 +5091,9 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
// Deopt if the literal boilerplate ElementsKind is of a type different than
// the expected one. The check isn't necessary if the boilerplate has already
- // been converted to FAST_ELEMENTS.
- if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
__ LoadHeapObject(ebx, instr->hydrogen()->boilerplate());
__ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset));
// Load the map's "bit field 2". We only need the first byte,
@@ -4653,7 +5156,7 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
- ASSERT(ToRegister(instr->InputAt(0)).is(eax));
+ ASSERT(ToRegister(instr->value()).is(eax));
__ push(eax);
CallRuntime(Runtime::kToFastProperties, 1, instr);
}
@@ -4663,15 +5166,13 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
Label materialized;
// Registers will be used as follows:
- // edi = JS function.
// ecx = literals array.
// ebx = regexp literal.
// eax = regexp literal clone.
// esi = context.
- __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
- __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
- int literal_offset = FixedArray::kHeaderSize +
- instr->hydrogen()->literal_index() * kPointerSize;
+ int literal_offset =
+ FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
+ __ LoadHeapObject(ecx, instr->hydrogen()->literals());
__ mov(ebx, FieldOperand(ecx, literal_offset));
__ cmp(ebx, factory()->undefined_value());
__ j(not_equal, &materialized, Label::kNear);
@@ -4735,14 +5236,14 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
void LCodeGen::DoTypeof(LTypeof* instr) {
- LOperand* input = instr->InputAt(1);
+ LOperand* input = instr->value();
EmitPushTaggedOperand(input);
CallRuntime(Runtime::kTypeof, 1, instr);
}
void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
Label* true_label = chunk_->GetAssemblyLabel(true_block);
@@ -4826,7 +5327,7 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
- Register temp = ToRegister(instr->TempAt(0));
+ Register temp = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -4945,7 +5446,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
DeferredStackCheck* deferred_stack_check =
- new DeferredStackCheck(this, instr);
+ new(zone()) DeferredStackCheck(this, instr);
ExternalReference stack_limit =
ExternalReference::address_of_stack_limit(isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
@@ -5027,11 +5528,20 @@ void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
Register map = ToRegister(instr->map());
Register result = ToRegister(instr->result());
+ Label load_cache, done;
+ __ EnumLength(result, map);
+ __ cmp(result, Immediate(Smi::FromInt(0)));
+ __ j(not_equal, &load_cache);
+ __ mov(result, isolate()->factory()->empty_fixed_array());
+ __ jmp(&done);
+
+ __ bind(&load_cache);
__ LoadInstanceDescriptors(map, result);
__ mov(result,
- FieldOperand(result, DescriptorArray::kEnumerationIndexOffset));
+ FieldOperand(result, DescriptorArray::kEnumCacheOffset));
__ mov(result,
FieldOperand(result, FixedArray::SizeFor(instr->idx())));
+ __ bind(&done);
__ test(result, result);
DeoptimizeIf(equal, instr->environment());
}
diff --git a/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.h b/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.h
index 392bca2..44ddaff 100644
--- a/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.h
+++ b/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.h
@@ -47,20 +47,24 @@ class SafepointGenerator;
class LCodeGen BASE_EMBEDDED {
public:
LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
- : chunk_(chunk),
+ : zone_(info->zone()),
+ chunk_(static_cast<LPlatformChunk*>(chunk)),
masm_(assembler),
info_(info),
current_block_(-1),
current_instruction_(-1),
instructions_(chunk->instructions()),
- deoptimizations_(4),
- deoptimization_literals_(8),
+ deoptimizations_(4, info->zone()),
+ deoptimization_literals_(8, info->zone()),
inlined_function_count_(0),
scope_(info->scope()),
status_(UNUSED),
- deferred_(8),
+ translations_(info->zone()),
+ deferred_(8, info->zone()),
+ dynamic_frame_alignment_(false),
osr_pc_offset_(-1),
last_lazy_deopt_pc_(0),
+ safepoints_(info->zone()),
resolver_(this),
expected_safepoint_kind_(Safepoint::kSimple) {
PopulateDeoptimizationLiteralsWithInlinedFunctions();
@@ -72,6 +76,7 @@ class LCodeGen BASE_EMBEDDED {
Isolate* isolate() const { return info_->isolate(); }
Factory* factory() const { return isolate()->factory(); }
Heap* heap() const { return isolate()->heap(); }
+ Zone* zone() const { return zone_; }
// Support for converting LOperands to assembler types.
Operand ToOperand(LOperand* op) const;
@@ -100,7 +105,12 @@ class LCodeGen BASE_EMBEDDED {
// Deferred code support.
void DoDeferredNumberTagD(LNumberTagD* instr);
- void DoDeferredNumberTagI(LNumberTagI* instr);
+
+ enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
+ void DoDeferredNumberTagI(LInstruction* instr,
+ LOperand* value,
+ IntegerSignedness signedness);
+
void DoDeferredTaggedToI(LTaggedToI* instr);
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
void DoDeferredStackCheck(LStackCheck* instr);
@@ -119,7 +129,10 @@ class LCodeGen BASE_EMBEDDED {
void DoGap(LGap* instr);
// Emit frame translation commands for an environment.
- void WriteTranslation(LEnvironment* environment, Translation* translation);
+ void WriteTranslation(LEnvironment* environment,
+ Translation* translation,
+ int* arguments_index,
+ int* arguments_count);
void EnsureRelocSpaceForDeoptimization();
@@ -145,7 +158,7 @@ class LCodeGen BASE_EMBEDDED {
return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
}
- LChunk* chunk() const { return chunk_; }
+ LPlatformChunk* chunk() const { return chunk_; }
Scope* scope() const { return scope_; }
HGraph* graph() const { return chunk_->graph(); }
@@ -161,10 +174,10 @@ class LCodeGen BASE_EMBEDDED {
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
int GetParameterCount() const { return scope()->num_parameters(); }
- void Abort(const char* format, ...);
+ void Abort(const char* reason);
void Comment(const char* format, ...);
- void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code); }
+ void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
// Code generation passes. Returns true if code generation should
// continue.
@@ -228,7 +241,10 @@ class LCodeGen BASE_EMBEDDED {
void AddToTranslation(Translation* translation,
LOperand* op,
- bool is_tagged);
+ bool is_tagged,
+ bool is_uint32,
+ int arguments_index,
+ int arguments_count);
void PopulateDeoptimizationData(Handle<Code> code);
int DefineDeoptimizationLiteral(Handle<Object> literal);
@@ -241,6 +257,7 @@ class LCodeGen BASE_EMBEDDED {
double ToDouble(LConstantOperand* op) const;
Operand BuildFastArrayOperand(LOperand* elements_pointer,
LOperand* key,
+ Representation key_representation,
ElementsKind elements_kind,
uint32_t offset,
uint32_t additional_index = 0);
@@ -278,6 +295,10 @@ class LCodeGen BASE_EMBEDDED {
bool deoptimize_on_minus_zero,
LEnvironment* env);
+ void DeoptIfTaggedButNotSmi(LEnvironment* environment,
+ HValue* value,
+ LOperand* operand);
+
// Emits optimized code for typeof x == "y". Modifies input register.
// Returns the condition on which a final split to
// true and false label should be made, to optimize fallthrough.
@@ -308,7 +329,8 @@ class LCodeGen BASE_EMBEDDED {
void EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name);
+ Handle<String> name,
+ LEnvironment* env);
// Emits optimized code to deep-copy the contents of statically known
// object graphs (e.g. object literal boilerplate).
@@ -318,12 +340,19 @@ class LCodeGen BASE_EMBEDDED {
int* offset);
void EnsureSpaceForLazyDeopt();
+ void DoLoadKeyedExternalArray(LLoadKeyed* instr);
+ void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
+ void DoLoadKeyedFixedArray(LLoadKeyed* instr);
+ void DoStoreKeyedExternalArray(LStoreKeyed* instr);
+ void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
+ void DoStoreKeyedFixedArray(LStoreKeyed* instr);
// Emits code for pushing either a tagged constant, a (non-double)
// register, or a stack slot operand.
void EmitPushTaggedOperand(LOperand* operand);
- LChunk* const chunk_;
+ Zone* zone_;
+ LPlatformChunk* const chunk_;
MacroAssembler* const masm_;
CompilationInfo* const info_;
@@ -337,6 +366,7 @@ class LCodeGen BASE_EMBEDDED {
Status status_;
TranslationBuffer translations_;
ZoneList<LDeferredCode*> deferred_;
+ bool dynamic_frame_alignment_;
int osr_pc_offset_;
int last_lazy_deopt_pc_;
diff --git a/src/3rdparty/v8/src/ia32/lithium-gap-resolver-ia32.cc b/src/3rdparty/v8/src/ia32/lithium-gap-resolver-ia32.cc
index 510d9f1..6428916 100644
--- a/src/3rdparty/v8/src/ia32/lithium-gap-resolver-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/lithium-gap-resolver-ia32.cc
@@ -37,7 +37,7 @@ namespace internal {
LGapResolver::LGapResolver(LCodeGen* owner)
: cgen_(owner),
- moves_(32),
+ moves_(32, owner->zone()),
source_uses_(),
destination_uses_(),
spilled_register_(-1) {}
@@ -157,7 +157,7 @@ void LGapResolver::AddMove(LMoveOperands move) {
LOperand* destination = move.destination();
if (destination->IsRegister()) ++destination_uses_[destination->index()];
- moves_.Add(move);
+ moves_.Add(move, cgen_->zone());
}
diff --git a/src/3rdparty/v8/src/ia32/lithium-ia32.cc b/src/3rdparty/v8/src/ia32/lithium-ia32.cc
index fb408a1..dcc5b77 100644
--- a/src/3rdparty/v8/src/ia32/lithium-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/lithium-ia32.cc
@@ -179,6 +179,7 @@ const char* LArithmeticT::Mnemonic() const {
case Token::BIT_AND: return "bit-and-t";
case Token::BIT_OR: return "bit-or-t";
case Token::BIT_XOR: return "bit-xor-t";
+ case Token::ROR: return "ror-t";
case Token::SHL: return "sal-t";
case Token::SAR: return "sar-t";
case Token::SHR: return "shr-t";
@@ -196,22 +197,22 @@ void LGoto::PrintDataTo(StringStream* stream) {
void LBranch::PrintDataTo(StringStream* stream) {
stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
}
void LCmpIDAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if ");
- InputAt(0)->PrintTo(stream);
+ left()->PrintTo(stream);
stream->Add(" %s ", Token::String(op()));
- InputAt(1)->PrintTo(stream);
+ right()->PrintTo(stream);
stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
}
void LIsNilAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if ");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(kind() == kStrictEquality ? " === " : " == ");
stream->Add(nil() == kNullValue ? "null" : "undefined");
stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
@@ -220,57 +221,57 @@ void LIsNilAndBranch::PrintDataTo(StringStream* stream) {
void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_object(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_string(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_smi(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_undetectable(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if string_compare(");
- InputAt(1)->PrintTo(stream);
- InputAt(2)->PrintTo(stream);
+ left()->PrintTo(stream);
+ right()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if has_instance_type(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if has_cached_array_index(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if class_of_test(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(", \"%o\") then B%d else B%d",
*hydrogen()->class_name(),
true_block_id(),
@@ -280,7 +281,7 @@ void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if typeof ");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(" == \"%s\" then B%d else B%d",
*hydrogen()->type_literal()->ToCString(),
true_block_id(), false_block_id());
@@ -294,34 +295,34 @@ void LCallConstantFunction::PrintDataTo(StringStream* stream) {
void LUnaryMathOperation::PrintDataTo(StringStream* stream) {
stream->Add("/%s ", hydrogen()->OpName());
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
}
void LMathPowHalf::PrintDataTo(StringStream* stream) {
stream->Add("/pow_half ");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
}
void LLoadContextSlot::PrintDataTo(StringStream* stream) {
- InputAt(0)->PrintTo(stream);
+ context()->PrintTo(stream);
stream->Add("[%d]", slot_index());
}
void LStoreContextSlot::PrintDataTo(StringStream* stream) {
- InputAt(0)->PrintTo(stream);
+ context()->PrintTo(stream);
stream->Add("[%d] <- ", slot_index());
- InputAt(1)->PrintTo(stream);
+ value()->PrintTo(stream);
}
void LInvokeFunction::PrintDataTo(StringStream* stream) {
stream->Add("= ");
- InputAt(0)->PrintTo(stream);
+ context()->PrintTo(stream);
stream->Add(" ");
- InputAt(1)->PrintTo(stream);
+ function()->PrintTo(stream);
stream->Add(" #%d / ", arity());
}
@@ -350,7 +351,9 @@ void LCallKnownGlobal::PrintDataTo(StringStream* stream) {
void LCallNew::PrintDataTo(StringStream* stream) {
stream->Add("= ");
- InputAt(0)->PrintTo(stream);
+ context()->PrintTo(stream);
+ stream->Add(" ");
+ constructor()->PrintTo(stream);
stream->Add(" #%d / ", arity());
}
@@ -366,55 +369,23 @@ void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
}
-int LChunk::GetNextSpillIndex(bool is_double) {
+int LPlatformChunk::GetNextSpillIndex(bool is_double) {
// Skip a slot if for a double-width slot.
- if (is_double) spill_slot_count_++;
+ if (is_double) {
+ spill_slot_count_++;
+ spill_slot_count_ |= 1;
+ num_double_slots_++;
+ }
return spill_slot_count_++;
}
-LOperand* LChunk::GetNextSpillSlot(bool is_double) {
+LOperand* LPlatformChunk::GetNextSpillSlot(bool is_double) {
int index = GetNextSpillIndex(is_double);
if (is_double) {
- return LDoubleStackSlot::Create(index);
+ return LDoubleStackSlot::Create(index, zone());
} else {
- return LStackSlot::Create(index);
- }
-}
-
-
-void LChunk::MarkEmptyBlocks() {
- HPhase phase("L_Mark empty blocks", this);
- for (int i = 0; i < graph()->blocks()->length(); ++i) {
- HBasicBlock* block = graph()->blocks()->at(i);
- int first = block->first_instruction_index();
- int last = block->last_instruction_index();
- LInstruction* first_instr = instructions()->at(first);
- LInstruction* last_instr = instructions()->at(last);
-
- LLabel* label = LLabel::cast(first_instr);
- if (last_instr->IsGoto()) {
- LGoto* goto_instr = LGoto::cast(last_instr);
- if (label->IsRedundant() &&
- !label->is_loop_header()) {
- bool can_eliminate = true;
- for (int i = first + 1; i < last && can_eliminate; ++i) {
- LInstruction* cur = instructions()->at(i);
- if (cur->IsGap()) {
- LGap* gap = LGap::cast(cur);
- if (!gap->IsRedundant()) {
- can_eliminate = false;
- }
- } else {
- can_eliminate = false;
- }
- }
-
- if (can_eliminate) {
- label->set_replacement(GetLabel(goto_instr->block_id()));
- }
- }
- }
+ return LStackSlot::Create(index, zone());
}
}
@@ -437,16 +408,7 @@ void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
}
-void LStoreKeyedFastElement::PrintDataTo(StringStream* stream) {
- object()->PrintTo(stream);
- stream->Add("[");
- key()->PrintTo(stream);
- stream->Add("] <- ");
- value()->PrintTo(stream);
-}
-
-
-void LStoreKeyedFastDoubleElement::PrintDataTo(StringStream* stream) {
+void LStoreKeyed::PrintDataTo(StringStream* stream) {
elements()->PrintTo(stream);
stream->Add("[");
key()->PrintTo(stream);
@@ -470,85 +432,17 @@ void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
}
-void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
- LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
- int index = -1;
- if (instr->IsControl()) {
- instructions_.Add(gap);
- index = instructions_.length();
- instructions_.Add(instr);
- } else {
- index = instructions_.length();
- instructions_.Add(instr);
- instructions_.Add(gap);
- }
- if (instr->HasPointerMap()) {
- pointer_maps_.Add(instr->pointer_map());
- instr->pointer_map()->set_lithium_position(index);
- }
-}
-
-
-LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
- return LConstantOperand::Create(constant->id());
-}
-
-
-int LChunk::GetParameterStackSlot(int index) const {
- // The receiver is at index 0, the first parameter at index 1, so we
- // shift all parameter indexes down by the number of parameters, and
- // make sure they end up negative so they are distinguishable from
- // spill slots.
- int result = index - info()->scope()->num_parameters() - 1;
- ASSERT(result < 0);
- return result;
-}
-
-// A parameter relative to ebp in the arguments stub.
-int LChunk::ParameterAt(int index) {
- ASSERT(-1 <= index); // -1 is the receiver.
- return (1 + info()->scope()->num_parameters() - index) *
- kPointerSize;
-}
-
-
-LGap* LChunk::GetGapAt(int index) const {
- return LGap::cast(instructions_[index]);
-}
-
-
-bool LChunk::IsGapAt(int index) const {
- return instructions_[index]->IsGap();
-}
-
-
-int LChunk::NearestGapPos(int index) const {
- while (!IsGapAt(index)) index--;
- return index;
-}
-
-
-void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
- GetGapAt(index)->GetOrCreateParallelMove(LGap::START)->AddMove(from, to);
-}
-
-
-Handle<Object> LChunk::LookupLiteral(LConstantOperand* operand) const {
- return HConstant::cast(graph_->LookupValue(operand->index()))->handle();
-}
-
-
-Representation LChunk::LookupLiteralRepresentation(
- LConstantOperand* operand) const {
- return graph_->LookupValue(operand->index())->representation();
-}
-
-
-LChunk* LChunkBuilder::Build() {
+LPlatformChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
- chunk_ = new(zone()) LChunk(info(), graph());
+ chunk_ = new(zone()) LPlatformChunk(info(), graph());
HPhase phase("L_Building chunk", chunk_);
status_ = BUILDING;
+
+ // Reserve the first spill slot for the state of dynamic alignment.
+ int alignment_state_index = chunk_->GetNextSpillIndex(false);
+ ASSERT_EQ(alignment_state_index, 0);
+ USE(alignment_state_index);
+
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
for (int i = 0; i < blocks->length(); i++) {
HBasicBlock* next = NULL;
@@ -561,17 +455,8 @@ LChunk* LChunkBuilder::Build() {
}
-void LChunkBuilder::Abort(const char* format, ...) {
- if (FLAG_trace_bailout) {
- SmartArrayPointer<char> name(
- info()->shared_info()->DebugName()->ToCString());
- PrintF("Aborting LChunk building in @\"%s\": ", *name);
- va_list arguments;
- va_start(arguments, format);
- OS::VPrint(format, arguments);
- va_end(arguments);
- PrintF("\n");
- }
+void LChunkBuilder::Abort(const char* reason) {
+ info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -742,7 +627,7 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
ASSERT(hinstr->next()->IsSimulate());
HSimulate* sim = HSimulate::cast(hinstr->next());
ASSERT(instruction_pending_deoptimization_environment_ == NULL);
- ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
+ ASSERT(pending_deoptimization_ast_id_.IsNone());
instruction_pending_deoptimization_environment_ = instr;
pending_deoptimization_ast_id_ = sim->ast_id();
}
@@ -764,7 +649,7 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
- instr->set_pointer_map(new(zone()) LPointerMap(position_));
+ instr->set_pointer_map(new(zone()) LPointerMap(position_, zone()));
return instr;
}
@@ -840,13 +725,16 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
// Shift operations can only deoptimize if we do a logical shift by 0 and
// the result cannot be truncated to int32.
- bool may_deopt = (op == Token::SHR && constant_value == 0);
bool does_deopt = false;
- if (may_deopt) {
- for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
- if (!it.value()->CheckFlag(HValue::kTruncatingToInt32)) {
- does_deopt = true;
- break;
+ if (op == Token::SHR && constant_value == 0) {
+ if (FLAG_opt_safe_uint32_operations) {
+ does_deopt = !instr->CheckFlag(HInstruction::kUint32);
+ } else {
+ for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
+ if (!it.value()->CheckFlag(HValue::kTruncatingToInt32)) {
+ does_deopt = true;
+ break;
+ }
}
}
}
@@ -980,8 +868,8 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LEnvironment* outer =
CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
- int ast_id = hydrogen_env->ast_id();
- ASSERT(ast_id != AstNode::kNoNumber ||
+ BailoutId ast_id = hydrogen_env->ast_id();
+ ASSERT(!ast_id.IsNone() ||
hydrogen_env->frame_type() != JS_FUNCTION);
int value_count = hydrogen_env->length();
LEnvironment* result =
@@ -991,7 +879,9 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
hydrogen_env->parameter_count(),
argument_count_,
value_count,
- outer);
+ outer,
+ hydrogen_env->entry(),
+ zone());
int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -1005,7 +895,9 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
} else {
op = UseAny(value);
}
- result->AddValue(op, value->representation());
+ result->AddValue(op,
+ value->representation(),
+ value->CheckFlag(HInstruction::kUint32));
}
if (hydrogen_env->frame_type() == JS_FUNCTION) {
@@ -1269,6 +1161,11 @@ LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
}
+LInstruction* LChunkBuilder::DoRor(HRor* instr) {
+ return DoShift(Token::ROR, instr);
+}
+
+
LInstruction* LChunkBuilder::DoShr(HShr* instr) {
return DoShift(Token::SHR, instr);
}
@@ -1335,12 +1232,57 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
}
-LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
- UNIMPLEMENTED();
+HValue* LChunkBuilder::SimplifiedDividendForMathFloorOfDiv(HValue* dividend) {
+ // A value with an integer representation does not need to be transformed.
+ if (dividend->representation().IsInteger32()) {
+ return dividend;
+ // A change from an integer32 can be replaced by the integer32 value.
+ } else if (dividend->IsChange() &&
+ HChange::cast(dividend)->from().IsInteger32()) {
+ return HChange::cast(dividend)->value();
+ }
return NULL;
}
+HValue* LChunkBuilder::SimplifiedDivisorForMathFloorOfDiv(HValue* divisor) {
+ if (divisor->IsConstant() &&
+ HConstant::cast(divisor)->HasInteger32Value()) {
+ HConstant* constant_val = HConstant::cast(divisor);
+ return constant_val->CopyToRepresentation(Representation::Integer32(),
+ divisor->block()->zone());
+ }
+ return NULL;
+}
+
+
+LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
+ HValue* right = instr->right();
+ ASSERT(right->IsConstant() && HConstant::cast(right)->HasInteger32Value());
+ LOperand* divisor = chunk_->DefineConstantOperand(HConstant::cast(right));
+ int32_t divisor_si = HConstant::cast(right)->Integer32Value();
+ if (divisor_si == 0) {
+ LOperand* dividend = UseRegister(instr->left());
+ return AssignEnvironment(DefineAsRegister(
+ new(zone()) LMathFloorOfDiv(dividend, divisor, NULL)));
+ } else if (IsPowerOf2(abs(divisor_si))) {
+ // use dividend as temp if divisor < 0 && divisor != -1
+ LOperand* dividend = divisor_si < -1 ? UseTempRegister(instr->left()) :
+ UseRegisterAtStart(instr->left());
+ LInstruction* result = DefineAsRegister(
+ new(zone()) LMathFloorOfDiv(dividend, divisor, NULL));
+ return divisor_si < 0 ? AssignEnvironment(result) : result;
+ } else {
+ // needs edx:eax, plus a temp
+ LOperand* dividend = UseFixed(instr->left(), eax);
+ LOperand* temp = TempRegister();
+ LInstruction* result = DefineFixed(
+ new(zone()) LMathFloorOfDiv(dividend, divisor, temp), edx);
+ return divisor_si < 0 ? AssignEnvironment(result) : result;
+ }
+}
+
+
LInstruction* LChunkBuilder::DoMod(HMod* instr) {
if (instr->representation().IsInteger32()) {
ASSERT(instr->left()->representation().IsInteger32());
@@ -1449,6 +1391,26 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
}
+LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
+ LOperand* left = NULL;
+ LOperand* right = NULL;
+ if (instr->representation().IsInteger32()) {
+ ASSERT(instr->left()->representation().IsInteger32());
+ ASSERT(instr->right()->representation().IsInteger32());
+ left = UseRegisterAtStart(instr->LeastConstantOperand());
+ right = UseOrConstantAtStart(instr->MostConstantOperand());
+ } else {
+ ASSERT(instr->representation().IsDouble());
+ ASSERT(instr->left()->representation().IsDouble());
+ ASSERT(instr->right()->representation().IsDouble());
+ left = UseRegisterAtStart(instr->left());
+ right = UseRegisterAtStart(instr->right());
+ }
+ LMathMinMax* minmax = new(zone()) LMathMinMax(left, right);
+ return DefineSameAsFirst(minmax);
+}
+
+
LInstruction* LChunkBuilder::DoPower(HPower* instr) {
ASSERT(instr->representation().IsDouble());
// We call a C function for double power. It can't trigger a GC.
@@ -1544,7 +1506,7 @@ LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* temp = TempRegister();
- return new LIsStringAndBranch(UseRegister(instr->value()), temp);
+ return new(zone()) LIsStringAndBranch(UseRegister(instr->value()), temp);
}
@@ -1570,7 +1532,7 @@ LInstruction* LChunkBuilder::DoStringCompareAndBranch(
LOperand* left = UseFixed(instr->left(), edx);
LOperand* right = UseFixed(instr->right(), eax);
- LStringCompareAndBranch* result = new
+ LStringCompareAndBranch* result = new(zone())
LStringCompareAndBranch(context, left, right);
return MarkAsCall(result, instr);
@@ -1625,6 +1587,12 @@ LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
}
+LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
+ LOperand* map = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new(zone()) LMapEnumLength(map));
+}
+
+
LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
LOperand* object = UseRegisterAtStart(instr->value());
return DefineAsRegister(new(zone()) LElementsKind(object));
@@ -1642,7 +1610,7 @@ LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
LOperand* date = UseFixed(instr->value(), eax);
LDateField* result =
new(zone()) LDateField(date, FixedTemp(ecx), instr->index());
- return MarkAsCall(DefineFixed(result, eax), instr);
+ return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
}
@@ -1695,8 +1663,9 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
} else {
ASSERT(to.IsInteger32());
LOperand* value = UseRegister(instr->value());
- bool needs_check = !instr->value()->type().IsSmi();
- if (needs_check) {
+ if (instr->value()->type().IsSmi()) {
+ return DefineSameAsFirst(new(zone()) LSmiUntag(value, false));
+ } else {
bool truncating = instr->CanTruncateToInt32();
LOperand* xmm_temp =
(truncating && CpuFeatures::IsSupported(SSE3))
@@ -1704,8 +1673,6 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
: FixedTemp(xmm1);
LTaggedToI* res = new(zone()) LTaggedToI(value, xmm_temp);
return AssignEnvironment(DefineSameAsFirst(res));
- } else {
- return DefineSameAsFirst(new(zone()) LSmiUntag(value, needs_check));
}
}
} else if (from.IsDouble()) {
@@ -1733,14 +1700,24 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
LOperand* value = UseRegister(val);
if (val->HasRange() && val->range()->IsInSmiRange()) {
return DefineSameAsFirst(new(zone()) LSmiTag(value));
+ } else if (val->CheckFlag(HInstruction::kUint32)) {
+ LOperand* temp = FixedTemp(xmm1);
+ LNumberTagU* result = new(zone()) LNumberTagU(value, temp);
+ return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
} else {
LNumberTagI* result = new(zone()) LNumberTagI(value);
return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
}
} else {
ASSERT(to.IsDouble());
- return DefineAsRegister(
- new(zone()) LInteger32ToDouble(Use(instr->value())));
+ if (instr->value()->CheckFlag(HInstruction::kUint32)) {
+ LOperand* temp = FixedTemp(xmm1);
+ return DefineAsRegister(
+ new(zone()) LUint32ToDouble(UseRegister(instr->value()), temp));
+ } else {
+ return DefineAsRegister(
+ new(zone()) LInteger32ToDouble(Use(instr->value())));
+ }
}
}
UNREACHABLE();
@@ -1763,9 +1740,9 @@ LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
- LOperand* temp = TempRegister();
+ LUnallocated* temp = TempRegister();
LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp);
- return AssignEnvironment(result);
+ return AssignEnvironment(Define(result, temp));
}
@@ -1799,7 +1776,7 @@ LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
Representation input_rep = value->representation();
if (input_rep.IsDouble()) {
LOperand* reg = UseRegister(value);
- return DefineAsRegister(new(zone()) LClampDToUint8(reg));
+ return DefineFixed(new(zone()) LClampDToUint8(reg), eax);
} else if (input_rep.IsInteger32()) {
LOperand* reg = UseFixed(value, eax);
return DefineFixed(new(zone()) LClampIToUint8(reg), eax);
@@ -1952,51 +1929,35 @@ LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
}
-LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
- HLoadKeyedFastElement* instr) {
- ASSERT(instr->representation().IsTagged());
- ASSERT(instr->key()->representation().IsInteger32());
- LOperand* obj = UseRegisterAtStart(instr->object());
- LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
- if (instr->RequiresHoleCheck()) AssignEnvironment(result);
- return DefineAsRegister(result);
-}
-
-
-LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
- HLoadKeyedFastDoubleElement* instr) {
- ASSERT(instr->representation().IsDouble());
- ASSERT(instr->key()->representation().IsInteger32());
+LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
+ ASSERT(instr->key()->representation().IsInteger32() ||
+ instr->key()->representation().IsTagged());
+ ElementsKind elements_kind = instr->elements_kind();
LOperand* elements = UseRegisterAtStart(instr->elements());
- LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- LLoadKeyedFastDoubleElement* result =
- new(zone()) LLoadKeyedFastDoubleElement(elements, key);
- return AssignEnvironment(DefineAsRegister(result));
-}
+ LOperand* key = instr->is_external() &&
+ ExternalArrayOpRequiresTemp<HLoadKeyed>(instr)
+ ? UseTempRegister(instr->key())
+ : UseRegisterOrConstantAtStart(instr->key());
+#ifdef DEBUG
+ if (instr->is_external()) {
+ ASSERT(
+ (instr->representation().IsInteger32() &&
+ (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
+ (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
+ (instr->representation().IsDouble() &&
+ ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
+ (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
+ }
+#endif
-LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
- HLoadKeyedSpecializedArrayElement* instr) {
- ElementsKind elements_kind = instr->elements_kind();
- ASSERT(
- (instr->representation().IsInteger32() &&
- (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
- (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
- (instr->representation().IsDouble() &&
- ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
- (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
- ASSERT(instr->key()->representation().IsInteger32());
- LOperand* external_pointer = UseRegister(instr->external_pointer());
- LOperand* key = UseRegisterOrConstant(instr->key());
- LLoadKeyedSpecializedArrayElement* result =
- new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
- LInstruction* load_instr = DefineAsRegister(result);
+ LLoadKeyed* result = new(zone()) LLoadKeyed(elements, key);
+ DefineAsRegister(result);
+ bool can_deoptimize = instr->RequiresHoleCheck() ||
+ (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS);
// An unsigned int array load might overflow and cause a deopt, make sure it
// has an environment.
- return (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS)
- ? AssignEnvironment(load_instr)
- : load_instr;
+ return can_deoptimize ? AssignEnvironment(result) : result;
}
@@ -2011,66 +1972,58 @@ LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
}
-LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
- HStoreKeyedFastElement* instr) {
- bool needs_write_barrier = instr->NeedsWriteBarrier();
- ASSERT(instr->value()->representation().IsTagged());
- ASSERT(instr->object()->representation().IsTagged());
- ASSERT(instr->key()->representation().IsInteger32());
-
- LOperand* obj = UseRegister(instr->object());
- LOperand* val = needs_write_barrier
- ? UseTempRegister(instr->value())
- : UseRegisterAtStart(instr->value());
- LOperand* key = needs_write_barrier
- ? UseTempRegister(instr->key())
- : UseRegisterOrConstantAtStart(instr->key());
- return new(zone()) LStoreKeyedFastElement(obj, key, val);
-}
-
-
-LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
- HStoreKeyedFastDoubleElement* instr) {
- ASSERT(instr->value()->representation().IsDouble());
- ASSERT(instr->elements()->representation().IsTagged());
- ASSERT(instr->key()->representation().IsInteger32());
-
- LOperand* elements = UseRegisterAtStart(instr->elements());
- LOperand* val = UseTempRegister(instr->value());
- LOperand* key = UseRegisterOrConstantAtStart(instr->key());
-
- return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
-}
-
-
-LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
- HStoreKeyedSpecializedArrayElement* instr) {
+LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
ElementsKind elements_kind = instr->elements_kind();
- ASSERT(
- (instr->value()->representation().IsInteger32() &&
- (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
- (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
- (instr->value()->representation().IsDouble() &&
- ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
- (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
- ASSERT(instr->external_pointer()->representation().IsExternal());
- ASSERT(instr->key()->representation().IsInteger32());
-
- LOperand* external_pointer = UseRegister(instr->external_pointer());
- LOperand* key = UseRegisterOrConstant(instr->key());
- LOperand* val = NULL;
- if (elements_kind == EXTERNAL_BYTE_ELEMENTS ||
- elements_kind == EXTERNAL_UNSIGNED_BYTE_ELEMENTS ||
- elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
- // We need a byte register in this case for the value.
- val = UseFixed(instr->value(), eax);
+ LOperand* elements;
+ LOperand* val;
+ LOperand* key;
+
+ if (!instr->is_external()) {
+ ASSERT(instr->elements()->representation().IsTagged());
+ ASSERT(instr->key()->representation().IsInteger32() ||
+ instr->key()->representation().IsTagged());
+
+ if (instr->NeedsWriteBarrier() &&
+ !IsFastDoubleElementsKind(elements_kind)) {
+ val = UseTempRegister(instr->value());
+ key = UseTempRegister(instr->key());
+ elements = UseRegister(instr->elements());
+ } else {
+ val = UseRegisterAtStart(instr->value());
+ key = UseRegisterOrConstantAtStart(instr->key());
+ elements = UseRegisterAtStart(instr->elements());
+ }
} else {
- val = UseRegister(instr->value());
+ ASSERT(
+ (instr->value()->representation().IsInteger32() &&
+ (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
+ (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
+ (instr->value()->representation().IsDouble() &&
+ ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
+ (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
+ ASSERT(instr->elements()->representation().IsExternal());
+
+ if (ExternalArrayOpRequiresTemp<HStoreKeyed>(instr)) {
+ key = UseTempRegister(instr->key());
+ elements = UseRegister(instr->elements());
+ } else {
+ key = UseRegisterOrConstantAtStart(instr->key());
+ elements = UseRegisterAtStart(instr->elements());
+ }
+
+ // Determine if we need a byte register in this case for the value.
+ bool val_is_fixed_register =
+ elements_kind == EXTERNAL_BYTE_ELEMENTS ||
+ elements_kind == EXTERNAL_UNSIGNED_BYTE_ELEMENTS ||
+ elements_kind == EXTERNAL_PIXEL_ELEMENTS;
+ val = val_is_fixed_register
+ ? UseFixed(instr->value(), eax)
+ : UseRegister(instr->value());
}
- return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
- key,
- val);
+ LStoreKeyed* result = new(zone()) LStoreKeyed(elements, key, val);
+ ASSERT(result != NULL);
+ return result;
}
@@ -2092,8 +2045,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
LInstruction* LChunkBuilder::DoTransitionElementsKind(
HTransitionElementsKind* instr) {
- if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
- instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) {
+ ElementsKind from_kind = instr->original_map()->elements_kind();
+ ElementsKind to_kind = instr->transitioned_map()->elements_kind();
+ if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
LOperand* object = UseRegister(instr->object());
LOperand* new_map_reg = TempRegister();
LOperand* temp_reg = TempRegister();
@@ -2115,6 +2069,8 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool needs_write_barrier = instr->NeedsWriteBarrier();
+ bool needs_write_barrier_for_map = !instr->transition().is_null() &&
+ instr->NeedsWriteBarrierForMap();
LOperand* obj;
if (needs_write_barrier) {
@@ -2122,7 +2078,9 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
? UseRegister(instr->object())
: UseTempRegister(instr->object());
} else {
- obj = UseRegisterAtStart(instr->object());
+ obj = needs_write_barrier_for_map
+ ? UseRegister(instr->object())
+ : UseRegisterAtStart(instr->object());
}
LOperand* val = needs_write_barrier
@@ -2131,11 +2089,13 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
// We only need a scratch register if we have a write barrier or we
// have a store into the properties array (not in-object-property).
- LOperand* temp = (!instr->is_in_object() || needs_write_barrier)
- ? TempRegister()
- : NULL;
+ LOperand* temp = (!instr->is_in_object() || needs_write_barrier ||
+ needs_write_barrier_for_map) ? TempRegister() : NULL;
+
+ // We need a temporary register for write barrier of the map field.
+ LOperand* temp_map = needs_write_barrier_for_map ? TempRegister() : NULL;
- return new(zone()) LStoreNamedField(obj, val, temp);
+ return new(zone()) LStoreNamedField(obj, val, temp, temp_map);
}
@@ -2237,6 +2197,7 @@ LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
+ ASSERT(argument_count_ == 0);
allocator_->MarkAsOsrEntry();
current_block_->last_environment()->set_ast_id(instr->ast_id());
return AssignEnvironment(new(zone()) LOsrEntry);
@@ -2277,12 +2238,10 @@ LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
- LOperand* arguments = UseRegister(instr->arguments());
+ LOperand* args = UseRegister(instr->arguments());
LOperand* length = UseTempRegister(instr->length());
LOperand* index = Use(instr->index());
- LAccessArgumentsAt* result =
- new(zone()) LAccessArgumentsAt(arguments, length, index);
- return AssignEnvironment(DefineAsRegister(result));
+ return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
}
@@ -2330,7 +2289,7 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
// If there is an instruction pending deoptimization environment create a
// lazy bailout instruction to capture the environment.
- if (pending_deoptimization_ast_id_ != AstNode::kNoNumber) {
+ if (!pending_deoptimization_ast_id_.IsNone()) {
ASSERT(pending_deoptimization_ast_id_ == instr->ast_id());
LLazyBailout* lazy_bailout = new(zone()) LLazyBailout;
LInstruction* result = AssignEnvironment(lazy_bailout);
@@ -2339,7 +2298,7 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
instruction_pending_deoptimization_environment_->
SetDeferredLazyDeoptimizationEnvironment(result->environment());
instruction_pending_deoptimization_environment_ = NULL;
- pending_deoptimization_ast_id_ = AstNode::kNoNumber;
+ pending_deoptimization_ast_id_ = BailoutId::None();
return result;
}
@@ -2368,10 +2327,11 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
instr->function(),
undefined,
instr->call_kind(),
- instr->is_construct());
+ instr->inlining_kind());
if (instr->arguments_var() != NULL) {
inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
}
+ inner->set_entry(instr);
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
return NULL;
@@ -2383,7 +2343,7 @@ LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
HEnvironment* env = current_block_->last_environment();
- if (instr->arguments_pushed()) {
+ if (env->entry()->arguments_pushed()) {
int argument_count = env->arguments_environment()->parameter_count();
pop = new(zone()) LDrop(argument_count);
argument_count_ -= argument_count;
diff --git a/src/3rdparty/v8/src/ia32/lithium-ia32.h b/src/3rdparty/v8/src/ia32/lithium-ia32.h
index 16b5610..a1adb01 100644
--- a/src/3rdparty/v8/src/ia32/lithium-ia32.h
+++ b/src/3rdparty/v8/src/ia32/lithium-ia32.h
@@ -102,6 +102,7 @@ class LCodeGen;
V(InstanceOfKnownGlobal) \
V(InstructionGap) \
V(Integer32ToDouble) \
+ V(Uint32ToDouble) \
V(InvokeFunction) \
V(IsConstructCallAndBranch) \
V(IsNilAndBranch) \
@@ -109,7 +110,6 @@ class LCodeGen;
V(IsStringAndBranch) \
V(IsSmiAndBranch) \
V(IsUndetectableAndBranch) \
- V(StringCompareAndBranch) \
V(JSArrayLength) \
V(Label) \
V(LazyBailout) \
@@ -119,18 +119,20 @@ class LCodeGen;
V(LoadFunctionPrototype) \
V(LoadGlobalCell) \
V(LoadGlobalGeneric) \
- V(LoadKeyedFastElement) \
- V(LoadKeyedFastDoubleElement) \
+ V(LoadKeyed) \
V(LoadKeyedGeneric) \
- V(LoadKeyedSpecializedArrayElement) \
V(LoadNamedField) \
V(LoadNamedFieldPolymorphic) \
V(LoadNamedGeneric) \
+ V(MapEnumLength) \
+ V(MathFloorOfDiv) \
+ V(MathMinMax) \
V(MathPowHalf) \
V(ModI) \
V(MulI) \
V(NumberTagD) \
V(NumberTagI) \
+ V(NumberTagU) \
V(NumberUntagD) \
V(ObjectLiteral) \
V(OsrEntry) \
@@ -148,15 +150,14 @@ class LCodeGen;
V(StoreContextSlot) \
V(StoreGlobalCell) \
V(StoreGlobalGeneric) \
- V(StoreKeyedFastDoubleElement) \
- V(StoreKeyedFastElement) \
+ V(StoreKeyed) \
V(StoreKeyedGeneric) \
- V(StoreKeyedSpecializedArrayElement) \
V(StoreNamedField) \
V(StoreNamedGeneric) \
V(StringAdd) \
V(StringCharCodeAt) \
V(StringCharFromCode) \
+ V(StringCompareAndBranch) \
V(StringLength) \
V(SubI) \
V(TaggedToI) \
@@ -252,11 +253,6 @@ class LInstruction: public ZoneObject {
virtual bool HasResult() const = 0;
virtual LOperand* result() = 0;
- virtual int InputCount() = 0;
- virtual LOperand* InputAt(int i) = 0;
- virtual int TempCount() = 0;
- virtual LOperand* TempAt(int i) = 0;
-
LOperand* FirstInput() { return InputAt(0); }
LOperand* Output() { return HasResult() ? result() : NULL; }
@@ -265,6 +261,15 @@ class LInstruction: public ZoneObject {
#endif
private:
+ // Iterator support.
+ friend class InputIterator;
+ virtual int InputCount() = 0;
+ virtual LOperand* InputAt(int i) = 0;
+
+ friend class TempIterator;
+ virtual int TempCount() = 0;
+ virtual LOperand* TempAt(int i) = 0;
+
LEnvironment* environment_;
SetOncePointer<LPointerMap> pointer_map_;
HValue* hydrogen_value_;
@@ -284,16 +289,18 @@ class LTemplateInstruction: public LInstruction {
void set_result(LOperand* operand) { results_[0] = operand; }
LOperand* result() { return results_[0]; }
- int InputCount() { return I; }
- LOperand* InputAt(int i) { return inputs_[i]; }
-
- int TempCount() { return T; }
- LOperand* TempAt(int i) { return temps_[i]; }
-
protected:
EmbeddedContainer<LOperand*, R> results_;
EmbeddedContainer<LOperand*, I> inputs_;
EmbeddedContainer<LOperand*, T> temps_;
+
+ private:
+ // Iterator support.
+ virtual int InputCount() { return I; }
+ virtual LOperand* InputAt(int i) { return inputs_[i]; }
+
+ virtual int TempCount() { return T; }
+ virtual LOperand* TempAt(int i) { return temps_[i]; }
};
@@ -327,8 +334,10 @@ class LGap: public LTemplateInstruction<0, 0, 0> {
LAST_INNER_POSITION = AFTER
};
- LParallelMove* GetOrCreateParallelMove(InnerPosition pos) {
- if (parallel_moves_[pos] == NULL) parallel_moves_[pos] = new LParallelMove;
+ LParallelMove* GetOrCreateParallelMove(InnerPosition pos, Zone* zone) {
+ if (parallel_moves_[pos] == NULL) {
+ parallel_moves_[pos] = new(zone) LParallelMove(zone);
+ }
return parallel_moves_[pos];
}
@@ -411,11 +420,11 @@ class LCallStub: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = context;
}
+ LOperand* context() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CallStub, "call-stub")
DECLARE_HYDROGEN_ACCESSOR(CallStub)
- LOperand* context() { return inputs_[0]; }
-
TranscendentalCache::Type transcendental_type() {
return hydrogen()->transcendental_type();
}
@@ -455,10 +464,11 @@ class LWrapReceiver: public LTemplateInstruction<1, 2, 1> {
temps_[0] = temp;
}
- DECLARE_CONCRETE_INSTRUCTION(WrapReceiver, "wrap-receiver")
-
LOperand* receiver() { return inputs_[0]; }
LOperand* function() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(WrapReceiver, "wrap-receiver")
};
@@ -474,12 +484,12 @@ class LApplyArguments: public LTemplateInstruction<1, 4, 0> {
inputs_[3] = elements;
}
- DECLARE_CONCRETE_INSTRUCTION(ApplyArguments, "apply-arguments")
-
LOperand* function() { return inputs_[0]; }
LOperand* receiver() { return inputs_[1]; }
LOperand* length() { return inputs_[2]; }
LOperand* elements() { return inputs_[3]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(ApplyArguments, "apply-arguments")
};
@@ -491,12 +501,12 @@ class LAccessArgumentsAt: public LTemplateInstruction<1, 3, 0> {
inputs_[2] = index;
}
- DECLARE_CONCRETE_INSTRUCTION(AccessArgumentsAt, "access-arguments-at")
-
LOperand* arguments() { return inputs_[0]; }
LOperand* length() { return inputs_[1]; }
LOperand* index() { return inputs_[2]; }
+ DECLARE_CONCRETE_INSTRUCTION(AccessArgumentsAt, "access-arguments-at")
+
virtual void PrintDataTo(StringStream* stream);
};
@@ -507,6 +517,8 @@ class LArgumentsLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = elements;
}
+ LOperand* elements() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ArgumentsLength, "arguments-length")
};
@@ -526,6 +538,10 @@ class LModI: public LTemplateInstruction<1, 2, 1> {
temps_[0] = temp;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ModI, "mod-i")
DECLARE_HYDROGEN_ACCESSOR(Mod)
};
@@ -539,11 +555,33 @@ class LDivI: public LTemplateInstruction<1, 2, 1> {
temps_[0] = temp;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(DivI, "div-i")
DECLARE_HYDROGEN_ACCESSOR(Div)
};
+class LMathFloorOfDiv: public LTemplateInstruction<1, 2, 1> {
+ public:
+ LMathFloorOfDiv(LOperand* left,
+ LOperand* right,
+ LOperand* temp = NULL) {
+ inputs_[0] = left;
+ inputs_[1] = right;
+ temps_[0] = temp;
+ }
+
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(MathFloorOfDiv, "math-floor-of-div")
+ DECLARE_HYDROGEN_ACCESSOR(MathFloorOfDiv)
+};
+
+
class LMulI: public LTemplateInstruction<1, 2, 1> {
public:
LMulI(LOperand* left, LOperand* right, LOperand* temp) {
@@ -552,6 +590,10 @@ class LMulI: public LTemplateInstruction<1, 2, 1> {
temps_[0] = temp;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(MulI, "mul-i")
DECLARE_HYDROGEN_ACCESSOR(Mul)
};
@@ -564,6 +606,9 @@ class LCmpIDAndBranch: public LControlInstruction<2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpIDAndBranch, "cmp-id-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareIDAndBranch)
@@ -619,6 +664,9 @@ class LCmpObjectEqAndBranch: public LControlInstruction<2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpObjectEqAndBranch,
"cmp-object-eq-and-branch")
};
@@ -630,6 +678,8 @@ class LCmpConstantEqAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = left;
}
+ LOperand* left() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpConstantEqAndBranch,
"cmp-constant-eq-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareConstantEqAndBranch)
@@ -643,6 +693,9 @@ class LIsNilAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsNilAndBranch, "is-nil-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsNilAndBranch)
@@ -660,6 +713,9 @@ class LIsObjectAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsObjectAndBranch, "is-object-and-branch")
virtual void PrintDataTo(StringStream* stream);
@@ -673,6 +729,9 @@ class LIsStringAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsStringAndBranch, "is-string-and-branch")
virtual void PrintDataTo(StringStream* stream);
@@ -685,6 +744,8 @@ class LIsSmiAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsSmiAndBranch, "is-smi-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsSmiAndBranch)
@@ -699,6 +760,9 @@ class LIsUndetectableAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsUndetectableAndBranch,
"is-undetectable-and-branch")
@@ -714,6 +778,9 @@ class LStringCompareAndBranch: public LControlInstruction<3, 0> {
inputs_[2] = right;
}
+ LOperand* left() { return inputs_[1]; }
+ LOperand* right() { return inputs_[2]; }
+
DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,
"string-compare-and-branch")
DECLARE_HYDROGEN_ACCESSOR(StringCompareAndBranch)
@@ -731,6 +798,9 @@ class LHasInstanceTypeAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(HasInstanceTypeAndBranch,
"has-instance-type-and-branch")
DECLARE_HYDROGEN_ACCESSOR(HasInstanceTypeAndBranch)
@@ -745,6 +815,8 @@ class LGetCachedArrayIndex: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(GetCachedArrayIndex, "get-cached-array-index")
DECLARE_HYDROGEN_ACCESSOR(GetCachedArrayIndex)
};
@@ -756,8 +828,11 @@ class LHasCachedArrayIndexAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(HasCachedArrayIndexAndBranch,
"has-cached-array-index-and-branch")
+
virtual void PrintDataTo(StringStream* stream);
};
@@ -768,6 +843,8 @@ class LIsConstructCallAndBranch: public LControlInstruction<0, 1> {
temps_[0] = temp;
}
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsConstructCallAndBranch,
"is-construct-call-and-branch")
};
@@ -781,6 +858,10 @@ class LClassOfTestAndBranch: public LControlInstruction<1, 2> {
temps_[1] = temp2;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(ClassOfTestAndBranch,
"class-of-test-and-branch")
DECLARE_HYDROGEN_ACCESSOR(ClassOfTestAndBranch)
@@ -812,9 +893,9 @@ class LInstanceOf: public LTemplateInstruction<1, 3, 0> {
inputs_[2] = right;
}
- DECLARE_CONCRETE_INSTRUCTION(InstanceOf, "instance-of")
-
LOperand* context() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(InstanceOf, "instance-of")
};
@@ -826,6 +907,9 @@ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 2, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
"instance-of-known-global")
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
@@ -854,6 +938,7 @@ class LBoundsCheck: public LTemplateInstruction<0, 2, 0> {
LOperand* length() { return inputs_[1]; }
DECLARE_CONCRETE_INSTRUCTION(BoundsCheck, "bounds-check")
+ DECLARE_HYDROGEN_ACCESSOR(BoundsCheck)
};
@@ -864,10 +949,13 @@ class LBitI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
- Token::Value op() const { return hydrogen()->op(); }
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
DECLARE_CONCRETE_INSTRUCTION(BitI, "bit-i")
DECLARE_HYDROGEN_ACCESSOR(Bitwise)
+
+ Token::Value op() const { return hydrogen()->op(); }
};
@@ -879,12 +967,14 @@ class LShiftI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
- Token::Value op() const { return op_; }
-
- bool can_deopt() const { return can_deopt_; }
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
DECLARE_CONCRETE_INSTRUCTION(ShiftI, "shift-i")
+ Token::Value op() const { return op_; }
+ bool can_deopt() const { return can_deopt_; }
+
private:
Token::Value op_;
bool can_deopt_;
@@ -898,6 +988,9 @@ class LSubI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(SubI, "sub-i")
DECLARE_HYDROGEN_ACCESSOR(Sub)
};
@@ -918,6 +1011,8 @@ class LConstantD: public LTemplateInstruction<1, 0, 1> {
temps_[0] = temp;
}
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ConstantD, "constant-d")
DECLARE_HYDROGEN_ACCESSOR(Constant)
@@ -936,11 +1031,14 @@ class LConstantT: public LTemplateInstruction<1, 0, 0> {
class LBranch: public LControlInstruction<1, 1> {
public:
- explicit LBranch(LOperand* value, LOperand* temp) {
+ LBranch(LOperand* value, LOperand* temp) {
inputs_[0] = value;
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Branch, "branch")
DECLARE_HYDROGEN_ACCESSOR(Branch)
@@ -954,6 +1052,8 @@ class LCmpMapAndBranch: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpMapAndBranch, "cmp-map-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareMap)
@@ -975,6 +1075,8 @@ class LJSArrayLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(JSArrayLength, "js-array-length")
DECLARE_HYDROGEN_ACCESSOR(JSArrayLength)
};
@@ -986,18 +1088,34 @@ class LFixedArrayBaseLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(FixedArrayBaseLength,
"fixed-array-base-length")
DECLARE_HYDROGEN_ACCESSOR(FixedArrayBaseLength)
};
+class LMapEnumLength: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LMapEnumLength(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(MapEnumLength, "map-enum-length")
+};
+
+
class LElementsKind: public LTemplateInstruction<1, 1, 0> {
public:
explicit LElementsKind(LOperand* value) {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ElementsKind, "elements-kind")
DECLARE_HYDROGEN_ACCESSOR(ElementsKind)
};
@@ -1010,6 +1128,9 @@ class LValueOf: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ValueOf, "value-of")
DECLARE_HYDROGEN_ACCESSOR(ValueOf)
};
@@ -1023,6 +1144,9 @@ class LDateField: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* date() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(DateField, "date-field")
DECLARE_HYDROGEN_ACCESSOR(DateField)
@@ -1053,6 +1177,8 @@ class LBitNotI: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(BitNotI, "bit-not-i")
};
@@ -1064,11 +1190,29 @@ class LAddI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(AddI, "add-i")
DECLARE_HYDROGEN_ACCESSOR(Add)
};
+class LMathMinMax: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LMathMinMax(LOperand* left, LOperand* right) {
+ inputs_[0] = left;
+ inputs_[1] = right;
+ }
+
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(MathMinMax, "min-max")
+ DECLARE_HYDROGEN_ACCESSOR(MathMinMax)
+};
+
+
class LPower: public LTemplateInstruction<1, 2, 0> {
public:
LPower(LOperand* left, LOperand* right) {
@@ -1076,6 +1220,9 @@ class LPower: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(Power, "power")
DECLARE_HYDROGEN_ACCESSOR(Power)
};
@@ -1087,6 +1234,8 @@ class LRandom: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = global_object;
}
+ LOperand* global_object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Random, "random")
DECLARE_HYDROGEN_ACCESSOR(Random)
};
@@ -1100,6 +1249,9 @@ class LArithmeticD: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
Token::Value op() const { return op_; }
virtual Opcode opcode() const { return LInstruction::kArithmeticD; }
@@ -1123,14 +1275,15 @@ class LArithmeticT: public LTemplateInstruction<1, 3, 0> {
inputs_[2] = right;
}
+ LOperand* context() { return inputs_[0]; }
+ LOperand* left() { return inputs_[1]; }
+ LOperand* right() { return inputs_[2]; }
+
virtual Opcode opcode() const { return LInstruction::kArithmeticT; }
virtual void CompileToNative(LCodeGen* generator);
virtual const char* Mnemonic() const;
Token::Value op() const { return op_; }
- LOperand* context() { return inputs_[0]; }
- LOperand* left() { return inputs_[1]; }
- LOperand* right() { return inputs_[2]; }
private:
Token::Value op_;
@@ -1153,10 +1306,10 @@ class LLoadNamedField: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadNamedField, "load-named-field")
DECLARE_HYDROGEN_ACCESSOR(LoadNamedField)
-
- LOperand* object() { return inputs_[0]; }
};
@@ -1167,11 +1320,11 @@ class LLoadNamedFieldPolymorphic: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = object;
}
- DECLARE_CONCRETE_INSTRUCTION(LoadNamedField, "load-named-field-polymorphic")
- DECLARE_HYDROGEN_ACCESSOR(LoadNamedFieldPolymorphic)
-
LOperand* context() { return inputs_[0]; }
LOperand* object() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadNamedField, "load-named-field-polymorphic")
+ DECLARE_HYDROGEN_ACCESSOR(LoadNamedFieldPolymorphic)
};
@@ -1182,11 +1335,12 @@ class LLoadNamedGeneric: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = object;
}
+ LOperand* context() { return inputs_[0]; }
+ LOperand* object() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadNamedGeneric, "load-named-generic")
DECLARE_HYDROGEN_ACCESSOR(LoadNamedGeneric)
- LOperand* context() { return inputs_[0]; }
- LOperand* object() { return inputs_[1]; }
Handle<Object> name() const { return hydrogen()->name(); }
};
@@ -1198,10 +1352,11 @@ class LLoadFunctionPrototype: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* function() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadFunctionPrototype, "load-function-prototype")
DECLARE_HYDROGEN_ACCESSOR(LoadFunctionPrototype)
-
- LOperand* function() { return inputs_[0]; }
};
@@ -1211,6 +1366,8 @@ class LLoadElements: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadElements, "load-elements")
};
@@ -1221,62 +1378,50 @@ class LLoadExternalArrayPointer: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadExternalArrayPointer,
"load-external-array-pointer")
};
-class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
+class LLoadKeyed: public LTemplateInstruction<1, 2, 0> {
public:
- LLoadKeyedFastElement(LOperand* elements, LOperand* key) {
+ LLoadKeyed(LOperand* elements, LOperand* key) {
inputs_[0] = elements;
inputs_[1] = key;
}
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastElement, "load-keyed-fast-element")
- DECLARE_HYDROGEN_ACCESSOR(LoadKeyedFastElement)
-
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
- uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
-
-
-class LLoadKeyedFastDoubleElement: public LTemplateInstruction<1, 2, 0> {
- public:
- LLoadKeyedFastDoubleElement(LOperand* elements, LOperand* key) {
- inputs_[0] = elements;
- inputs_[1] = key;
+ ElementsKind elements_kind() const {
+ return hydrogen()->elements_kind();
+ }
+ bool is_external() const {
+ return hydrogen()->is_external();
}
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastDoubleElement,
- "load-keyed-fast-double-element")
- DECLARE_HYDROGEN_ACCESSOR(LoadKeyedFastDoubleElement)
+ DECLARE_CONCRETE_INSTRUCTION(LoadKeyed, "load-keyed")
+ DECLARE_HYDROGEN_ACCESSOR(LoadKeyed)
- LOperand* elements() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
-class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
- public:
- LLoadKeyedSpecializedArrayElement(LOperand* external_pointer, LOperand* key) {
- inputs_[0] = external_pointer;
- inputs_[1] = key;
- }
+template <class T>
+inline static bool ExternalArrayOpRequiresTemp(T* value) {
+ CHECK(value->IsLoadKeyed() || value->IsStoreKeyed());
+ Representation key_representation = value->key()->representation();
+ ElementsKind elements_kind = value->elements_kind();
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedSpecializedArrayElement,
- "load-keyed-specialized-array-element")
- DECLARE_HYDROGEN_ACCESSOR(LoadKeyedSpecializedArrayElement)
-
- LOperand* external_pointer() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
- ElementsKind elements_kind() const {
- return hydrogen()->elements_kind();
- }
- uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
+ // Operations that require the key to be divided by two to be converted into
+ // an index cannot fold the scale operation into a load and need an extra
+ // temp register to do the work.
+ return !value->IsConstant() && key_representation.IsTagged() &&
+ (elements_kind == EXTERNAL_BYTE_ELEMENTS ||
+ elements_kind == EXTERNAL_UNSIGNED_BYTE_ELEMENTS ||
+ elements_kind == EXTERNAL_PIXEL_ELEMENTS);
+}
class LLoadKeyedGeneric: public LTemplateInstruction<1, 3, 0> {
@@ -1287,11 +1432,11 @@ class LLoadKeyedGeneric: public LTemplateInstruction<1, 3, 0> {
inputs_[2] = key;
}
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedGeneric, "load-keyed-generic")
-
LOperand* context() { return inputs_[0]; }
LOperand* object() { return inputs_[1]; }
LOperand* key() { return inputs_[2]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadKeyedGeneric, "load-keyed-generic")
};
@@ -1309,11 +1454,12 @@ class LLoadGlobalGeneric: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = global_object;
}
+ LOperand* context() { return inputs_[0]; }
+ LOperand* global_object() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadGlobalGeneric, "load-global-generic")
DECLARE_HYDROGEN_ACCESSOR(LoadGlobalGeneric)
- LOperand* context() { return inputs_[0]; }
- LOperand* global_object() { return inputs_[1]; }
Handle<Object> name() const { return hydrogen()->name(); }
bool for_typeof() const { return hydrogen()->for_typeof(); }
};
@@ -1325,10 +1471,10 @@ class LStoreGlobalCell: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreGlobalCell, "store-global-cell")
DECLARE_HYDROGEN_ACCESSOR(StoreGlobalCell)
-
- LOperand* value() { return inputs_[0]; }
};
@@ -1342,13 +1488,14 @@ class LStoreGlobalGeneric: public LTemplateInstruction<0, 3, 0> {
inputs_[2] = value;
}
+ LOperand* context() { return inputs_[0]; }
+ LOperand* global_object() { return inputs_[1]; }
+ LOperand* value() { return inputs_[2]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreGlobalGeneric, "store-global-generic")
DECLARE_HYDROGEN_ACCESSOR(StoreGlobalGeneric)
- LOperand* context() { return InputAt(0); }
- LOperand* global_object() { return InputAt(1); }
Handle<Object> name() const { return hydrogen()->name(); }
- LOperand* value() { return InputAt(2); }
StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
@@ -1359,10 +1506,11 @@ class LLoadContextSlot: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = context;
}
+ LOperand* context() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadContextSlot, "load-context-slot")
DECLARE_HYDROGEN_ACCESSOR(LoadContextSlot)
- LOperand* context() { return InputAt(0); }
int slot_index() { return hydrogen()->slot_index(); }
virtual void PrintDataTo(StringStream* stream);
@@ -1377,11 +1525,13 @@ class LStoreContextSlot: public LTemplateInstruction<0, 2, 1> {
temps_[0] = temp;
}
+ LOperand* context() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreContextSlot, "store-context-slot")
DECLARE_HYDROGEN_ACCESSOR(StoreContextSlot)
- LOperand* context() { return InputAt(0); }
- LOperand* value() { return InputAt(1); }
int slot_index() { return hydrogen()->slot_index(); }
virtual void PrintDataTo(StringStream* stream);
@@ -1394,6 +1544,8 @@ class LPushArgument: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(PushArgument, "push-argument")
};
@@ -1430,9 +1582,9 @@ class LOuterContext: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = context;
}
- DECLARE_CONCRETE_INSTRUCTION(OuterContext, "outer-context")
+ LOperand* context() { return inputs_[0]; }
- LOperand* context() { return InputAt(0); }
+ DECLARE_CONCRETE_INSTRUCTION(OuterContext, "outer-context")
};
@@ -1442,6 +1594,8 @@ class LDeclareGlobals: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = context;
}
+ LOperand* context() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(DeclareGlobals, "declare-globals")
DECLARE_HYDROGEN_ACCESSOR(DeclareGlobals)
};
@@ -1454,10 +1608,12 @@ class LGlobalObject: public LTemplateInstruction<1, 1, 0> {
qml_global_ = qml_global;
}
+ LOperand* context() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(GlobalObject, "global-object")
- LOperand* context() { return InputAt(0); }
bool qml_global() { return qml_global_; }
+
private:
bool qml_global_;
};
@@ -1469,9 +1625,9 @@ class LGlobalReceiver: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = global_object;
}
- DECLARE_CONCRETE_INSTRUCTION(GlobalReceiver, "global-receiver")
+ LOperand* global() { return inputs_[0]; }
- LOperand* global() { return InputAt(0); }
+ DECLARE_CONCRETE_INSTRUCTION(GlobalReceiver, "global-receiver")
};
@@ -1494,12 +1650,12 @@ class LInvokeFunction: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = function;
}
- DECLARE_CONCRETE_INSTRUCTION(InvokeFunction, "invoke-function")
- DECLARE_HYDROGEN_ACCESSOR(InvokeFunction)
-
LOperand* context() { return inputs_[0]; }
LOperand* function() { return inputs_[1]; }
+ DECLARE_CONCRETE_INSTRUCTION(InvokeFunction, "invoke-function")
+ DECLARE_HYDROGEN_ACCESSOR(InvokeFunction)
+
virtual void PrintDataTo(StringStream* stream);
int arity() const { return hydrogen()->argument_count() - 1; }
@@ -1514,12 +1670,12 @@ class LCallKeyed: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = key;
}
- DECLARE_CONCRETE_INSTRUCTION(CallKeyed, "call-keyed")
- DECLARE_HYDROGEN_ACCESSOR(CallKeyed)
-
LOperand* context() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
+ DECLARE_CONCRETE_INSTRUCTION(CallKeyed, "call-keyed")
+ DECLARE_HYDROGEN_ACCESSOR(CallKeyed)
+
virtual void PrintDataTo(StringStream* stream);
int arity() const { return hydrogen()->argument_count() - 1; }
@@ -1532,12 +1688,13 @@ class LCallNamed: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = context;
}
+ LOperand* context() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CallNamed, "call-named")
DECLARE_HYDROGEN_ACCESSOR(CallNamed)
virtual void PrintDataTo(StringStream* stream);
- LOperand* context() { return inputs_[0]; }
Handle<String> name() const { return hydrogen()->name(); }
int arity() const { return hydrogen()->argument_count() - 1; }
};
@@ -1550,11 +1707,12 @@ class LCallFunction: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = function;
}
+ LOperand* context() { return inputs_[0]; }
+ LOperand* function() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CallFunction, "call-function")
DECLARE_HYDROGEN_ACCESSOR(CallFunction)
- LOperand* context() { return inputs_[0]; }
- LOperand* function() { return inputs_[1]; }
int arity() const { return hydrogen()->argument_count() - 1; }
};
@@ -1565,12 +1723,13 @@ class LCallGlobal: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = context;
}
+ LOperand* context() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CallGlobal, "call-global")
DECLARE_HYDROGEN_ACCESSOR(CallGlobal)
virtual void PrintDataTo(StringStream* stream);
- LOperand* context() { return inputs_[0]; }
Handle<String> name() const {return hydrogen()->name(); }
int arity() const { return hydrogen()->argument_count() - 1; }
@@ -1599,13 +1758,14 @@ class LCallNew: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = constructor;
}
+ LOperand* context() { return inputs_[0]; }
+ LOperand* constructor() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CallNew, "call-new")
DECLARE_HYDROGEN_ACCESSOR(CallNew)
virtual void PrintDataTo(StringStream* stream);
- LOperand* context() { return inputs_[0]; }
- LOperand* constructor() { return inputs_[1]; }
int arity() const { return hydrogen()->argument_count() - 1; }
};
@@ -1615,10 +1775,12 @@ class LCallRuntime: public LTemplateInstruction<1, 1, 0> {
explicit LCallRuntime(LOperand* context) {
inputs_[0] = context;
}
+
+ LOperand* context() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CallRuntime, "call-runtime")
DECLARE_HYDROGEN_ACCESSOR(CallRuntime)
- LOperand* context() { return inputs_[0]; }
const Runtime::Function* function() const { return hydrogen()->function(); }
int arity() const { return hydrogen()->argument_count(); }
};
@@ -1630,20 +1792,51 @@ class LInteger32ToDouble: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Integer32ToDouble, "int32-to-double")
};
+class LUint32ToDouble: public LTemplateInstruction<1, 1, 1> {
+ public:
+ explicit LUint32ToDouble(LOperand* value, LOperand* temp) {
+ inputs_[0] = value;
+ temps_[0] = temp;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(Uint32ToDouble, "uint32-to-double")
+};
+
+
class LNumberTagI: public LTemplateInstruction<1, 1, 0> {
public:
explicit LNumberTagI(LOperand* value) {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(NumberTagI, "number-tag-i")
};
+class LNumberTagU: public LTemplateInstruction<1, 1, 1> {
+ public:
+ explicit LNumberTagU(LOperand* value, LOperand* temp) {
+ inputs_[0] = value;
+ temps_[0] = temp;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(NumberTagU, "number-tag-u")
+};
+
+
class LNumberTagD: public LTemplateInstruction<1, 1, 1> {
public:
LNumberTagD(LOperand* value, LOperand* temp) {
@@ -1651,6 +1844,9 @@ class LNumberTagD: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(NumberTagD, "number-tag-d")
};
@@ -1663,6 +1859,9 @@ class LDoubleToI: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(DoubleToI, "double-to-i")
DECLARE_HYDROGEN_ACCESSOR(UnaryOperation)
@@ -1678,6 +1877,9 @@ class LTaggedToI: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(TaggedToI, "tagged-to-i")
DECLARE_HYDROGEN_ACCESSOR(UnaryOperation)
@@ -1691,6 +1893,8 @@ class LSmiTag: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(SmiTag, "smi-tag")
};
@@ -1702,6 +1906,9 @@ class LNumberUntagD: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(NumberUntagD, "double-untag")
DECLARE_HYDROGEN_ACCESSOR(Change);
};
@@ -1714,6 +1921,8 @@ class LSmiUntag: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(SmiUntag, "smi-untag")
bool needs_check() const { return needs_check_; }
@@ -1723,22 +1932,28 @@ class LSmiUntag: public LTemplateInstruction<1, 1, 0> {
};
-class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
+class LStoreNamedField: public LTemplateInstruction<0, 2, 2> {
public:
- LStoreNamedField(LOperand* obj, LOperand* val, LOperand* temp) {
+ LStoreNamedField(LOperand* obj,
+ LOperand* val,
+ LOperand* temp,
+ LOperand* temp_map) {
inputs_[0] = obj;
inputs_[1] = val;
temps_[0] = temp;
+ temps_[1] = temp_map;
}
+ LOperand* object() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp_map() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreNamedField, "store-named-field")
DECLARE_HYDROGEN_ACCESSOR(StoreNamedField)
virtual void PrintDataTo(StringStream* stream);
- LOperand* object() { return inputs_[0]; }
- LOperand* value() { return inputs_[1]; }
-
Handle<Object> name() const { return hydrogen()->name(); }
bool is_in_object() { return hydrogen()->is_in_object(); }
int offset() { return hydrogen()->offset(); }
@@ -1754,89 +1969,44 @@ class LStoreNamedGeneric: public LTemplateInstruction<0, 3, 0> {
inputs_[2] = value;
}
+ LOperand* context() { return inputs_[0]; }
+ LOperand* object() { return inputs_[1]; }
+ LOperand* value() { return inputs_[2]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store-named-generic")
DECLARE_HYDROGEN_ACCESSOR(StoreNamedGeneric)
virtual void PrintDataTo(StringStream* stream);
-
- LOperand* context() { return inputs_[0]; }
- LOperand* object() { return inputs_[1]; }
- LOperand* value() { return inputs_[2]; }
Handle<Object> name() const { return hydrogen()->name(); }
StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
-class LStoreKeyedFastElement: public LTemplateInstruction<0, 3, 0> {
+class LStoreKeyed: public LTemplateInstruction<0, 3, 0> {
public:
- LStoreKeyedFastElement(LOperand* obj, LOperand* key, LOperand* val) {
+ LStoreKeyed(LOperand* obj, LOperand* key, LOperand* val) {
inputs_[0] = obj;
inputs_[1] = key;
inputs_[2] = val;
}
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedFastElement,
- "store-keyed-fast-element")
- DECLARE_HYDROGEN_ACCESSOR(StoreKeyedFastElement)
-
- virtual void PrintDataTo(StringStream* stream);
-
- LOperand* object() { return inputs_[0]; }
+ bool is_external() const { return hydrogen()->is_external(); }
+ LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
- uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
-
-
-class LStoreKeyedFastDoubleElement: public LTemplateInstruction<0, 3, 0> {
- public:
- LStoreKeyedFastDoubleElement(LOperand* elements,
- LOperand* key,
- LOperand* val) {
- inputs_[0] = elements;
- inputs_[1] = key;
- inputs_[2] = val;
+ ElementsKind elements_kind() const {
+ return hydrogen()->elements_kind();
}
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedFastDoubleElement,
- "store-keyed-fast-double-element")
- DECLARE_HYDROGEN_ACCESSOR(StoreKeyedFastDoubleElement)
+ DECLARE_CONCRETE_INSTRUCTION(StoreKeyed, "store-keyed")
+ DECLARE_HYDROGEN_ACCESSOR(StoreKeyed)
virtual void PrintDataTo(StringStream* stream);
-
- LOperand* elements() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
- LOperand* value() { return inputs_[2]; }
uint32_t additional_index() const { return hydrogen()->index_offset(); }
-
bool NeedsCanonicalization() { return hydrogen()->NeedsCanonicalization(); }
};
-class LStoreKeyedSpecializedArrayElement: public LTemplateInstruction<0, 3, 0> {
- public:
- LStoreKeyedSpecializedArrayElement(LOperand* external_pointer,
- LOperand* key,
- LOperand* val) {
- inputs_[0] = external_pointer;
- inputs_[1] = key;
- inputs_[2] = val;
- }
-
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedSpecializedArrayElement,
- "store-keyed-specialized-array-element")
- DECLARE_HYDROGEN_ACCESSOR(StoreKeyedSpecializedArrayElement)
-
- LOperand* external_pointer() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
- LOperand* value() { return inputs_[2]; }
- ElementsKind elements_kind() const {
- return hydrogen()->elements_kind();
- }
- uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
-
-
class LStoreKeyedGeneric: public LTemplateInstruction<0, 4, 0> {
public:
LStoreKeyedGeneric(LOperand* context,
@@ -1849,15 +2019,16 @@ class LStoreKeyedGeneric: public LTemplateInstruction<0, 4, 0> {
inputs_[3] = value;
}
+ LOperand* context() { return inputs_[0]; }
+ LOperand* object() { return inputs_[1]; }
+ LOperand* key() { return inputs_[2]; }
+ LOperand* value() { return inputs_[3]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
virtual void PrintDataTo(StringStream* stream);
- LOperand* context() { return inputs_[0]; }
- LOperand* object() { return inputs_[1]; }
- LOperand* key() { return inputs_[2]; }
- LOperand* value() { return inputs_[3]; }
StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
@@ -1866,21 +2037,22 @@ class LTransitionElementsKind: public LTemplateInstruction<1, 1, 2> {
public:
LTransitionElementsKind(LOperand* object,
LOperand* new_map_temp,
- LOperand* temp_reg) {
+ LOperand* temp) {
inputs_[0] = object;
temps_[0] = new_map_temp;
- temps_[1] = temp_reg;
+ temps_[1] = temp;
}
+ LOperand* object() { return inputs_[0]; }
+ LOperand* new_map_temp() { return temps_[0]; }
+ LOperand* temp() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(TransitionElementsKind,
"transition-elements-kind")
DECLARE_HYDROGEN_ACCESSOR(TransitionElementsKind)
virtual void PrintDataTo(StringStream* stream);
- LOperand* object() { return inputs_[0]; }
- LOperand* new_map_reg() { return temps_[0]; }
- LOperand* temp_reg() { return temps_[1]; }
Handle<Map> original_map() { return hydrogen()->original_map(); }
Handle<Map> transitioned_map() { return hydrogen()->transitioned_map(); }
};
@@ -1894,12 +2066,12 @@ class LStringAdd: public LTemplateInstruction<1, 3, 0> {
inputs_[2] = right;
}
- DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string-add")
- DECLARE_HYDROGEN_ACCESSOR(StringAdd)
-
LOperand* context() { return inputs_[0]; }
LOperand* left() { return inputs_[1]; }
LOperand* right() { return inputs_[2]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string-add")
+ DECLARE_HYDROGEN_ACCESSOR(StringAdd)
};
@@ -1911,12 +2083,12 @@ class LStringCharCodeAt: public LTemplateInstruction<1, 3, 0> {
inputs_[2] = index;
}
- DECLARE_CONCRETE_INSTRUCTION(StringCharCodeAt, "string-char-code-at")
- DECLARE_HYDROGEN_ACCESSOR(StringCharCodeAt)
-
LOperand* context() { return inputs_[0]; }
LOperand* string() { return inputs_[1]; }
LOperand* index() { return inputs_[2]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringCharCodeAt, "string-char-code-at")
+ DECLARE_HYDROGEN_ACCESSOR(StringCharCodeAt)
};
@@ -1927,11 +2099,11 @@ class LStringCharFromCode: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = char_code;
}
- DECLARE_CONCRETE_INSTRUCTION(StringCharFromCode, "string-char-from-code")
- DECLARE_HYDROGEN_ACCESSOR(StringCharFromCode)
-
LOperand* context() { return inputs_[0]; }
LOperand* char_code() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringCharFromCode, "string-char-from-code")
+ DECLARE_HYDROGEN_ACCESSOR(StringCharFromCode)
};
@@ -1941,10 +2113,10 @@ class LStringLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = string;
}
+ LOperand* string() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StringLength, "string-length")
DECLARE_HYDROGEN_ACCESSOR(StringLength)
-
- LOperand* string() { return inputs_[0]; }
};
@@ -1968,6 +2140,9 @@ class LCheckInstanceType: public LTemplateInstruction<0, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckInstanceType, "check-instance-type")
DECLARE_HYDROGEN_ACCESSOR(CheckInstanceType)
};
@@ -1979,17 +2154,21 @@ class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps")
DECLARE_HYDROGEN_ACCESSOR(CheckMaps)
};
-class LCheckPrototypeMaps: public LTemplateInstruction<0, 0, 1> {
+class LCheckPrototypeMaps: public LTemplateInstruction<1, 0, 1> {
public:
explicit LCheckPrototypeMaps(LOperand* temp) {
temps_[0] = temp;
}
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps, "check-prototype-maps")
DECLARE_HYDROGEN_ACCESSOR(CheckPrototypeMaps)
@@ -2004,6 +2183,8 @@ class LCheckSmi: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckSmi, "check-smi")
};
@@ -2051,6 +2232,8 @@ class LCheckNonSmi: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckNonSmi, "check-non-smi")
};
@@ -2062,10 +2245,11 @@ class LAllocateObject: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* context() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
-
- LOperand* context() { return inputs_[0]; }
};
@@ -2142,6 +2326,8 @@ class LToFastProperties: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ToFastProperties, "to-fast-properties")
DECLARE_HYDROGEN_ACCESSOR(ToFastProperties)
};
@@ -2154,6 +2340,9 @@ class LTypeof: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = value;
}
+ LOperand* context() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(Typeof, "typeof")
};
@@ -2164,6 +2353,8 @@ class LTypeofIsAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(TypeofIsAndBranch, "typeof-is-and-branch")
DECLARE_HYDROGEN_ACCESSOR(TypeofIsAndBranch)
@@ -2181,11 +2372,11 @@ class LDeleteProperty: public LTemplateInstruction<1, 3, 0> {
inputs_[2] = key;
}
- DECLARE_CONCRETE_INSTRUCTION(DeleteProperty, "delete-property")
-
LOperand* context() { return inputs_[0]; }
LOperand* object() { return inputs_[1]; }
LOperand* key() { return inputs_[2]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(DeleteProperty, "delete-property")
};
@@ -2305,69 +2496,19 @@ class LLoadFieldByIndex: public LTemplateInstruction<1, 2, 0> {
class LChunkBuilder;
-class LChunk: public ZoneObject {
+class LPlatformChunk: public LChunk {
public:
- LChunk(CompilationInfo* info, HGraph* graph)
- : spill_slot_count_(0),
- info_(info),
- graph_(graph),
- instructions_(32),
- pointer_maps_(8),
- inlined_closures_(1) { }
-
- void AddInstruction(LInstruction* instruction, HBasicBlock* block);
- LConstantOperand* DefineConstantOperand(HConstant* constant);
- Handle<Object> LookupLiteral(LConstantOperand* operand) const;
- Representation LookupLiteralRepresentation(LConstantOperand* operand) const;
+ LPlatformChunk(CompilationInfo* info, HGraph* graph)
+ : LChunk(info, graph),
+ num_double_slots_(0) { }
int GetNextSpillIndex(bool is_double);
LOperand* GetNextSpillSlot(bool is_double);
- int ParameterAt(int index);
- int GetParameterStackSlot(int index) const;
- int spill_slot_count() const { return spill_slot_count_; }
- CompilationInfo* info() const { return info_; }
- HGraph* graph() const { return graph_; }
- const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
- void AddGapMove(int index, LOperand* from, LOperand* to);
- LGap* GetGapAt(int index) const;
- bool IsGapAt(int index) const;
- int NearestGapPos(int index) const;
- void MarkEmptyBlocks();
- const ZoneList<LPointerMap*>* pointer_maps() const { return &pointer_maps_; }
- LLabel* GetLabel(int block_id) const {
- HBasicBlock* block = graph_->blocks()->at(block_id);
- int first_instruction = block->first_instruction_index();
- return LLabel::cast(instructions_[first_instruction]);
- }
- int LookupDestination(int block_id) const {
- LLabel* cur = GetLabel(block_id);
- while (cur->replacement() != NULL) {
- cur = cur->replacement();
- }
- return cur->block_id();
- }
- Label* GetAssemblyLabel(int block_id) const {
- LLabel* label = GetLabel(block_id);
- ASSERT(!label->HasReplacement());
- return label->label();
- }
-
- const ZoneList<Handle<JSFunction> >* inlined_closures() const {
- return &inlined_closures_;
- }
-
- void AddInlinedClosure(Handle<JSFunction> closure) {
- inlined_closures_.Add(closure);
- }
+ int num_double_slots() const { return num_double_slots_; }
private:
- int spill_slot_count_;
- CompilationInfo* info_;
- HGraph* const graph_;
- ZoneList<LInstruction*> instructions_;
- ZoneList<LPointerMap*> pointer_maps_;
- ZoneList<Handle<JSFunction> > inlined_closures_;
+ int num_double_slots_;
};
@@ -2377,7 +2518,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
- zone_(graph->isolate()->zone()),
+ zone_(graph->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2386,16 +2527,19 @@ class LChunkBuilder BASE_EMBEDDED {
allocator_(allocator),
position_(RelocInfo::kNoPosition),
instruction_pending_deoptimization_environment_(NULL),
- pending_deoptimization_ast_id_(AstNode::kNoNumber) { }
+ pending_deoptimization_ast_id_(BailoutId::None()) { }
// Build the sequence for the graph.
- LChunk* Build();
+ LPlatformChunk* Build();
// Declare methods that deal with the individual node types.
#define DECLARE_DO(type) LInstruction* Do##type(H##type* node);
HYDROGEN_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
#undef DECLARE_DO
+ static HValue* SimplifiedDividendForMathFloorOfDiv(HValue* val);
+ static HValue* SimplifiedDivisorForMathFloorOfDiv(HValue* val);
+
private:
enum Status {
UNUSED,
@@ -2404,17 +2548,17 @@ class LChunkBuilder BASE_EMBEDDED {
ABORTED
};
- LChunk* chunk() const { return chunk_; }
+ LPlatformChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
- Zone* zone() { return zone_; }
+ Zone* zone() const { return zone_; }
bool is_unused() const { return status_ == UNUSED; }
bool is_building() const { return status_ == BUILDING; }
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
- void Abort(const char* format, ...);
+ void Abort(const char* reason);
// Methods for getting operands for Use / Define / Temp.
LUnallocated* ToUnallocated(Register reg);
@@ -2508,7 +2652,7 @@ class LChunkBuilder BASE_EMBEDDED {
LInstruction* DoArithmeticT(Token::Value op,
HArithmeticBinaryOperation* instr);
- LChunk* chunk_;
+ LPlatformChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
Zone* zone_;
@@ -2520,7 +2664,7 @@ class LChunkBuilder BASE_EMBEDDED {
LAllocator* allocator_;
int position_;
LInstruction* instruction_pending_deoptimization_environment_;
- int pending_deoptimization_ast_id_;
+ BailoutId pending_deoptimization_ast_id_;
DISALLOW_COPY_AND_ASSIGN(LChunkBuilder);
};
diff --git a/src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc b/src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc
index c31b0c2..26d0f92 100644
--- a/src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc
@@ -85,7 +85,7 @@ void MacroAssembler::RememberedSetHelper(
SaveFPRegsMode save_fp,
MacroAssembler::RememberedSetFinalAction and_then) {
Label done;
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
Label ok;
JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
int3();
@@ -129,17 +129,22 @@ void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
XMMRegister scratch_reg,
Register result_reg) {
Label done;
- ExternalReference zero_ref = ExternalReference::address_of_zero();
- movdbl(scratch_reg, Operand::StaticVariable(zero_ref));
+ Label conv_failure;
+ pxor(scratch_reg, scratch_reg);
+ cvtsd2si(result_reg, input_reg);
+ test(result_reg, Immediate(0xFFFFFF00));
+ j(zero, &done, Label::kNear);
+ cmp(result_reg, Immediate(0x80000000));
+ j(equal, &conv_failure, Label::kNear);
+ mov(result_reg, Immediate(0));
+ setcc(above, result_reg);
+ sub(result_reg, Immediate(1));
+ and_(result_reg, Immediate(255));
+ jmp(&done, Label::kNear);
+ bind(&conv_failure);
Set(result_reg, Immediate(0));
ucomisd(input_reg, scratch_reg);
j(below, &done, Label::kNear);
- ExternalReference half_ref = ExternalReference::address_of_one_half();
- movdbl(scratch_reg, Operand::StaticVariable(half_ref));
- addsd(scratch_reg, input_reg);
- cvttsd2si(result_reg, Operand(scratch_reg));
- test(result_reg, Immediate(0xFFFFFF00));
- j(zero, &done, Label::kNear);
Set(result_reg, Immediate(255));
bind(&done);
}
@@ -155,6 +160,24 @@ void MacroAssembler::ClampUint8(Register reg) {
}
+static double kUint32Bias =
+ static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
+
+
+void MacroAssembler::LoadUint32(XMMRegister dst,
+ Register src,
+ XMMRegister scratch) {
+ Label done;
+ cmp(src, Immediate(0));
+ movdbl(scratch,
+ Operand(reinterpret_cast<int32_t>(&kUint32Bias), RelocInfo::NONE));
+ cvtsi2sd(dst, src);
+ j(not_sign, &done, Label::kNear);
+ addsd(dst, scratch);
+ bind(&done);
+}
+
+
void MacroAssembler::RecordWriteArray(Register object,
Register value,
Register index,
@@ -237,6 +260,66 @@ void MacroAssembler::RecordWriteField(
}
+void MacroAssembler::RecordWriteForMap(
+ Register object,
+ Handle<Map> map,
+ Register scratch1,
+ Register scratch2,
+ SaveFPRegsMode save_fp) {
+ Label done;
+
+ Register address = scratch1;
+ Register value = scratch2;
+ if (emit_debug_code()) {
+ Label ok;
+ lea(address, FieldOperand(object, HeapObject::kMapOffset));
+ test_b(address, (1 << kPointerSizeLog2) - 1);
+ j(zero, &ok, Label::kNear);
+ int3();
+ bind(&ok);
+ }
+
+ ASSERT(!object.is(value));
+ ASSERT(!object.is(address));
+ ASSERT(!value.is(address));
+ AssertNotSmi(object);
+
+ if (!FLAG_incremental_marking) {
+ return;
+ }
+
+ // A single check of the map's pages interesting flag suffices, since it is
+ // only set during incremental collection, and then it's also guaranteed that
+ // the from object's page's interesting flag is also set. This optimization
+ // relies on the fact that maps can never be in new space.
+ ASSERT(!isolate()->heap()->InNewSpace(*map));
+ CheckPageFlagForMap(map,
+ MemoryChunk::kPointersToHereAreInterestingMask,
+ zero,
+ &done,
+ Label::kNear);
+
+ // Delay the initialization of |address| and |value| for the stub until it's
+ // known that the will be needed. Up until this point their values are not
+ // needed since they are embedded in the operands of instructions that need
+ // them.
+ lea(address, FieldOperand(object, HeapObject::kMapOffset));
+ mov(value, Immediate(map));
+ RecordWriteStub stub(object, value, address, OMIT_REMEMBERED_SET, save_fp);
+ CallStub(&stub);
+
+ bind(&done);
+
+ // Clobber clobbered input registers when running with the debug-code flag
+ // turned on to provoke errors.
+ if (emit_debug_code()) {
+ mov(value, Immediate(BitCast<int32_t>(kZapValue)));
+ mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
+ mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
+ }
+}
+
+
void MacroAssembler::RecordWrite(Register object,
Register address,
Register value,
@@ -246,16 +329,14 @@ void MacroAssembler::RecordWrite(Register object,
ASSERT(!object.is(value));
ASSERT(!object.is(address));
ASSERT(!value.is(address));
- if (emit_debug_code()) {
- AbortIfSmi(object);
- }
+ AssertNotSmi(object);
if (remembered_set_action == OMIT_REMEMBERED_SET &&
!FLAG_incremental_marking) {
return;
}
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
Label ok;
cmp(value, Operand(address, 0));
j(equal, &ok, Label::kNear);
@@ -382,10 +463,12 @@ void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
void MacroAssembler::CheckFastElements(Register map,
Label* fail,
Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastElementValue);
+ Map::kMaximumBitField2FastHoleyElementValue);
j(above, fail, distance);
}
@@ -393,23 +476,26 @@ void MacroAssembler::CheckFastElements(Register map,
void MacroAssembler::CheckFastObjectElements(Register map,
Label* fail,
Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastSmiOnlyElementValue);
+ Map::kMaximumBitField2FastHoleySmiElementValue);
j(below_equal, fail, distance);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastElementValue);
+ Map::kMaximumBitField2FastHoleyElementValue);
j(above, fail, distance);
}
-void MacroAssembler::CheckFastSmiOnlyElements(Register map,
- Label* fail,
- Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
+void MacroAssembler::CheckFastSmiElements(Register map,
+ Label* fail,
+ Label::Distance distance) {
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastSmiOnlyElementValue);
+ Map::kMaximumBitField2FastHoleySmiElementValue);
j(above, fail, distance);
}
@@ -493,24 +579,18 @@ void MacroAssembler::CompareMap(Register obj,
CompareMapMode mode) {
cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
- Map* transitioned_fast_element_map(
- map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
- ASSERT(transitioned_fast_element_map == NULL ||
- map->elements_kind() != FAST_ELEMENTS);
- if (transitioned_fast_element_map != NULL) {
- j(equal, early_success, Label::kNear);
- cmp(FieldOperand(obj, HeapObject::kMapOffset),
- Handle<Map>(transitioned_fast_element_map));
- }
-
- Map* transitioned_double_map(
- map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
- ASSERT(transitioned_double_map == NULL ||
- map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
- if (transitioned_double_map != NULL) {
- j(equal, early_success, Label::kNear);
- cmp(FieldOperand(obj, HeapObject::kMapOffset),
- Handle<Map>(transitioned_double_map));
+ ElementsKind kind = map->elements_kind();
+ if (IsFastElementsKind(kind)) {
+ bool packed = IsFastPackedElementsKind(kind);
+ Map* current_map = *map;
+ while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
+ kind = GetNextMoreGeneralFastElementsKind(kind, packed);
+ current_map = current_map->LookupElementsTransitionMap(kind);
+ if (!current_map) break;
+ j(equal, early_success, Label::kNear);
+ cmp(FieldOperand(obj, HeapObject::kMapOffset),
+ Handle<Map>(current_map));
+ }
}
}
}
@@ -592,36 +672,44 @@ void MacroAssembler::FCmp() {
}
-void MacroAssembler::AbortIfNotNumber(Register object) {
- Label ok;
- JumpIfSmi(object, &ok);
- cmp(FieldOperand(object, HeapObject::kMapOffset),
- isolate()->factory()->heap_number_map());
- Assert(equal, "Operand not a number");
- bind(&ok);
+void MacroAssembler::AssertNumber(Register object) {
+ if (emit_debug_code()) {
+ Label ok;
+ JumpIfSmi(object, &ok);
+ cmp(FieldOperand(object, HeapObject::kMapOffset),
+ isolate()->factory()->heap_number_map());
+ Check(equal, "Operand not a number");
+ bind(&ok);
+ }
}
-void MacroAssembler::AbortIfNotSmi(Register object) {
- test(object, Immediate(kSmiTagMask));
- Assert(equal, "Operand is not a smi");
+void MacroAssembler::AssertSmi(Register object) {
+ if (emit_debug_code()) {
+ test(object, Immediate(kSmiTagMask));
+ Check(equal, "Operand is not a smi");
+ }
}
-void MacroAssembler::AbortIfNotString(Register object) {
- test(object, Immediate(kSmiTagMask));
- Assert(not_equal, "Operand is not a string");
- push(object);
- mov(object, FieldOperand(object, HeapObject::kMapOffset));
- CmpInstanceType(object, FIRST_NONSTRING_TYPE);
- pop(object);
- Assert(below, "Operand is not a string");
+void MacroAssembler::AssertString(Register object) {
+ if (emit_debug_code()) {
+ test(object, Immediate(kSmiTagMask));
+ Check(not_equal, "Operand is a smi and not a string");
+ push(object);
+ mov(object, FieldOperand(object, HeapObject::kMapOffset));
+ CmpInstanceType(object, FIRST_NONSTRING_TYPE);
+ pop(object);
+ Check(below, "Operand is not a string");
+ }
}
-void MacroAssembler::AbortIfSmi(Register object) {
- test(object, Immediate(kSmiTagMask));
- Assert(not_equal, "Operand is a smi");
+void MacroAssembler::AssertNotSmi(Register object) {
+ if (emit_debug_code()) {
+ test(object, Immediate(kSmiTagMask));
+ Check(not_equal, "Operand is a smi");
+ }
}
@@ -921,23 +1009,24 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
cmp(scratch, Immediate(0));
Check(not_equal, "we should not have an empty lexical context");
}
- // Load the global context of the current context.
- int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ // Load the native context of the current context.
+ int offset =
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
mov(scratch, FieldOperand(scratch, offset));
- mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
+ mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
- // Check the context is a global context.
+ // Check the context is a native context.
if (emit_debug_code()) {
push(scratch);
- // Read the first word and compare to global_context_map.
+ // Read the first word and compare to native_context_map.
mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
- cmp(scratch, isolate()->factory()->global_context_map());
- Check(equal, "JSGlobalObject::global_context should be a global context.");
+ cmp(scratch, isolate()->factory()->native_context_map());
+ Check(equal, "JSGlobalObject::native_context should be a native context.");
pop(scratch);
}
// Check if both contexts are the same.
- cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
+ cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
j(equal, &same_contexts);
// Compare security tokens, save holder_reg on the stack so we can use it
@@ -948,18 +1037,19 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// Check that the security token in the calling global object is
// compatible with the security token in the receiving global
// object.
- mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
+ mov(holder_reg,
+ FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
- // Check the context is a global context.
+ // Check the context is a native context.
if (emit_debug_code()) {
cmp(holder_reg, isolate()->factory()->null_value());
Check(not_equal, "JSGlobalProxy::context() should not be null.");
push(holder_reg);
- // Read the first word and compare to global_context_map(),
+ // Read the first word and compare to native_context_map(),
mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
- cmp(holder_reg, isolate()->factory()->global_context_map());
- Check(equal, "JSGlobalObject::global_context should be a global context.");
+ cmp(holder_reg, isolate()->factory()->native_context_map());
+ Check(equal, "JSGlobalObject::native_context should be a native context.");
pop(holder_reg);
}
@@ -1646,7 +1736,7 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
}
-void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
+void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
}
@@ -1861,16 +1951,53 @@ void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
cmp(Operand::StaticVariable(scheduled_exception_address),
Immediate(isolate()->factory()->the_hole_value()));
j(not_equal, &promote_scheduled_exception);
+
+#if ENABLE_EXTRA_CHECKS
+ // Check if the function returned a valid JavaScript value.
+ Label ok;
+ Register return_value = eax;
+ Register map = ecx;
+
+ JumpIfSmi(return_value, &ok, Label::kNear);
+ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
+
+ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
+ j(below, &ok, Label::kNear);
+
+ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
+ j(above_equal, &ok, Label::kNear);
+
+ cmp(map, isolate()->factory()->heap_number_map());
+ j(equal, &ok, Label::kNear);
+
+ cmp(return_value, isolate()->factory()->undefined_value());
+ j(equal, &ok, Label::kNear);
+
+ cmp(return_value, isolate()->factory()->true_value());
+ j(equal, &ok, Label::kNear);
+
+ cmp(return_value, isolate()->factory()->false_value());
+ j(equal, &ok, Label::kNear);
+
+ cmp(return_value, isolate()->factory()->null_value());
+ j(equal, &ok, Label::kNear);
+
+ Abort("API call returned invalid object");
+
+ bind(&ok);
+#endif
+
LeaveApiExitFrame();
ret(stack_space * kPointerSize);
- bind(&promote_scheduled_exception);
- TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
bind(&empty_handle);
// It was zero; the result is undefined.
mov(eax, isolate()->factory()->undefined_value());
jmp(&prologue);
+ bind(&promote_scheduled_exception);
+ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
+
// HandleScope limit has changed. Delete allocated extensions.
ExternalReference delete_extensions =
ExternalReference::delete_handle_scope_extensions(isolate());
@@ -2108,7 +2235,7 @@ void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
void MacroAssembler::GetBuiltinFunction(Register target,
Builtins::JavaScript id) {
// Load the JavaScript builtin function from the builtins object.
- mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
mov(target, FieldOperand(target,
JSBuiltinsObject::OffsetOfFunctionWithId(id)));
@@ -2157,31 +2284,42 @@ void MacroAssembler::LoadTransitionedArrayMapConditional(
Register scratch,
Label* no_map_match) {
// Load the global or builtins object from the current context.
- mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
+ mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
// Check that the function's map is the same as the expected cached map.
- int expected_index =
- Context::GetContextMapIndexFromElementsKind(expected_kind);
- cmp(map_in_out, Operand(scratch, Context::SlotOffset(expected_index)));
+ mov(scratch, Operand(scratch,
+ Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
+
+ size_t offset = expected_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ cmp(map_in_out, FieldOperand(scratch, offset));
j(not_equal, no_map_match);
// Use the transitioned cached map.
- int trans_index =
- Context::GetContextMapIndexFromElementsKind(transitioned_kind);
- mov(map_in_out, Operand(scratch, Context::SlotOffset(trans_index)));
+ offset = transitioned_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ mov(map_in_out, FieldOperand(scratch, offset));
}
void MacroAssembler::LoadInitialArrayMap(
- Register function_in, Register scratch, Register map_out) {
+ Register function_in, Register scratch,
+ Register map_out, bool can_have_holes) {
ASSERT(!function_in.is(map_out));
Label done;
mov(map_out, FieldOperand(function_in,
JSFunction::kPrototypeOrInitialMapOffset));
if (!FLAG_smi_only_arrays) {
- LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_ELEMENTS,
+ ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ kind,
+ map_out,
+ scratch,
+ &done);
+ } else if (can_have_holes) {
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_HOLEY_SMI_ELEMENTS,
map_out,
scratch,
&done);
@@ -2192,10 +2330,11 @@ void MacroAssembler::LoadInitialArrayMap(
void MacroAssembler::LoadGlobalFunction(int index, Register function) {
// Load the global or builtins object from the current context.
- mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- // Load the global context from the global or builtins object.
- mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
- // Load the function from the global context.
+ mov(function,
+ Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ // Load the native context from the global or builtins object.
+ mov(function, FieldOperand(function, GlobalObject::kNativeContextOffset));
+ // Load the function from the native context.
mov(function, Operand(function, Context::SlotOffset(index)));
}
@@ -2446,12 +2585,13 @@ void MacroAssembler::Abort(const char* msg) {
void MacroAssembler::LoadInstanceDescriptors(Register map,
Register descriptors) {
- mov(descriptors,
- FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
- Label not_smi;
- JumpIfNotSmi(descriptors, &not_smi);
- mov(descriptors, isolate()->factory()->empty_descriptor_array());
- bind(&not_smi);
+ mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
+}
+
+
+void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
+ mov(dst, FieldOperand(map, Map::kBitField3Offset));
+ DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
}
@@ -2475,7 +2615,7 @@ void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
}
and_(scratch,
kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
- cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
+ cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
j(not_equal, failure);
}
@@ -2608,6 +2748,28 @@ void MacroAssembler::CheckPageFlag(
}
+void MacroAssembler::CheckPageFlagForMap(
+ Handle<Map> map,
+ int mask,
+ Condition cc,
+ Label* condition_met,
+ Label::Distance condition_met_distance) {
+ ASSERT(cc == zero || cc == not_zero);
+ Page* page = Page::FromAddress(map->address());
+ ExternalReference reference(ExternalReference::page_flags(page));
+ // The inlined static address check of the page's flags relies
+ // on maps never being compacted.
+ ASSERT(!isolate()->heap()->mark_compact_collector()->
+ IsOnEvacuationCandidate(*map));
+ if (mask < (1 << kBitsPerByte)) {
+ test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
+ } else {
+ test(Operand::StaticVariable(reference), Immediate(mask));
+ }
+ j(cc, condition_met, condition_met_distance);
+}
+
+
void MacroAssembler::JumpIfBlack(Register object,
Register scratch0,
Register scratch1,
@@ -2692,7 +2854,7 @@ void MacroAssembler::EnsureNotWhite(
test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
j(not_zero, &done, Label::kNear);
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
// Check for impossible bit pattern.
Label ok;
push(mask_scratch);
@@ -2744,7 +2906,7 @@ void MacroAssembler::EnsureNotWhite(
bind(&not_external);
// Sequential string, either ASCII or UC16.
- ASSERT(kAsciiStringTag == 0x04);
+ ASSERT(kOneByteStringTag == 0x04);
and_(length, Immediate(kStringEncodingMask));
xor_(length, Immediate(kStringEncodingMask));
add(length, Immediate(0x04));
@@ -2767,7 +2929,7 @@ void MacroAssembler::EnsureNotWhite(
and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
length);
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
Check(less_equal, "Live Bytes Count overflow chunk size");
@@ -2777,40 +2939,43 @@ void MacroAssembler::EnsureNotWhite(
}
+void MacroAssembler::EnumLength(Register dst, Register map) {
+ STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
+ mov(dst, FieldOperand(map, Map::kBitField3Offset));
+ and_(dst, Immediate(Smi::FromInt(Map::EnumLengthBits::kMask)));
+}
+
+
void MacroAssembler::CheckEnumCache(Label* call_runtime) {
- Label next;
+ Label next, start;
mov(ecx, eax);
- bind(&next);
-
- // Check that there are no elements. Register ecx contains the
- // current JS object we've reached through the prototype chain.
- cmp(FieldOperand(ecx, JSObject::kElementsOffset),
- isolate()->factory()->empty_fixed_array());
- j(not_equal, call_runtime);
- // Check that instance descriptors are not empty so that we can
- // check for an enum cache. Leave the map in ebx for the subsequent
- // prototype load.
+ // Check if the enum length field is properly initialized, indicating that
+ // there is an enum cache.
mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
- mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOrBitField3Offset));
- JumpIfSmi(edx, call_runtime);
- // Check that there is an enum cache in the non-empty instance
- // descriptors (edx). This is the case if the next enumeration
- // index field does not contain a smi.
- mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
- JumpIfSmi(edx, call_runtime);
+ EnumLength(edx, ebx);
+ cmp(edx, Immediate(Smi::FromInt(Map::kInvalidEnumCache)));
+ j(equal, call_runtime);
+
+ jmp(&start);
+
+ bind(&next);
+ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
// For all objects but the receiver, check that the cache is empty.
- Label check_prototype;
- cmp(ecx, eax);
- j(equal, &check_prototype, Label::kNear);
- mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
- cmp(edx, isolate()->factory()->empty_fixed_array());
+ EnumLength(edx, ebx);
+ cmp(edx, Immediate(Smi::FromInt(0)));
+ j(not_equal, call_runtime);
+
+ bind(&start);
+
+ // Check that there are no elements. Register rcx contains the current JS
+ // object we've reached through the prototype chain.
+ mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
+ cmp(ecx, isolate()->factory()->empty_fixed_array());
j(not_equal, call_runtime);
- // Load the prototype from the map and loop if non-null.
- bind(&check_prototype);
mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
cmp(ecx, isolate()->factory()->null_value());
j(not_equal, &next);
diff --git a/src/3rdparty/v8/src/ia32/macro-assembler-ia32.h b/src/3rdparty/v8/src/ia32/macro-assembler-ia32.h
index 1cc9142..b91cfcd 100644
--- a/src/3rdparty/v8/src/ia32/macro-assembler-ia32.h
+++ b/src/3rdparty/v8/src/ia32/macro-assembler-ia32.h
@@ -90,6 +90,13 @@ class MacroAssembler: public Assembler {
Label* condition_met,
Label::Distance condition_met_distance = Label::kFar);
+ void CheckPageFlagForMap(
+ Handle<Map> map,
+ int mask,
+ Condition cc,
+ Label* condition_met,
+ Label::Distance condition_met_distance = Label::kFar);
+
// Check if object is in new space. Jumps if the object is not in new space.
// The register scratch can be object itself, but scratch will be clobbered.
void JumpIfNotInNewSpace(Register object,
@@ -194,6 +201,16 @@ class MacroAssembler: public Assembler {
RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
SmiCheck smi_check = INLINE_SMI_CHECK);
+ // For page containing |object| mark the region covering the object's map
+ // dirty. |object| is the object being stored into, |map| is the Map object
+ // that was stored.
+ void RecordWriteForMap(
+ Register object,
+ Handle<Map> map,
+ Register scratch1,
+ Register scratch2,
+ SaveFPRegsMode save_fp);
+
#ifdef ENABLE_DEBUGGER_SUPPORT
// ---------------------------------------------------------------------------
// Debugger Support
@@ -222,8 +239,8 @@ class MacroAssembler: public Assembler {
void LoadContext(Register dst, int context_chain_length);
// Conditionally load the cached Array transitioned map of type
- // transitioned_kind from the global context if the map in register
- // map_in_out is the cached Array map in the global context of
+ // transitioned_kind from the native context if the map in register
+ // map_in_out is the cached Array map in the native context of
// expected_kind.
void LoadTransitionedArrayMapConditional(
ElementsKind expected_kind,
@@ -235,7 +252,8 @@ class MacroAssembler: public Assembler {
// Load the initial map for new Arrays from a JSFunction.
void LoadInitialArrayMap(Register function_in,
Register scratch,
- Register map_out);
+ Register map_out,
+ bool can_have_holes);
// Load the global function with the given index.
void LoadGlobalFunction(int index, Register function);
@@ -357,9 +375,9 @@ class MacroAssembler: public Assembler {
// Check if a map for a JSObject indicates that the object has fast smi only
// elements. Jump to the specified label if it does not.
- void CheckFastSmiOnlyElements(Register map,
- Label* fail,
- Label::Distance distance = Label::kFar);
+ void CheckFastSmiElements(Register map,
+ Label* fail,
+ Label::Distance distance = Label::kFar);
// Check to see if maybe_number can be stored as a double in
// FastDoubleElements. If it can, store it at the index specified by key in
@@ -449,6 +467,8 @@ class MacroAssembler: public Assembler {
j(not_carry, is_smi);
}
+ void LoadUint32(XMMRegister dst, Register src, XMMRegister scratch);
+
// Jump the register contains a smi.
inline void JumpIfSmi(Register value,
Label* smi_label,
@@ -472,20 +492,29 @@ class MacroAssembler: public Assembler {
}
void LoadInstanceDescriptors(Register map, Register descriptors);
-
+ void EnumLength(Register dst, Register map);
+ void NumberOfOwnDescriptors(Register dst, Register map);
+
+ template<typename Field>
+ void DecodeField(Register reg) {
+ static const int shift = Field::kShift;
+ static const int mask = (Field::kMask >> Field::kShift) << kSmiTagSize;
+ sar(reg, shift);
+ and_(reg, Immediate(mask));
+ }
void LoadPowerOf2(XMMRegister dst, Register scratch, int power);
- // Abort execution if argument is not a number. Used in debug code.
- void AbortIfNotNumber(Register object);
+ // Abort execution if argument is not a number, enabled via --debug-code.
+ void AssertNumber(Register object);
- // Abort execution if argument is not a smi. Used in debug code.
- void AbortIfNotSmi(Register object);
+ // Abort execution if argument is not a smi, enabled via --debug-code.
+ void AssertSmi(Register object);
- // Abort execution if argument is a smi. Used in debug code.
- void AbortIfSmi(Register object);
+ // Abort execution if argument is a smi, enabled via --debug-code.
+ void AssertNotSmi(Register object);
- // Abort execution if argument is a string. Used in debug code.
- void AbortIfNotString(Register object);
+ // Abort execution if argument is not a string, enabled via --debug-code.
+ void AssertString(Register object);
// ---------------------------------------------------------------------------
// Exception handling
@@ -670,7 +699,7 @@ class MacroAssembler: public Assembler {
// Runtime calls
// Call a code stub. Generate the code if necessary.
- void CallStub(CodeStub* stub, unsigned ast_id = kNoASTId);
+ void CallStub(CodeStub* stub, TypeFeedbackId ast_id = TypeFeedbackId::None());
// Tail call a code stub (jump). Generate the code if necessary.
void TailCallStub(CodeStub* stub);
@@ -943,11 +972,11 @@ inline Operand ContextOperand(Register context, int index) {
inline Operand GlobalObjectOperand() {
- return ContextOperand(esi, Context::GLOBAL_INDEX);
+ return ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX);
}
static inline Operand QmlGlobalObjectOperand() {
- return ContextOperand(esi, Context::QML_GLOBAL_INDEX);
+ return ContextOperand(esi, Context::QML_GLOBAL_OBJECT_INDEX);
}
// Generates an Operand for saving parameters after PrepareCallApiFunction.
diff --git a/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.cc b/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.cc
index 0029f33..622dc42 100644
--- a/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -42,28 +42,30 @@ namespace internal {
#ifndef V8_INTERPRETED_REGEXP
/*
* This assembler uses the following register assignment convention
- * - edx : current character. Must be loaded using LoadCurrentCharacter
- * before using any of the dispatch methods.
- * - edi : current position in input, as negative offset from end of string.
+ * - edx : Current character. Must be loaded using LoadCurrentCharacter
+ * before using any of the dispatch methods. Temporarily stores the
+ * index of capture start after a matching pass for a global regexp.
+ * - edi : Current position in input, as negative offset from end of string.
* Please notice that this is the byte offset, not the character offset!
* - esi : end of input (points to byte after last character in input).
- * - ebp : frame pointer. Used to access arguments, local variables and
+ * - ebp : Frame pointer. Used to access arguments, local variables and
* RegExp registers.
- * - esp : points to tip of C stack.
- * - ecx : points to tip of backtrack stack
+ * - esp : Points to tip of C stack.
+ * - ecx : Points to tip of backtrack stack
*
* The registers eax and ebx are free to use for computations.
*
* Each call to a public method should retain this convention.
* The stack will have the following structure:
- * - Isolate* isolate (Address of the current isolate)
+ * - Isolate* isolate (address of the current isolate)
* - direct_call (if 1, direct call from JavaScript code, if 0
* call through the runtime system)
- * - stack_area_base (High end of the memory area to use as
+ * - stack_area_base (high end of the memory area to use as
* backtracking stack)
+ * - capture array size (may fit multiple sets of matches)
* - int* capture_array (int[num_saved_registers_], for output).
- * - end of input (Address of end of string)
- * - start of input (Address of first character in string)
+ * - end of input (address of end of string)
+ * - start of input (address of first character in string)
* - start index (character index of start)
* - String* input_string (location of a handle containing the string)
* --- frame alignment (if applicable) ---
@@ -72,9 +74,10 @@ namespace internal {
* - backup of caller esi
* - backup of caller edi
* - backup of caller ebx
+ * - success counter (only for global regexps to count matches).
* - Offset of location before start of input (effectively character
* position -1). Used to initialize capture registers to a non-position.
- * - register 0 ebp[-4] (Only positions must be stored in the first
+ * - register 0 ebp[-4] (only positions must be stored in the first
* - register 1 ebp[-8] num_saved_registers_ registers)
* - ...
*
@@ -98,8 +101,10 @@ namespace internal {
RegExpMacroAssemblerIA32::RegExpMacroAssemblerIA32(
Mode mode,
- int registers_to_save)
- : masm_(new MacroAssembler(Isolate::Current(), NULL, kRegExpCodeSize)),
+ int registers_to_save,
+ Zone* zone)
+ : NativeRegExpMacroAssembler(zone),
+ masm_(new MacroAssembler(Isolate::Current(), NULL, kRegExpCodeSize)),
mode_(mode),
num_registers_(registers_to_save),
num_saved_registers_(registers_to_save),
@@ -311,6 +316,11 @@ void RegExpMacroAssemblerIA32::CheckNotBackReferenceIgnoreCase(
// uncaptured. In either case succeed immediately.
__ j(equal, &fallthrough);
+ // Check that there are sufficient characters left in the input.
+ __ mov(eax, edi);
+ __ add(eax, ebx);
+ BranchOrBacktrack(greater, on_no_match);
+
if (mode_ == ASCII) {
Label success;
Label fail;
@@ -482,15 +492,6 @@ void RegExpMacroAssemblerIA32::CheckNotBackReference(
}
-void RegExpMacroAssemblerIA32::CheckNotRegistersEqual(int reg1,
- int reg2,
- Label* on_not_equal) {
- __ mov(eax, register_location(reg1));
- __ cmp(eax, register_location(reg2));
- BranchOrBacktrack(not_equal, on_not_equal);
-}
-
-
void RegExpMacroAssemblerIA32::CheckNotCharacter(uint32_t c,
Label* on_not_equal) {
__ cmp(current_character(), c);
@@ -706,13 +707,16 @@ bool RegExpMacroAssemblerIA32::CheckSpecialCharacterClass(uc16 type,
void RegExpMacroAssemblerIA32::Fail() {
- ASSERT(FAILURE == 0); // Return value for failure is zero.
- __ Set(eax, Immediate(0));
+ STATIC_ASSERT(FAILURE == 0); // Return value for failure is zero.
+ if (!global()) {
+ __ Set(eax, Immediate(FAILURE));
+ }
__ jmp(&exit_label_);
}
Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
+ Label return_eax;
// Finalize code - write the entry point code now we know how many
// registers we need.
@@ -731,6 +735,7 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
__ push(esi);
__ push(edi);
__ push(ebx); // Callee-save on MacOS.
+ __ push(Immediate(0)); // Number of successful matches in a global regexp.
__ push(Immediate(0)); // Make room for "input start - 1" constant.
// Check if we have space on the stack for registers.
@@ -750,13 +755,13 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
// Exit with OutOfMemory exception. There is not enough space on the stack
// for our working registers.
__ mov(eax, EXCEPTION);
- __ jmp(&exit_label_);
+ __ jmp(&return_eax);
__ bind(&stack_limit_hit);
CallCheckStackGuardState(ebx);
__ or_(eax, eax);
// If returned value is non-zero, we exit with the returned value as result.
- __ j(not_zero, &exit_label_);
+ __ j(not_zero, &return_eax);
__ bind(&stack_ok);
// Load start index for later use.
@@ -783,19 +788,8 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
// position registers.
__ mov(Operand(ebp, kInputStartMinusOne), eax);
- if (num_saved_registers_ > 0) { // Always is, if generated from a regexp.
- // Fill saved registers with initial value = start offset - 1
- // Fill in stack push order, to avoid accessing across an unwritten
- // page (a problem on Windows).
- __ mov(ecx, kRegisterZero);
- Label init_loop;
- __ bind(&init_loop);
- __ mov(Operand(ebp, ecx, times_1, +0), eax);
- __ sub(ecx, Immediate(kPointerSize));
- __ cmp(ecx, kRegisterZero - num_saved_registers_ * kPointerSize);
- __ j(greater, &init_loop);
- }
- // Ensure that we have written to each stack page, in order. Skipping a page
+#ifdef WIN32
+ // Ensure that we write to each stack page, in order. Skipping a page
// on Windows can cause segmentation faults. Assuming page size is 4k.
const int kPageSize = 4096;
const int kRegistersPerPage = kPageSize / kPointerSize;
@@ -804,20 +798,45 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
i += kRegistersPerPage) {
__ mov(register_location(i), eax); // One write every page.
}
+#endif // WIN32
+ Label load_char_start_regexp, start_regexp;
+ // Load newline if index is at start, previous character otherwise.
+ __ cmp(Operand(ebp, kStartIndex), Immediate(0));
+ __ j(not_equal, &load_char_start_regexp, Label::kNear);
+ __ mov(current_character(), '\n');
+ __ jmp(&start_regexp, Label::kNear);
+
+ // Global regexp restarts matching here.
+ __ bind(&load_char_start_regexp);
+ // Load previous char as initial value of current character register.
+ LoadCurrentCharacterUnchecked(-1, 1);
+ __ bind(&start_regexp);
+
+ // Initialize on-stack registers.
+ if (num_saved_registers_ > 0) { // Always is, if generated from a regexp.
+ // Fill saved registers with initial value = start offset - 1
+ // Fill in stack push order, to avoid accessing across an unwritten
+ // page (a problem on Windows).
+ if (num_saved_registers_ > 8) {
+ __ mov(ecx, kRegisterZero);
+ Label init_loop;
+ __ bind(&init_loop);
+ __ mov(Operand(ebp, ecx, times_1, 0), eax);
+ __ sub(ecx, Immediate(kPointerSize));
+ __ cmp(ecx, kRegisterZero - num_saved_registers_ * kPointerSize);
+ __ j(greater, &init_loop);
+ } else { // Unroll the loop.
+ for (int i = 0; i < num_saved_registers_; i++) {
+ __ mov(register_location(i), eax);
+ }
+ }
+ }
// Initialize backtrack stack pointer.
__ mov(backtrack_stackpointer(), Operand(ebp, kStackHighEnd));
- // Load previous char as initial value of current-character.
- Label at_start;
- __ cmp(Operand(ebp, kStartIndex), Immediate(0));
- __ j(equal, &at_start);
- LoadCurrentCharacterUnchecked(-1, 1); // Load previous char.
- __ jmp(&start_label_);
- __ bind(&at_start);
- __ mov(current_character(), '\n');
- __ jmp(&start_label_);
+ __ jmp(&start_label_);
// Exit code:
if (success_label_.is_linked()) {
@@ -836,6 +855,10 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
}
for (int i = 0; i < num_saved_registers_; i++) {
__ mov(eax, register_location(i));
+ if (i == 0 && global_with_zero_length_check()) {
+ // Keep capture start in edx for the zero-length check later.
+ __ mov(edx, eax);
+ }
// Convert to index from start of string, not end.
__ add(eax, ecx);
if (mode_ == UC16) {
@@ -844,10 +867,57 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
__ mov(Operand(ebx, i * kPointerSize), eax);
}
}
- __ mov(eax, Immediate(SUCCESS));
+
+ if (global()) {
+ // Restart matching if the regular expression is flagged as global.
+ // Increment success counter.
+ __ inc(Operand(ebp, kSuccessfulCaptures));
+ // Capture results have been stored, so the number of remaining global
+ // output registers is reduced by the number of stored captures.
+ __ mov(ecx, Operand(ebp, kNumOutputRegisters));
+ __ sub(ecx, Immediate(num_saved_registers_));
+ // Check whether we have enough room for another set of capture results.
+ __ cmp(ecx, Immediate(num_saved_registers_));
+ __ j(less, &exit_label_);
+
+ __ mov(Operand(ebp, kNumOutputRegisters), ecx);
+ // Advance the location for output.
+ __ add(Operand(ebp, kRegisterOutput),
+ Immediate(num_saved_registers_ * kPointerSize));
+
+ // Prepare eax to initialize registers with its value in the next run.
+ __ mov(eax, Operand(ebp, kInputStartMinusOne));
+
+ if (global_with_zero_length_check()) {
+ // Special case for zero-length matches.
+ // edx: capture start index
+ __ cmp(edi, edx);
+ // Not a zero-length match, restart.
+ __ j(not_equal, &load_char_start_regexp);
+ // edi (offset from the end) is zero if we already reached the end.
+ __ test(edi, edi);
+ __ j(zero, &exit_label_, Label::kNear);
+ // Advance current position after a zero-length match.
+ if (mode_ == UC16) {
+ __ add(edi, Immediate(2));
+ } else {
+ __ inc(edi);
+ }
+ }
+
+ __ jmp(&load_char_start_regexp);
+ } else {
+ __ mov(eax, Immediate(SUCCESS));
+ }
}
- // Exit and return eax
+
__ bind(&exit_label_);
+ if (global()) {
+ // Return the number of successful captures.
+ __ mov(eax, Operand(ebp, kSuccessfulCaptures));
+ }
+
+ __ bind(&return_eax);
// Skip esp past regexp registers.
__ lea(esp, Operand(ebp, kBackup_ebx));
// Restore callee-save registers.
@@ -877,7 +947,7 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
__ or_(eax, eax);
// If returning non-zero, we should end execution with the given
// result as return value.
- __ j(not_zero, &exit_label_);
+ __ j(not_zero, &return_eax);
__ pop(edi);
__ pop(backtrack_stackpointer());
@@ -924,7 +994,7 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
__ bind(&exit_with_exception);
// Exit with Result EXCEPTION(-1) to signal thrown exception.
__ mov(eax, EXCEPTION);
- __ jmp(&exit_label_);
+ __ jmp(&return_eax);
}
CodeDesc code_desc;
@@ -1043,8 +1113,9 @@ void RegExpMacroAssemblerIA32::SetRegister(int register_index, int to) {
}
-void RegExpMacroAssemblerIA32::Succeed() {
+bool RegExpMacroAssemblerIA32::Succeed() {
__ jmp(&success_label_);
+ return global();
}
diff --git a/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.h b/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.h
index 78cd069..7aea385 100644
--- a/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.h
+++ b/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.h
@@ -1,4 +1,4 @@
-// Copyright 2008-2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -34,17 +34,10 @@
namespace v8 {
namespace internal {
-#ifdef V8_INTERPRETED_REGEXP
-class RegExpMacroAssemblerIA32: public RegExpMacroAssembler {
- public:
- RegExpMacroAssemblerIA32() { }
- virtual ~RegExpMacroAssemblerIA32() { }
-};
-
-#else // V8_INTERPRETED_REGEXP
+#ifndef V8_INTERPRETED_REGEXP
class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
public:
- RegExpMacroAssemblerIA32(Mode mode, int registers_to_save);
+ RegExpMacroAssemblerIA32(Mode mode, int registers_to_save, Zone* zone);
virtual ~RegExpMacroAssemblerIA32();
virtual int stack_limit_slack();
virtual void AdvanceCurrentPosition(int by);
@@ -69,7 +62,6 @@ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
Label* on_no_match);
- virtual void CheckNotRegistersEqual(int reg1, int reg2, Label* on_not_equal);
virtual void CheckNotCharacter(uint32_t c, Label* on_not_equal);
virtual void CheckNotCharacterAfterAnd(uint32_t c,
uint32_t mask,
@@ -111,7 +103,7 @@ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
virtual void ReadStackPointerFromRegister(int reg);
virtual void SetCurrentPositionFromEnd(int by);
virtual void SetRegister(int register_index, int to);
- virtual void Succeed();
+ virtual bool Succeed();
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset);
virtual void ClearRegisters(int reg_from, int reg_to);
virtual void WriteStackPointerToRegister(int reg);
@@ -135,7 +127,11 @@ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
static const int kInputStart = kStartIndex + kPointerSize;
static const int kInputEnd = kInputStart + kPointerSize;
static const int kRegisterOutput = kInputEnd + kPointerSize;
- static const int kStackHighEnd = kRegisterOutput + kPointerSize;
+ // For the case of global regular expression, we have room to store at least
+ // one set of capture results. For the case of non-global regexp, we ignore
+ // this value.
+ static const int kNumOutputRegisters = kRegisterOutput + kPointerSize;
+ static const int kStackHighEnd = kNumOutputRegisters + kPointerSize;
static const int kDirectCall = kStackHighEnd + kPointerSize;
static const int kIsolate = kDirectCall + kPointerSize;
// Below the frame pointer - local stack variables.
@@ -144,7 +140,8 @@ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
static const int kBackup_esi = kFramePointer - kPointerSize;
static const int kBackup_edi = kBackup_esi - kPointerSize;
static const int kBackup_ebx = kBackup_edi - kPointerSize;
- static const int kInputStartMinusOne = kBackup_ebx - kPointerSize;
+ static const int kSuccessfulCaptures = kBackup_ebx - kPointerSize;
+ static const int kInputStartMinusOne = kSuccessfulCaptures - kPointerSize;
// First register address. Following registers are below it on the stack.
static const int kRegisterZero = kInputStartMinusOne - kPointerSize;
diff --git a/src/3rdparty/v8/src/ia32/simulator-ia32.h b/src/3rdparty/v8/src/ia32/simulator-ia32.h
index 13ddf35..478d4ce 100644
--- a/src/3rdparty/v8/src/ia32/simulator-ia32.h
+++ b/src/3rdparty/v8/src/ia32/simulator-ia32.h
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -40,12 +40,12 @@ namespace internal {
typedef int (*regexp_matcher)(String*, int, const byte*,
- const byte*, int*, Address, int, Isolate*);
+ const byte*, int*, int, Address, int, Isolate*);
// Call the generated regexp code directly. The code at the entry address should
// expect eight int/pointer sized arguments and return an int.
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
- (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6, p7))
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7, p8) \
+ (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6, p7, p8))
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
diff --git a/src/3rdparty/v8/src/ia32/stub-cache-ia32.cc b/src/3rdparty/v8/src/ia32/stub-cache-ia32.cc
index e148e2f..11efb72 100644
--- a/src/3rdparty/v8/src/ia32/stub-cache-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/stub-cache-ia32.cc
@@ -276,12 +276,12 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
Register prototype,
Label* miss) {
// Check we're still in the same context.
- __ cmp(Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)),
- masm->isolate()->global());
+ __ cmp(Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)),
+ masm->isolate()->global_object());
__ j(not_equal, miss);
// Get the global function with the given index.
Handle<JSFunction> function(
- JSFunction::cast(masm->isolate()->global_context()->get(index)));
+ JSFunction::cast(masm->isolate()->native_context()->get(index)));
// Load its initial map. The global functions all have initial maps.
__ Set(prototype, Immediate(Handle<Map>(function->initial_map())));
// Load the prototype from the initial map.
@@ -745,10 +745,22 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
Handle<JSObject> object,
int index,
Handle<Map> transition,
+ Handle<String> name,
Register receiver_reg,
Register name_reg,
- Register scratch,
+ Register scratch1,
+ Register scratch2,
Label* miss_label) {
+ LookupResult lookup(masm->isolate());
+ object->Lookup(*name, &lookup);
+ if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
+ // In sloppy mode, we could just return the value and be done. However, we
+ // might be in strict mode, where we have to throw. Since we cannot tell,
+ // go into slow case unconditionally.
+ __ jmp(miss_label);
+ return;
+ }
+
// Check that the map of the object hasn't changed.
CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
: REQUIRE_EXACT_MAP;
@@ -757,7 +769,32 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
+ __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
+ }
+
+ // Check that we are allowed to write this.
+ if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
+ JSObject* holder;
+ if (lookup.IsFound()) {
+ holder = lookup.holder();
+ } else {
+ // Find the top object.
+ holder = *object;
+ do {
+ holder = JSObject::cast(holder->GetPrototype());
+ } while (holder->GetPrototype()->IsJSObject());
+ }
+ // We need an extra register, push
+ __ push(name_reg);
+ Label miss_pop, done_check;
+ CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
+ scratch1, scratch2, name, &miss_pop);
+ __ jmp(&done_check);
+ __ bind(&miss_pop);
+ __ pop(name_reg);
+ __ jmp(miss_label);
+ __ bind(&done_check);
+ __ pop(name_reg);
}
// Stub never generated for non-global objects that require access
@@ -768,11 +805,11 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
// The properties must be extended before we can store the value.
// We jump to a runtime call that extends the properties array.
- __ pop(scratch); // Return address.
+ __ pop(scratch1); // Return address.
__ push(receiver_reg);
__ push(Immediate(transition));
__ push(eax);
- __ push(scratch);
+ __ push(scratch1);
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
masm->isolate()),
@@ -782,10 +819,19 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
}
if (!transition.is_null()) {
- // Update the map of the object; no write barrier updating is
- // needed because the map is never in new space.
- __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset),
- Immediate(transition));
+ // Update the map of the object.
+ __ mov(scratch1, Immediate(transition));
+ __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
+
+ // Update the write barrier for the map field and pass the now unused
+ // name_reg as scratch register.
+ __ RecordWriteField(receiver_reg,
+ HeapObject::kMapOffset,
+ scratch1,
+ name_reg,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
}
// Adjust for the number of properties stored in the object. Even in the
@@ -804,19 +850,19 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
__ RecordWriteField(receiver_reg,
offset,
name_reg,
- scratch,
+ scratch1,
kDontSaveFPRegs);
} else {
// Write to the properties array.
int offset = index * kPointerSize + FixedArray::kHeaderSize;
// Get the properties array (optimistically).
- __ mov(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
- __ mov(FieldOperand(scratch, offset), eax);
+ __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
+ __ mov(FieldOperand(scratch1, offset), eax);
// Update the write barrier for the array address.
// Pass the value being stored in the now unused name_reg.
__ mov(name_reg, eax);
- __ RecordWriteField(scratch,
+ __ RecordWriteField(scratch1,
offset,
name_reg,
receiver_reg,
@@ -1006,6 +1052,58 @@ void StubCompiler::GenerateLoadField(Handle<JSObject> object,
}
+void StubCompiler::GenerateDictionaryLoadCallback(Register receiver,
+ Register name_reg,
+ Register scratch1,
+ Register scratch2,
+ Register scratch3,
+ Handle<AccessorInfo> callback,
+ Handle<String> name,
+ Label* miss) {
+ ASSERT(!receiver.is(scratch2));
+ ASSERT(!receiver.is(scratch3));
+ Register dictionary = scratch1;
+ bool must_preserve_dictionary_reg = receiver.is(dictionary);
+
+ // Load the properties dictionary.
+ if (must_preserve_dictionary_reg) {
+ __ push(dictionary);
+ }
+ __ mov(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset));
+
+ // Probe the dictionary.
+ Label probe_done, pop_and_miss;
+ StringDictionaryLookupStub::GeneratePositiveLookup(masm(),
+ &pop_and_miss,
+ &probe_done,
+ dictionary,
+ name_reg,
+ scratch2,
+ scratch3);
+ __ bind(&pop_and_miss);
+ if (must_preserve_dictionary_reg) {
+ __ pop(dictionary);
+ }
+ __ jmp(miss);
+ __ bind(&probe_done);
+
+ // If probing finds an entry in the dictionary, scratch2 contains the
+ // index into the dictionary. Check that the value is the callback.
+ Register index = scratch2;
+ const int kElementsStartOffset =
+ StringDictionary::kHeaderSize +
+ StringDictionary::kElementsStartIndex * kPointerSize;
+ const int kValueOffset = kElementsStartOffset + kPointerSize;
+ __ mov(scratch3,
+ Operand(dictionary, index, times_4, kValueOffset - kHeapObjectTag));
+ if (must_preserve_dictionary_reg) {
+ __ pop(dictionary);
+ }
+ __ cmp(scratch3, callback);
+ __ j(not_equal, miss);
+}
+
+
void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
Handle<JSObject> holder,
Register receiver,
@@ -1013,6 +1111,7 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
Register scratch1,
Register scratch2,
Register scratch3,
+ Register scratch4,
Handle<AccessorInfo> callback,
Handle<String> name,
Label* miss) {
@@ -1023,6 +1122,11 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
Register reg = CheckPrototypes(object, receiver, holder, scratch1,
scratch2, scratch3, name, miss);
+ if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
+ GenerateDictionaryLoadCallback(
+ reg, name_reg, scratch1, scratch2, scratch3, callback, name, miss);
+ }
+
// Insert additional parameters into the stack frame above return address.
ASSERT(!scratch3.is(reg));
__ pop(scratch3); // Get return address to place it below.
@@ -1111,12 +1215,13 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
// later.
bool compile_followup_inline = false;
if (lookup->IsFound() && lookup->IsCacheable()) {
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
lookup->GetCallbackObject()->IsAccessorInfo()) {
- compile_followup_inline =
- AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL;
+ AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
+ compile_followup_inline = callback->getter() != NULL &&
+ callback->IsCompatibleReceiver(*object);
}
}
@@ -1195,7 +1300,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
miss);
}
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
// We found FIELD property in prototype chain of interceptor's holder.
// Retrieve a field from field's holder.
GenerateFastPropertyLoad(masm(), eax, holder_reg,
@@ -1368,7 +1473,7 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
GenerateMissBranch();
// Return the generated code.
- return GetCode(FIELD, name);
+ return GetCode(Code::FIELD, name);
}
@@ -1462,16 +1567,31 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
- __ CheckFastSmiOnlyElements(ebx, &call_builtin);
+ __ CheckFastSmiElements(ebx, &call_builtin);
// edi: elements array
// edx: receiver
// ebx: map
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ Label try_holey_map;
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
ebx,
edi,
+ &try_holey_map);
+
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
+ // Restore edi.
+ __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
+ __ jmp(&fast_object);
+
+ __ bind(&try_holey_map);
+ __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
+ FAST_HOLEY_ELEMENTS,
+ ebx,
+ edi,
&call_builtin);
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
// Restore edi.
__ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
__ bind(&fast_object);
@@ -1900,7 +2020,7 @@ Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
GenerateMissBranch();
// Return the generated code.
- return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
+ return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
}
@@ -2030,7 +2150,7 @@ Handle<Code> CallStubCompiler::CompileMathFloorCall(
GenerateMissBranch();
// Return the generated code.
- return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
+ return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
}
@@ -2135,7 +2255,7 @@ Handle<Code> CallStubCompiler::CompileMathAbsCall(
GenerateMissBranch();
// Return the generated code.
- return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
+ return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
}
@@ -2382,7 +2502,7 @@ Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
GenerateMissBranch();
// Return the generated code.
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2443,7 +2563,7 @@ Handle<Code> CallStubCompiler::CompileCallGlobal(
GenerateMissBranch();
// Return the generated code.
- return GetCode(NORMAL, name);
+ return GetCode(Code::NORMAL, name);
}
@@ -2460,8 +2580,13 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
Label miss;
// Generate store field code. Trashes the name register.
- GenerateStoreField(masm(), object, index, transition, edx, ecx, ebx, &miss);
-
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ edx, ecx, ebx, edi,
+ &miss);
// Handle store cache miss.
__ bind(&miss);
__ mov(ecx, Immediate(name)); // restore name
@@ -2469,14 +2594,17 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
__ jmp(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
+ return GetCode(transition.is_null()
+ ? Code::FIELD
+ : Code::MAP_TRANSITION, name);
}
Handle<Code> StoreStubCompiler::CompileStoreCallback(
- Handle<JSObject> object,
- Handle<AccessorInfo> callback,
- Handle<String> name) {
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<AccessorInfo> callback) {
// ----------- S t a t e -------------
// -- eax : value
// -- ecx : name
@@ -2484,19 +2612,14 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
// -- esp[0] : return address
// -----------------------------------
Label miss;
+ // Check that the maps haven't changed, preserving the value register.
+ __ push(eax);
+ __ JumpIfSmi(edx, &miss);
+ CheckPrototypes(receiver, edx, holder, ebx, eax, edi, name, &miss);
+ __ pop(eax); // restore value
- // Check that the map of the object hasn't changed.
- __ CheckMap(edx, Handle<Map>(object->map()),
- &miss, DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
-
- // Perform global security token check if needed.
- if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(edx, ebx, &miss);
- }
-
- // Stub never generated for non-global objects that require access
- // checks.
- ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
+ // Stub never generated for non-global objects that require access checks.
+ ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
__ pop(ebx); // remove the return address
__ push(edx); // receiver
@@ -2512,11 +2635,89 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
// Handle store cache miss.
__ bind(&miss);
+ __ pop(eax);
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
+ __ jmp(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(Code::CALLBACKS, name);
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm)
+
+
+void StoreStubCompiler::GenerateStoreViaSetter(
+ MacroAssembler* masm,
+ Handle<JSFunction> setter) {
+ // ----------- S t a t e -------------
+ // -- eax : value
+ // -- ecx : name
+ // -- edx : receiver
+ // -- esp[0] : return address
+ // -----------------------------------
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ // Save value register, so we can restore it later.
+ __ push(eax);
+
+ if (!setter.is_null()) {
+ // Call the JavaScript setter with receiver and value on the stack.
+ __ push(edx);
+ __ push(eax);
+ ParameterCount actual(1);
+ __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+ } else {
+ // If we generate a global code snippet for deoptimization only, remember
+ // the place to continue after deoptimization.
+ masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
+ }
+
+ // We have to return the passed value, not the return value of the setter.
+ __ pop(eax);
+
+ // Restore context register.
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ }
+ __ ret(0);
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm())
+
+
+Handle<Code> StoreStubCompiler::CompileStoreViaSetter(
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> setter) {
+ // ----------- S t a t e -------------
+ // -- eax : value
+ // -- ecx : name
+ // -- edx : receiver
+ // -- esp[0] : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the maps haven't changed, preserving the name register.
+ __ push(ecx);
+ __ JumpIfSmi(edx, &miss);
+ CheckPrototypes(receiver, edx, holder, ebx, ecx, edi, name, &miss);
+ __ pop(ecx);
+
+ GenerateStoreViaSetter(masm(), setter);
+
+ __ bind(&miss);
+ __ pop(ecx);
Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
__ jmp(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2562,7 +2763,7 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
__ jmp(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2610,7 +2811,7 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal(
__ jmp(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, name);
+ return GetCode(Code::NORMAL, name);
}
@@ -2634,7 +2835,13 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
__ j(not_equal, &miss);
// Generate store field code. Trashes the name register.
- GenerateStoreField(masm(), object, index, transition, edx, ecx, ebx, &miss);
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ edx, ecx, ebx, edi,
+ &miss);
// Handle store cache miss.
__ bind(&miss);
@@ -2643,7 +2850,9 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
__ jmp(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
+ return GetCode(transition.is_null()
+ ? Code::FIELD
+ : Code::MAP_TRANSITION, name);
}
@@ -2666,7 +2875,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
__ jmp(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string());
+ return GetCode(Code::NORMAL, factory()->empty_string());
}
@@ -2701,7 +2910,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
__ jmp(miss_ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
+ return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC);
}
@@ -2741,7 +2950,7 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(NONEXISTENT, factory()->empty_string());
+ return GetCode(Code::NONEXISTENT, factory()->empty_string());
}
@@ -2761,7 +2970,7 @@ Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(FIELD, name);
+ return GetCode(Code::FIELD, name);
}
@@ -2777,13 +2986,76 @@ Handle<Code> LoadStubCompiler::CompileLoadCallback(
// -----------------------------------
Label miss;
- GenerateLoadCallback(object, holder, edx, ecx, ebx, eax, edi, callback,
- name, &miss);
+ GenerateLoadCallback(object, holder, edx, ecx, ebx, eax, edi, no_reg,
+ callback, name, &miss);
__ bind(&miss);
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm)
+
+
+void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
+ Handle<JSFunction> getter) {
+ // ----------- S t a t e -------------
+ // -- ecx : name
+ // -- edx : receiver
+ // -- esp[0] : return address
+ // -----------------------------------
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ if (!getter.is_null()) {
+ // Call the JavaScript getter with the receiver on the stack.
+ __ push(edx);
+ ParameterCount actual(0);
+ __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+ } else {
+ // If we generate a global code snippet for deoptimization only, remember
+ // the place to continue after deoptimization.
+ masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
+ }
+
+ // Restore context register.
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ }
+ __ ret(0);
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm())
+
+
+Handle<Code> LoadStubCompiler::CompileLoadViaGetter(
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> getter) {
+ // ----------- S t a t e -------------
+ // -- ecx : name
+ // -- edx : receiver
+ // -- esp[0] : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the maps haven't changed.
+ __ JumpIfSmi(edx, &miss);
+ CheckPrototypes(receiver, edx, holder, ebx, eax, edi, name, &miss);
+
+ GenerateLoadViaGetter(masm(), getter);
+
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2803,7 +3075,7 @@ Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(CONSTANT_FUNCTION, name);
+ return GetCode(Code::CONSTANT_FUNCTION, name);
}
@@ -2829,7 +3101,7 @@ Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> receiver,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2877,7 +3149,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(NORMAL, name);
+ return GetCode(Code::NORMAL, name);
}
@@ -2906,7 +3178,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(FIELD, name);
+ return GetCode(Code::FIELD, name);
}
@@ -2929,15 +3201,15 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback(
__ cmp(ecx, Immediate(name));
__ j(not_equal, &miss);
- GenerateLoadCallback(receiver, holder, edx, ecx, ebx, eax, edi, callback,
- name, &miss);
+ GenerateLoadCallback(receiver, holder, edx, ecx, ebx, eax, edi, no_reg,
+ callback, name, &miss);
__ bind(&miss);
__ DecrementCounter(counters->keyed_load_callback(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2967,7 +3239,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(CONSTANT_FUNCTION, name);
+ return GetCode(Code::CONSTANT_FUNCTION, name);
}
@@ -2998,7 +3270,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -3024,7 +3296,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -3050,7 +3322,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -3076,7 +3348,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -3096,7 +3368,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string());
+ return GetCode(Code::NORMAL, factory()->empty_string());
}
@@ -3123,7 +3395,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
+ return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC);
}
@@ -3149,6 +3421,7 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
#endif
// Load the initial map and verify that it is in fact a map.
+ // edi: constructor
__ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ JumpIfSmi(ebx, &generic_stub_call);
@@ -3157,19 +3430,23 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
#ifdef DEBUG
// Cannot construct functions this way.
- // edi: constructor
// ebx: initial map
__ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
- __ Assert(not_equal, "Function constructed by construct stub.");
+ __ Check(not_equal, "Function constructed by construct stub.");
#endif
// Now allocate the JSObject on the heap by moving the new space allocation
// top forward.
- // edi: constructor
// ebx: initial map
+ ASSERT(function->has_initial_map());
+ int instance_size = function->initial_map()->instance_size();
+#ifdef DEBUG
__ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset));
__ shl(ecx, kPointerSizeLog2);
- __ AllocateInNewSpace(ecx, edx, ecx, no_reg,
+ __ cmp(ecx, Immediate(instance_size));
+ __ Check(equal, "Instance size of initial map changed.");
+#endif
+ __ AllocateInNewSpace(instance_size, edx, ecx, no_reg,
&generic_stub_call, NO_ALLOCATION_FLAGS);
// Allocated the JSObject, now initialize the fields and add the heap tag.
@@ -3229,7 +3506,6 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
}
// Fill the unused in-object property fields with undefined.
- ASSERT(function->has_initial_map());
for (int i = shared->this_property_assignments_count();
i < function->initial_map()->inobject_properties();
i++) {
@@ -3818,7 +4094,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, ecx, ebx, xmm0, xmm1, &miss_force_generic);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
__ JumpIfNotSmi(eax, &transition_elements_kind);
}
@@ -3843,7 +4119,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ j(not_equal, &miss_force_generic);
__ bind(&finish_store);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
// ecx is a smi, use times_half_pointer_size instead of
// times_pointer_size
__ mov(FieldOperand(edi,
@@ -3851,7 +4127,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
times_half_pointer_size,
FixedArray::kHeaderSize), eax);
} else {
- ASSERT(elements_kind == FAST_ELEMENTS);
+ ASSERT(IsFastObjectElementsKind(elements_kind));
// Do the store and update the write barrier.
// ecx is a smi, use times_half_pointer_size instead of
// times_pointer_size
diff --git a/src/3rdparty/v8/src/ic-inl.h b/src/3rdparty/v8/src/ic-inl.h
index 6a86921..49b6ef9 100644
--- a/src/3rdparty/v8/src/ic-inl.h
+++ b/src/3rdparty/v8/src/ic-inl.h
@@ -40,7 +40,7 @@ namespace internal {
Address IC::address() const {
// Get the address of the call.
- Address result = pc() - Assembler::kCallTargetAddressOffset;
+ Address result = Assembler::target_address_from_return_address(pc());
#ifdef ENABLE_DEBUGGER_SUPPORT
Debug* debug = Isolate::Current()->debug();
@@ -79,6 +79,7 @@ Code* IC::GetTargetAtAddress(Address address) {
void IC::SetTargetAtAddress(Address address, Code* target) {
ASSERT(target->is_inline_cache_stub() || target->is_compare_ic_stub());
+ Heap* heap = target->GetHeap();
Code* old_target = GetTargetAtAddress(address);
#ifdef DEBUG
// STORE_IC and KEYED_STORE_IC use Code::extra_ic_state() to mark
@@ -90,8 +91,11 @@ void IC::SetTargetAtAddress(Address address, Code* target) {
}
#endif
Assembler::set_target_address_at(address, target->instruction_start());
- target->GetHeap()->incremental_marking()->RecordCodeTargetPatch(address,
- target);
+ if (heap->gc_state() == Heap::MARK_COMPACT) {
+ heap->mark_compact_collector()->RecordCodeTargetPatch(address, target);
+ } else {
+ heap->incremental_marking()->RecordCodeTargetPatch(address, target);
+ }
PostPatching(address, target, old_target);
}
diff --git a/src/3rdparty/v8/src/ic.cc b/src/3rdparty/v8/src/ic.cc
index be5f752..fe31ef1 100644
--- a/src/3rdparty/v8/src/ic.cc
+++ b/src/3rdparty/v8/src/ic.cc
@@ -158,7 +158,7 @@ Address IC::OriginalCodeAddress() const {
// Get the address of the call site in the active code. This is the
// place where the call to DebugBreakXXX is and where the IC
// normally would be.
- Address addr = pc() - Assembler::kCallTargetAddressOffset;
+ Address addr = Assembler::target_address_from_return_address(pc());
// Return the address in the original code. This is the place where
// the call which has been overwritten by the DebugBreakXXX resides
// and the place where the inline cache system should look.
@@ -320,13 +320,17 @@ void IC::PostPatching(Address address, Code* target, Code* old_target) {
int delta = ComputeTypeInfoCountDelta(old_target->ic_state(),
target->ic_state());
// Not all Code objects have TypeFeedbackInfo.
- if (delta != 0 && host->type_feedback_info()->IsTypeFeedbackInfo()) {
+ if (host->type_feedback_info()->IsTypeFeedbackInfo() && delta != 0) {
TypeFeedbackInfo* info =
TypeFeedbackInfo::cast(host->type_feedback_info());
- info->set_ic_with_type_info_count(
- info->ic_with_type_info_count() + delta);
+ info->change_ic_with_type_info_count(delta);
}
}
+ if (host->type_feedback_info()->IsTypeFeedbackInfo()) {
+ TypeFeedbackInfo* info =
+ TypeFeedbackInfo::cast(host->type_feedback_info());
+ info->change_own_type_change_checksum();
+ }
if (FLAG_watch_ic_patching) {
host->set_profiler_ticks(0);
Isolate::Current()->runtime_profiler()->NotifyICChanged();
@@ -435,9 +439,7 @@ static void LookupForRead(Handle<Object> object,
// Besides normal conditions (property not found or it's not
// an interceptor), bail out if lookup is not cacheable: we won't
// be able to IC it anyway and regular lookup should work fine.
- if (!lookup->IsFound()
- || (lookup->type() != INTERCEPTOR)
- || !lookup->IsCacheable()) {
+ if (!lookup->IsInterceptor() || !lookup->IsCacheable()) {
return;
}
@@ -447,14 +449,14 @@ static void LookupForRead(Handle<Object> object,
}
holder->LocalLookupRealNamedProperty(*name, lookup);
- if (lookup->IsProperty()) {
- ASSERT(lookup->type() != INTERCEPTOR);
+ if (lookup->IsFound()) {
+ ASSERT(!lookup->IsInterceptor());
return;
}
Handle<Object> proto(holder->GetPrototype());
if (proto->IsNull()) {
- lookup->NotFound();
+ ASSERT(!lookup->IsFound());
return;
}
@@ -535,7 +537,7 @@ MaybeObject* CallICBase::LoadFunction(State state,
LookupResult lookup(isolate());
LookupForRead(object, name, &lookup);
- if (!lookup.IsProperty()) {
+ if (!lookup.IsFound()) {
// If the object does not have the requested property, check which
// exception we need to throw.
return IsContextual(object)
@@ -554,7 +556,7 @@ MaybeObject* CallICBase::LoadFunction(State state,
Object::GetProperty(object, object, &lookup, name, &attr);
RETURN_IF_EMPTY_HANDLE(isolate(), result);
- if (lookup.type() == INTERCEPTOR && attr == ABSENT) {
+ if (lookup.IsInterceptor() && attr == ABSENT) {
// If the object does not have the requested property, check which
// exception we need to throw.
return IsContextual(object)
@@ -902,7 +904,7 @@ MaybeObject* LoadIC::Load(State state,
LookupForRead(object, name, &lookup);
// If we did not find a property, check if we need to throw an exception.
- if (!lookup.IsProperty()) {
+ if (!lookup.IsFound()) {
if (IsContextual(object)) {
return ReferenceError("not_defined", name);
}
@@ -915,8 +917,7 @@ MaybeObject* LoadIC::Load(State state,
}
PropertyAttributes attr;
- if (lookup.IsFound() &&
- (lookup.type() == INTERCEPTOR || lookup.type() == HANDLER)) {
+ if (lookup.IsInterceptor() || lookup.IsHandler()) {
// Get the property.
Handle<Object> result =
Object::GetProperty(object, object, &lookup, name, &attr);
@@ -988,13 +989,29 @@ void LoadIC::UpdateCaches(LookupResult* lookup,
}
break;
case CALLBACKS: {
- Handle<Object> callback_object(lookup->GetCallbackObject());
- if (!callback_object->IsAccessorInfo()) return;
- Handle<AccessorInfo> callback =
- Handle<AccessorInfo>::cast(callback_object);
- if (v8::ToCData<Address>(callback->getter()) == 0) return;
- code = isolate()->stub_cache()->ComputeLoadCallback(
- name, receiver, holder, callback);
+#ifdef _WIN32_WCE
+ // Disable optimization for wince as the calling convention looks different.
+ return;
+#endif
+ Handle<Object> callback(lookup->GetCallbackObject());
+ if (callback->IsAccessorInfo()) {
+ Handle<AccessorInfo> info = Handle<AccessorInfo>::cast(callback);
+ if (v8::ToCData<Address>(info->getter()) == 0) return;
+ if (!info->IsCompatibleReceiver(*receiver)) return;
+ code = isolate()->stub_cache()->ComputeLoadCallback(
+ name, receiver, holder, info);
+ } else if (callback->IsAccessorPair()) {
+ Handle<Object> getter(Handle<AccessorPair>::cast(callback)->getter());
+ if (!getter->IsJSFunction()) return;
+ if (holder->IsGlobalObject()) return;
+ if (!holder->HasFastProperties()) return;
+ code = isolate()->stub_cache()->ComputeLoadViaGetter(
+ name, receiver, holder, Handle<JSFunction>::cast(getter));
+ } else {
+ ASSERT(callback->IsForeign());
+ // No IC support for old-style native accessors.
+ return;
+ }
break;
}
case INTERCEPTOR:
@@ -1156,7 +1173,7 @@ MaybeObject* KeyedLoadIC::Load(State state,
LookupForRead(object, name, &lookup);
// If we did not find a property, check if we need to throw an exception.
- if (!lookup.IsProperty() && IsContextual(object)) {
+ if (!lookup.IsFound() && IsContextual(object)) {
return ReferenceError("not_defined", name);
}
@@ -1165,7 +1182,7 @@ MaybeObject* KeyedLoadIC::Load(State state,
}
PropertyAttributes attr;
- if (lookup.IsFound() && lookup.type() == INTERCEPTOR) {
+ if (lookup.IsInterceptor()) {
// Get the property.
Handle<Object> result =
Object::GetProperty(object, object, &lookup, name, &attr);
@@ -1256,6 +1273,7 @@ void KeyedLoadIC::UpdateCaches(LookupResult* lookup,
Handle<AccessorInfo> callback =
Handle<AccessorInfo>::cast(callback_object);
if (v8::ToCData<Address>(callback->getter()) == 0) return;
+ if (!callback->IsCompatibleReceiver(*receiver)) return;
code = isolate()->stub_cache()->ComputeKeyedLoadCallback(
name, receiver, holder, callback);
break;
@@ -1288,15 +1306,16 @@ void KeyedLoadIC::UpdateCaches(LookupResult* lookup,
static bool StoreICableLookup(LookupResult* lookup) {
// Bail out if we didn't find a result.
- if (!lookup->IsFound() || lookup->type() == NULL_DESCRIPTOR) return false;
+ if (!lookup->IsFound()) return false;
// Bail out if inline caching is not allowed.
if (!lookup->IsCacheable()) return false;
// If the property is read-only, we leave the IC in its current state.
- if (lookup->IsReadOnly()) return false;
-
- return true;
+ if (lookup->IsTransition()) {
+ return !lookup->GetTransitionDetails().IsReadOnly();
+ }
+ return !lookup->IsReadOnly();
}
@@ -1304,11 +1323,16 @@ static bool LookupForWrite(Handle<JSObject> receiver,
Handle<String> name,
LookupResult* lookup) {
receiver->LocalLookup(*name, lookup);
+ if (!lookup->IsFound()) {
+ receiver->map()->LookupTransition(*receiver, *name, lookup);
+ }
if (!StoreICableLookup(lookup)) {
- return false;
+ // 2nd chance: There can be accessors somewhere in the prototype chain.
+ receiver->Lookup(*name, lookup);
+ return lookup->IsPropertyCallbacks() && StoreICableLookup(lookup);
}
- if (lookup->type() == INTERCEPTOR &&
+ if (lookup->IsInterceptor() &&
receiver->GetNamedInterceptor()->setter()->IsUndefined()) {
receiver->LocalLookupRealNamedProperty(*name, lookup);
return StoreICableLookup(lookup);
@@ -1357,6 +1381,11 @@ MaybeObject* StoreIC::Store(State state,
return *value;
}
+ // Observed objects are always modified through the runtime.
+ if (FLAG_harmony_observation && receiver->map()->is_observed()) {
+ return receiver->SetProperty(*name, *value, NONE, strict_mode);
+ }
+
// Use specialized code for setting the length of arrays with fast
// properties. Slow properties might indicate redefinition of the
// length property.
@@ -1375,12 +1404,13 @@ MaybeObject* StoreIC::Store(State state,
}
// Lookup the property locally in the receiver.
- if (FLAG_use_ic && !receiver->IsJSGlobalProxy()) {
+ if (!receiver->IsJSGlobalProxy()) {
LookupResult lookup(isolate());
if (LookupForWrite(receiver, name, &lookup)) {
- // Generate a stub for this store.
- UpdateCaches(&lookup, state, strict_mode, receiver, name, value);
+ if (FLAG_use_ic) { // Generate a stub for this store.
+ UpdateCaches(&lookup, state, strict_mode, receiver, name, value);
+ }
} else {
// Strict mode doesn't allow setting non-existent global property
// or an assignment to a read only property.
@@ -1408,7 +1438,11 @@ MaybeObject* StoreIC::Store(State state,
}
// Set the property.
- return receiver->SetProperty(*name, *value, NONE, strict_mode);
+ return receiver->SetProperty(*name,
+ *value,
+ NONE,
+ strict_mode,
+ JSReceiver::CERTAINLY_NOT_STORE_FROM_KEYED);
}
@@ -1420,10 +1454,10 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
Handle<Object> value) {
ASSERT(!receiver->IsJSGlobalProxy());
ASSERT(StoreICableLookup(lookup));
+ ASSERT(lookup->IsFound());
+
// These are not cacheable, so we never see such LookupResults here.
- ASSERT(lookup->type() != HANDLER);
- // We get only called for properties or transitions, see StoreICableLookup.
- ASSERT(lookup->type() != NULL_DESCRIPTOR);
+ ASSERT(!lookup->IsHandler());
// If the property has a non-field type allowing map transitions
// where there is extra room in the object, we leave the IC in its
@@ -1433,6 +1467,7 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
// Compute the code stub for this store; used for rewriting to
// monomorphic state and making sure that the code stub is in the
// stub cache.
+ Handle<JSObject> holder(lookup->holder());
Handle<Code> code;
switch (type) {
case FIELD:
@@ -1442,14 +1477,6 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
Handle<Map>::null(),
strict_mode);
break;
- case MAP_TRANSITION: {
- if (lookup->GetAttributes() != NONE) return;
- Handle<Map> transition(lookup->GetTransitionMap());
- int index = transition->PropertyIndexFor(*name);
- code = isolate()->stub_cache()->ComputeStoreField(
- name, receiver, index, transition, strict_mode);
- break;
- }
case NORMAL:
if (receiver->IsGlobalObject()) {
// The stub generated for the global object picks the value directly
@@ -1460,18 +1487,32 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
code = isolate()->stub_cache()->ComputeStoreGlobal(
name, global, cell, strict_mode);
} else {
- if (lookup->holder() != *receiver) return;
+ if (!holder.is_identical_to(receiver)) return;
code = isolate()->stub_cache()->ComputeStoreNormal(strict_mode);
}
break;
case CALLBACKS: {
- Handle<Object> callback_object(lookup->GetCallbackObject());
- if (!callback_object->IsAccessorInfo()) return;
- Handle<AccessorInfo> callback =
- Handle<AccessorInfo>::cast(callback_object);
- if (v8::ToCData<Address>(callback->setter()) == 0) return;
- code = isolate()->stub_cache()->ComputeStoreCallback(
- name, receiver, callback, strict_mode);
+ Handle<Object> callback(lookup->GetCallbackObject());
+ if (callback->IsAccessorInfo()) {
+ Handle<AccessorInfo> info = Handle<AccessorInfo>::cast(callback);
+ if (v8::ToCData<Address>(info->setter()) == 0) return;
+ if (!holder->HasFastProperties()) return;
+ if (!info->IsCompatibleReceiver(*receiver)) return;
+ code = isolate()->stub_cache()->ComputeStoreCallback(
+ name, receiver, holder, info, strict_mode);
+ } else if (callback->IsAccessorPair()) {
+ Handle<Object> setter(Handle<AccessorPair>::cast(callback)->setter());
+ if (!setter->IsJSFunction()) return;
+ if (holder->IsGlobalObject()) return;
+ if (!holder->HasFastProperties()) return;
+ code = isolate()->stub_cache()->ComputeStoreViaSetter(
+ name, receiver, holder, Handle<JSFunction>::cast(setter),
+ strict_mode);
+ } else {
+ ASSERT(callback->IsForeign());
+ // No IC support for old-style native accessors.
+ return;
+ }
break;
}
case INTERCEPTOR:
@@ -1480,11 +1521,24 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
name, receiver, strict_mode);
break;
case CONSTANT_FUNCTION:
- case CONSTANT_TRANSITION:
- case ELEMENTS_TRANSITION:
return;
+ case TRANSITION: {
+ Handle<Map> transition(lookup->GetTransitionTarget());
+ int descriptor = transition->LastAdded();
+
+ DescriptorArray* target_descriptors = transition->instance_descriptors();
+ PropertyDetails details = target_descriptors->GetDetails(descriptor);
+
+ if (details.type() != FIELD || details.attributes() != NONE) return;
+
+ int field_index = target_descriptors->GetFieldIndex(descriptor);
+ code = isolate()->stub_cache()->ComputeStoreField(
+ name, receiver, field_index, transition, strict_mode);
+
+ break;
+ }
+ case NONEXISTENT:
case HANDLER:
- case NULL_DESCRIPTOR:
UNREACHABLE();
return;
}
@@ -1557,44 +1611,49 @@ Handle<Code> KeyedIC::ComputeStub(Handle<JSObject> receiver,
// Don't handle megamorphic property accesses for INTERCEPTORS or CALLBACKS
// via megamorphic stubs, since they don't have a map in their relocation info
// and so the stubs can't be harvested for the object needed for a map check.
- if (target()->type() != NORMAL) {
+ if (target()->type() != Code::NORMAL) {
TRACE_GENERIC_IC("KeyedIC", "non-NORMAL target type");
return generic_stub;
}
bool monomorphic = false;
+ bool is_transition_stub = IsTransitionStubKind(stub_kind);
+ Handle<Map> receiver_map(receiver->map());
+ Handle<Map> monomorphic_map = receiver_map;
MapHandleList target_receiver_maps;
- if (ic_state != UNINITIALIZED && ic_state != PREMONOMORPHIC) {
+ if (ic_state == UNINITIALIZED || ic_state == PREMONOMORPHIC) {
+ // Optimistically assume that ICs that haven't reached the MONOMORPHIC state
+ // yet will do so and stay there.
+ monomorphic = true;
+ } else {
GetReceiverMapsForStub(Handle<Code>(target()), &target_receiver_maps);
- }
- if (!IsTransitionStubKind(stub_kind)) {
- if (ic_state == UNINITIALIZED || ic_state == PREMONOMORPHIC) {
- monomorphic = true;
- } else {
- if (ic_state == MONOMORPHIC) {
- // The first time a receiver is seen that is a transitioned version of
- // the previous monomorphic receiver type, assume the new ElementsKind
- // is the monomorphic type. This benefits global arrays that only
- // transition once, and all call sites accessing them are faster if they
- // remain monomorphic. If this optimistic assumption is not true, the IC
- // will miss again and it will become polymorphic and support both the
- // untransitioned and transitioned maps.
- monomorphic = IsMoreGeneralElementsKindTransition(
- target_receiver_maps.at(0)->elements_kind(),
- receiver->GetElementsKind());
- }
+ if (ic_state == MONOMORPHIC && (is_transition_stub || stub_kind == LOAD)) {
+ // The first time a receiver is seen that is a transitioned version of the
+ // previous monomorphic receiver type, assume the new ElementsKind is the
+ // monomorphic type. This benefits global arrays that only transition
+ // once, and all call sites accessing them are faster if they remain
+ // monomorphic. If this optimistic assumption is not true, the IC will
+ // miss again and it will become polymorphic and support both the
+ // untransitioned and transitioned maps.
+ monomorphic = IsMoreGeneralElementsKindTransition(
+ target_receiver_maps.at(0)->elements_kind(),
+ receiver->GetElementsKind());
}
}
if (monomorphic) {
+ if (is_transition_stub) {
+ monomorphic_map = ComputeTransitionedMap(receiver, stub_kind);
+ ASSERT(*monomorphic_map != *receiver_map);
+ stub_kind = GetNoTransitionStubKind(stub_kind);
+ }
return ComputeMonomorphicStub(
- receiver, stub_kind, strict_mode, generic_stub);
+ monomorphic_map, stub_kind, strict_mode, generic_stub);
}
ASSERT(target() != *generic_stub);
// Determine the list of receiver maps that this call site has seen,
// adding the map that was just encountered.
- Handle<Map> receiver_map(receiver->map());
bool map_added =
AddOneReceiverMapIfMissing(&target_receiver_maps, receiver_map);
if (IsTransitionStubKind(stub_kind)) {
@@ -1644,8 +1703,7 @@ Handle<Code> KeyedIC::ComputeMonomorphicStubWithoutMapCheck(
return string_stub();
} else {
ASSERT(receiver_map->has_dictionary_elements() ||
- receiver_map->has_fast_elements() ||
- receiver_map->has_fast_smi_only_elements() ||
+ receiver_map->has_fast_smi_or_object_elements() ||
receiver_map->has_fast_double_elements() ||
receiver_map->has_external_array_elements());
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
@@ -1656,17 +1714,16 @@ Handle<Code> KeyedIC::ComputeMonomorphicStubWithoutMapCheck(
}
-Handle<Code> KeyedIC::ComputeMonomorphicStub(Handle<JSObject> receiver,
+Handle<Code> KeyedIC::ComputeMonomorphicStub(Handle<Map> receiver_map,
StubKind stub_kind,
StrictModeFlag strict_mode,
Handle<Code> generic_stub) {
- if (receiver->HasFastElements() ||
- receiver->HasFastSmiOnlyElements() ||
- receiver->HasExternalArrayElements() ||
- receiver->HasFastDoubleElements() ||
- receiver->HasDictionaryElements()) {
+ ElementsKind elements_kind = receiver_map->elements_kind();
+ if (IsFastElementsKind(elements_kind) ||
+ IsExternalArrayElementsKind(elements_kind) ||
+ IsDictionaryElementsKind(elements_kind)) {
return isolate()->stub_cache()->ComputeKeyedLoadOrStoreElement(
- receiver, stub_kind, strict_mode);
+ receiver_map, stub_kind, strict_mode);
} else {
return generic_stub;
}
@@ -1681,15 +1738,26 @@ Handle<Map> KeyedIC::ComputeTransitionedMap(Handle<JSObject> receiver,
case KeyedIC::STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT:
case KeyedIC::STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT:
return JSObject::GetElementsTransitionMap(receiver, FAST_ELEMENTS);
- break;
case KeyedIC::STORE_TRANSITION_SMI_TO_DOUBLE:
case KeyedIC::STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE:
return JSObject::GetElementsTransitionMap(receiver, FAST_DOUBLE_ELEMENTS);
- break;
- default:
+ case KeyedIC::STORE_TRANSITION_HOLEY_SMI_TO_OBJECT:
+ case KeyedIC::STORE_TRANSITION_HOLEY_DOUBLE_TO_OBJECT:
+ case KeyedIC::STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_OBJECT:
+ case KeyedIC::STORE_AND_GROW_TRANSITION_HOLEY_DOUBLE_TO_OBJECT:
+ return JSObject::GetElementsTransitionMap(receiver,
+ FAST_HOLEY_ELEMENTS);
+ case KeyedIC::STORE_TRANSITION_HOLEY_SMI_TO_DOUBLE:
+ case KeyedIC::STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_DOUBLE:
+ return JSObject::GetElementsTransitionMap(receiver,
+ FAST_HOLEY_DOUBLE_ELEMENTS);
+ case KeyedIC::LOAD:
+ case KeyedIC::STORE_NO_TRANSITION:
+ case KeyedIC::STORE_AND_GROW_NO_TRANSITION:
UNREACHABLE();
- return Handle<Map>::null();
+ break;
}
+ return Handle<Map>::null();
}
@@ -1749,30 +1817,54 @@ KeyedIC::StubKind KeyedStoreIC::GetStubKind(Handle<JSObject> receiver,
if (allow_growth) {
// Handle growing array in stub if necessary.
- if (receiver->HasFastSmiOnlyElements()) {
+ if (receiver->HasFastSmiElements()) {
if (value->IsHeapNumber()) {
- return STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE;
+ if (receiver->HasFastHoleyElements()) {
+ return STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_DOUBLE;
+ } else {
+ return STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE;
+ }
}
if (value->IsHeapObject()) {
- return STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT;
+ if (receiver->HasFastHoleyElements()) {
+ return STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_OBJECT;
+ } else {
+ return STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT;
+ }
}
} else if (receiver->HasFastDoubleElements()) {
if (!value->IsSmi() && !value->IsHeapNumber()) {
- return STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT;
+ if (receiver->HasFastHoleyElements()) {
+ return STORE_AND_GROW_TRANSITION_HOLEY_DOUBLE_TO_OBJECT;
+ } else {
+ return STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT;
+ }
}
}
return STORE_AND_GROW_NO_TRANSITION;
} else {
// Handle only in-bounds elements accesses.
- if (receiver->HasFastSmiOnlyElements()) {
+ if (receiver->HasFastSmiElements()) {
if (value->IsHeapNumber()) {
- return STORE_TRANSITION_SMI_TO_DOUBLE;
+ if (receiver->HasFastHoleyElements()) {
+ return STORE_TRANSITION_HOLEY_SMI_TO_DOUBLE;
+ } else {
+ return STORE_TRANSITION_SMI_TO_DOUBLE;
+ }
} else if (value->IsHeapObject()) {
- return STORE_TRANSITION_SMI_TO_OBJECT;
+ if (receiver->HasFastHoleyElements()) {
+ return STORE_TRANSITION_HOLEY_SMI_TO_OBJECT;
+ } else {
+ return STORE_TRANSITION_SMI_TO_OBJECT;
+ }
}
} else if (receiver->HasFastDoubleElements()) {
if (!value->IsSmi() && !value->IsHeapNumber()) {
- return STORE_TRANSITION_DOUBLE_TO_OBJECT;
+ if (receiver->HasFastHoleyElements()) {
+ return STORE_TRANSITION_HOLEY_DOUBLE_TO_OBJECT;
+ } else {
+ return STORE_TRANSITION_DOUBLE_TO_OBJECT;
+ }
}
}
return STORE_NO_TRANSITION;
@@ -1819,7 +1911,8 @@ MaybeObject* KeyedStoreIC::Store(State state,
}
// Update inline cache and stub cache.
- if (FLAG_use_ic && !receiver->IsJSGlobalProxy()) {
+ if (FLAG_use_ic && !receiver->IsJSGlobalProxy() &&
+ !(FLAG_harmony_observation && receiver->map()->is_observed())) {
LookupResult lookup(isolate());
if (LookupForWrite(receiver, name, &lookup)) {
UpdateCaches(&lookup, state, strict_mode, receiver, name, value);
@@ -1831,8 +1924,10 @@ MaybeObject* KeyedStoreIC::Store(State state,
}
// Do not use ICs for objects that require access checks (including
- // the global object).
- bool use_ic = FLAG_use_ic && !object->IsAccessCheckNeeded();
+ // the global object), or are observed.
+ bool use_ic = FLAG_use_ic && !object->IsAccessCheckNeeded() &&
+ !(FLAG_harmony_observation && object->IsJSObject() &&
+ JSObject::cast(*object)->map()->is_observed());
ASSERT(!(use_ic && object->IsJSGlobalProxy()));
if (use_ic) {
@@ -1872,10 +1967,10 @@ void KeyedStoreIC::UpdateCaches(LookupResult* lookup,
Handle<Object> value) {
ASSERT(!receiver->IsJSGlobalProxy());
ASSERT(StoreICableLookup(lookup));
+ ASSERT(lookup->IsFound());
+
// These are not cacheable, so we never see such LookupResults here.
- ASSERT(lookup->type() != HANDLER);
- // We get only called for properties or transitions, see StoreICableLookup.
- ASSERT(lookup->type() != NULL_DESCRIPTOR);
+ ASSERT(!lookup->IsHandler());
// If the property has a non-field type allowing map transitions
// where there is extra room in the object, we leave the IC in its
@@ -1893,21 +1988,25 @@ void KeyedStoreIC::UpdateCaches(LookupResult* lookup,
name, receiver, lookup->GetFieldIndex(),
Handle<Map>::null(), strict_mode);
break;
- case MAP_TRANSITION:
- if (lookup->GetAttributes() == NONE) {
- Handle<Map> transition(lookup->GetTransitionMap());
- int index = transition->PropertyIndexFor(*name);
+ case TRANSITION: {
+ Handle<Map> transition(lookup->GetTransitionTarget());
+ int descriptor = transition->LastAdded();
+
+ DescriptorArray* target_descriptors = transition->instance_descriptors();
+ PropertyDetails details = target_descriptors->GetDetails(descriptor);
+
+ if (details.type() == FIELD && details.attributes() == NONE) {
+ int field_index = target_descriptors->GetFieldIndex(descriptor);
code = isolate()->stub_cache()->ComputeKeyedStoreField(
- name, receiver, index, transition, strict_mode);
+ name, receiver, field_index, transition, strict_mode);
break;
}
// fall through.
+ }
case NORMAL:
case CONSTANT_FUNCTION:
case CALLBACKS:
case INTERCEPTOR:
- case CONSTANT_TRANSITION:
- case ELEMENTS_TRANSITION:
// Always rewrite to the generic case so that we do not
// repeatedly try to rewrite.
code = (strict_mode == kStrictMode)
@@ -1915,7 +2014,7 @@ void KeyedStoreIC::UpdateCaches(LookupResult* lookup,
: generic_stub();
break;
case HANDLER:
- case NULL_DESCRIPTOR:
+ case NONEXISTENT:
UNREACHABLE();
return;
}
@@ -2050,7 +2149,7 @@ RUNTIME_FUNCTION(MaybeObject*, StoreIC_ArrayLength) {
// The length property has to be a writable callback property.
LookupResult debug_lookup(isolate);
receiver->LocalLookup(isolate->heap()->length_symbol(), &debug_lookup);
- ASSERT(debug_lookup.type() == CALLBACKS && !debug_lookup.IsReadOnly());
+ ASSERT(debug_lookup.IsPropertyCallbacks() && !debug_lookup.IsReadOnly());
#endif
Object* result;
@@ -2491,7 +2590,8 @@ CompareIC::State CompareIC::ComputeState(Code* target) {
Token::Value CompareIC::ComputeOperation(Code* target) {
ASSERT(target->major_key() == CodeStub::CompareIC);
- return static_cast<Token::Value>(target->compare_operation());
+ return static_cast<Token::Value>(
+ target->compare_operation() + Token::EQ);
}
@@ -2501,7 +2601,7 @@ const char* CompareIC::GetStateName(State state) {
case SMIS: return "SMIS";
case HEAP_NUMBERS: return "HEAP_NUMBERS";
case OBJECTS: return "OBJECTS";
- case KNOWN_OBJECTS: return "OBJECTS";
+ case KNOWN_OBJECTS: return "KNOWN_OBJECTS";
case SYMBOLS: return "SYMBOLS";
case STRINGS: return "STRINGS";
case GENERIC: return "GENERIC";
diff --git a/src/3rdparty/v8/src/ic.h b/src/3rdparty/v8/src/ic.h
index 36af768..389c845 100644
--- a/src/3rdparty/v8/src/ic.h
+++ b/src/3rdparty/v8/src/ic.h
@@ -111,7 +111,7 @@ class IC {
RelocInfo::Mode ComputeMode();
bool IsQmlGlobal(Handle<Object> receiver) {
- JSObject* qml_global = isolate_->context()->qml_global();
+ JSObject* qml_global = isolate_->context()->qml_global_object();
return !qml_global->IsUndefined() && qml_global == *receiver;
}
@@ -384,10 +384,16 @@ class KeyedIC: public IC {
STORE_TRANSITION_SMI_TO_OBJECT,
STORE_TRANSITION_SMI_TO_DOUBLE,
STORE_TRANSITION_DOUBLE_TO_OBJECT,
+ STORE_TRANSITION_HOLEY_SMI_TO_OBJECT,
+ STORE_TRANSITION_HOLEY_SMI_TO_DOUBLE,
+ STORE_TRANSITION_HOLEY_DOUBLE_TO_OBJECT,
STORE_AND_GROW_NO_TRANSITION,
STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT,
STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE,
- STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT
+ STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT,
+ STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_OBJECT,
+ STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_DOUBLE,
+ STORE_AND_GROW_TRANSITION_HOLEY_DOUBLE_TO_OBJECT
};
static const int kGrowICDelta = STORE_AND_GROW_NO_TRANSITION -
@@ -451,7 +457,7 @@ class KeyedIC: public IC {
private:
void GetReceiverMapsForStub(Handle<Code> stub, MapHandleList* result);
- Handle<Code> ComputeMonomorphicStub(Handle<JSObject> receiver,
+ Handle<Code> ComputeMonomorphicStub(Handle<Map> receiver_map,
StubKind stub_kind,
StrictModeFlag strict_mode,
Handle<Code> default_stub);
@@ -467,6 +473,12 @@ class KeyedIC: public IC {
static bool IsGrowStubKind(StubKind stub_kind) {
return stub_kind >= STORE_AND_GROW_NO_TRANSITION;
}
+
+ static StubKind GetNoTransitionStubKind(StubKind stub_kind) {
+ if (!IsTransitionStubKind(stub_kind)) return stub_kind;
+ if (IsGrowStubKind(stub_kind)) return STORE_AND_GROW_NO_TRANSITION;
+ return STORE_NO_TRANSITION;
+ }
};
@@ -625,6 +637,18 @@ class StoreIC: public IC {
};
+enum KeyedStoreCheckMap {
+ kDontCheckMap,
+ kCheckMap
+};
+
+
+enum KeyedStoreIncrementLength {
+ kDontIncrementLength,
+ kIncrementLength
+};
+
+
class KeyedStoreIC: public KeyedIC {
public:
explicit KeyedStoreIC(Isolate* isolate) : KeyedIC(isolate) {
@@ -632,7 +656,7 @@ class KeyedStoreIC: public KeyedIC {
}
MUST_USE_RESULT MaybeObject* Store(State state,
- StrictModeFlag strict_mode,
+ StrictModeFlag strict_mode,
Handle<Object> object,
Handle<Object> name,
Handle<Object> value,
diff --git a/src/3rdparty/v8/src/incremental-marking-inl.h b/src/3rdparty/v8/src/incremental-marking-inl.h
index 2dae6f2..bbe9a9d 100644
--- a/src/3rdparty/v8/src/incremental-marking-inl.h
+++ b/src/3rdparty/v8/src/incremental-marking-inl.h
@@ -48,7 +48,9 @@ bool IncrementalMarking::BaseRecordWrite(HeapObject* obj,
// Object is either grey or white. It will be scanned if survives.
return false;
}
- return true;
+ if (!is_compacting_) return false;
+ MarkBit obj_bit = Marking::MarkBitFrom(obj);
+ return Marking::IsBlack(obj_bit);
}
@@ -107,9 +109,9 @@ void IncrementalMarking::BlackToGreyAndUnshift(HeapObject* obj,
// trace it. In this case we switch to non-incremental marking in
// order to finish off this marking phase.
if (FLAG_trace_gc) {
- PrintF("Hurrying incremental marking because of lack of progress\n");
+ PrintPID("Hurrying incremental marking because of lack of progress\n");
}
- allocation_marking_factor_ = kMaxAllocationMarkingFactor;
+ marking_speed_ = kMaxMarkingSpeed;
}
}
@@ -123,27 +125,6 @@ void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit) {
}
-bool IncrementalMarking::MarkObjectAndPush(HeapObject* obj) {
- MarkBit mark_bit = Marking::MarkBitFrom(obj);
- if (!mark_bit.Get()) {
- WhiteToGreyAndPush(obj, mark_bit);
- return true;
- }
- return false;
-}
-
-
-bool IncrementalMarking::MarkObjectWithoutPush(HeapObject* obj) {
- MarkBit mark_bit = Marking::MarkBitFrom(obj);
- if (!mark_bit.Get()) {
- mark_bit.Set();
- MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
- return true;
- }
- return false;
-}
-
-
} } // namespace v8::internal
#endif // V8_INCREMENTAL_MARKING_INL_H_
diff --git a/src/3rdparty/v8/src/incremental-marking.cc b/src/3rdparty/v8/src/incremental-marking.cc
index 94afffa..b34d6d9 100644
--- a/src/3rdparty/v8/src/incremental-marking.cc
+++ b/src/3rdparty/v8/src/incremental-marking.cc
@@ -31,6 +31,8 @@
#include "code-stubs.h"
#include "compilation-cache.h"
+#include "objects-visiting.h"
+#include "objects-visiting-inl.h"
#include "v8conversions.h"
namespace v8 {
@@ -42,7 +44,6 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
state_(STOPPED),
marking_deque_memory_(NULL),
marking_deque_memory_committed_(false),
- marker_(this, heap->mark_compact_collector()),
steps_count_(0),
steps_took_(0),
longest_step_(0.0),
@@ -51,7 +52,7 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
steps_count_since_last_gc_(0),
steps_took_since_last_gc_(0),
should_hurry_(false),
- allocation_marking_factor_(0),
+ marking_speed_(0),
allocated_(0),
no_marking_scope_depth_(0) {
}
@@ -65,7 +66,7 @@ void IncrementalMarking::TearDown() {
void IncrementalMarking::RecordWriteSlow(HeapObject* obj,
Object** slot,
Object* value) {
- if (BaseRecordWrite(obj, slot, value) && is_compacting_ && slot != NULL) {
+ if (BaseRecordWrite(obj, slot, value) && slot != NULL) {
MarkBit obj_bit = Marking::MarkBitFrom(obj);
if (Marking::IsBlack(obj_bit)) {
// Object is not going to be rescanned we need to record the slot.
@@ -80,17 +81,19 @@ void IncrementalMarking::RecordWriteFromCode(HeapObject* obj,
Object* value,
Isolate* isolate) {
ASSERT(obj->IsHeapObject());
-
- // Fast cases should already be covered by RecordWriteStub.
- ASSERT(value->IsHeapObject());
- ASSERT(!value->IsHeapNumber());
- ASSERT(!value->IsString() ||
- value->IsConsString() ||
- value->IsSlicedString());
- ASSERT(Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(value))));
-
IncrementalMarking* marking = isolate->heap()->incremental_marking();
ASSERT(!marking->is_compacting_);
+
+ MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
+ int counter = chunk->write_barrier_counter();
+ if (counter < (MemoryChunk::kWriteBarrierCounterGranularity / 2)) {
+ marking->write_barriers_invoked_since_last_step_ +=
+ MemoryChunk::kWriteBarrierCounterGranularity -
+ chunk->write_barrier_counter();
+ chunk->set_write_barrier_counter(
+ MemoryChunk::kWriteBarrierCounterGranularity);
+ }
+
marking->RecordWrite(obj, NULL, value);
}
@@ -98,8 +101,20 @@ void IncrementalMarking::RecordWriteFromCode(HeapObject* obj,
void IncrementalMarking::RecordWriteForEvacuationFromCode(HeapObject* obj,
Object** slot,
Isolate* isolate) {
+ ASSERT(obj->IsHeapObject());
IncrementalMarking* marking = isolate->heap()->incremental_marking();
ASSERT(marking->is_compacting_);
+
+ MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
+ int counter = chunk->write_barrier_counter();
+ if (counter < (MemoryChunk::kWriteBarrierCounterGranularity / 2)) {
+ marking->write_barriers_invoked_since_last_step_ +=
+ MemoryChunk::kWriteBarrierCounterGranularity -
+ chunk->write_barrier_counter();
+ chunk->set_write_barrier_counter(
+ MemoryChunk::kWriteBarrierCounterGranularity);
+ }
+
marking->RecordWrite(obj, slot, *slot);
}
@@ -125,9 +140,9 @@ void IncrementalMarking::RecordCodeTargetPatch(Address pc, HeapObject* value) {
void IncrementalMarking::RecordWriteOfCodeEntrySlow(JSFunction* host,
- Object** slot,
- Code* value) {
- if (BaseRecordWrite(host, slot, value) && is_compacting_) {
+ Object** slot,
+ Code* value) {
+ if (BaseRecordWrite(host, slot, value)) {
ASSERT(slot != NULL);
heap_->mark_compact_collector()->
RecordCodeEntrySlot(reinterpret_cast<Address>(slot), value);
@@ -160,93 +175,92 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(HeapObject* obj,
}
-class IncrementalMarkingMarkingVisitor : public ObjectVisitor {
- public:
- IncrementalMarkingMarkingVisitor(Heap* heap,
- IncrementalMarking* incremental_marking)
- : heap_(heap),
- incremental_marking_(incremental_marking) {
- }
-
- void VisitEmbeddedPointer(RelocInfo* rinfo) {
- ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
- Object* target = rinfo->target_object();
- if (target->NonFailureIsHeapObject()) {
- heap_->mark_compact_collector()->RecordRelocSlot(rinfo, target);
- MarkObject(target);
+static void MarkObjectGreyDoNotEnqueue(Object* obj) {
+ if (obj->IsHeapObject()) {
+ HeapObject* heap_obj = HeapObject::cast(obj);
+ MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::cast(obj));
+ if (Marking::IsBlack(mark_bit)) {
+ MemoryChunk::IncrementLiveBytesFromGC(heap_obj->address(),
+ -heap_obj->Size());
}
+ Marking::AnyToGrey(mark_bit);
}
+}
- void VisitCodeTarget(RelocInfo* rinfo) {
- ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
- Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
- if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
- && (target->ic_age() != heap_->global_ic_age())) {
- IC::Clear(rinfo->pc());
- target = Code::GetCodeFromTargetAddress(rinfo->target_address());
- }
- heap_->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target));
- MarkObject(target);
- }
- void VisitDebugTarget(RelocInfo* rinfo) {
- ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
- rinfo->IsPatchedReturnSequence()) ||
- (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
- rinfo->IsPatchedDebugBreakSlotSequence()));
- Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
- heap_->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target));
- MarkObject(target);
+class IncrementalMarkingMarkingVisitor
+ : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> {
+ public:
+ static void Initialize() {
+ StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
+
+ table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
+ table_.Register(kVisitJSRegExp, &VisitJSRegExp);
}
- void VisitCodeEntry(Address entry_address) {
- Object* target = Code::GetObjectFromEntryAddress(entry_address);
- heap_->mark_compact_collector()->
- RecordCodeEntrySlot(entry_address, Code::cast(target));
- MarkObject(target);
+ static void VisitNativeContextIncremental(Map* map, HeapObject* object) {
+ Context* context = Context::cast(object);
+
+ // We will mark cache black with a separate pass
+ // when we finish marking.
+ MarkObjectGreyDoNotEnqueue(context->normalized_map_cache());
+ VisitNativeContext(map, context);
}
- void VisitSharedFunctionInfo(SharedFunctionInfo* shared) {
- if (shared->ic_age() != heap_->global_ic_age()) {
- shared->ResetForNewContext(heap_->global_ic_age());
- }
+ static void VisitJSWeakMap(Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ VisitPointers(heap,
+ HeapObject::RawField(object, JSWeakMap::kPropertiesOffset),
+ HeapObject::RawField(object, JSWeakMap::kSize));
}
- void VisitPointer(Object** p) {
+ static void BeforeVisitingSharedFunctionInfo(HeapObject* object) {}
+
+ INLINE(static void VisitPointer(Heap* heap, Object** p)) {
Object* obj = *p;
if (obj->NonFailureIsHeapObject()) {
- heap_->mark_compact_collector()->RecordSlot(p, p, obj);
- MarkObject(obj);
+ heap->mark_compact_collector()->RecordSlot(p, p, obj);
+ MarkObject(heap, obj);
}
}
- void VisitPointers(Object** start, Object** end) {
+ INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
for (Object** p = start; p < end; p++) {
Object* obj = *p;
if (obj->NonFailureIsHeapObject()) {
- heap_->mark_compact_collector()->RecordSlot(start, p, obj);
- MarkObject(obj);
+ heap->mark_compact_collector()->RecordSlot(start, p, obj);
+ MarkObject(heap, obj);
}
}
}
- private:
- // Mark object pointed to by p.
- INLINE(void MarkObject(Object* obj)) {
+ // Marks the object grey and pushes it on the marking stack.
+ INLINE(static void MarkObject(Heap* heap, Object* obj)) {
HeapObject* heap_object = HeapObject::cast(obj);
MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
if (mark_bit.data_only()) {
- if (incremental_marking_->MarkBlackOrKeepGrey(mark_bit)) {
+ if (heap->incremental_marking()->MarkBlackOrKeepGrey(mark_bit)) {
MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(),
heap_object->Size());
}
} else if (Marking::IsWhite(mark_bit)) {
- incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit);
+ heap->incremental_marking()->WhiteToGreyAndPush(heap_object, mark_bit);
}
}
- Heap* heap_;
- IncrementalMarking* incremental_marking_;
+ // Marks the object black without pushing it on the marking stack.
+ // Returns true if object needed marking and false otherwise.
+ INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
+ HeapObject* heap_object = HeapObject::cast(obj);
+ MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
+ if (Marking::IsWhite(mark_bit)) {
+ mark_bit.Set();
+ MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(),
+ heap_object->Size());
+ return true;
+ }
+ return false;
+ }
};
@@ -290,6 +304,11 @@ class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor {
};
+void IncrementalMarking::Initialize() {
+ IncrementalMarkingMarkingVisitor::Initialize();
+}
+
+
void IncrementalMarking::SetOldSpacePageFlags(MemoryChunk* chunk,
bool is_marking,
bool is_compacting) {
@@ -498,19 +517,6 @@ void IncrementalMarking::Start() {
}
-static void MarkObjectGreyDoNotEnqueue(Object* obj) {
- if (obj->IsHeapObject()) {
- HeapObject* heap_obj = HeapObject::cast(obj);
- MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::cast(obj));
- if (Marking::IsBlack(mark_bit)) {
- MemoryChunk::IncrementLiveBytesFromGC(heap_obj->address(),
- -heap_obj->Size());
- }
- Marking::AnyToGrey(mark_bit);
- }
-}
-
-
void IncrementalMarking::StartMarking(CompactionFlag flag) {
if (FLAG_trace_incremental_marking) {
PrintF("[IncrementalMarking] Start marking\n");
@@ -537,8 +543,8 @@ void IncrementalMarking::StartMarking(CompactionFlag flag) {
ActivateIncrementalWriteBarrier();
-#ifdef DEBUG
// Marking bits are cleared by the sweeper.
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
}
@@ -623,20 +629,50 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
}
-void IncrementalMarking::VisitGlobalContext(Context* ctx, ObjectVisitor* v) {
- v->VisitPointers(
- HeapObject::RawField(
- ctx, Context::MarkCompactBodyDescriptor::kStartOffset),
- HeapObject::RawField(
- ctx, Context::MarkCompactBodyDescriptor::kEndOffset));
+void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
+ MarkBit map_mark_bit = Marking::MarkBitFrom(map);
+ if (Marking::IsWhite(map_mark_bit)) {
+ WhiteToGreyAndPush(map, map_mark_bit);
+ }
+
+ IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
+
+ MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
+ SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
+ (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
+ Marking::MarkBlack(obj_mark_bit);
+ MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
+}
- MarkCompactCollector* collector = heap_->mark_compact_collector();
- for (int idx = Context::FIRST_WEAK_SLOT;
- idx < Context::GLOBAL_CONTEXT_SLOTS;
- ++idx) {
- Object** slot =
- HeapObject::RawField(ctx, FixedArray::OffsetOfElementAt(idx));
- collector->RecordSlot(slot, slot, *slot);
+
+void IncrementalMarking::ProcessMarkingDeque(intptr_t bytes_to_process) {
+ Map* filler_map = heap_->one_pointer_filler_map();
+ while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
+ HeapObject* obj = marking_deque_.Pop();
+
+ // Explicitly skip one word fillers. Incremental markbit patterns are
+ // correct only for objects that occupy at least two words.
+ Map* map = obj->map();
+ if (map == filler_map) continue;
+
+ int size = obj->SizeFromMap(map);
+ bytes_to_process -= size;
+ VisitObject(map, obj, size);
+ }
+}
+
+
+void IncrementalMarking::ProcessMarkingDeque() {
+ Map* filler_map = heap_->one_pointer_filler_map();
+ while (!marking_deque_.IsEmpty()) {
+ HeapObject* obj = marking_deque_.Pop();
+
+ // Explicitly skip one word fillers. Incremental markbit patterns are
+ // correct only for objects that occupy at least two words.
+ Map* map = obj->map();
+ if (map == filler_map) continue;
+
+ VisitObject(map, obj, obj->SizeFromMap(map));
}
}
@@ -650,45 +686,7 @@ void IncrementalMarking::Hurry() {
}
// TODO(gc) hurry can mark objects it encounters black as mutator
// was stopped.
- Map* filler_map = heap_->one_pointer_filler_map();
- Map* global_context_map = heap_->global_context_map();
- IncrementalMarkingMarkingVisitor marking_visitor(heap_, this);
- while (!marking_deque_.IsEmpty()) {
- HeapObject* obj = marking_deque_.Pop();
-
- // Explicitly skip one word fillers. Incremental markbit patterns are
- // correct only for objects that occupy at least two words.
- Map* map = obj->map();
- if (map == filler_map) {
- continue;
- } else if (map == global_context_map) {
- // Global contexts have weak fields.
- VisitGlobalContext(Context::cast(obj), &marking_visitor);
- } else if (map->instance_type() == MAP_TYPE) {
- Map* map = Map::cast(obj);
- heap_->ClearCacheOnMap(map);
-
- // When map collection is enabled we have to mark through map's
- // transitions and back pointers in a special way to make these links
- // weak. Only maps for subclasses of JSReceiver can have transitions.
- STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
- if (FLAG_collect_maps &&
- map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
- marker_.MarkMapContents(map);
- } else {
- marking_visitor.VisitPointers(
- HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
- HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
- }
- } else {
- obj->Iterate(&marking_visitor);
- }
-
- MarkBit mark_bit = Marking::MarkBitFrom(obj);
- ASSERT(!Marking::IsBlack(mark_bit));
- Marking::MarkBlack(mark_bit);
- MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
- }
+ ProcessMarkingDeque();
state_ = COMPLETE;
if (FLAG_trace_incremental_marking) {
double end = OS::TimeCurrentMillis();
@@ -704,7 +702,7 @@ void IncrementalMarking::Hurry() {
PolymorphicCodeCache::kSize);
}
- Object* context = heap_->global_contexts_list();
+ Object* context = heap_->native_contexts_list();
while (!context->IsUndefined()) {
// GC can happen when the context is not fully initialized,
// so the cache can be undefined.
@@ -794,11 +792,25 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
allocated_ += allocated_bytes;
- if (allocated_ < kAllocatedThreshold) return;
+ if (allocated_ < kAllocatedThreshold &&
+ write_barriers_invoked_since_last_step_ <
+ kWriteBarriersInvokedThreshold) {
+ return;
+ }
if (state_ == MARKING && no_marking_scope_depth_ > 0) return;
- intptr_t bytes_to_process = allocated_ * allocation_marking_factor_;
+ // The marking speed is driven either by the allocation rate or by the rate
+ // at which we are having to check the color of objects in the write barrier.
+ // It is possible for a tight non-allocating loop to run a lot of write
+ // barriers before we get here and check them (marking can only take place on
+ // allocation), so to reduce the lumpiness we don't use the write barriers
+ // invoked since last step directly to determine the amount of work to do.
+ intptr_t bytes_to_process =
+ marking_speed_ * Max(allocated_, kWriteBarriersInvokedThreshold);
+ allocated_ = 0;
+ write_barriers_invoked_since_last_step_ = 0;
+
bytes_scanned_ += bytes_to_process;
double start = 0;
@@ -813,87 +825,19 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
StartMarking(PREVENT_COMPACTION);
}
} else if (state_ == MARKING) {
- Map* filler_map = heap_->one_pointer_filler_map();
- Map* global_context_map = heap_->global_context_map();
- IncrementalMarkingMarkingVisitor marking_visitor(heap_, this);
- while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
- HeapObject* obj = marking_deque_.Pop();
-
- // Explicitly skip one word fillers. Incremental markbit patterns are
- // correct only for objects that occupy at least two words.
- Map* map = obj->map();
- if (map == filler_map) continue;
-
- int size = obj->SizeFromMap(map);
- bytes_to_process -= size;
- MarkBit map_mark_bit = Marking::MarkBitFrom(map);
- if (Marking::IsWhite(map_mark_bit)) {
- WhiteToGreyAndPush(map, map_mark_bit);
- }
-
- // TODO(gc) switch to static visitor instead of normal visitor.
- if (map == global_context_map) {
- // Global contexts have weak fields.
- Context* ctx = Context::cast(obj);
-
- // We will mark cache black with a separate pass
- // when we finish marking.
- MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
-
- VisitGlobalContext(ctx, &marking_visitor);
- } else if (map->instance_type() == MAP_TYPE) {
- Map* map = Map::cast(obj);
- heap_->ClearCacheOnMap(map);
-
- // When map collection is enabled we have to mark through map's
- // transitions and back pointers in a special way to make these links
- // weak. Only maps for subclasses of JSReceiver can have transitions.
- STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
- if (FLAG_collect_maps &&
- map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
- marker_.MarkMapContents(map);
- } else {
- marking_visitor.VisitPointers(
- HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
- HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
- }
- } else if (map->instance_type() == JS_FUNCTION_TYPE) {
- marking_visitor.VisitPointers(
- HeapObject::RawField(obj, JSFunction::kPropertiesOffset),
- HeapObject::RawField(obj, JSFunction::kCodeEntryOffset));
-
- marking_visitor.VisitCodeEntry(
- obj->address() + JSFunction::kCodeEntryOffset);
-
- marking_visitor.VisitPointers(
- HeapObject::RawField(obj,
- JSFunction::kCodeEntryOffset + kPointerSize),
- HeapObject::RawField(obj,
- JSFunction::kNonWeakFieldsEndOffset));
- } else {
- obj->IterateBody(map->instance_type(), size, &marking_visitor);
- }
-
- MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
- SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
- (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
- Marking::MarkBlack(obj_mark_bit);
- MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
- }
+ ProcessMarkingDeque(bytes_to_process);
if (marking_deque_.IsEmpty()) MarkingComplete(action);
}
- allocated_ = 0;
-
steps_count_++;
steps_count_since_last_gc_++;
bool speed_up = false;
- if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) {
+ if ((steps_count_ % kMarkingSpeedAccellerationInterval) == 0) {
if (FLAG_trace_gc) {
- PrintF("Speed up marking after %d steps\n",
- static_cast<int>(kAllocationMarkingFactorSpeedupInterval));
+ PrintPID("Speed up marking after %d steps\n",
+ static_cast<int>(kMarkingSpeedAccellerationInterval));
}
speed_up = true;
}
@@ -902,35 +846,35 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
(old_generation_space_available_at_start_of_incremental_ < 10 * MB);
bool only_1_nth_of_space_that_was_available_still_left =
- (SpaceLeftInOldSpace() * (allocation_marking_factor_ + 1) <
+ (SpaceLeftInOldSpace() * (marking_speed_ + 1) <
old_generation_space_available_at_start_of_incremental_);
if (space_left_is_very_small ||
only_1_nth_of_space_that_was_available_still_left) {
- if (FLAG_trace_gc) PrintF("Speed up marking because of low space left\n");
+ if (FLAG_trace_gc) PrintPID("Speed up marking because of low space left\n");
speed_up = true;
}
bool size_of_old_space_multiplied_by_n_during_marking =
(heap_->PromotedTotalSize() >
- (allocation_marking_factor_ + 1) *
+ (marking_speed_ + 1) *
old_generation_space_used_at_start_of_incremental_);
if (size_of_old_space_multiplied_by_n_during_marking) {
speed_up = true;
if (FLAG_trace_gc) {
- PrintF("Speed up marking because of heap size increase\n");
+ PrintPID("Speed up marking because of heap size increase\n");
}
}
int64_t promoted_during_marking = heap_->PromotedTotalSize()
- old_generation_space_used_at_start_of_incremental_;
- intptr_t delay = allocation_marking_factor_ * MB;
+ intptr_t delay = marking_speed_ * MB;
intptr_t scavenge_slack = heap_->MaxSemiSpaceSize();
// We try to scan at at least twice the speed that we are allocating.
if (promoted_during_marking > bytes_scanned_ / 2 + scavenge_slack + delay) {
if (FLAG_trace_gc) {
- PrintF("Speed up marking because marker was not keeping up\n");
+ PrintPID("Speed up marking because marker was not keeping up\n");
}
speed_up = true;
}
@@ -938,15 +882,15 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
if (speed_up) {
if (state_ != MARKING) {
if (FLAG_trace_gc) {
- PrintF("Postponing speeding up marking until marking starts\n");
+ PrintPID("Postponing speeding up marking until marking starts\n");
}
} else {
- allocation_marking_factor_ += kAllocationMarkingFactorSpeedup;
- allocation_marking_factor_ = static_cast<int>(
- Min(kMaxAllocationMarkingFactor,
- static_cast<intptr_t>(allocation_marking_factor_ * 1.3)));
+ marking_speed_ += kMarkingSpeedAccellerationInterval;
+ marking_speed_ = static_cast<int>(
+ Min(kMaxMarkingSpeed,
+ static_cast<intptr_t>(marking_speed_ * 1.3)));
if (FLAG_trace_gc) {
- PrintF("Marking speed increased to %d\n", allocation_marking_factor_);
+ PrintPID("Marking speed increased to %d\n", marking_speed_);
}
}
}
@@ -972,8 +916,9 @@ void IncrementalMarking::ResetStepCounters() {
steps_count_since_last_gc_ = 0;
steps_took_since_last_gc_ = 0;
bytes_rescanned_ = 0;
- allocation_marking_factor_ = kInitialAllocationMarkingFactor;
+ marking_speed_ = kInitialMarkingSpeed;
bytes_scanned_ = 0;
+ write_barriers_invoked_since_last_step_ = 0;
}
diff --git a/src/3rdparty/v8/src/incremental-marking.h b/src/3rdparty/v8/src/incremental-marking.h
index 39e8dae..6ae0f59 100644
--- a/src/3rdparty/v8/src/incremental-marking.h
+++ b/src/3rdparty/v8/src/incremental-marking.h
@@ -53,6 +53,8 @@ class IncrementalMarking {
explicit IncrementalMarking(Heap* heap);
+ static void Initialize();
+
void TearDown();
State state() {
@@ -93,21 +95,23 @@ class IncrementalMarking {
// progress in the face of the mutator creating new work for it. We start
// of at a moderate rate of work and gradually increase the speed of the
// incremental marker until it completes.
- // Do some marking every time this much memory has been allocated.
+ // Do some marking every time this much memory has been allocated or that many
+ // heavy (color-checking) write barriers have been invoked.
static const intptr_t kAllocatedThreshold = 65536;
+ static const intptr_t kWriteBarriersInvokedThreshold = 65536;
// Start off by marking this many times more memory than has been allocated.
- static const intptr_t kInitialAllocationMarkingFactor = 1;
+ static const intptr_t kInitialMarkingSpeed = 1;
// But if we are promoting a lot of data we need to mark faster to keep up
// with the data that is entering the old space through promotion.
static const intptr_t kFastMarking = 3;
// After this many steps we increase the marking/allocating factor.
- static const intptr_t kAllocationMarkingFactorSpeedupInterval = 1024;
+ static const intptr_t kMarkingSpeedAccellerationInterval = 1024;
// This is how much we increase the marking/allocating factor by.
- static const intptr_t kAllocationMarkingFactorSpeedup = 2;
- static const intptr_t kMaxAllocationMarkingFactor = 1000;
+ static const intptr_t kMarkingSpeedAccelleration = 2;
+ static const intptr_t kMaxMarkingSpeed = 1000;
void OldSpaceStep(intptr_t allocated) {
- Step(allocated * kFastMarking / kInitialAllocationMarkingFactor,
+ Step(allocated * kFastMarking / kInitialMarkingSpeed,
GC_VIA_STACK_GUARD);
}
@@ -130,6 +134,12 @@ class IncrementalMarking {
Object** slot,
Isolate* isolate);
+ // Record a slot for compaction. Returns false for objects that are
+ // guaranteed to be rescanned or not guaranteed to survive.
+ //
+ // No slots in white objects should be recorded, as some slots are typed and
+ // cannot be interpreted correctly if the underlying object does not survive
+ // the incremental cycle (stays white).
INLINE(bool BaseRecordWrite(HeapObject* obj, Object** slot, Object* value));
INLINE(void RecordWrite(HeapObject* obj, Object** slot, Object* value));
INLINE(void RecordWriteIntoCode(HeapObject* obj,
@@ -167,16 +177,6 @@ class IncrementalMarking {
return true;
}
- // Marks the object grey and pushes it on the marking stack.
- // Returns true if object needed marking and false otherwise.
- // This is for incremental marking only.
- INLINE(bool MarkObjectAndPush(HeapObject* obj));
-
- // Marks the object black without pushing it on the marking stack.
- // Returns true if object needed marking and false otherwise.
- // This is for incremental marking only.
- INLINE(bool MarkObjectWithoutPush(HeapObject* obj));
-
inline int steps_count() {
return steps_count_;
}
@@ -213,12 +213,13 @@ class IncrementalMarking {
void NotifyOfHighPromotionRate() {
if (IsMarking()) {
- if (allocation_marking_factor_ < kFastMarking) {
+ if (marking_speed_ < kFastMarking) {
if (FLAG_trace_gc) {
- PrintF("Increasing marking speed to %d due to high promotion rate\n",
- static_cast<int>(kFastMarking));
+ PrintPID("Increasing marking speed to %d "
+ "due to high promotion rate\n",
+ static_cast<int>(kFastMarking));
}
- allocation_marking_factor_ = kFastMarking;
+ marking_speed_ = kFastMarking;
}
}
}
@@ -258,7 +259,11 @@ class IncrementalMarking {
void EnsureMarkingDequeIsCommitted();
- void VisitGlobalContext(Context* ctx, ObjectVisitor* v);
+ INLINE(void ProcessMarkingDeque());
+
+ INLINE(void ProcessMarkingDeque(intptr_t bytes_to_process));
+
+ INLINE(void VisitObject(Map* map, HeapObject* obj, int size));
Heap* heap_;
@@ -268,7 +273,6 @@ class IncrementalMarking {
VirtualMemory* marking_deque_memory_;
bool marking_deque_memory_committed_;
MarkingDeque marking_deque_;
- Marker<IncrementalMarking> marker_;
int steps_count_;
double steps_took_;
@@ -279,9 +283,10 @@ class IncrementalMarking {
double steps_took_since_last_gc_;
int64_t bytes_rescanned_;
bool should_hurry_;
- int allocation_marking_factor_;
+ int marking_speed_;
intptr_t bytes_scanned_;
intptr_t allocated_;
+ intptr_t write_barriers_invoked_since_last_step_;
int no_marking_scope_depth_;
diff --git a/src/3rdparty/v8/src/interface.cc b/src/3rdparty/v8/src/interface.cc
index 7836110..336be82 100644
--- a/src/3rdparty/v8/src/interface.cc
+++ b/src/3rdparty/v8/src/interface.cc
@@ -41,11 +41,13 @@ static bool Match(void* key1, void* key2) {
}
-Interface* Interface::Lookup(Handle<String> name) {
+Interface* Interface::Lookup(Handle<String> name, Zone* zone) {
ASSERT(IsModule());
ZoneHashMap* map = Chase()->exports_;
if (map == NULL) return NULL;
- ZoneHashMap::Entry* p = map->Lookup(name.location(), name->Hash(), false);
+ ZoneAllocationPolicy allocator(zone);
+ ZoneHashMap::Entry* p = map->Lookup(name.location(), name->Hash(), false,
+ allocator);
if (p == NULL) return NULL;
ASSERT(*static_cast<String**>(p->key) == *name);
ASSERT(p->value != NULL);
@@ -69,7 +71,7 @@ int Nesting::current_ = 0;
void Interface::DoAdd(
- void* name, uint32_t hash, Interface* interface, bool* ok) {
+ void* name, uint32_t hash, Interface* interface, Zone* zone, bool* ok) {
MakeModule(ok);
if (!*ok) return;
@@ -85,9 +87,13 @@ void Interface::DoAdd(
#endif
ZoneHashMap** map = &Chase()->exports_;
- if (*map == NULL) *map = new ZoneHashMap(Match, 8);
+ ZoneAllocationPolicy allocator(zone);
- ZoneHashMap::Entry* p = (*map)->Lookup(name, hash, !IsFrozen());
+ if (*map == NULL)
+ *map = new ZoneHashMap(Match, ZoneHashMap::kDefaultHashMapCapacity,
+ allocator);
+
+ ZoneHashMap::Entry* p = (*map)->Lookup(name, hash, !IsFrozen(), allocator);
if (p == NULL) {
// This didn't have name but was frozen already, that's an error.
*ok = false;
@@ -97,7 +103,7 @@ void Interface::DoAdd(
#ifdef DEBUG
Nesting nested;
#endif
- static_cast<Interface*>(p->value)->Unify(interface, ok);
+ static_cast<Interface*>(p->value)->Unify(interface, zone, ok);
}
#ifdef DEBUG
@@ -110,16 +116,24 @@ void Interface::DoAdd(
}
-void Interface::Unify(Interface* that, bool* ok) {
- if (this->forward_) return this->Chase()->Unify(that, ok);
- if (that->forward_) return this->Unify(that->Chase(), ok);
+void Interface::Unify(Interface* that, Zone* zone, bool* ok) {
+ if (this->forward_) return this->Chase()->Unify(that, zone, ok);
+ if (that->forward_) return this->Unify(that->Chase(), zone, ok);
ASSERT(this->forward_ == NULL);
ASSERT(that->forward_ == NULL);
*ok = true;
if (this == that) return;
- if (this->IsValue()) return that->MakeValue(ok);
- if (that->IsValue()) return this->MakeValue(ok);
+ if (this->IsValue()) {
+ that->MakeValue(ok);
+ if (*ok && this->IsConst()) that->MakeConst(ok);
+ return;
+ }
+ if (that->IsValue()) {
+ this->MakeValue(ok);
+ if (*ok && that->IsConst()) this->MakeConst(ok);
+ return;
+ }
#ifdef DEBUG
if (FLAG_print_interface_details) {
@@ -134,9 +148,9 @@ void Interface::Unify(Interface* that, bool* ok) {
// Merge the smaller interface into the larger, for performance.
if (this->exports_ != NULL && (that->exports_ == NULL ||
this->exports_->occupancy() >= that->exports_->occupancy())) {
- this->DoUnify(that, ok);
+ this->DoUnify(that, ok, zone);
} else {
- that->DoUnify(this, ok);
+ that->DoUnify(this, ok, zone);
}
#ifdef DEBUG
@@ -151,7 +165,7 @@ void Interface::Unify(Interface* that, bool* ok) {
}
-void Interface::DoUnify(Interface* that, bool* ok) {
+void Interface::DoUnify(Interface* that, bool* ok, Zone* zone) {
ASSERT(this->forward_ == NULL);
ASSERT(that->forward_ == NULL);
ASSERT(!this->IsValue());
@@ -166,7 +180,7 @@ void Interface::DoUnify(Interface* that, bool* ok) {
ZoneHashMap* map = that->exports_;
if (map != NULL) {
for (ZoneHashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) {
- this->DoAdd(p->key, p->hash, static_cast<Interface*>(p->value), ok);
+ this->DoAdd(p->key, p->hash, static_cast<Interface*>(p->value), zone, ok);
if (!*ok) return;
}
}
@@ -208,6 +222,8 @@ void Interface::Print(int n) {
if (IsUnknown()) {
PrintF("unknown\n");
+ } else if (IsConst()) {
+ PrintF("const\n");
} else if (IsValue()) {
PrintF("value\n");
} else if (IsModule()) {
diff --git a/src/3rdparty/v8/src/interface.h b/src/3rdparty/v8/src/interface.h
index 580f082..94ef11b 100644
--- a/src/3rdparty/v8/src/interface.h
+++ b/src/3rdparty/v8/src/interface.h
@@ -36,29 +36,45 @@ namespace internal {
// This class implements the following abstract grammar of interfaces
// (i.e. module types):
-// interface ::= UNDETERMINED | VALUE | MODULE(exports)
+// interface ::= UNDETERMINED | VALUE | CONST | MODULE(exports)
// exports ::= {name : interface, ...}
-// A frozen module type is one that is fully determined. Unification does not
-// allow adding additional exports to frozen interfaces.
-// Otherwise, unifying modules merges their exports.
+// A frozen type is one that is fully determined. Unification does not
+// allow to turn non-const values into const, or adding additional exports to
+// frozen interfaces. Otherwise, unifying modules merges their exports.
// Undetermined types are unification variables that can be unified freely.
+// There is a natural subsort lattice that reflects the increase of knowledge:
+//
+// undetermined
+// // | \\ .
+// value (frozen) module
+// // \\ / \ //
+// const fr.value fr.module
+// \\ /
+// fr.const
+//
+// where the bold lines are the only transitions allowed.
class Interface : public ZoneObject {
public:
// ---------------------------------------------------------------------------
// Factory methods.
+ static Interface* NewUnknown(Zone* zone) {
+ return new(zone) Interface(NONE);
+ }
+
static Interface* NewValue() {
static Interface value_interface(VALUE + FROZEN); // Cached.
return &value_interface;
}
- static Interface* NewUnknown() {
- return new Interface(NONE);
+ static Interface* NewConst() {
+ static Interface value_interface(VALUE + CONST + FROZEN); // Cached.
+ return &value_interface;
}
- static Interface* NewModule() {
- return new Interface(MODULE);
+ static Interface* NewModule(Zone* zone) {
+ return new(zone) Interface(MODULE);
}
// ---------------------------------------------------------------------------
@@ -66,13 +82,13 @@ class Interface : public ZoneObject {
// Add a name to the list of exports. If it already exists, unify with
// interface, otherwise insert unless this is closed.
- void Add(Handle<String> name, Interface* interface, bool* ok) {
- DoAdd(name.location(), name->Hash(), interface, ok);
+ void Add(Handle<String> name, Interface* interface, Zone* zone, bool* ok) {
+ DoAdd(name.location(), name->Hash(), interface, zone, ok);
}
// Unify with another interface. If successful, both interface objects will
// represent the same type, and changes to one are reflected in the other.
- void Unify(Interface* that, bool* ok);
+ void Unify(Interface* that, Zone* zone, bool* ok);
// Determine this interface to be a value interface.
void MakeValue(bool* ok) {
@@ -80,6 +96,12 @@ class Interface : public ZoneObject {
if (*ok) Chase()->flags_ |= VALUE;
}
+ // Determine this interface to be an immutable interface.
+ void MakeConst(bool* ok) {
+ *ok = !IsModule() && (IsConst() || !IsFrozen());
+ if (*ok) Chase()->flags_ |= VALUE + CONST;
+ }
+
// Determine this interface to be a module interface.
void MakeModule(bool* ok) {
*ok = !IsValue();
@@ -107,6 +129,9 @@ class Interface : public ZoneObject {
// Check whether this is a value type.
bool IsValue() { return Chase()->flags_ & VALUE; }
+ // Check whether this is a constant type.
+ bool IsConst() { return Chase()->flags_ & CONST; }
+
// Check whether this is a module type.
bool IsModule() { return Chase()->flags_ & MODULE; }
@@ -116,7 +141,7 @@ class Interface : public ZoneObject {
Handle<JSModule> Instance() { return Chase()->instance_; }
// Look up an exported name. Returns NULL if not (yet) defined.
- Interface* Lookup(Handle<String> name);
+ Interface* Lookup(Handle<String> name, Zone* zone);
// ---------------------------------------------------------------------------
// Iterators.
@@ -161,8 +186,9 @@ class Interface : public ZoneObject {
enum Flags { // All flags are monotonic
NONE = 0,
VALUE = 1, // This type describes a value
- MODULE = 2, // This type describes a module
- FROZEN = 4 // This type is fully determined
+ CONST = 2, // This type describes a constant
+ MODULE = 4, // This type describes a module
+ FROZEN = 8 // This type is fully determined
};
int flags_;
@@ -187,8 +213,9 @@ class Interface : public ZoneObject {
return result;
}
- void DoAdd(void* name, uint32_t hash, Interface* interface, bool* ok);
- void DoUnify(Interface* that, bool* ok);
+ void DoAdd(void* name, uint32_t hash, Interface* interface, Zone* zone,
+ bool* ok);
+ void DoUnify(Interface* that, bool* ok, Zone* zone);
};
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/isolate.cc b/src/3rdparty/v8/src/isolate.cc
index 63029ab..3a6099b 100644
--- a/src/3rdparty/v8/src/isolate.cc
+++ b/src/3rdparty/v8/src/isolate.cc
@@ -257,7 +257,7 @@ void Isolate::PreallocatedStorageInit(size_t size) {
void* Isolate::PreallocatedStorageNew(size_t size) {
if (!preallocated_storage_preallocated_) {
- return FreeStoreAllocationPolicy::New(size);
+ return FreeStoreAllocationPolicy().New(size);
}
ASSERT(free_list_.next_ != &free_list_);
ASSERT(free_list_.previous_ != &free_list_);
@@ -478,6 +478,14 @@ void Isolate::Iterate(ObjectVisitor* v) {
Iterate(v, current_t);
}
+void Isolate::IterateDeferredHandles(ObjectVisitor* visitor) {
+ for (DeferredHandles* deferred = deferred_handles_head_;
+ deferred != NULL;
+ deferred = deferred->next_) {
+ deferred->Iterate(visitor);
+ }
+}
+
void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) {
// The ARM simulator has a separate JS stack. We therefore register
@@ -528,6 +536,24 @@ Handle<String> Isolate::StackTraceString() {
}
+void Isolate::PushStackTraceAndDie(unsigned int magic,
+ Object* object,
+ Map* map,
+ unsigned int magic2) {
+ const int kMaxStackTraceSize = 8192;
+ Handle<String> trace = StackTraceString();
+ char buffer[kMaxStackTraceSize];
+ int length = Min(kMaxStackTraceSize - 1, trace->length());
+ String::WriteToFlat(*trace, buffer, 0, length);
+ buffer[length] = '\0';
+ OS::PrintError("Stacktrace (%x-%x) %p %p: %s\n",
+ magic, magic2,
+ static_cast<void*>(object), static_cast<void*>(map),
+ buffer);
+ OS::Abort();
+}
+
+
void Isolate::CaptureAndSetCurrentStackTraceFor(Handle<JSObject> error_object) {
if (capture_stack_trace_for_uncaught_exceptions_) {
// Capture stack trace for a detailed exception message.
@@ -549,8 +575,6 @@ Handle<JSArray> Isolate::CaptureCurrentStackTrace(
Handle<String> column_key = factory()->LookupAsciiSymbol("column");
Handle<String> line_key = factory()->LookupAsciiSymbol("lineNumber");
Handle<String> script_key = factory()->LookupAsciiSymbol("scriptName");
- Handle<String> name_or_source_url_key =
- factory()->LookupAsciiSymbol("nameOrSourceURL");
Handle<String> script_name_or_source_url_key =
factory()->LookupAsciiSymbol("scriptNameOrSourceURL");
Handle<String> function_key = factory()->LookupAsciiSymbol("functionName");
@@ -610,18 +634,7 @@ Handle<JSArray> Isolate::CaptureCurrentStackTrace(
}
if (options & StackTrace::kScriptNameOrSourceURL) {
- Handle<Object> script_name(script->name(), this);
- Handle<JSValue> script_wrapper = GetScriptWrapper(script);
- Handle<Object> property = GetProperty(script_wrapper,
- name_or_source_url_key);
- ASSERT(property->IsJSFunction());
- Handle<JSFunction> method = Handle<JSFunction>::cast(property);
- bool caught_exception;
- Handle<Object> result = Execution::TryCall(method, script_wrapper, 0,
- NULL, &caught_exception);
- if (caught_exception) {
- result = factory()->undefined_value();
- }
+ Handle<Object> result = GetScriptNameOrSourceURL(script);
CHECK_NOT_EMPTY_HANDLE(this,
JSObject::SetLocalPropertyIgnoreAttributes(
stack_frame, script_name_or_source_url_key,
@@ -744,7 +757,7 @@ void Isolate::SetFailedAccessCheckCallback(
thread_local_top()->failed_access_check_callback_ = callback;
}
-
+
void Isolate::SetUserObjectComparisonCallback(
v8::UserObjectComparisonCallback callback) {
thread_local_top()->user_object_comparison_callback_ = callback;
@@ -788,16 +801,17 @@ static MayAccessDecision MayAccessPreCheck(Isolate* isolate,
if (isolate->bootstrapper()->IsActive()) return YES;
if (receiver->IsJSGlobalProxy()) {
- Object* receiver_context = JSGlobalProxy::cast(receiver)->context();
+ Object* receiver_context = JSGlobalProxy::cast(receiver)->native_context();
if (!receiver_context->IsContext()) return NO;
- // Get the global context of current top context.
- // avoid using Isolate::global_context() because it uses Handle.
- Context* global_context = isolate->context()->global()->global_context();
- if (receiver_context == global_context) return YES;
+ // Get the native context of current top context.
+ // avoid using Isolate::native_context() because it uses Handle.
+ Context* native_context =
+ isolate->context()->global_object()->native_context();
+ if (receiver_context == native_context) return YES;
if (Context::cast(receiver_context)->security_token() ==
- global_context->security_token())
+ native_context->security_token())
return YES;
}
@@ -928,7 +942,7 @@ Failure* Isolate::Throw(Object* exception, MessageLocation* location) {
}
-Failure* Isolate::ReThrow(MaybeObject* exception, MessageLocation* location) {
+Failure* Isolate::ReThrow(MaybeObject* exception) {
bool can_be_caught_externally = false;
bool catchable_by_javascript = is_catchable_by_javascript(exception);
ShouldReportException(&can_be_caught_externally, catchable_by_javascript);
@@ -1118,6 +1132,14 @@ void Isolate::DoThrow(Object* exception, MessageLocation* location) {
stack_trace_for_uncaught_exceptions_options_);
}
}
+ // Stringify custom error objects for the message object.
+ if (exception_handle->IsJSObject() && !IsErrorObject(exception_handle)) {
+ bool failed = false;
+ exception_handle = Execution::ToString(exception_handle, &failed);
+ if (failed) {
+ exception_handle = factory()->LookupAsciiSymbol("exception");
+ }
+ }
Handle<Object> message_obj = MessageHandler::MakeMessageObject(
"uncaught_exception",
location,
@@ -1138,8 +1160,18 @@ void Isolate::DoThrow(Object* exception, MessageLocation* location) {
// to the console for easier debugging.
int line_number = GetScriptLineNumberSafe(location->script(),
location->start_pos());
- OS::PrintError("Extension or internal compilation error at line %d.\n",
- line_number);
+ if (exception->IsString()) {
+ OS::PrintError(
+ "Extension or internal compilation error: %s in %s at line %d.\n",
+ *String::cast(exception)->ToCString(),
+ *String::cast(location->script()->name())->ToCString(),
+ line_number + 1);
+ } else {
+ OS::PrintError(
+ "Extension or internal compilation error in %s at line %d.\n",
+ *String::cast(location->script()->name())->ToCString(),
+ line_number + 1);
+ }
}
}
@@ -1202,7 +1234,7 @@ void Isolate::ReportPendingMessages() {
PropagatePendingExceptionToExternalTryCatch();
// If the pending exception is OutOfMemoryException set out_of_memory in
- // the global context. Note: We have to mark the global context here
+ // the native context. Note: We have to mark the native context here
// since the GenerateThrowOutOfMemory stub cannot make a RuntimeCall to
// set it.
HandleScope scope;
@@ -1312,20 +1344,26 @@ bool Isolate::is_out_of_memory() {
}
+Handle<Context> Isolate::native_context() {
+ GlobalObject* global = thread_local_top()->context_->global_object();
+ return Handle<Context>(global->native_context());
+}
+
+
Handle<Context> Isolate::global_context() {
- GlobalObject* global = thread_local_top()->context_->global();
+ GlobalObject* global = thread_local_top()->context_->global_object();
return Handle<Context>(global->global_context());
}
-Handle<Context> Isolate::GetCallingGlobalContext() {
+Handle<Context> Isolate::GetCallingNativeContext() {
JavaScriptFrameIterator it;
#ifdef ENABLE_DEBUGGER_SUPPORT
if (debug_->InDebugger()) {
while (!it.done()) {
JavaScriptFrame* frame = it.frame();
Context* context = Context::cast(frame->context());
- if (context->global_context() == *debug_->debug_context()) {
+ if (context->native_context() == *debug_->debug_context()) {
it.Advance();
} else {
break;
@@ -1336,7 +1374,7 @@ Handle<Context> Isolate::GetCallingGlobalContext() {
if (it.done()) return Handle<Context>::null();
JavaScriptFrame* frame = it.frame();
Context* context = Context::cast(frame->context());
- return Handle<Context>(context->global_context());
+ return Handle<Context>(context->native_context());
}
@@ -1467,6 +1505,7 @@ Isolate::Isolate()
descriptor_lookup_cache_(NULL),
handle_scope_implementer_(NULL),
unicode_cache_(NULL),
+ runtime_zone_(this),
in_use_list_(0),
free_list_(0),
preallocated_storage_preallocated_(false),
@@ -1480,14 +1519,15 @@ Isolate::Isolate()
string_tracker_(NULL),
regexp_stack_(NULL),
date_cache_(NULL),
- context_exit_happened_(false) {
+ context_exit_happened_(false),
+ deferred_handles_head_(NULL),
+ optimizing_compiler_thread_(this) {
TRACE_ISOLATE(constructor);
memset(isolate_addresses_, 0,
sizeof(isolate_addresses_[0]) * (kIsolateAddressCount + 1));
heap_.isolate_ = this;
- zone_.isolate_ = this;
stack_guard_.isolate_ = this;
// ThreadManager is initialized early to support locking an isolate
@@ -1544,6 +1584,11 @@ void Isolate::TearDown() {
thread_data_table_->RemoveAllThreads(this);
}
+ if (serialize_partial_snapshot_cache_ != NULL) {
+ delete[] serialize_partial_snapshot_cache_;
+ serialize_partial_snapshot_cache_ = NULL;
+ }
+
if (!IsDefaultIsolate()) {
delete this;
}
@@ -1557,6 +1602,8 @@ void Isolate::Deinit() {
if (state_ == INITIALIZED) {
TRACE_ISOLATE(deinit);
+ if (FLAG_parallel_recompilation) optimizing_compiler_thread_.Stop();
+
if (FLAG_hydrogen_stats) HStatistics::Instance()->Print();
// We must stop the logger before we tear down other components.
@@ -1592,6 +1639,26 @@ void Isolate::Deinit() {
}
+void Isolate::PushToPartialSnapshotCache(Object* obj) {
+ int length = serialize_partial_snapshot_cache_length();
+ int capacity = serialize_partial_snapshot_cache_capacity();
+
+ if (length >= capacity) {
+ int new_capacity = static_cast<int>((capacity + 10) * 1.2);
+ Object** new_array = new Object*[new_capacity];
+ for (int i = 0; i < length; i++) {
+ new_array[i] = serialize_partial_snapshot_cache()[i];
+ }
+ if (capacity != 0) delete[] serialize_partial_snapshot_cache();
+ set_serialize_partial_snapshot_cache(new_array);
+ set_serialize_partial_snapshot_cache_capacity(new_capacity);
+ }
+
+ serialize_partial_snapshot_cache()[length] = obj;
+ set_serialize_partial_snapshot_cache_length(length + 1);
+}
+
+
void Isolate::SetIsolateThreadLocals(Isolate* isolate,
PerIsolateThreadData* data) {
Thread::SetThreadLocal(isolate_key_, isolate);
@@ -1603,7 +1670,7 @@ Isolate::~Isolate() {
TRACE_ISOLATE(destructor);
// Has to be called while counters_ are still alive.
- zone_.DeleteKeptSegment();
+ runtime_zone_.DeleteKeptSegment();
delete[] assembler_spare_buffer_;
assembler_spare_buffer_ = NULL;
@@ -1740,10 +1807,8 @@ bool Isolate::Init(Deserializer* des) {
ASSERT(Isolate::Current() == this);
TRACE_ISOLATE(init);
-#ifdef DEBUG
// The initialization process does not handle memory exhaustion.
DisallowAllocationFailure disallow_allocation_failure;
-#endif
InitializeLoggingAndCounters();
@@ -1775,7 +1840,7 @@ bool Isolate::Init(Deserializer* des) {
global_handles_ = new GlobalHandles(this);
bootstrapper_ = new Bootstrapper();
handle_scope_implementer_ = new HandleScopeImplementer(this);
- stub_cache_ = new StubCache(this);
+ stub_cache_ = new StubCache(this, runtime_zone());
regexp_stack_ = new RegExpStack();
regexp_stack_->isolate_ = this;
date_cache_ = new DateCache();
@@ -1809,6 +1874,11 @@ bool Isolate::Init(Deserializer* des) {
return false;
}
+ if (create_heap_objects) {
+ // Terminate the cache array with the sentinel so we can iterate.
+ PushToPartialSnapshotCache(heap_.undefined_value());
+ }
+
InitializeThreadLocal();
bootstrapper_->Initialize(create_heap_objects);
@@ -1835,7 +1905,7 @@ bool Isolate::Init(Deserializer* des) {
#endif
// If we are deserializing, read the state into the now-empty heap.
- if (des != NULL) {
+ if (!create_heap_objects) {
des->Deserialize();
}
stub_cache_->Initialize();
@@ -1850,7 +1920,7 @@ bool Isolate::Init(Deserializer* des) {
heap_.SetStackLimits();
// Quiet the heap NaN if needed on target platform.
- if (des != NULL) Assembler::QuietNaN(heap_.nan_value());
+ if (!create_heap_objects) Assembler::QuietNaN(heap_.nan_value());
deoptimizer_data_ = new DeoptimizerData;
runtime_profiler_ = new RuntimeProfiler(this);
@@ -1858,7 +1928,8 @@ bool Isolate::Init(Deserializer* des) {
// If we are deserializing, log non-function code objects and compiled
// functions found in the snapshot.
- if (des != NULL && (FLAG_log_code || FLAG_ll_prof)) {
+ if (create_heap_objects &&
+ (FLAG_log_code || FLAG_ll_prof || logger_->is_logging_code_events())) {
HandleScope scope;
LOG(this, LogCodeObjects());
LOG(this, LogCompiledFunctions());
@@ -1873,6 +1944,7 @@ bool Isolate::Init(Deserializer* des) {
state_ = INITIALIZED;
time_millis_at_init_ = OS::TimeCurrentMillis();
+ if (FLAG_parallel_recompilation) optimizing_compiler_thread_.Start();
return true;
}
@@ -1956,6 +2028,36 @@ void Isolate::Exit() {
}
+void Isolate::LinkDeferredHandles(DeferredHandles* deferred) {
+ deferred->next_ = deferred_handles_head_;
+ if (deferred_handles_head_ != NULL) {
+ deferred_handles_head_->previous_ = deferred;
+ }
+ deferred_handles_head_ = deferred;
+}
+
+
+void Isolate::UnlinkDeferredHandles(DeferredHandles* deferred) {
+#ifdef DEBUG
+ // In debug mode assert that the linked list is well-formed.
+ DeferredHandles* deferred_iterator = deferred;
+ while (deferred_iterator->previous_ != NULL) {
+ deferred_iterator = deferred_iterator->previous_;
+ }
+ ASSERT(deferred_handles_head_ == deferred_iterator);
+#endif
+ if (deferred_handles_head_ == deferred) {
+ deferred_handles_head_ = deferred_handles_head_->next_;
+ }
+ if (deferred->next_ != NULL) {
+ deferred->next_->previous_ = deferred->previous_;
+ }
+ if (deferred->previous_ != NULL) {
+ deferred->previous_->next_ = deferred->next_;
+ }
+}
+
+
#ifdef DEBUG
#define ISOLATE_FIELD_OFFSET(type, name, ignored) \
const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_);
diff --git a/src/3rdparty/v8/src/isolate.h b/src/3rdparty/v8/src/isolate.h
index a865d99..889a3e1 100644
--- a/src/3rdparty/v8/src/isolate.h
+++ b/src/3rdparty/v8/src/isolate.h
@@ -41,6 +41,7 @@
#include "handles.h"
#include "hashmap.h"
#include "heap.h"
+#include "optimizing-compiler-thread.h"
#include "regexp-stack.h"
#include "runtime-profiler.h"
#include "runtime.h"
@@ -310,19 +311,20 @@ class ThreadLocalTop BASE_EMBEDDED {
#define ISOLATE_INIT_ARRAY_LIST(V) \
/* SerializerDeserializer state. */ \
- V(Object*, serialize_partial_snapshot_cache, kPartialSnapshotCacheCapacity) \
- V(int, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
+ V(int32_t, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
V(int, bad_char_shift_table, kUC16AlphabetSize) \
V(int, good_suffix_shift_table, (kBMMaxShift + 1)) \
V(int, suffix_table, (kBMMaxShift + 1)) \
V(uint32_t, private_random_seed, 2) \
ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
-typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;
+typedef List<HeapObject*, PreallocatedStorageAllocationPolicy> DebugObjectCache;
#define ISOLATE_INIT_LIST(V) \
/* SerializerDeserializer state. */ \
V(int, serialize_partial_snapshot_cache_length, 0) \
+ V(int, serialize_partial_snapshot_cache_capacity, 0) \
+ V(Object**, serialize_partial_snapshot_cache, NULL) \
/* Assembler state. */ \
/* A previously allocated buffer of kMinimalBufferSize bytes, or NULL. */ \
V(byte*, assembler_spare_buffer, NULL) \
@@ -330,7 +332,7 @@ typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;
V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL) \
V(v8::Debug::MessageHandler, message_handler, NULL) \
/* To distinguish the function templates, so that we can find them in the */ \
- /* function cache of the global context. */ \
+ /* function cache of the native context. */ \
V(int, next_serial_number, 0) \
V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL) \
V(bool, always_allow_natives_syntax, false) \
@@ -355,6 +357,7 @@ typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;
V(uint64_t, enabled_cpu_features, 0) \
V(CpuProfiler*, cpu_profiler, NULL) \
V(HeapProfiler*, heap_profiler, NULL) \
+ V(bool, observer_delivery_pending, false) \
ISOLATE_DEBUGGER_INIT_LIST(V)
class Isolate {
@@ -530,6 +533,11 @@ class Isolate {
thread_local_top_.save_context_ = save;
}
+ // Access to the map of "new Object()".
+ Map* empty_object_map() {
+ return context()->native_context()->object_function()->map();
+ }
+
// Access to current thread id.
ThreadId thread_id() { return thread_local_top_.thread_id_; }
void set_thread_id(ThreadId id) { thread_local_top_.thread_id_ = id; }
@@ -581,6 +589,20 @@ class Isolate {
MaybeObject** scheduled_exception_address() {
return &thread_local_top_.scheduled_exception_;
}
+
+ Address pending_message_obj_address() {
+ return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_);
+ }
+
+ Address has_pending_message_address() {
+ return reinterpret_cast<Address>(&thread_local_top_.has_pending_message_);
+ }
+
+ Address pending_message_script_address() {
+ return reinterpret_cast<Address>(
+ &thread_local_top_.pending_message_script_);
+ }
+
MaybeObject* scheduled_exception() {
ASSERT(has_scheduled_exception());
return thread_local_top_.scheduled_exception_;
@@ -599,6 +621,9 @@ class Isolate {
(exception != heap()->termination_exception());
}
+ // Serializer.
+ void PushToPartialSnapshotCache(Object* obj);
+
// JS execution stack (see frames.h).
static Address c_entry_fp(ThreadLocalTop* thread) {
return thread->c_entry_fp_;
@@ -623,8 +648,8 @@ class Isolate {
// Returns the global object of the current context. It could be
// a builtin object, or a JS global object.
- Handle<GlobalObject> global() {
- return Handle<GlobalObject>(context()->global());
+ Handle<GlobalObject> global_object() {
+ return Handle<GlobalObject>(context()->global_object());
}
// Returns the global proxy object of the current context.
@@ -686,6 +711,10 @@ class Isolate {
void PrintStack(StringStream* accumulator);
void PrintStack();
Handle<String> StackTraceString();
+ NO_INLINE(void PushStackTraceAndDie(unsigned int magic,
+ Object* object,
+ Map* map,
+ unsigned int magic2));
Handle<JSArray> CaptureCurrentStackTrace(
int frame_limit,
StackTrace::StackTraceOptions options);
@@ -716,7 +745,7 @@ class Isolate {
// Re-throw an exception. This involves no error reporting since
// error reporting was handled when the exception was thrown
// originally.
- Failure* ReThrow(MaybeObject* exception, MessageLocation* location = NULL);
+ Failure* ReThrow(MaybeObject* exception);
void ScheduleThrow(Object* exception);
void ReportPendingMessages();
Failure* ThrowIllegalOperation();
@@ -748,12 +777,13 @@ class Isolate {
void IterateThread(ThreadVisitor* v, char* t);
- // Returns the current global context.
+ // Returns the current native and global context.
+ Handle<Context> native_context();
Handle<Context> global_context();
- // Returns the global context of the calling JavaScript code. That
- // is, the global context of the top-most JavaScript frame.
- Handle<Context> GetCallingGlobalContext();
+ // Returns the native context of the calling JavaScript code. That
+ // is, the native context of the top-most JavaScript frame.
+ Handle<Context> GetCallingNativeContext();
void RegisterTryCatchHandler(v8::TryCatch* that);
void UnregisterTryCatchHandler(v8::TryCatch* that);
@@ -787,12 +817,12 @@ class Isolate {
ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
#undef GLOBAL_ARRAY_ACCESSOR
-#define GLOBAL_CONTEXT_FIELD_ACCESSOR(index, type, name) \
+#define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name) \
Handle<type> name() { \
- return Handle<type>(context()->global_context()->name()); \
+ return Handle<type>(context()->native_context()->name()); \
}
- GLOBAL_CONTEXT_FIELDS(GLOBAL_CONTEXT_FIELD_ACCESSOR)
-#undef GLOBAL_CONTEXT_FIELD_ACCESSOR
+ NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSOR)
+#undef NATIVE_CONTEXT_FIELD_ACCESSOR
Bootstrapper* bootstrapper() { return bootstrapper_; }
Counters* counters() {
@@ -844,7 +874,7 @@ class Isolate {
ASSERT(handle_scope_implementer_);
return handle_scope_implementer_;
}
- Zone* zone() { return &zone_; }
+ Zone* runtime_zone() { return &runtime_zone_; }
UnicodeCache* unicode_cache() {
return unicode_cache_;
@@ -970,10 +1000,7 @@ class Isolate {
Factory* factory() { return reinterpret_cast<Factory*>(this); }
- // SerializerDeserializer state.
- static const int kPartialSnapshotCacheCapacity = 1400;
-
- static const int kJSRegexpStaticOffsetsVectorSize = 50;
+ static const int kJSRegexpStaticOffsetsVectorSize = 128;
Address external_callback() {
return thread_local_top_.external_callback_;
@@ -1040,6 +1067,14 @@ class Isolate {
date_cache_ = date_cache;
}
+ void IterateDeferredHandles(ObjectVisitor* visitor);
+ void LinkDeferredHandles(DeferredHandles* deferred_handles);
+ void UnlinkDeferredHandles(DeferredHandles* deferred_handles);
+
+ OptimizingCompilerThread* optimizing_compiler_thread() {
+ return &optimizing_compiler_thread_;
+ }
+
private:
Isolate();
@@ -1190,7 +1225,7 @@ class Isolate {
v8::ImplementationUtilities::HandleScopeData handle_scope_data_;
HandleScopeImplementer* handle_scope_implementer_;
UnicodeCache* unicode_cache_;
- Zone zone_;
+ Zone runtime_zone_;
PreallocatedStorage in_use_list_;
PreallocatedStorage free_list_;
bool preallocated_storage_preallocated_;
@@ -1263,8 +1298,13 @@ class Isolate {
#undef ISOLATE_FIELD_OFFSET
#endif
+ DeferredHandles* deferred_handles_head_;
+ OptimizingCompilerThread optimizing_compiler_thread_;
+
friend class ExecutionAccess;
+ friend class HandleScopeImplementer;
friend class IsolateInitializer;
+ friend class OptimizingCompilerThread;
friend class ThreadManager;
friend class Simulator;
friend class StackGuard;
@@ -1402,19 +1442,18 @@ class PostponeInterruptsScope BASE_EMBEDDED {
#define HEAP (v8::internal::Isolate::Current()->heap())
#define FACTORY (v8::internal::Isolate::Current()->factory())
#define ISOLATE (v8::internal::Isolate::Current())
-#define ZONE (v8::internal::Isolate::Current()->zone())
#define LOGGER (v8::internal::Isolate::Current()->logger())
-// Tells whether the global context is marked with out of memory.
+// Tells whether the native context is marked with out of memory.
inline bool Context::has_out_of_memory() {
- return global_context()->out_of_memory()->IsTrue();
+ return native_context()->out_of_memory()->IsTrue();
}
-// Mark the global context with out of memory.
+// Mark the native context with out of memory.
inline void Context::mark_out_of_memory() {
- global_context()->set_out_of_memory(HEAP->true_value());
+ native_context()->set_out_of_memory(HEAP->true_value());
}
diff --git a/src/3rdparty/v8/src/json-parser.h b/src/3rdparty/v8/src/json-parser.h
index d22cd0d..6f8c715 100644
--- a/src/3rdparty/v8/src/json-parser.h
+++ b/src/3rdparty/v8/src/json-parser.h
@@ -43,15 +43,15 @@ namespace internal {
template <bool seq_ascii>
class JsonParser BASE_EMBEDDED {
public:
- static Handle<Object> Parse(Handle<String> source) {
- return JsonParser().ParseJson(source);
+ static Handle<Object> Parse(Handle<String> source, Zone* zone) {
+ return JsonParser().ParseJson(source, zone);
}
static const int kEndOfString = -1;
private:
// Parse a string containing a single JSON value.
- Handle<Object> ParseJson(Handle<String> source);
+ Handle<Object> ParseJson(Handle<String> source, Zone* zone);
inline void Advance() {
position_++;
@@ -71,11 +71,11 @@ class JsonParser BASE_EMBEDDED {
inline void AdvanceSkipWhitespace() {
do {
Advance();
- } while (c0_ == '\t' || c0_ == '\r' || c0_ == '\n' || c0_ == ' ');
+ } while (c0_ == ' ' || c0_ == '\t' || c0_ == '\n' || c0_ == '\r');
}
inline void SkipWhitespace() {
- while (c0_ == '\t' || c0_ == '\r' || c0_ == '\n' || c0_ == ' ') {
+ while (c0_ == ' ' || c0_ == '\t' || c0_ == '\n' || c0_ == '\r') {
Advance();
}
}
@@ -149,8 +149,12 @@ class JsonParser BASE_EMBEDDED {
}
inline Isolate* isolate() { return isolate_; }
+ inline Factory* factory() { return factory_; }
+ inline Handle<JSFunction> object_constructor() { return object_constructor_; }
+ inline Zone* zone() const { return zone_; }
static const int kInitialSpecialStringLength = 1024;
+ static const int kPretenureTreshold = 100 * 1024;
private:
@@ -158,17 +162,27 @@ class JsonParser BASE_EMBEDDED {
int source_length_;
Handle<SeqAsciiString> seq_source_;
+ PretenureFlag pretenure_;
Isolate* isolate_;
+ Factory* factory_;
+ Handle<JSFunction> object_constructor_;
uc32 c0_;
int position_;
+ Zone* zone_;
};
template <bool seq_ascii>
-Handle<Object> JsonParser<seq_ascii>::ParseJson(Handle<String> source) {
+Handle<Object> JsonParser<seq_ascii>::ParseJson(Handle<String> source,
+ Zone* zone) {
isolate_ = source->map()->GetHeap()->isolate();
+ factory_ = isolate_->factory();
+ object_constructor_ = Handle<JSFunction>(
+ isolate()->native_context()->object_function(), isolate());
+ zone_ = zone;
FlattenString(source);
source_ = source;
source_length_ = source_->length();
+ pretenure_ = (source_length_ >= kPretenureTreshold) ? TENURED : NOT_TENURED;
// Optimized fast case where we only have ASCII characters.
if (seq_ascii) {
@@ -181,10 +195,12 @@ Handle<Object> JsonParser<seq_ascii>::ParseJson(Handle<String> source) {
AdvanceSkipWhitespace();
Handle<Object> result = ParseJsonValue();
if (result.is_null() || c0_ != kEndOfString) {
- // Parse failed. Current character is the unexpected token.
+ // Some exception (for example stack overflow) is already pending.
+ if (isolate_->has_pending_exception()) return Handle<Object>::null();
+ // Parse failed. Current character is the unexpected token.
const char* message;
- Factory* factory = isolate()->factory();
+ Factory* factory = this->factory();
Handle<JSArray> array;
switch (c0_) {
@@ -233,87 +249,118 @@ Handle<Object> JsonParser<seq_ascii>::ParseJson(Handle<String> source) {
// Parse any JSON value.
template <bool seq_ascii>
Handle<Object> JsonParser<seq_ascii>::ParseJsonValue() {
- switch (c0_) {
- case '"':
- return ParseJsonString();
- case '-':
- case '0':
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9':
- return ParseJsonNumber();
- case 'f':
- if (AdvanceGetChar() == 'a' && AdvanceGetChar() == 'l' &&
- AdvanceGetChar() == 's' && AdvanceGetChar() == 'e') {
- AdvanceSkipWhitespace();
- return isolate()->factory()->false_value();
- } else {
- return ReportUnexpectedCharacter();
- }
- case 't':
- if (AdvanceGetChar() == 'r' && AdvanceGetChar() == 'u' &&
- AdvanceGetChar() == 'e') {
- AdvanceSkipWhitespace();
- return isolate()->factory()->true_value();
- } else {
- return ReportUnexpectedCharacter();
- }
- case 'n':
- if (AdvanceGetChar() == 'u' && AdvanceGetChar() == 'l' &&
- AdvanceGetChar() == 'l') {
- AdvanceSkipWhitespace();
- return isolate()->factory()->null_value();
- } else {
- return ReportUnexpectedCharacter();
- }
- case '{':
- return ParseJsonObject();
- case '[':
- return ParseJsonArray();
- default:
- return ReportUnexpectedCharacter();
+ StackLimitCheck stack_check(isolate_);
+ if (stack_check.HasOverflowed()) {
+ isolate_->StackOverflow();
+ return Handle<Object>::null();
+ }
+
+ if (c0_ == '"') return ParseJsonString();
+ if ((c0_ >= '0' && c0_ <= '9') || c0_ == '-') return ParseJsonNumber();
+ if (c0_ == '{') return ParseJsonObject();
+ if (c0_ == '[') return ParseJsonArray();
+ if (c0_ == 'f') {
+ if (AdvanceGetChar() == 'a' && AdvanceGetChar() == 'l' &&
+ AdvanceGetChar() == 's' && AdvanceGetChar() == 'e') {
+ AdvanceSkipWhitespace();
+ return factory()->false_value();
+ }
+ return ReportUnexpectedCharacter();
+ }
+ if (c0_ == 't') {
+ if (AdvanceGetChar() == 'r' && AdvanceGetChar() == 'u' &&
+ AdvanceGetChar() == 'e') {
+ AdvanceSkipWhitespace();
+ return factory()->true_value();
+ }
+ return ReportUnexpectedCharacter();
+ }
+ if (c0_ == 'n') {
+ if (AdvanceGetChar() == 'u' && AdvanceGetChar() == 'l' &&
+ AdvanceGetChar() == 'l') {
+ AdvanceSkipWhitespace();
+ return factory()->null_value();
+ }
+ return ReportUnexpectedCharacter();
}
+ return ReportUnexpectedCharacter();
}
// Parse a JSON object. Position must be right at '{'.
template <bool seq_ascii>
Handle<Object> JsonParser<seq_ascii>::ParseJsonObject() {
- Handle<JSFunction> object_constructor(
- isolate()->global_context()->object_function());
+ Handle<Object> prototype;
Handle<JSObject> json_object =
- isolate()->factory()->NewJSObject(object_constructor);
+ factory()->NewJSObject(object_constructor(), pretenure_);
ASSERT_EQ(c0_, '{');
AdvanceSkipWhitespace();
if (c0_ != '}') {
do {
if (c0_ != '"') return ReportUnexpectedCharacter();
+
+ int start_position = position_;
+ Advance();
+
+ uint32_t index = 0;
+ if (c0_ >= '0' && c0_ <= '9') {
+ // Maybe an array index, try to parse it.
+ if (c0_ == '0') {
+ // With a leading zero, the string has to be "0" only to be an index.
+ Advance();
+ } else {
+ do {
+ int d = c0_ - '0';
+ if (index > 429496729U - ((d > 5) ? 1 : 0)) break;
+ index = (index * 10) + d;
+ Advance();
+ } while (c0_ >= '0' && c0_ <= '9');
+ }
+
+ if (c0_ == '"') {
+ // Successfully parsed index, parse and store element.
+ AdvanceSkipWhitespace();
+
+ if (c0_ != ':') return ReportUnexpectedCharacter();
+ AdvanceSkipWhitespace();
+ Handle<Object> value = ParseJsonValue();
+ if (value.is_null()) return ReportUnexpectedCharacter();
+
+ JSObject::SetOwnElement(json_object, index, value, kNonStrictMode);
+ continue;
+ }
+ // Not an index, fallback to the slow path.
+ }
+
+ position_ = start_position;
+#ifdef DEBUG
+ c0_ = '"';
+#endif
+
Handle<String> key = ParseJsonSymbol();
if (key.is_null() || c0_ != ':') return ReportUnexpectedCharacter();
+
AdvanceSkipWhitespace();
Handle<Object> value = ParseJsonValue();
if (value.is_null()) return ReportUnexpectedCharacter();
- uint32_t index;
- if (key->AsArrayIndex(&index)) {
- JSObject::SetOwnElement(json_object, index, value, kNonStrictMode);
- } else if (key->Equals(isolate()->heap()->Proto_symbol())) {
- SetPrototype(json_object, value);
+ if (key->Equals(isolate()->heap()->Proto_symbol())) {
+ prototype = value;
} else {
- JSObject::SetLocalPropertyIgnoreAttributes(
- json_object, key, value, NONE);
+ if (JSObject::TryTransitionToField(json_object, key)) {
+ int index = json_object->LastAddedFieldIndex();
+ json_object->FastPropertyAtPut(index, *value);
+ } else {
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ json_object, key, value, NONE);
+ }
}
} while (MatchSkipWhiteSpace(','));
if (c0_ != '}') {
return ReportUnexpectedCharacter();
}
+ if (!prototype.is_null()) SetPrototype(json_object, prototype);
}
AdvanceSkipWhitespace();
return json_object;
@@ -322,8 +369,8 @@ Handle<Object> JsonParser<seq_ascii>::ParseJsonObject() {
// Parse a JSON array. Position must be right at '['.
template <bool seq_ascii>
Handle<Object> JsonParser<seq_ascii>::ParseJsonArray() {
- ZoneScope zone_scope(isolate(), DELETE_ON_EXIT);
- ZoneList<Handle<Object> > elements(4);
+ ZoneScope zone_scope(zone(), DELETE_ON_EXIT);
+ ZoneList<Handle<Object> > elements(4, zone());
ASSERT_EQ(c0_, '[');
AdvanceSkipWhitespace();
@@ -331,7 +378,7 @@ Handle<Object> JsonParser<seq_ascii>::ParseJsonArray() {
do {
Handle<Object> element = ParseJsonValue();
if (element.is_null()) return ReportUnexpectedCharacter();
- elements.Add(element);
+ elements.Add(element, zone());
} while (MatchSkipWhiteSpace(','));
if (c0_ != ']') {
return ReportUnexpectedCharacter();
@@ -340,11 +387,12 @@ Handle<Object> JsonParser<seq_ascii>::ParseJsonArray() {
AdvanceSkipWhitespace();
// Allocate a fixed array with all the elements.
Handle<FixedArray> fast_elements =
- isolate()->factory()->NewFixedArray(elements.length());
+ factory()->NewFixedArray(elements.length(), pretenure_);
for (int i = 0, n = elements.length(); i < n; i++) {
fast_elements->set(i, *elements[i]);
}
- return isolate()->factory()->NewJSArrayWithElements(fast_elements);
+ return factory()->NewJSArrayWithElements(
+ fast_elements, FAST_ELEMENTS, pretenure_);
}
@@ -411,7 +459,7 @@ Handle<Object> JsonParser<seq_ascii>::ParseJsonNumber() {
buffer.Dispose();
}
SkipWhitespace();
- return isolate()->factory()->NewNumber(number);
+ return factory()->NewNumber(number, pretenure_);
}
@@ -429,16 +477,22 @@ inline void SeqStringSet(Handle<SeqAsciiString> seq_str, int i, uc32 c) {
}
template <typename StringType>
-inline Handle<StringType> NewRawString(Factory* factory, int length);
+inline Handle<StringType> NewRawString(Factory* factory,
+ int length,
+ PretenureFlag pretenure);
template <>
-inline Handle<SeqTwoByteString> NewRawString(Factory* factory, int length) {
- return factory->NewRawTwoByteString(length, NOT_TENURED);
+inline Handle<SeqTwoByteString> NewRawString(Factory* factory,
+ int length,
+ PretenureFlag pretenure) {
+ return factory->NewRawTwoByteString(length, pretenure);
}
template <>
-inline Handle<SeqAsciiString> NewRawString(Factory* factory, int length) {
- return factory->NewRawAsciiString(length, NOT_TENURED);
+inline Handle<SeqAsciiString> NewRawString(Factory* factory,
+ int length,
+ PretenureFlag pretenure) {
+ return factory->NewRawAsciiString(length, pretenure);
}
@@ -452,8 +506,8 @@ Handle<String> JsonParser<seq_ascii>::SlowScanJsonString(
int count = end - start;
int max_length = count + source_length_ - position_;
int length = Min(max_length, Max(kInitialSpecialStringLength, 2 * count));
- Handle<StringType> seq_str = NewRawString<StringType>(isolate()->factory(),
- length);
+ Handle<StringType> seq_str =
+ NewRawString<StringType>(factory(), length, pretenure_);
// Copy prefix into seq_str.
SinkChar* dest = seq_str->GetChars();
String::WriteToFlat(*prefix, dest, start, end);
@@ -557,8 +611,58 @@ Handle<String> JsonParser<seq_ascii>::ScanJsonString() {
Advance();
if (c0_ == '"') {
AdvanceSkipWhitespace();
- return Handle<String>(isolate()->heap()->empty_string());
+ return factory()->empty_string();
+ }
+
+ if (seq_ascii && is_symbol) {
+ // Fast path for existing symbols. If the the string being parsed is not
+ // a known symbol, contains backslashes or unexpectedly reaches the end of
+ // string, return with an empty handle.
+ uint32_t running_hash = isolate()->heap()->HashSeed();
+ int position = position_;
+ uc32 c0 = c0_;
+ do {
+ if (c0 == '\\') {
+ c0_ = c0;
+ int beg_pos = position_;
+ position_ = position;
+ return SlowScanJsonString<SeqAsciiString, char>(source_,
+ beg_pos,
+ position_);
+ }
+ if (c0 < 0x20) return Handle<String>::null();
+ running_hash = StringHasher::AddCharacterCore(running_hash, c0);
+ position++;
+ if (position >= source_length_) return Handle<String>::null();
+ c0 = seq_source_->SeqAsciiStringGet(position);
+ } while (c0 != '"');
+ int length = position - position_;
+ uint32_t hash = (length <= String::kMaxHashCalcLength)
+ ? StringHasher::GetHashCore(running_hash) : length;
+ Vector<const char> string_vector(
+ seq_source_->GetChars() + position_, length);
+ SymbolTable* symbol_table = isolate()->heap()->symbol_table();
+ uint32_t capacity = symbol_table->Capacity();
+ uint32_t entry = SymbolTable::FirstProbe(hash, capacity);
+ uint32_t count = 1;
+ while (true) {
+ Object* element = symbol_table->KeyAt(entry);
+ if (element == isolate()->heap()->undefined_value()) {
+ // Lookup failure.
+ break;
+ }
+ if (element != isolate()->heap()->the_hole_value() &&
+ String::cast(element)->IsAsciiEqualTo(string_vector)) {
+ // Lookup success, update the current position.
+ position_ = position;
+ // Advance past the last '"'.
+ AdvanceSkipWhitespace();
+ return Handle<String>(String::cast(element), isolate());
+ }
+ entry = SymbolTable::NextProbe(entry, count++, capacity);
+ }
}
+
int beg_pos = position_;
// Fast case for ASCII only without escape characters.
do {
@@ -581,11 +685,11 @@ Handle<String> JsonParser<seq_ascii>::ScanJsonString() {
int length = position_ - beg_pos;
Handle<String> result;
if (seq_ascii && is_symbol) {
- result = isolate()->factory()->LookupAsciiSymbol(seq_source_,
- beg_pos,
- length);
+ result = factory()->LookupAsciiSymbol(seq_source_,
+ beg_pos,
+ length);
} else {
- result = isolate()->factory()->NewRawAsciiString(length);
+ result = factory()->NewRawAsciiString(length, pretenure_);
char* dest = SeqAsciiString::cast(*result)->GetChars();
String::WriteToFlat(*source_, dest, beg_pos, position_);
}
diff --git a/src/3rdparty/v8/src/json-stringifier.h b/src/3rdparty/v8/src/json-stringifier.h
new file mode 100644
index 0000000..cdb724f
--- /dev/null
+++ b/src/3rdparty/v8/src/json-stringifier.h
@@ -0,0 +1,800 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_JSON_STRINGIFIER_H_
+#define V8_JSON_STRINGIFIER_H_
+
+#include "v8.h"
+#include "v8utils.h"
+#include "v8conversions.h"
+
+namespace v8 {
+namespace internal {
+
+class BasicJsonStringifier BASE_EMBEDDED {
+ public:
+ explicit BasicJsonStringifier(Isolate* isolate);
+
+ MaybeObject* Stringify(Handle<Object> object);
+
+ private:
+ static const int kInitialPartLength = 32;
+ static const int kMaxPartLength = 16 * 1024;
+ static const int kPartLengthGrowthFactor = 2;
+
+ enum Result { UNCHANGED, SUCCESS, EXCEPTION, CIRCULAR, STACK_OVERFLOW };
+
+ void Extend();
+
+ void ChangeEncoding();
+
+ void ShrinkCurrentPart();
+
+ template <bool is_ascii, typename Char>
+ INLINE(void Append_(Char c));
+
+ template <bool is_ascii, typename Char>
+ INLINE(void Append_(const Char* chars));
+
+ INLINE(void Append(char c)) {
+ if (is_ascii_) {
+ Append_<true>(c);
+ } else {
+ Append_<false>(c);
+ }
+ }
+
+ INLINE(void Append(const char* chars)) {
+ if (is_ascii_) {
+ Append_<true>(chars);
+ } else {
+ Append_<false>(chars);
+ }
+ }
+
+ Handle<Object> GetProperty(Handle<JSObject> object,
+ Handle<String> key);
+
+ Handle<Object> ApplyToJsonFunction(Handle<Object> object,
+ Handle<Object> key);
+
+ Result SerializeGeneric(Handle<Object> object,
+ Handle<Object> key,
+ bool deferred_comma,
+ bool deferred_key);
+
+ // Entry point to serialize the object.
+ INLINE(Result SerializeObject(Handle<Object> obj)) {
+ return Serialize_<false>(obj, false, factory_->empty_string());
+ }
+
+ // Serialize an array element.
+ // The index may serve as argument for the toJSON function.
+ INLINE(Result SerializeElement(Handle<Object> object, int i)) {
+ return Serialize_<false>(object, false, Handle<Object>(Smi::FromInt(i)));
+ }
+
+ // Serialize a object property.
+ // The key may or may not be serialized depending on the property.
+ // The key may also serve as argument for the toJSON function.
+ INLINE(Result SerializeProperty(Handle<Object> object,
+ bool deferred_comma,
+ Handle<String> deferred_key)) {
+ ASSERT(!deferred_key.is_null());
+ return Serialize_<true>(object, deferred_comma, deferred_key);
+ }
+
+ template <bool deferred_string_key>
+ Result Serialize_(Handle<Object> object, bool comma, Handle<Object> key);
+
+ void SerializeDeferredKey(bool deferred_comma, Handle<Object> deferred_key) {
+ if (deferred_comma) Append(',');
+ SerializeString(Handle<String>::cast(deferred_key));
+ Append(':');
+ }
+
+ Result SerializeSmi(Smi* object);
+
+ Result SerializeDouble(double number);
+ INLINE(Result SerializeHeapNumber(Handle<HeapNumber> object)) {
+ return SerializeDouble(object->value());
+ }
+
+ Result SerializeJSValue(Handle<JSValue> object);
+
+ INLINE(Result SerializeJSArray(Handle<JSArray> object));
+ INLINE(Result SerializeJSObject(Handle<JSObject> object));
+
+ Result SerializeJSArraySlow(Handle<JSArray> object, int length);
+
+ void SerializeString(Handle<String> object);
+
+ template <typename SrcChar, typename DestChar>
+ INLINE(void SerializeStringUnchecked_(const SrcChar* src,
+ DestChar* dest,
+ int length));
+
+ template <bool is_ascii, typename Char>
+ INLINE(void SerializeString_(Vector<const Char> vector,
+ Handle<String> string));
+
+ template <typename Char>
+ INLINE(bool DoNotEscape(Char c));
+
+ template <typename Char>
+ INLINE(Vector<const Char> GetCharVector(Handle<String> string));
+
+ Result StackPush(Handle<Object> object);
+ void StackPop();
+
+ INLINE(Handle<String> accumulator()) {
+ return Handle<String>(String::cast(accumulator_store_->value()), isolate_);
+ }
+
+ INLINE(void set_accumulator(Handle<String> string)) {
+ return accumulator_store_->set_value(*string);
+ }
+
+ Isolate* isolate_;
+ Factory* factory_;
+ // We use a value wrapper for the string accumulator to keep the
+ // (indirect) handle to it in the outermost handle scope.
+ Handle<JSValue> accumulator_store_;
+ Handle<String> current_part_;
+ Handle<String> tojson_symbol_;
+ Handle<JSArray> stack_;
+ int current_index_;
+ int part_length_;
+ bool is_ascii_;
+
+ static const int kJsonEscapeTableEntrySize = 8;
+ static const char* const JsonEscapeTable;
+};
+
+
+// Translation table to escape ASCII characters.
+// Table entries start at a multiple of 8 and are null-terminated.
+const char* const BasicJsonStringifier::JsonEscapeTable =
+ "\\u0000\0 \\u0001\0 \\u0002\0 \\u0003\0 "
+ "\\u0004\0 \\u0005\0 \\u0006\0 \\u0007\0 "
+ "\\b\0 \\t\0 \\n\0 \\u000b\0 "
+ "\\f\0 \\r\0 \\u000e\0 \\u000f\0 "
+ "\\u0010\0 \\u0011\0 \\u0012\0 \\u0013\0 "
+ "\\u0014\0 \\u0015\0 \\u0016\0 \\u0017\0 "
+ "\\u0018\0 \\u0019\0 \\u001a\0 \\u001b\0 "
+ "\\u001c\0 \\u001d\0 \\u001e\0 \\u001f\0 "
+ " \0 !\0 \\\"\0 #\0 "
+ "$\0 %\0 &\0 '\0 "
+ "(\0 )\0 *\0 +\0 "
+ ",\0 -\0 .\0 /\0 "
+ "0\0 1\0 2\0 3\0 "
+ "4\0 5\0 6\0 7\0 "
+ "8\0 9\0 :\0 ;\0 "
+ "<\0 =\0 >\0 ?\0 "
+ "@\0 A\0 B\0 C\0 "
+ "D\0 E\0 F\0 G\0 "
+ "H\0 I\0 J\0 K\0 "
+ "L\0 M\0 N\0 O\0 "
+ "P\0 Q\0 R\0 S\0 "
+ "T\0 U\0 V\0 W\0 "
+ "X\0 Y\0 Z\0 [\0 "
+ "\\\\\0 ]\0 ^\0 _\0 "
+ "`\0 a\0 b\0 c\0 "
+ "d\0 e\0 f\0 g\0 "
+ "h\0 i\0 j\0 k\0 "
+ "l\0 m\0 n\0 o\0 "
+ "p\0 q\0 r\0 s\0 "
+ "t\0 u\0 v\0 w\0 "
+ "x\0 y\0 z\0 {\0 "
+ "|\0 }\0 ~\0 \177\0 ";
+
+
+BasicJsonStringifier::BasicJsonStringifier(Isolate* isolate)
+ : isolate_(isolate), current_index_(0), is_ascii_(true) {
+ factory_ = isolate_->factory();
+ accumulator_store_ = Handle<JSValue>::cast(
+ factory_->ToObject(factory_->empty_string()));
+ part_length_ = kInitialPartLength;
+ current_part_ = factory_->NewRawAsciiString(kInitialPartLength);
+ tojson_symbol_ = factory_->LookupAsciiSymbol("toJSON");
+ stack_ = factory_->NewJSArray(8);
+}
+
+
+MaybeObject* BasicJsonStringifier::Stringify(Handle<Object> object) {
+ switch (SerializeObject(object)) {
+ case UNCHANGED:
+ return isolate_->heap()->undefined_value();
+ case SUCCESS:
+ ShrinkCurrentPart();
+ return *factory_->NewConsString(accumulator(), current_part_);
+ case CIRCULAR:
+ return isolate_->Throw(*factory_->NewTypeError(
+ "circular_structure", HandleVector<Object>(NULL, 0)));
+ case STACK_OVERFLOW:
+ return isolate_->StackOverflow();
+ default:
+ return Failure::Exception();
+ }
+}
+
+
+template <bool is_ascii, typename Char>
+void BasicJsonStringifier::Append_(Char c) {
+ if (is_ascii) {
+ SeqAsciiString::cast(*current_part_)->SeqAsciiStringSet(
+ current_index_++, c);
+ } else {
+ SeqTwoByteString::cast(*current_part_)->SeqTwoByteStringSet(
+ current_index_++, c);
+ }
+ if (current_index_ == part_length_) Extend();
+}
+
+
+template <bool is_ascii, typename Char>
+void BasicJsonStringifier::Append_(const Char* chars) {
+ for ( ; *chars != '\0'; chars++) Append_<is_ascii, Char>(*chars);
+}
+
+
+Handle<Object> BasicJsonStringifier::GetProperty(Handle<JSObject> object,
+ Handle<String> key) {
+ LookupResult lookup(isolate_);
+ object->LocalLookupRealNamedProperty(*key, &lookup);
+ if (!lookup.IsProperty()) return factory_->undefined_value();
+ switch (lookup.type()) {
+ case NORMAL: {
+ Object* value = lookup.holder()->GetNormalizedProperty(&lookup);
+ ASSERT(!value->IsTheHole());
+ return Handle<Object>(value, isolate_);
+ }
+ case FIELD: {
+ Object* value = lookup.holder()->FastPropertyAt(lookup.GetFieldIndex());
+ ASSERT(!value->IsTheHole());
+ return Handle<Object>(value, isolate_);
+ }
+ case CONSTANT_FUNCTION:
+ return Handle<Object>(lookup.GetConstantFunction(), isolate_);
+ default: {
+ PropertyAttributes attr;
+ return Object::GetProperty(object, object, &lookup, key, &attr);
+ }
+ }
+ return Handle<Object>::null();
+}
+
+
+Handle<Object> BasicJsonStringifier::ApplyToJsonFunction(
+ Handle<Object> object, Handle<Object> key) {
+ LookupResult lookup(isolate_);
+ JSObject::cast(*object)->LookupRealNamedProperty(*tojson_symbol_, &lookup);
+ if (!lookup.IsProperty()) return object;
+ PropertyAttributes attr;
+ Handle<Object> fun =
+ Object::GetProperty(object, object, &lookup, tojson_symbol_, &attr);
+ if (!fun->IsJSFunction()) return object;
+
+ // Call toJSON function.
+ if (key->IsSmi()) key = factory_->NumberToString(key);
+ Handle<Object> argv[] = { key };
+ bool has_exception = false;
+ HandleScope scope(isolate_);
+ object = Execution::Call(fun, object, 1, argv, &has_exception);
+ // Return empty handle to signal an exception.
+ if (has_exception) return Handle<Object>::null();
+ return scope.CloseAndEscape(object);
+}
+
+
+BasicJsonStringifier::Result BasicJsonStringifier::StackPush(
+ Handle<Object> object) {
+ StackLimitCheck check(isolate_);
+ if (check.HasOverflowed()) return STACK_OVERFLOW;
+
+ int length = Smi::cast(stack_->length())->value();
+ FixedArray* elements = FixedArray::cast(stack_->elements());
+ for (int i = 0; i < length; i++) {
+ if (elements->get(i) == *object) {
+ return CIRCULAR;
+ }
+ }
+ stack_->EnsureSize(length + 1);
+ FixedArray::cast(stack_->elements())->set(length, *object);
+ stack_->set_length(Smi::FromInt(length + 1));
+ return SUCCESS;
+}
+
+
+void BasicJsonStringifier::StackPop() {
+ int length = Smi::cast(stack_->length())->value();
+ stack_->set_length(Smi::FromInt(length - 1));
+}
+
+
+template <bool deferred_string_key>
+BasicJsonStringifier::Result BasicJsonStringifier::Serialize_(
+ Handle<Object> object, bool comma, Handle<Object> key) {
+ if (object->IsJSObject()) {
+ object = ApplyToJsonFunction(object, key);
+ if (object.is_null()) return EXCEPTION;
+ }
+
+ if (object->IsSmi()) {
+ if (deferred_string_key) SerializeDeferredKey(comma, key);
+ return SerializeSmi(Smi::cast(*object));
+ }
+
+ switch (HeapObject::cast(*object)->map()->instance_type()) {
+ case HEAP_NUMBER_TYPE:
+ if (deferred_string_key) SerializeDeferredKey(comma, key);
+ return SerializeHeapNumber(Handle<HeapNumber>::cast(object));
+ case ODDBALL_TYPE:
+ switch (Oddball::cast(*object)->kind()) {
+ case Oddball::kFalse:
+ if (deferred_string_key) SerializeDeferredKey(comma, key);
+ Append("false");
+ return SUCCESS;
+ case Oddball::kTrue:
+ if (deferred_string_key) SerializeDeferredKey(comma, key);
+ Append("true");
+ return SUCCESS;
+ case Oddball::kNull:
+ if (deferred_string_key) SerializeDeferredKey(comma, key);
+ Append("null");
+ return SUCCESS;
+ default:
+ return UNCHANGED;
+ }
+ case JS_ARRAY_TYPE:
+ if (deferred_string_key) SerializeDeferredKey(comma, key);
+ return SerializeJSArray(Handle<JSArray>::cast(object));
+ case JS_VALUE_TYPE:
+ if (deferred_string_key) SerializeDeferredKey(comma, key);
+ return SerializeJSValue(Handle<JSValue>::cast(object));
+ case JS_FUNCTION_TYPE:
+ return UNCHANGED;
+ default:
+ if (object->IsString()) {
+ if (deferred_string_key) SerializeDeferredKey(comma, key);
+ SerializeString(Handle<String>::cast(object));
+ return SUCCESS;
+ } else if (object->IsJSObject()) {
+ if (deferred_string_key) SerializeDeferredKey(comma, key);
+ return SerializeJSObject(Handle<JSObject>::cast(object));
+ } else {
+ return SerializeGeneric(object, key, comma, deferred_string_key);
+ }
+ }
+}
+
+
+BasicJsonStringifier::Result BasicJsonStringifier::SerializeGeneric(
+ Handle<Object> object,
+ Handle<Object> key,
+ bool deferred_comma,
+ bool deferred_key) {
+ Handle<JSObject> builtins(isolate_->native_context()->builtins());
+ Handle<JSFunction> builtin = Handle<JSFunction>::cast(
+ v8::internal::GetProperty(builtins, "JSONSerializeAdapter"));
+
+ Handle<Object> argv[] = { key, object };
+ bool has_exception = false;
+ Handle<Object> result =
+ Execution::Call(builtin, object, 2, argv, &has_exception);
+ if (has_exception) return EXCEPTION;
+ if (result->IsUndefined()) return UNCHANGED;
+ if (deferred_key) {
+ if (key->IsSmi()) key = factory_->NumberToString(key);
+ SerializeDeferredKey(deferred_comma, key);
+ }
+
+ Handle<String> result_string = Handle<String>::cast(result);
+ // Shrink current part, attach it to the accumulator, also attach the result
+ // string to the accumulator, and allocate a new part.
+ ShrinkCurrentPart(); // Shrink.
+ part_length_ = kInitialPartLength; // Allocate conservatively.
+ Extend(); // Attach current part and allocate new part.
+ // Attach result string to the accumulator.
+ set_accumulator(factory_->NewConsString(accumulator(), result_string));
+ return SUCCESS;
+}
+
+
+BasicJsonStringifier::Result BasicJsonStringifier::SerializeJSValue(
+ Handle<JSValue> object) {
+ bool has_exception = false;
+ String* class_name = object->class_name();
+ if (class_name == isolate_->heap()->String_symbol()) {
+ Handle<Object> value = Execution::ToString(object, &has_exception);
+ if (has_exception) return EXCEPTION;
+ SerializeString(Handle<String>::cast(value));
+ } else if (class_name == isolate_->heap()->Number_symbol()) {
+ Handle<Object> value = Execution::ToNumber(object, &has_exception);
+ if (has_exception) return EXCEPTION;
+ if (value->IsSmi()) return SerializeSmi(Smi::cast(*value));
+ SerializeHeapNumber(Handle<HeapNumber>::cast(value));
+ } else {
+ ASSERT(class_name == isolate_->heap()->Boolean_symbol());
+ Object* value = JSValue::cast(*object)->value();
+ ASSERT(value->IsBoolean());
+ Append(value->IsTrue() ? "true" : "false");
+ }
+ return SUCCESS;
+}
+
+
+BasicJsonStringifier::Result BasicJsonStringifier::SerializeSmi(Smi* object) {
+ static const int kBufferSize = 100;
+ char chars[kBufferSize];
+ Vector<char> buffer(chars, kBufferSize);
+ Append(IntToCString(object->value(), buffer));
+ return SUCCESS;
+}
+
+
+BasicJsonStringifier::Result BasicJsonStringifier::SerializeDouble(
+ double number) {
+ if (isinf(number) || isnan(number)) {
+ Append("null");
+ return SUCCESS;
+ }
+ static const int kBufferSize = 100;
+ char chars[kBufferSize];
+ Vector<char> buffer(chars, kBufferSize);
+ Append(DoubleToCString(number, buffer));
+ return SUCCESS;
+}
+
+
+BasicJsonStringifier::Result BasicJsonStringifier::SerializeJSArray(
+ Handle<JSArray> object) {
+ HandleScope handle_scope(isolate_);
+ Result stack_push = StackPush(object);
+ if (stack_push != SUCCESS) return stack_push;
+ int length = Smi::cast(object->length())->value();
+ Append('[');
+ switch (object->GetElementsKind()) {
+ case FAST_SMI_ELEMENTS: {
+ Handle<FixedArray> elements(
+ FixedArray::cast(object->elements()), isolate_);
+ for (int i = 0; i < length; i++) {
+ if (i > 0) Append(',');
+ SerializeSmi(Smi::cast(elements->get(i)));
+ }
+ break;
+ }
+ case FAST_DOUBLE_ELEMENTS: {
+ Handle<FixedDoubleArray> elements(
+ FixedDoubleArray::cast(object->elements()), isolate_);
+ for (int i = 0; i < length; i++) {
+ if (i > 0) Append(',');
+ SerializeDouble(elements->get_scalar(i));
+ }
+ break;
+ }
+ case FAST_ELEMENTS: {
+ Handle<FixedArray> elements(
+ FixedArray::cast(object->elements()), isolate_);
+ for (int i = 0; i < length; i++) {
+ if (i > 0) Append(',');
+ Result result =
+ SerializeElement(Handle<Object>(elements->get(i), isolate_), i);
+ if (result == SUCCESS) continue;
+ if (result == UNCHANGED) {
+ Append("null");
+ } else {
+ return result;
+ }
+ }
+ break;
+ }
+ // TODO(yangguo): The FAST_HOLEY_* cases could be handled in a faster way.
+ // They resemble the non-holey cases except that a prototype chain lookup
+ // is necessary for holes.
+ default: {
+ Result result = SerializeJSArraySlow(object, length);
+ if (result != SUCCESS) return result;
+ break;
+ }
+ }
+ Append(']');
+ StackPop();
+ current_part_ = handle_scope.CloseAndEscape(current_part_);
+ return SUCCESS;
+}
+
+
+BasicJsonStringifier::Result BasicJsonStringifier::SerializeJSArraySlow(
+ Handle<JSArray> object, int length) {
+ for (int i = 0; i < length; i++) {
+ if (i > 0) Append(',');
+ Handle<Object> element = Object::GetElement(object, i);
+ if (element->IsUndefined()) {
+ Append("null");
+ } else {
+ Result result = SerializeElement(element, i);
+ if (result == SUCCESS) continue;
+ if (result == UNCHANGED) {
+ Append("null");
+ } else {
+ return result;
+ }
+ }
+ }
+ return SUCCESS;
+}
+
+
+BasicJsonStringifier::Result BasicJsonStringifier::SerializeJSObject(
+ Handle<JSObject> object) {
+ HandleScope handle_scope(isolate_);
+ Result stack_push = StackPush(object);
+ if (stack_push != SUCCESS) return stack_push;
+ if (object->IsJSGlobalProxy()) {
+ object = Handle<JSObject>(
+ JSObject::cast(object->GetPrototype()), isolate_);
+ ASSERT(object->IsGlobalObject());
+ }
+
+ Append('{');
+ bool comma = false;
+
+ if (object->HasFastProperties() &&
+ !object->HasIndexedInterceptor() &&
+ !object->HasNamedInterceptor() &&
+ object->elements()->length() == 0) {
+ Handle<Map> map(object->map());
+ for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
+ Handle<String> key(map->instance_descriptors()->GetKey(i), isolate_);
+ PropertyDetails details = map->instance_descriptors()->GetDetails(i);
+ if (details.IsDontEnum() || details.IsDeleted()) continue;
+ Handle<Object> property;
+ if (details.type() == FIELD && *map == object->map()) {
+ property = Handle<Object>(
+ object->FastPropertyAt(
+ map->instance_descriptors()->GetFieldIndex(i)),
+ isolate_);
+ } else {
+ property = GetProperty(object, key);
+ if (property.is_null()) return EXCEPTION;
+ }
+ Result result = SerializeProperty(property, comma, key);
+ if (!comma && result == SUCCESS) comma = true;
+ if (result >= EXCEPTION) return result;
+ }
+ } else {
+ bool has_exception = false;
+ Handle<FixedArray> contents =
+ GetKeysInFixedArrayFor(object, LOCAL_ONLY, &has_exception);
+ if (has_exception) return EXCEPTION;
+
+ for (int i = 0; i < contents->length(); i++) {
+ Object* key = contents->get(i);
+ Handle<String> key_handle;
+ Handle<Object> property;
+ if (key->IsString()) {
+ key_handle = Handle<String>(String::cast(key), isolate_);
+ property = GetProperty(object, key_handle);
+ } else {
+ ASSERT(key->IsNumber());
+ key_handle = factory_->NumberToString(Handle<Object>(key, isolate_));
+ uint32_t index;
+ if (key->IsSmi()) {
+ property = Object::GetElement(object, Smi::cast(key)->value());
+ } else if (key_handle->AsArrayIndex(&index)) {
+ property = Object::GetElement(object, index);
+ } else {
+ property = GetProperty(object, key_handle);
+ }
+ }
+ if (property.is_null()) return EXCEPTION;
+ Result result = SerializeProperty(property, comma, key_handle);
+ if (!comma && result == SUCCESS) comma = true;
+ if (result >= EXCEPTION) return result;
+ }
+ }
+
+ Append('}');
+ StackPop();
+ current_part_ = handle_scope.CloseAndEscape(current_part_);
+ return SUCCESS;
+}
+
+
+void BasicJsonStringifier::ShrinkCurrentPart() {
+ ASSERT(current_index_ < part_length_);
+ if (current_index_ == 0) {
+ current_part_ = factory_->empty_string();
+ return;
+ }
+
+ int string_size, allocated_string_size;
+ if (is_ascii_) {
+ allocated_string_size = SeqAsciiString::SizeFor(part_length_);
+ string_size = SeqAsciiString::SizeFor(current_index_);
+ } else {
+ allocated_string_size = SeqTwoByteString::SizeFor(part_length_);
+ string_size = SeqTwoByteString::SizeFor(current_index_);
+ }
+
+ int delta = allocated_string_size - string_size;
+ current_part_->set_length(current_index_);
+
+ // String sizes are pointer size aligned, so that we can use filler objects
+ // that are a multiple of pointer size.
+ Address end_of_string = current_part_->address() + string_size;
+ isolate_->heap()->CreateFillerObjectAt(end_of_string, delta);
+ if (Marking::IsBlack(Marking::MarkBitFrom(*current_part_))) {
+ MemoryChunk::IncrementLiveBytesFromMutator(
+ current_part_->address(), -delta);
+ }
+}
+
+
+void BasicJsonStringifier::Extend() {
+ set_accumulator(factory_->NewConsString(accumulator(), current_part_));
+ if (part_length_ <= kMaxPartLength / kPartLengthGrowthFactor) {
+ part_length_ *= kPartLengthGrowthFactor;
+ }
+ if (is_ascii_) {
+ current_part_ = factory_->NewRawAsciiString(part_length_);
+ } else {
+ current_part_ = factory_->NewRawTwoByteString(part_length_);
+ }
+ current_index_ = 0;
+}
+
+
+void BasicJsonStringifier::ChangeEncoding() {
+ ShrinkCurrentPart();
+ set_accumulator(factory_->NewConsString(accumulator(), current_part_));
+ current_part_ = factory_->NewRawTwoByteString(part_length_);
+ current_index_ = 0;
+ is_ascii_ = false;
+}
+
+
+template <typename SrcChar, typename DestChar>
+void BasicJsonStringifier::SerializeStringUnchecked_(const SrcChar* src,
+ DestChar* dest,
+ int length) {
+ dest += current_index_;
+ DestChar* dest_start = dest;
+
+ // Assert that uc16 character is not truncated down to 8 bit.
+ // The <uc16, char> version of this method must not be called.
+ ASSERT(sizeof(*dest) >= sizeof(*src));
+
+ for (int i = 0; i < length; i++) {
+ SrcChar c = src[i];
+ if (DoNotEscape(c)) {
+ *(dest++) = static_cast<DestChar>(c);
+ } else {
+ const char* chars = &JsonEscapeTable[c * kJsonEscapeTableEntrySize];
+ while (*chars != '\0') *(dest++) = *(chars++);
+ }
+ }
+
+ current_index_ += static_cast<int>(dest - dest_start);
+}
+
+
+template <bool is_ascii, typename Char>
+void BasicJsonStringifier::SerializeString_(Vector<const Char> vector,
+ Handle<String> string) {
+ int length = vector.length();
+ Append_<is_ascii, char>('"');
+ // We make a rough estimate to find out if the current string can be
+ // serialized without allocating a new string part. The worst case length of
+ // an escaped character is 6. Shifting the remainin string length right by 3
+ // is a more pessimistic estimate, but faster to calculate.
+
+ if (((part_length_ - current_index_) >> 3) > length) {
+ if (is_ascii) {
+ SerializeStringUnchecked_(
+ vector.start(),
+ SeqAsciiString::cast(*current_part_)->GetChars(),
+ length);
+ } else {
+ SerializeStringUnchecked_(
+ vector.start(),
+ SeqTwoByteString::cast(*current_part_)->GetChars(),
+ length);
+ }
+ } else {
+ String* string_location = *string;
+ for (int i = 0; i < length; i++) {
+ Char c = vector[i];
+ if (DoNotEscape(c)) {
+ Append_<is_ascii, Char>(c);
+ } else {
+ Append_<is_ascii, char>(
+ &JsonEscapeTable[c * kJsonEscapeTableEntrySize]);
+ }
+ // If GC moved the string, we need to refresh the vector.
+ if (*string != string_location) {
+ vector = GetCharVector<Char>(string);
+ string_location = *string;
+ }
+ }
+ }
+
+ Append_<is_ascii, char>('"');
+}
+
+
+template <>
+bool BasicJsonStringifier::DoNotEscape(char c) {
+ return c >= '#' && c <= '~' && c != '\\';
+}
+
+
+template <>
+bool BasicJsonStringifier::DoNotEscape(uc16 c) {
+ return (c >= 0x80) || (c >= '#' && c <= '~' && c != '\\');
+}
+
+
+template <>
+Vector<const char> BasicJsonStringifier::GetCharVector(Handle<String> string) {
+ String::FlatContent flat = string->GetFlatContent();
+ ASSERT(flat.IsAscii());
+ return flat.ToAsciiVector();
+}
+
+
+template <>
+Vector<const uc16> BasicJsonStringifier::GetCharVector(Handle<String> string) {
+ String::FlatContent flat = string->GetFlatContent();
+ ASSERT(flat.IsTwoByte());
+ return flat.ToUC16Vector();
+}
+
+
+void BasicJsonStringifier::SerializeString(Handle<String> object) {
+ FlattenString(object);
+ String::FlatContent flat = object->GetFlatContent();
+ if (is_ascii_) {
+ if (flat.IsAscii()) {
+ SerializeString_<true, char>(flat.ToAsciiVector(), object);
+ } else {
+ ChangeEncoding();
+ SerializeString(object);
+ }
+ } else {
+ if (flat.IsAscii()) {
+ SerializeString_<false, char>(flat.ToAsciiVector(), object);
+ } else {
+ SerializeString_<false, uc16>(flat.ToUC16Vector(), object);
+ }
+ }
+}
+
+} } // namespace v8::internal
+
+#endif // V8_JSON_STRINGIFIER_H_
diff --git a/src/3rdparty/v8/src/json.js b/src/3rdparty/v8/src/json.js
index ccef445..9ab1a31 100644
--- a/src/3rdparty/v8/src/json.js
+++ b/src/3rdparty/v8/src/json.js
@@ -178,140 +178,9 @@ function JSONSerialize(key, holder, replacer, stack, indent, gap) {
}
-function BasicSerializeArray(value, stack, builder) {
- var len = value.length;
- if (len == 0) {
- builder.push("[]");
- return;
- }
- if (!%PushIfAbsent(stack, value)) {
- throw MakeTypeError('circular_structure', $Array());
- }
- builder.push("[");
- var val = value[0];
- if (IS_STRING(val)) {
- // First entry is a string. Remaining entries are likely to be strings too.
- var array_string = %QuoteJSONStringArray(value);
- if (!IS_UNDEFINED(array_string)) {
- // array_string also includes bracket characters so we are done.
- builder[builder.length - 1] = array_string;
- stack.pop();
- return;
- } else {
- builder.push(%QuoteJSONString(val));
- for (var i = 1; i < len; i++) {
- val = value[i];
- if (IS_STRING(val)) {
- builder.push(%QuoteJSONStringComma(val));
- } else {
- builder.push(",");
- var before = builder.length;
- BasicJSONSerialize(i, val, stack, builder);
- if (before == builder.length) builder[before - 1] = ",null";
- }
- }
- }
- } else if (IS_NUMBER(val)) {
- // First entry is a number. Remaining entries are likely to be numbers too.
- builder.push(JSON_NUMBER_TO_STRING(val));
- for (var i = 1; i < len; i++) {
- builder.push(",");
- val = value[i];
- if (IS_NUMBER(val)) {
- builder.push(JSON_NUMBER_TO_STRING(val));
- } else {
- var before = builder.length;
- BasicJSONSerialize(i, val, stack, builder);
- if (before == builder.length) builder[before - 1] = ",null";
- }
- }
- } else {
- var before = builder.length;
- BasicJSONSerialize(0, val, stack, builder);
- if (before == builder.length) builder.push("null");
- for (var i = 1; i < len; i++) {
- builder.push(",");
- before = builder.length;
- BasicJSONSerialize(i, value[i], stack, builder);
- if (before == builder.length) builder[before - 1] = ",null";
- }
- }
- stack.pop();
- builder.push("]");
-}
-
-
-function BasicSerializeObject(value, stack, builder) {
- if (!%PushIfAbsent(stack, value)) {
- throw MakeTypeError('circular_structure', $Array());
- }
- builder.push("{");
- var first = true;
- for (var p in value) {
- if (%HasLocalProperty(value, p)) {
- if (!first) {
- builder.push(%QuoteJSONStringComma(p));
- } else {
- builder.push(%QuoteJSONString(p));
- }
- builder.push(":");
- var before = builder.length;
- BasicJSONSerialize(p, value[p], stack, builder);
- if (before == builder.length) {
- builder.pop();
- builder.pop();
- } else {
- first = false;
- }
- }
- }
- stack.pop();
- builder.push("}");
-}
-
-
-function BasicJSONSerialize(key, value, stack, builder) {
- if (IS_SPEC_OBJECT(value)) {
- var toJSON = value.toJSON;
- if (IS_SPEC_FUNCTION(toJSON)) {
- value = %_CallFunction(value, ToString(key), toJSON);
- }
- }
- if (IS_STRING(value)) {
- builder.push(value !== "" ? %QuoteJSONString(value) : '""');
- } else if (IS_NUMBER(value)) {
- builder.push(JSON_NUMBER_TO_STRING(value));
- } else if (IS_BOOLEAN(value)) {
- builder.push(value ? "true" : "false");
- } else if (IS_NULL(value)) {
- builder.push("null");
- } else if (IS_SPEC_OBJECT(value) && !(typeof value == "function")) {
- // Value is a non-callable object.
- // Unwrap value if necessary
- if (IS_NUMBER_WRAPPER(value)) {
- value = ToNumber(value);
- builder.push(JSON_NUMBER_TO_STRING(value));
- } else if (IS_STRING_WRAPPER(value)) {
- builder.push(%QuoteJSONString(ToString(value)));
- } else if (IS_BOOLEAN_WRAPPER(value)) {
- builder.push(%_ValueOf(value) ? "true" : "false");
- } else if (IS_ARRAY(value)) {
- BasicSerializeArray(value, stack, builder);
- } else {
- BasicSerializeObject(value, stack, builder);
- }
- }
-}
-
-
function JSONStringify(value, replacer, space) {
if (%_ArgumentsLength() == 1) {
- var builder = new InternalArray();
- BasicJSONSerialize('', value, new InternalArray(), builder);
- if (builder.length == 0) return;
- var result = %_FastAsciiArrayJoin(builder, "");
- if (!IS_UNDEFINED(result)) return result;
- return %StringBuilderConcat(builder, builder.length, "");
+ return %BasicJSONStringify(value);
}
if (IS_OBJECT(space)) {
// Unwrap 'space' if it is wrapped
@@ -337,6 +206,7 @@ function JSONStringify(value, replacer, space) {
return JSONSerialize('', {'': value}, replacer, new InternalArray(), "", gap);
}
+
function SetUpJSON() {
%CheckIsBootstrapping();
InstallFunctions($JSON, DONT_ENUM, $Array(
@@ -345,4 +215,12 @@ function SetUpJSON() {
));
}
+
+function JSONSerializeAdapter(key, object) {
+ var holder = {};
+ holder[key] = object;
+ // No need to pass the actual holder since there is no replacer function.
+ return JSONSerialize(key, holder, void 0, new InternalArray(), "", "");
+}
+
SetUpJSON();
diff --git a/src/3rdparty/v8/src/jsregexp.cc b/src/3rdparty/v8/src/jsregexp.cc
index 3455abc..e59170d 100644
--- a/src/3rdparty/v8/src/jsregexp.cc
+++ b/src/3rdparty/v8/src/jsregexp.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -167,7 +167,9 @@ static bool HasFewDifferentCharacters(Handle<String> pattern) {
Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re,
Handle<String> pattern,
- Handle<String> flag_str) {
+ Handle<String> flag_str,
+ Zone* zone) {
+ ZoneScope zone_scope(zone, DELETE_ON_EXIT);
Isolate* isolate = re->GetIsolate();
JSRegExp::Flags flags = RegExpFlagsFromString(flag_str);
CompilationCache* compilation_cache = isolate->compilation_cache();
@@ -181,12 +183,11 @@ Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re,
return re;
}
pattern = FlattenGetString(pattern);
- ZoneScope zone_scope(isolate, DELETE_ON_EXIT);
PostponeInterruptsScope postpone(isolate);
RegExpCompileData parse_result;
FlatStringReader reader(isolate, pattern);
if (!RegExpParser::ParseRegExp(&reader, flags.is_multiline(),
- &parse_result)) {
+ &parse_result, zone)) {
// Throw an exception if we fail to parse the pattern.
ThrowRegExpException(re,
pattern,
@@ -277,11 +278,12 @@ static void SetAtomLastCapture(FixedArray* array,
}
-Handle<Object> RegExpImpl::AtomExec(Handle<JSRegExp> re,
- Handle<String> subject,
- int index,
- Handle<JSArray> last_match_info) {
- Isolate* isolate = re->GetIsolate();
+int RegExpImpl::AtomExecRaw(Handle<JSRegExp> regexp,
+ Handle<String> subject,
+ int index,
+ int32_t* output,
+ int output_size) {
+ Isolate* isolate = regexp->GetIsolate();
ASSERT(0 <= index);
ASSERT(index <= subject->length());
@@ -289,15 +291,16 @@ Handle<Object> RegExpImpl::AtomExec(Handle<JSRegExp> re,
if (!subject->IsFlat()) FlattenString(subject);
AssertNoAllocation no_heap_allocation; // ensure vectors stay valid
- String* needle = String::cast(re->DataAt(JSRegExp::kAtomPatternIndex));
+ String* needle = String::cast(regexp->DataAt(JSRegExp::kAtomPatternIndex));
int needle_len = needle->length();
ASSERT(needle->IsFlat());
+ ASSERT_LT(0, needle_len);
- if (needle_len != 0) {
- if (index + needle_len > subject->length()) {
- return isolate->factory()->null_value();
- }
+ if (index + needle_len > subject->length()) {
+ return RegExpImpl::RE_FAILURE;
+ }
+ for (int i = 0; i < output_size; i += 2) {
String::FlatContent needle_content = needle->GetFlatContent();
String::FlatContent subject_content = subject->GetFlatContent();
ASSERT(needle_content.IsFlat());
@@ -322,15 +325,36 @@ Handle<Object> RegExpImpl::AtomExec(Handle<JSRegExp> re,
subject_content.ToUC16Vector(),
needle_content.ToUC16Vector(),
index)));
- if (index == -1) return isolate->factory()->null_value();
+ if (index == -1) {
+ return i / 2; // Return number of matches.
+ } else {
+ output[i] = index;
+ output[i+1] = index + needle_len;
+ index += needle_len;
+ }
}
- ASSERT(last_match_info->HasFastElements());
+ return output_size / 2;
+}
- {
- NoHandleAllocation no_handles;
- FixedArray* array = FixedArray::cast(last_match_info->elements());
- SetAtomLastCapture(array, *subject, index, index + needle_len);
- }
+
+Handle<Object> RegExpImpl::AtomExec(Handle<JSRegExp> re,
+ Handle<String> subject,
+ int index,
+ Handle<JSArray> last_match_info) {
+ Isolate* isolate = re->GetIsolate();
+
+ static const int kNumRegisters = 2;
+ STATIC_ASSERT(kNumRegisters <= Isolate::kJSRegexpStaticOffsetsVectorSize);
+ int32_t* output_registers = isolate->jsregexp_static_offsets_vector();
+
+ int res = AtomExecRaw(re, subject, index, output_registers, kNumRegisters);
+
+ if (res == RegExpImpl::RE_FAILURE) return isolate->factory()->null_value();
+
+ ASSERT_EQ(res, RegExpImpl::RE_SUCCESS);
+ NoHandleAllocation no_handles;
+ FixedArray* array = FixedArray::cast(last_match_info->elements());
+ SetAtomLastCapture(array, *subject, output_registers[0], output_registers[1]);
return last_match_info;
}
@@ -385,7 +409,7 @@ bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re,
bool is_ascii) {
// Compile the RegExp.
Isolate* isolate = re->GetIsolate();
- ZoneScope zone_scope(isolate, DELETE_ON_EXIT);
+ ZoneScope zone_scope(isolate->runtime_zone(), DELETE_ON_EXIT);
PostponeInterruptsScope postpone(isolate);
// If we had a compilation error the last time this is saved at the
// saved code index.
@@ -416,8 +440,10 @@ bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re,
if (!pattern->IsFlat()) FlattenString(pattern);
RegExpCompileData compile_data;
FlatStringReader reader(isolate, pattern);
+ Zone* zone = isolate->runtime_zone();
if (!RegExpParser::ParseRegExp(&reader, flags.is_multiline(),
- &compile_data)) {
+ &compile_data,
+ zone)) {
// Throw an exception if we fail to parse the pattern.
// THIS SHOULD NOT HAPPEN. We already pre-parsed it successfully once.
ThrowRegExpException(re,
@@ -429,10 +455,12 @@ bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re,
RegExpEngine::CompilationResult result =
RegExpEngine::Compile(&compile_data,
flags.is_ignore_case(),
+ flags.is_global(),
flags.is_multiline(),
pattern,
sample_subject,
- is_ascii);
+ is_ascii,
+ zone);
if (result.error_message != NULL) {
// Unable to compile regexp.
Handle<String> error_message =
@@ -506,7 +534,11 @@ int RegExpImpl::IrregexpPrepare(Handle<JSRegExp> regexp,
#ifdef V8_INTERPRETED_REGEXP
// Byte-code regexp needs space allocated for all its registers.
- return IrregexpNumberOfRegisters(FixedArray::cast(regexp->data()));
+ // The result captures are copied to the start of the registers array
+ // if the match succeeds. This way those registers are not clobbered
+ // when we set the last match info from last successful match.
+ return IrregexpNumberOfRegisters(FixedArray::cast(regexp->data())) +
+ (IrregexpNumberOfCaptures(FixedArray::cast(regexp->data())) + 1) * 2;
#else // V8_INTERPRETED_REGEXP
// Native regexp only needs room to output captures. Registers are handled
// internally.
@@ -515,11 +547,11 @@ int RegExpImpl::IrregexpPrepare(Handle<JSRegExp> regexp,
}
-RegExpImpl::IrregexpResult RegExpImpl::IrregexpExecOnce(
- Handle<JSRegExp> regexp,
- Handle<String> subject,
- int index,
- Vector<int> output) {
+int RegExpImpl::IrregexpExecRaw(Handle<JSRegExp> regexp,
+ Handle<String> subject,
+ int index,
+ int32_t* output,
+ int output_size) {
Isolate* isolate = regexp->GetIsolate();
Handle<FixedArray> irregexp(FixedArray::cast(regexp->data()), isolate);
@@ -531,15 +563,19 @@ RegExpImpl::IrregexpResult RegExpImpl::IrregexpExecOnce(
bool is_ascii = subject->IsAsciiRepresentationUnderneath();
#ifndef V8_INTERPRETED_REGEXP
- ASSERT(output.length() >= (IrregexpNumberOfCaptures(*irregexp) + 1) * 2);
+ ASSERT(output_size >= (IrregexpNumberOfCaptures(*irregexp) + 1) * 2);
do {
EnsureCompiledIrregexp(regexp, subject, is_ascii);
Handle<Code> code(IrregexpNativeCode(*irregexp, is_ascii), isolate);
+ // The stack is used to allocate registers for the compiled regexp code.
+ // This means that in case of failure, the output registers array is left
+ // untouched and contains the capture results from the previous successful
+ // match. We can use that to set the last match info lazily.
NativeRegExpMacroAssembler::Result res =
NativeRegExpMacroAssembler::Match(code,
subject,
- output.start(),
- output.length(),
+ output,
+ output_size,
index,
isolate);
if (res != NativeRegExpMacroAssembler::RETRY) {
@@ -566,22 +602,29 @@ RegExpImpl::IrregexpResult RegExpImpl::IrregexpExecOnce(
return RE_EXCEPTION;
#else // V8_INTERPRETED_REGEXP
- ASSERT(output.length() >= IrregexpNumberOfRegisters(*irregexp));
+ ASSERT(output_size >= IrregexpNumberOfRegisters(*irregexp));
// We must have done EnsureCompiledIrregexp, so we can get the number of
// registers.
- int* register_vector = output.start();
int number_of_capture_registers =
(IrregexpNumberOfCaptures(*irregexp) + 1) * 2;
+ int32_t* raw_output = &output[number_of_capture_registers];
+ // We do not touch the actual capture result registers until we know there
+ // has been a match so that we can use those capture results to set the
+ // last match info.
for (int i = number_of_capture_registers - 1; i >= 0; i--) {
- register_vector[i] = -1;
+ raw_output[i] = -1;
}
Handle<ByteArray> byte_codes(IrregexpByteCode(*irregexp, is_ascii), isolate);
IrregexpResult result = IrregexpInterpreter::Match(isolate,
byte_codes,
subject,
- register_vector,
+ raw_output,
index);
+ if (result == RE_SUCCESS) {
+ // Copy capture results to the start of the registers array.
+ memcpy(output, raw_output, number_of_capture_registers * sizeof(int32_t));
+ }
if (result == RE_EXCEPTION) {
ASSERT(!isolate->has_pending_exception());
isolate->StackOverflow();
@@ -591,50 +634,44 @@ RegExpImpl::IrregexpResult RegExpImpl::IrregexpExecOnce(
}
-Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
+Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> regexp,
Handle<String> subject,
int previous_index,
Handle<JSArray> last_match_info) {
- Isolate* isolate = jsregexp->GetIsolate();
- ASSERT_EQ(jsregexp->TypeTag(), JSRegExp::IRREGEXP);
+ Isolate* isolate = regexp->GetIsolate();
+ ASSERT_EQ(regexp->TypeTag(), JSRegExp::IRREGEXP);
// Prepare space for the return values.
-#ifdef V8_INTERPRETED_REGEXP
-#ifdef DEBUG
+#if defined(V8_INTERPRETED_REGEXP) && defined(DEBUG)
if (FLAG_trace_regexp_bytecodes) {
- String* pattern = jsregexp->Pattern();
+ String* pattern = regexp->Pattern();
PrintF("\n\nRegexp match: /%s/\n\n", *(pattern->ToCString()));
PrintF("\n\nSubject string: '%s'\n\n", *(subject->ToCString()));
}
#endif
-#endif
- int required_registers = RegExpImpl::IrregexpPrepare(jsregexp, subject);
+ int required_registers = RegExpImpl::IrregexpPrepare(regexp, subject);
if (required_registers < 0) {
// Compiling failed with an exception.
ASSERT(isolate->has_pending_exception());
return Handle<Object>::null();
}
- OffsetsVector registers(required_registers, isolate);
+ int32_t* output_registers = NULL;
+ if (required_registers > Isolate::kJSRegexpStaticOffsetsVectorSize) {
+ output_registers = NewArray<int32_t>(required_registers);
+ }
+ SmartArrayPointer<int32_t> auto_release(output_registers);
+ if (output_registers == NULL) {
+ output_registers = isolate->jsregexp_static_offsets_vector();
+ }
- IrregexpResult res = RegExpImpl::IrregexpExecOnce(
- jsregexp, subject, previous_index, Vector<int>(registers.vector(),
- registers.length()));
+ int res = RegExpImpl::IrregexpExecRaw(
+ regexp, subject, previous_index, output_registers, required_registers);
if (res == RE_SUCCESS) {
- int capture_register_count =
- (IrregexpNumberOfCaptures(FixedArray::cast(jsregexp->data())) + 1) * 2;
- last_match_info->EnsureSize(capture_register_count + kLastMatchOverhead);
- AssertNoAllocation no_gc;
- int* register_vector = registers.vector();
- FixedArray* array = FixedArray::cast(last_match_info->elements());
- for (int i = 0; i < capture_register_count; i += 2) {
- SetCapture(array, i, register_vector[i]);
- SetCapture(array, i + 1, register_vector[i + 1]);
- }
- SetLastCaptureCount(array, capture_register_count);
- SetLastSubject(array, *subject);
- SetLastInput(array, *subject);
- return last_match_info;
+ int capture_count =
+ IrregexpNumberOfCaptures(FixedArray::cast(regexp->data()));
+ return SetLastMatchInfo(
+ last_match_info, subject, capture_count, output_registers);
}
if (res == RE_EXCEPTION) {
ASSERT(isolate->has_pending_exception());
@@ -645,6 +682,146 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
}
+Handle<JSArray> RegExpImpl::SetLastMatchInfo(Handle<JSArray> last_match_info,
+ Handle<String> subject,
+ int capture_count,
+ int32_t* match) {
+ int capture_register_count = (capture_count + 1) * 2;
+ last_match_info->EnsureSize(capture_register_count + kLastMatchOverhead);
+ AssertNoAllocation no_gc;
+ FixedArray* array = FixedArray::cast(last_match_info->elements());
+ if (match != NULL) {
+ for (int i = 0; i < capture_register_count; i += 2) {
+ SetCapture(array, i, match[i]);
+ SetCapture(array, i + 1, match[i + 1]);
+ }
+ }
+ SetLastCaptureCount(array, capture_register_count);
+ SetLastSubject(array, *subject);
+ SetLastInput(array, *subject);
+ return last_match_info;
+}
+
+
+RegExpImpl::GlobalCache::GlobalCache(Handle<JSRegExp> regexp,
+ Handle<String> subject,
+ bool is_global,
+ Isolate* isolate)
+ : register_array_(NULL),
+ register_array_size_(0),
+ regexp_(regexp),
+ subject_(subject) {
+#ifdef V8_INTERPRETED_REGEXP
+ bool interpreted = true;
+#else
+ bool interpreted = false;
+#endif // V8_INTERPRETED_REGEXP
+
+ if (regexp_->TypeTag() == JSRegExp::ATOM) {
+ static const int kAtomRegistersPerMatch = 2;
+ registers_per_match_ = kAtomRegistersPerMatch;
+ // There is no distinction between interpreted and native for atom regexps.
+ interpreted = false;
+ } else {
+ registers_per_match_ = RegExpImpl::IrregexpPrepare(regexp_, subject_);
+ if (registers_per_match_ < 0) {
+ num_matches_ = -1; // Signal exception.
+ return;
+ }
+ }
+
+ if (is_global && !interpreted) {
+ register_array_size_ =
+ Max(registers_per_match_, Isolate::kJSRegexpStaticOffsetsVectorSize);
+ max_matches_ = register_array_size_ / registers_per_match_;
+ } else {
+ // Global loop in interpreted regexp is not implemented. We choose
+ // the size of the offsets vector so that it can only store one match.
+ register_array_size_ = registers_per_match_;
+ max_matches_ = 1;
+ }
+
+ if (register_array_size_ > Isolate::kJSRegexpStaticOffsetsVectorSize) {
+ register_array_ = NewArray<int32_t>(register_array_size_);
+ } else {
+ register_array_ = isolate->jsregexp_static_offsets_vector();
+ }
+
+ // Set state so that fetching the results the first time triggers a call
+ // to the compiled regexp.
+ current_match_index_ = max_matches_ - 1;
+ num_matches_ = max_matches_;
+ ASSERT(registers_per_match_ >= 2); // Each match has at least one capture.
+ ASSERT_GE(register_array_size_, registers_per_match_);
+ int32_t* last_match =
+ &register_array_[current_match_index_ * registers_per_match_];
+ last_match[0] = -1;
+ last_match[1] = 0;
+}
+
+
+RegExpImpl::GlobalCache::~GlobalCache() {
+ // Deallocate the register array if we allocated it in the constructor
+ // (as opposed to using the existing jsregexp_static_offsets_vector).
+ if (register_array_size_ > Isolate::kJSRegexpStaticOffsetsVectorSize) {
+ DeleteArray(register_array_);
+ }
+}
+
+
+int32_t* RegExpImpl::GlobalCache::FetchNext() {
+ current_match_index_++;
+ if (current_match_index_ >= num_matches_) {
+ // Current batch of results exhausted.
+ // Fail if last batch was not even fully filled.
+ if (num_matches_ < max_matches_) {
+ num_matches_ = 0; // Signal failed match.
+ return NULL;
+ }
+
+ int32_t* last_match =
+ &register_array_[(current_match_index_ - 1) * registers_per_match_];
+ int last_end_index = last_match[1];
+
+ if (regexp_->TypeTag() == JSRegExp::ATOM) {
+ num_matches_ = RegExpImpl::AtomExecRaw(regexp_,
+ subject_,
+ last_end_index,
+ register_array_,
+ register_array_size_);
+ } else {
+ int last_start_index = last_match[0];
+ if (last_start_index == last_end_index) last_end_index++;
+ if (last_end_index > subject_->length()) {
+ num_matches_ = 0; // Signal failed match.
+ return NULL;
+ }
+ num_matches_ = RegExpImpl::IrregexpExecRaw(regexp_,
+ subject_,
+ last_end_index,
+ register_array_,
+ register_array_size_);
+ }
+
+ if (num_matches_ <= 0) return NULL;
+ current_match_index_ = 0;
+ return register_array_;
+ } else {
+ return &register_array_[current_match_index_ * registers_per_match_];
+ }
+}
+
+
+int32_t* RegExpImpl::GlobalCache::LastSuccessfulMatch() {
+ int index = current_match_index_ * registers_per_match_;
+ if (num_matches_ == 0) {
+ // After a failed match we shift back by one result.
+ index -= registers_per_match_;
+ }
+ return &register_array_[index];
+}
+
+
// -------------------------------------------------------------------
// Implementation of the Irregexp regular expression engine.
//
@@ -795,24 +972,24 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
// the event that code generation is requested for an identical trace.
-void RegExpTree::AppendToText(RegExpText* text) {
+void RegExpTree::AppendToText(RegExpText* text, Zone* zone) {
UNREACHABLE();
}
-void RegExpAtom::AppendToText(RegExpText* text) {
- text->AddElement(TextElement::Atom(this));
+void RegExpAtom::AppendToText(RegExpText* text, Zone* zone) {
+ text->AddElement(TextElement::Atom(this), zone);
}
-void RegExpCharacterClass::AppendToText(RegExpText* text) {
- text->AddElement(TextElement::CharClass(this));
+void RegExpCharacterClass::AppendToText(RegExpText* text, Zone* zone) {
+ text->AddElement(TextElement::CharClass(this), zone);
}
-void RegExpText::AppendToText(RegExpText* text) {
+void RegExpText::AppendToText(RegExpText* text, Zone* zone) {
for (int i = 0; i < elements()->length(); i++)
- text->AddElement(elements()->at(i));
+ text->AddElement(elements()->at(i), zone);
}
@@ -843,8 +1020,8 @@ int TextElement::length() {
DispatchTable* ChoiceNode::GetTable(bool ignore_case) {
if (table_ == NULL) {
- table_ = new DispatchTable();
- DispatchTableConstructor cons(table_, ignore_case);
+ table_ = new(zone()) DispatchTable(zone());
+ DispatchTableConstructor cons(table_, ignore_case, zone());
cons.BuildTable(this);
}
return table_;
@@ -900,7 +1077,8 @@ class FrequencyCollator {
class RegExpCompiler {
public:
- RegExpCompiler(int capture_count, bool ignore_case, bool is_ascii);
+ RegExpCompiler(int capture_count, bool ignore_case, bool is_ascii,
+ Zone* zone);
int AllocateRegister() {
if (next_register_ >= RegExpMacroAssembler::kMaxRegister) {
@@ -940,6 +1118,8 @@ class RegExpCompiler {
current_expansion_factor_ = value;
}
+ Zone* zone() const { return zone_; }
+
static const int kNoRegister = -1;
private:
@@ -953,6 +1133,7 @@ class RegExpCompiler {
bool reg_exp_too_big_;
int current_expansion_factor_;
FrequencyCollator frequency_collator_;
+ Zone* zone_;
};
@@ -974,7 +1155,8 @@ static RegExpEngine::CompilationResult IrregexpRegExpTooBig() {
// Attempts to compile the regexp using an Irregexp code generator. Returns
// a fixed array or a null handle depending on whether it succeeded.
-RegExpCompiler::RegExpCompiler(int capture_count, bool ignore_case, bool ascii)
+RegExpCompiler::RegExpCompiler(int capture_count, bool ignore_case, bool ascii,
+ Zone* zone)
: next_register_(2 * (capture_count + 1)),
work_list_(NULL),
recursion_depth_(0),
@@ -982,8 +1164,9 @@ RegExpCompiler::RegExpCompiler(int capture_count, bool ignore_case, bool ascii)
ascii_(ascii),
reg_exp_too_big_(false),
current_expansion_factor_(1),
- frequency_collator_() {
- accept_ = new EndNode(EndNode::ACCEPT);
+ frequency_collator_(),
+ zone_(zone) {
+ accept_ = new(zone) EndNode(EndNode::ACCEPT, zone);
ASSERT(next_register_ - 1 <= RegExpMacroAssembler::kMaxRegister);
}
@@ -1079,7 +1262,8 @@ bool Trace::GetStoredPosition(int reg, int* cp_offset) {
}
-int Trace::FindAffectedRegisters(OutSet* affected_registers) {
+int Trace::FindAffectedRegisters(OutSet* affected_registers,
+ Zone* zone) {
int max_register = RegExpCompiler::kNoRegister;
for (DeferredAction* action = actions_;
action != NULL;
@@ -1087,10 +1271,10 @@ int Trace::FindAffectedRegisters(OutSet* affected_registers) {
if (action->type() == ActionNode::CLEAR_CAPTURES) {
Interval range = static_cast<DeferredClearCaptures*>(action)->range();
for (int i = range.from(); i <= range.to(); i++)
- affected_registers->Set(i);
+ affected_registers->Set(i, zone);
if (range.to() > max_register) max_register = range.to();
} else {
- affected_registers->Set(action->reg());
+ affected_registers->Set(action->reg(), zone);
if (action->reg() > max_register) max_register = action->reg();
}
}
@@ -1119,7 +1303,8 @@ void Trace::PerformDeferredActions(RegExpMacroAssembler* assembler,
int max_register,
OutSet& affected_registers,
OutSet* registers_to_pop,
- OutSet* registers_to_clear) {
+ OutSet* registers_to_clear,
+ Zone* zone) {
// The "+1" is to avoid a push_limit of zero if stack_limit_slack() is 1.
const int push_limit = (assembler->stack_limit_slack() + 1) / 2;
@@ -1225,9 +1410,9 @@ void Trace::PerformDeferredActions(RegExpMacroAssembler* assembler,
}
assembler->PushRegister(reg, stack_check);
- registers_to_pop->Set(reg);
+ registers_to_pop->Set(reg, zone);
} else if (undo_action == CLEAR) {
- registers_to_clear->Set(reg);
+ registers_to_clear->Set(reg, zone);
}
// Perform the chronologically last action (or accumulated increment)
// for the register.
@@ -1273,14 +1458,16 @@ void Trace::Flush(RegExpCompiler* compiler, RegExpNode* successor) {
assembler->PushCurrentPosition();
}
- int max_register = FindAffectedRegisters(&affected_registers);
+ int max_register = FindAffectedRegisters(&affected_registers,
+ compiler->zone());
OutSet registers_to_pop;
OutSet registers_to_clear;
PerformDeferredActions(assembler,
max_register,
affected_registers,
&registers_to_pop,
- &registers_to_clear);
+ &registers_to_clear,
+ compiler->zone());
if (cp_offset_ != 0) {
assembler->AdvanceCurrentPosition(cp_offset_);
}
@@ -1357,17 +1544,18 @@ void EndNode::Emit(RegExpCompiler* compiler, Trace* trace) {
}
-void GuardedAlternative::AddGuard(Guard* guard) {
+void GuardedAlternative::AddGuard(Guard* guard, Zone* zone) {
if (guards_ == NULL)
- guards_ = new ZoneList<Guard*>(1);
- guards_->Add(guard);
+ guards_ = new(zone) ZoneList<Guard*>(1, zone);
+ guards_->Add(guard, zone);
}
ActionNode* ActionNode::SetRegister(int reg,
int val,
RegExpNode* on_success) {
- ActionNode* result = new ActionNode(SET_REGISTER, on_success);
+ ActionNode* result =
+ new(on_success->zone()) ActionNode(SET_REGISTER, on_success);
result->data_.u_store_register.reg = reg;
result->data_.u_store_register.value = val;
return result;
@@ -1375,7 +1563,8 @@ ActionNode* ActionNode::SetRegister(int reg,
ActionNode* ActionNode::IncrementRegister(int reg, RegExpNode* on_success) {
- ActionNode* result = new ActionNode(INCREMENT_REGISTER, on_success);
+ ActionNode* result =
+ new(on_success->zone()) ActionNode(INCREMENT_REGISTER, on_success);
result->data_.u_increment_register.reg = reg;
return result;
}
@@ -1384,7 +1573,8 @@ ActionNode* ActionNode::IncrementRegister(int reg, RegExpNode* on_success) {
ActionNode* ActionNode::StorePosition(int reg,
bool is_capture,
RegExpNode* on_success) {
- ActionNode* result = new ActionNode(STORE_POSITION, on_success);
+ ActionNode* result =
+ new(on_success->zone()) ActionNode(STORE_POSITION, on_success);
result->data_.u_position_register.reg = reg;
result->data_.u_position_register.is_capture = is_capture;
return result;
@@ -1393,7 +1583,8 @@ ActionNode* ActionNode::StorePosition(int reg,
ActionNode* ActionNode::ClearCaptures(Interval range,
RegExpNode* on_success) {
- ActionNode* result = new ActionNode(CLEAR_CAPTURES, on_success);
+ ActionNode* result =
+ new(on_success->zone()) ActionNode(CLEAR_CAPTURES, on_success);
result->data_.u_clear_captures.range_from = range.from();
result->data_.u_clear_captures.range_to = range.to();
return result;
@@ -1403,7 +1594,8 @@ ActionNode* ActionNode::ClearCaptures(Interval range,
ActionNode* ActionNode::BeginSubmatch(int stack_reg,
int position_reg,
RegExpNode* on_success) {
- ActionNode* result = new ActionNode(BEGIN_SUBMATCH, on_success);
+ ActionNode* result =
+ new(on_success->zone()) ActionNode(BEGIN_SUBMATCH, on_success);
result->data_.u_submatch.stack_pointer_register = stack_reg;
result->data_.u_submatch.current_position_register = position_reg;
return result;
@@ -1415,7 +1607,8 @@ ActionNode* ActionNode::PositiveSubmatchSuccess(int stack_reg,
int clear_register_count,
int clear_register_from,
RegExpNode* on_success) {
- ActionNode* result = new ActionNode(POSITIVE_SUBMATCH_SUCCESS, on_success);
+ ActionNode* result =
+ new(on_success->zone()) ActionNode(POSITIVE_SUBMATCH_SUCCESS, on_success);
result->data_.u_submatch.stack_pointer_register = stack_reg;
result->data_.u_submatch.current_position_register = position_reg;
result->data_.u_submatch.clear_register_count = clear_register_count;
@@ -1428,7 +1621,8 @@ ActionNode* ActionNode::EmptyMatchCheck(int start_register,
int repetition_register,
int repetition_limit,
RegExpNode* on_success) {
- ActionNode* result = new ActionNode(EMPTY_MATCH_CHECK, on_success);
+ ActionNode* result =
+ new(on_success->zone()) ActionNode(EMPTY_MATCH_CHECK, on_success);
result->data_.u_empty_match_check.start_register = start_register;
result->data_.u_empty_match_check.repetition_register = repetition_register;
result->data_.u_empty_match_check.repetition_limit = repetition_limit;
@@ -2008,8 +2202,9 @@ static void EmitCharClass(RegExpMacroAssembler* macro_assembler,
Label* on_failure,
int cp_offset,
bool check_offset,
- bool preloaded) {
- ZoneList<CharacterRange>* ranges = cc->ranges();
+ bool preloaded,
+ Zone* zone) {
+ ZoneList<CharacterRange>* ranges = cc->ranges(zone);
if (!CharacterRange::IsCanonical(ranges)) {
CharacterRange::Canonicalize(ranges);
}
@@ -2068,7 +2263,7 @@ static void EmitCharClass(RegExpMacroAssembler* macro_assembler,
macro_assembler->LoadCurrentCharacter(cp_offset, on_failure, check_offset);
}
- if (cc->is_standard() &&
+ if (cc->is_standard(zone) &&
macro_assembler->CheckSpecialCharacterClass(cc->standard_type(),
on_failure)) {
return;
@@ -2081,7 +2276,8 @@ static void EmitCharClass(RegExpMacroAssembler* macro_assembler,
// entry at zero which goes to the failure label, but if there
// was already one there we fall through for success on that entry.
// Subsequent entries have alternating meaning (success/failure).
- ZoneList<int>* range_boundaries = new ZoneList<int>(last_valid_range);
+ ZoneList<int>* range_boundaries =
+ new(zone) ZoneList<int>(last_valid_range, zone);
bool zeroth_entry_is_failure = !cc->is_negated();
@@ -2091,9 +2287,9 @@ static void EmitCharClass(RegExpMacroAssembler* macro_assembler,
ASSERT_EQ(i, 0);
zeroth_entry_is_failure = !zeroth_entry_is_failure;
} else {
- range_boundaries->Add(range.from());
+ range_boundaries->Add(range.from(), zone);
}
- range_boundaries->Add(range.to() + 1);
+ range_boundaries->Add(range.to() + 1, zone);
}
int end_index = range_boundaries->length() - 1;
if (range_boundaries->at(end_index) > max_char) {
@@ -2174,12 +2370,15 @@ int ActionNode::EatsAtLeast(int still_to_find,
void ActionNode::FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
BoyerMooreLookahead* bm,
bool not_at_start) {
if (type_ == BEGIN_SUBMATCH) {
bm->SetRest(offset);
} else if (type_ != POSITIVE_SUBMATCH_SUCCESS) {
- on_success()->FillInBMInfo(offset, bm, not_at_start);
+ on_success()->FillInBMInfo(
+ offset, recursion_depth + 1, budget - 1, bm, not_at_start);
}
SaveBMInfo(bm, not_at_start, offset);
}
@@ -2201,11 +2400,15 @@ int AssertionNode::EatsAtLeast(int still_to_find,
}
-void AssertionNode::FillInBMInfo(
- int offset, BoyerMooreLookahead* bm, bool not_at_start) {
+void AssertionNode::FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
// Match the behaviour of EatsAtLeast on this node.
if (type() == AT_START && not_at_start) return;
- on_success()->FillInBMInfo(offset, bm, not_at_start);
+ on_success()->FillInBMInfo(
+ offset, recursion_depth + 1, budget - 1, bm, not_at_start);
SaveBMInfo(bm, not_at_start, offset);
}
@@ -2484,7 +2687,7 @@ void TextNode::GetQuickCheckDetails(QuickCheckDetails* details,
QuickCheckDetails::Position* pos =
details->positions(characters_filled_in);
RegExpCharacterClass* tree = elm.data.u_char_class;
- ZoneList<CharacterRange>* ranges = tree->ranges();
+ ZoneList<CharacterRange>* ranges = tree->ranges(zone());
if (tree->is_negated()) {
// A quick check uses multi-character mask and compare. There is no
// useful way to incorporate a negative char class into this scheme
@@ -2669,7 +2872,7 @@ RegExpNode* TextNode::FilterASCII(int depth) {
} else {
ASSERT(elm.type == TextElement::CHAR_CLASS);
RegExpCharacterClass* cc = elm.data.u_char_class;
- ZoneList<CharacterRange>* ranges = cc->ranges();
+ ZoneList<CharacterRange>* ranges = cc->ranges(zone());
if (!CharacterRange::IsCanonical(ranges)) {
CharacterRange::Canonicalize(ranges);
}
@@ -2716,6 +2919,15 @@ RegExpNode* ChoiceNode::FilterASCII(int depth) {
if (info()->visited) return this;
VisitMarker marker(info());
int choice_count = alternatives_->length();
+
+ for (int i = 0; i < choice_count; i++) {
+ GuardedAlternative alternative = alternatives_->at(i);
+ if (alternative.guards() != NULL && alternative.guards()->length() != 0) {
+ set_replacement(this);
+ return this;
+ }
+ }
+
int surviving = 0;
RegExpNode* survivor = NULL;
for (int i = 0; i < choice_count; i++) {
@@ -2737,13 +2949,13 @@ RegExpNode* ChoiceNode::FilterASCII(int depth) {
// Only some of the nodes survived the filtering. We need to rebuild the
// alternatives list.
ZoneList<GuardedAlternative>* new_alternatives =
- new ZoneList<GuardedAlternative>(surviving);
+ new(zone()) ZoneList<GuardedAlternative>(surviving, zone());
for (int i = 0; i < choice_count; i++) {
RegExpNode* replacement =
alternatives_->at(i).node()->FilterASCII(depth - 1);
if (replacement != NULL) {
alternatives_->at(i).set_node(replacement);
- new_alternatives->Add(alternatives_->at(i));
+ new_alternatives->Add(alternatives_->at(i), zone());
}
}
alternatives_ = new_alternatives;
@@ -2786,14 +2998,20 @@ void LoopChoiceNode::GetQuickCheckDetails(QuickCheckDetails* details,
}
-void LoopChoiceNode::FillInBMInfo(
- int offset, BoyerMooreLookahead* bm, bool not_at_start) {
- if (body_can_be_zero_length_) {
+void LoopChoiceNode::FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
+ if (body_can_be_zero_length_ ||
+ recursion_depth > RegExpCompiler::kMaxRecursion ||
+ budget <= 0) {
bm->SetRest(offset);
SaveBMInfo(bm, not_at_start, offset);
return;
}
- ChoiceNode::FillInBMInfo(offset, bm, not_at_start);
+ ChoiceNode::FillInBMInfo(
+ offset, recursion_depth + 1, budget - 1, bm, not_at_start);
SaveBMInfo(bm, not_at_start, offset);
}
@@ -2894,8 +3112,8 @@ void AssertionNode::EmitBoundaryCheck(RegExpCompiler* compiler, Trace* trace) {
EatsAtLeast(kMaxLookaheadForBoyerMoore, 0, not_at_start));
if (eats_at_least >= 1) {
BoyerMooreLookahead* bm =
- new BoyerMooreLookahead(eats_at_least, compiler);
- FillInBMInfo(0, bm, not_at_start);
+ new(zone()) BoyerMooreLookahead(eats_at_least, compiler, zone());
+ FillInBMInfo(0, 0, kFillInBMBudget, bm, not_at_start);
if (bm->at(0)->is_non_word()) next_is_word_character = Trace::FALSE;
if (bm->at(0)->is_word()) next_is_word_character = Trace::TRUE;
}
@@ -3121,7 +3339,8 @@ void TextNode::TextEmitPass(RegExpCompiler* compiler,
backtrack,
cp_offset,
*checked_up_to < cp_offset,
- preloaded);
+ preloaded,
+ zone());
UpdateBoundsCheck(cp_offset, checked_up_to);
}
}
@@ -3242,11 +3461,11 @@ void TextNode::MakeCaseIndependent(bool is_ascii) {
RegExpCharacterClass* cc = elm.data.u_char_class;
// None of the standard character classes is different in the case
// independent case and it slows us down if we don't know that.
- if (cc->is_standard()) continue;
- ZoneList<CharacterRange>* ranges = cc->ranges();
+ if (cc->is_standard(zone())) continue;
+ ZoneList<CharacterRange>* ranges = cc->ranges(zone());
int range_count = ranges->length();
for (int j = 0; j < range_count; j++) {
- ranges->at(j).AddCaseEquivalents(ranges, is_ascii);
+ ranges->at(j).AddCaseEquivalents(ranges, is_ascii, zone());
}
}
}
@@ -3269,7 +3488,7 @@ RegExpNode* TextNode::GetSuccessorOfOmnivorousTextNode(
TextElement elm = elms_->at(0);
if (elm.type != TextElement::CHAR_CLASS) return NULL;
RegExpCharacterClass* node = elm.data.u_char_class;
- ZoneList<CharacterRange>* ranges = node->ranges();
+ ZoneList<CharacterRange>* ranges = node->ranges(zone());
if (!CharacterRange::IsCanonical(ranges)) {
CharacterRange::Canonicalize(ranges);
}
@@ -3391,13 +3610,13 @@ class AlternativeGeneration: public Malloced {
// size then it is on the stack, otherwise the excess is on the heap.
class AlternativeGenerationList {
public:
- explicit AlternativeGenerationList(int count)
- : alt_gens_(count) {
+ AlternativeGenerationList(int count, Zone* zone)
+ : alt_gens_(count, zone) {
for (int i = 0; i < count && i < kAFew; i++) {
- alt_gens_.Add(a_few_alt_gens_ + i);
+ alt_gens_.Add(a_few_alt_gens_ + i, zone);
}
for (int i = kAFew; i < count; i++) {
- alt_gens_.Add(new AlternativeGeneration());
+ alt_gens_.Add(new AlternativeGeneration(), zone);
}
}
~AlternativeGenerationList() {
@@ -3475,7 +3694,7 @@ void BoyerMoorePositionInfo::SetAll() {
BoyerMooreLookahead::BoyerMooreLookahead(
- int length, RegExpCompiler* compiler)
+ int length, RegExpCompiler* compiler, Zone* zone)
: length_(length),
compiler_(compiler) {
if (compiler->ascii()) {
@@ -3483,9 +3702,9 @@ BoyerMooreLookahead::BoyerMooreLookahead(
} else {
max_char_ = String::kMaxUtf16CodeUnit;
}
- bitmaps_ = new ZoneList<BoyerMoorePositionInfo*>(length);
+ bitmaps_ = new(zone) ZoneList<BoyerMoorePositionInfo*>(length, zone);
for (int i = 0; i < length; i++) {
- bitmaps_->Add(new BoyerMoorePositionInfo());
+ bitmaps_->Add(new(zone) BoyerMoorePositionInfo(zone), zone);
}
}
@@ -3831,9 +4050,11 @@ void ChoiceNode::Emit(RegExpCompiler* compiler, Trace* trace) {
EatsAtLeast(kMaxLookaheadForBoyerMoore, 0, not_at_start));
if (eats_at_least >= 1) {
BoyerMooreLookahead* bm =
- new BoyerMooreLookahead(eats_at_least, compiler);
+ new(zone()) BoyerMooreLookahead(eats_at_least,
+ compiler,
+ zone());
GuardedAlternative alt0 = alternatives_->at(0);
- alt0.node()->FillInBMInfo(0, bm, not_at_start);
+ alt0.node()->FillInBMInfo(0, 0, kFillInBMBudget, bm, not_at_start);
skip_was_emitted = bm->EmitSkipInstructions(macro_assembler);
}
} else {
@@ -3853,7 +4074,7 @@ void ChoiceNode::Emit(RegExpCompiler* compiler, Trace* trace) {
(current_trace->characters_preloaded() == preload_characters);
bool preload_has_checked_bounds = preload_is_current;
- AlternativeGenerationList alt_gens(choice_count);
+ AlternativeGenerationList alt_gens(choice_count, zone());
// For now we just call all choices one after the other. The idea ultimately
// is to use the Dispatch table to try only the relevant ones.
@@ -4333,6 +4554,7 @@ void DotPrinter::VisitChoice(ChoiceNode* that) {
void DotPrinter::VisitText(TextNode* that) {
+ Zone* zone = that->zone();
stream()->Add(" n%p [label=\"", that);
for (int i = 0; i < that->elements()->length(); i++) {
if (i > 0) stream()->Add(" ");
@@ -4347,8 +4569,8 @@ void DotPrinter::VisitText(TextNode* that) {
stream()->Add("[");
if (node->is_negated())
stream()->Add("^");
- for (int j = 0; j < node->ranges()->length(); j++) {
- CharacterRange range = node->ranges()->at(j);
+ for (int j = 0; j < node->ranges(zone)->length(); j++) {
+ CharacterRange range = node->ranges(zone)->at(j);
stream()->Add("%k-%k", range.from(), range.to());
}
stream()->Add("]");
@@ -4506,15 +4728,16 @@ void RegExpEngine::DotPrint(const char* label,
RegExpNode* RegExpAtom::ToNode(RegExpCompiler* compiler,
RegExpNode* on_success) {
- ZoneList<TextElement>* elms = new ZoneList<TextElement>(1);
- elms->Add(TextElement::Atom(this));
- return new TextNode(elms, on_success);
+ ZoneList<TextElement>* elms =
+ new(compiler->zone()) ZoneList<TextElement>(1, compiler->zone());
+ elms->Add(TextElement::Atom(this), compiler->zone());
+ return new(compiler->zone()) TextNode(elms, on_success);
}
RegExpNode* RegExpText::ToNode(RegExpCompiler* compiler,
RegExpNode* on_success) {
- return new TextNode(elements(), on_success);
+ return new(compiler->zone()) TextNode(elements(), on_success);
}
@@ -4568,7 +4791,7 @@ static bool CompareRanges(ZoneList<CharacterRange>* ranges,
}
-bool RegExpCharacterClass::is_standard() {
+bool RegExpCharacterClass::is_standard(Zone* zone) {
// TODO(lrn): Remove need for this function, by not throwing away information
// along the way.
if (is_negated_) {
@@ -4577,31 +4800,31 @@ bool RegExpCharacterClass::is_standard() {
if (set_.is_standard()) {
return true;
}
- if (CompareRanges(set_.ranges(), kSpaceRanges, kSpaceRangeCount)) {
+ if (CompareRanges(set_.ranges(zone), kSpaceRanges, kSpaceRangeCount)) {
set_.set_standard_set_type('s');
return true;
}
- if (CompareInverseRanges(set_.ranges(), kSpaceRanges, kSpaceRangeCount)) {
+ if (CompareInverseRanges(set_.ranges(zone), kSpaceRanges, kSpaceRangeCount)) {
set_.set_standard_set_type('S');
return true;
}
- if (CompareInverseRanges(set_.ranges(),
+ if (CompareInverseRanges(set_.ranges(zone),
kLineTerminatorRanges,
kLineTerminatorRangeCount)) {
set_.set_standard_set_type('.');
return true;
}
- if (CompareRanges(set_.ranges(),
+ if (CompareRanges(set_.ranges(zone),
kLineTerminatorRanges,
kLineTerminatorRangeCount)) {
set_.set_standard_set_type('n');
return true;
}
- if (CompareRanges(set_.ranges(), kWordRanges, kWordRangeCount)) {
+ if (CompareRanges(set_.ranges(zone), kWordRanges, kWordRangeCount)) {
set_.set_standard_set_type('w');
return true;
}
- if (CompareInverseRanges(set_.ranges(), kWordRanges, kWordRangeCount)) {
+ if (CompareInverseRanges(set_.ranges(zone), kWordRanges, kWordRangeCount)) {
set_.set_standard_set_type('W');
return true;
}
@@ -4611,7 +4834,7 @@ bool RegExpCharacterClass::is_standard() {
RegExpNode* RegExpCharacterClass::ToNode(RegExpCompiler* compiler,
RegExpNode* on_success) {
- return new TextNode(this, on_success);
+ return new(compiler->zone()) TextNode(this, on_success);
}
@@ -4619,7 +4842,8 @@ RegExpNode* RegExpDisjunction::ToNode(RegExpCompiler* compiler,
RegExpNode* on_success) {
ZoneList<RegExpTree*>* alternatives = this->alternatives();
int length = alternatives->length();
- ChoiceNode* result = new ChoiceNode(length);
+ ChoiceNode* result =
+ new(compiler->zone()) ChoiceNode(length, compiler->zone());
for (int i = 0; i < length; i++) {
GuardedAlternative alternative(alternatives->at(i)->ToNode(compiler,
on_success));
@@ -4712,6 +4936,8 @@ RegExpNode* RegExpQuantifier::ToNode(int min,
int body_start_reg = RegExpCompiler::kNoRegister;
Interval capture_registers = body->CaptureRegisters();
bool needs_capture_clearing = !capture_registers.is_empty();
+ Zone* zone = compiler->zone();
+
if (body_can_be_empty) {
body_start_reg = compiler->AllocateRegister();
} else if (FLAG_regexp_optimization && !needs_capture_clearing) {
@@ -4742,7 +4968,7 @@ RegExpNode* RegExpQuantifier::ToNode(int min,
// Unroll the optional matches up to max.
RegExpNode* answer = on_success;
for (int i = 0; i < max; i++) {
- ChoiceNode* alternation = new ChoiceNode(2);
+ ChoiceNode* alternation = new(zone) ChoiceNode(2, zone);
if (is_greedy) {
alternation->AddAlternative(
GuardedAlternative(body->ToNode(compiler, answer)));
@@ -4765,7 +4991,8 @@ RegExpNode* RegExpQuantifier::ToNode(int min,
int reg_ctr = needs_counter
? compiler->AllocateRegister()
: RegExpCompiler::kNoRegister;
- LoopChoiceNode* center = new LoopChoiceNode(body->min_match() == 0);
+ LoopChoiceNode* center = new(zone) LoopChoiceNode(body->min_match() == 0,
+ zone);
if (not_at_start) center->set_not_at_start();
RegExpNode* loop_return = needs_counter
? static_cast<RegExpNode*>(ActionNode::IncrementRegister(reg_ctr, center))
@@ -4790,13 +5017,14 @@ RegExpNode* RegExpQuantifier::ToNode(int min,
}
GuardedAlternative body_alt(body_node);
if (has_max) {
- Guard* body_guard = new Guard(reg_ctr, Guard::LT, max);
- body_alt.AddGuard(body_guard);
+ Guard* body_guard =
+ new(zone) Guard(reg_ctr, Guard::LT, max);
+ body_alt.AddGuard(body_guard, zone);
}
GuardedAlternative rest_alt(on_success);
if (has_min) {
- Guard* rest_guard = new Guard(reg_ctr, Guard::GEQ, min);
- rest_alt.AddGuard(rest_guard);
+ Guard* rest_guard = new(compiler->zone()) Guard(reg_ctr, Guard::GEQ, min);
+ rest_alt.AddGuard(rest_guard, zone);
}
if (is_greedy) {
center->AddLoopAlternative(body_alt);
@@ -4816,6 +5044,8 @@ RegExpNode* RegExpQuantifier::ToNode(int min,
RegExpNode* RegExpAssertion::ToNode(RegExpCompiler* compiler,
RegExpNode* on_success) {
NodeInfo info;
+ Zone* zone = compiler->zone();
+
switch (type()) {
case START_OF_LINE:
return AssertionNode::AfterNewline(on_success);
@@ -4834,13 +5064,13 @@ RegExpNode* RegExpAssertion::ToNode(RegExpCompiler* compiler,
int stack_pointer_register = compiler->AllocateRegister();
int position_register = compiler->AllocateRegister();
// The ChoiceNode to distinguish between a newline and end-of-input.
- ChoiceNode* result = new ChoiceNode(2);
+ ChoiceNode* result = new(zone) ChoiceNode(2, zone);
// Create a newline atom.
ZoneList<CharacterRange>* newline_ranges =
- new ZoneList<CharacterRange>(3);
- CharacterRange::AddClassEscape('n', newline_ranges);
- RegExpCharacterClass* newline_atom = new RegExpCharacterClass('n');
- TextNode* newline_matcher = new TextNode(
+ new(zone) ZoneList<CharacterRange>(3, zone);
+ CharacterRange::AddClassEscape('n', newline_ranges, zone);
+ RegExpCharacterClass* newline_atom = new(zone) RegExpCharacterClass('n');
+ TextNode* newline_matcher = new(zone) TextNode(
newline_atom,
ActionNode::PositiveSubmatchSuccess(stack_pointer_register,
position_register,
@@ -4868,9 +5098,10 @@ RegExpNode* RegExpAssertion::ToNode(RegExpCompiler* compiler,
RegExpNode* RegExpBackReference::ToNode(RegExpCompiler* compiler,
RegExpNode* on_success) {
- return new BackReferenceNode(RegExpCapture::StartRegister(index()),
- RegExpCapture::EndRegister(index()),
- on_success);
+ return new(compiler->zone())
+ BackReferenceNode(RegExpCapture::StartRegister(index()),
+ RegExpCapture::EndRegister(index()),
+ on_success);
}
@@ -4915,16 +5146,20 @@ RegExpNode* RegExpLookahead::ToNode(RegExpCompiler* compiler,
// for a negative lookahead. The NegativeLookaheadChoiceNode is a special
// ChoiceNode that knows to ignore the first exit when calculating quick
// checks.
+ Zone* zone = compiler->zone();
+
GuardedAlternative body_alt(
body()->ToNode(
compiler,
- success = new NegativeSubmatchSuccess(stack_pointer_register,
- position_register,
- register_count,
- register_start)));
+ success = new(zone) NegativeSubmatchSuccess(stack_pointer_register,
+ position_register,
+ register_count,
+ register_start,
+ zone)));
ChoiceNode* choice_node =
- new NegativeLookaheadChoiceNode(body_alt,
- GuardedAlternative(on_success));
+ new(zone) NegativeLookaheadChoiceNode(body_alt,
+ GuardedAlternative(on_success),
+ zone);
return ActionNode::BeginSubmatch(stack_pointer_register,
position_register,
choice_node);
@@ -4963,19 +5198,21 @@ RegExpNode* RegExpAlternative::ToNode(RegExpCompiler* compiler,
static void AddClass(const int* elmv,
int elmc,
- ZoneList<CharacterRange>* ranges) {
+ ZoneList<CharacterRange>* ranges,
+ Zone* zone) {
elmc--;
ASSERT(elmv[elmc] == 0x10000);
for (int i = 0; i < elmc; i += 2) {
ASSERT(elmv[i] < elmv[i + 1]);
- ranges->Add(CharacterRange(elmv[i], elmv[i + 1] - 1));
+ ranges->Add(CharacterRange(elmv[i], elmv[i + 1] - 1), zone);
}
}
static void AddClassNegated(const int *elmv,
int elmc,
- ZoneList<CharacterRange>* ranges) {
+ ZoneList<CharacterRange>* ranges,
+ Zone* zone) {
elmc--;
ASSERT(elmv[elmc] == 0x10000);
ASSERT(elmv[0] != 0x0000);
@@ -4984,51 +5221,54 @@ static void AddClassNegated(const int *elmv,
for (int i = 0; i < elmc; i += 2) {
ASSERT(last <= elmv[i] - 1);
ASSERT(elmv[i] < elmv[i + 1]);
- ranges->Add(CharacterRange(last, elmv[i] - 1));
+ ranges->Add(CharacterRange(last, elmv[i] - 1), zone);
last = elmv[i + 1];
}
- ranges->Add(CharacterRange(last, String::kMaxUtf16CodeUnit));
+ ranges->Add(CharacterRange(last, String::kMaxUtf16CodeUnit), zone);
}
void CharacterRange::AddClassEscape(uc16 type,
- ZoneList<CharacterRange>* ranges) {
+ ZoneList<CharacterRange>* ranges,
+ Zone* zone) {
switch (type) {
case 's':
- AddClass(kSpaceRanges, kSpaceRangeCount, ranges);
+ AddClass(kSpaceRanges, kSpaceRangeCount, ranges, zone);
break;
case 'S':
- AddClassNegated(kSpaceRanges, kSpaceRangeCount, ranges);
+ AddClassNegated(kSpaceRanges, kSpaceRangeCount, ranges, zone);
break;
case 'w':
- AddClass(kWordRanges, kWordRangeCount, ranges);
+ AddClass(kWordRanges, kWordRangeCount, ranges, zone);
break;
case 'W':
- AddClassNegated(kWordRanges, kWordRangeCount, ranges);
+ AddClassNegated(kWordRanges, kWordRangeCount, ranges, zone);
break;
case 'd':
- AddClass(kDigitRanges, kDigitRangeCount, ranges);
+ AddClass(kDigitRanges, kDigitRangeCount, ranges, zone);
break;
case 'D':
- AddClassNegated(kDigitRanges, kDigitRangeCount, ranges);
+ AddClassNegated(kDigitRanges, kDigitRangeCount, ranges, zone);
break;
case '.':
AddClassNegated(kLineTerminatorRanges,
kLineTerminatorRangeCount,
- ranges);
+ ranges,
+ zone);
break;
// This is not a character range as defined by the spec but a
// convenient shorthand for a character class that matches any
// character.
case '*':
- ranges->Add(CharacterRange::Everything());
+ ranges->Add(CharacterRange::Everything(), zone);
break;
// This is the set of characters matched by the $ and ^ symbols
// in multiline mode.
case 'n':
AddClass(kLineTerminatorRanges,
kLineTerminatorRangeCount,
- ranges);
+ ranges,
+ zone);
break;
default:
UNREACHABLE();
@@ -5044,9 +5284,11 @@ Vector<const int> CharacterRange::GetWordBounds() {
class CharacterRangeSplitter {
public:
CharacterRangeSplitter(ZoneList<CharacterRange>** included,
- ZoneList<CharacterRange>** excluded)
+ ZoneList<CharacterRange>** excluded,
+ Zone* zone)
: included_(included),
- excluded_(excluded) { }
+ excluded_(excluded),
+ zone_(zone) { }
void Call(uc16 from, DispatchTable::Entry entry);
static const int kInBase = 0;
@@ -5055,6 +5297,7 @@ class CharacterRangeSplitter {
private:
ZoneList<CharacterRange>** included_;
ZoneList<CharacterRange>** excluded_;
+ Zone* zone_;
};
@@ -5063,31 +5306,33 @@ void CharacterRangeSplitter::Call(uc16 from, DispatchTable::Entry entry) {
ZoneList<CharacterRange>** target = entry.out_set()->Get(kInOverlay)
? included_
: excluded_;
- if (*target == NULL) *target = new ZoneList<CharacterRange>(2);
- (*target)->Add(CharacterRange(entry.from(), entry.to()));
+ if (*target == NULL) *target = new(zone_) ZoneList<CharacterRange>(2, zone_);
+ (*target)->Add(CharacterRange(entry.from(), entry.to()), zone_);
}
void CharacterRange::Split(ZoneList<CharacterRange>* base,
Vector<const int> overlay,
ZoneList<CharacterRange>** included,
- ZoneList<CharacterRange>** excluded) {
+ ZoneList<CharacterRange>** excluded,
+ Zone* zone) {
ASSERT_EQ(NULL, *included);
ASSERT_EQ(NULL, *excluded);
- DispatchTable table;
+ DispatchTable table(zone);
for (int i = 0; i < base->length(); i++)
- table.AddRange(base->at(i), CharacterRangeSplitter::kInBase);
+ table.AddRange(base->at(i), CharacterRangeSplitter::kInBase, zone);
for (int i = 0; i < overlay.length(); i += 2) {
table.AddRange(CharacterRange(overlay[i], overlay[i + 1] - 1),
- CharacterRangeSplitter::kInOverlay);
+ CharacterRangeSplitter::kInOverlay, zone);
}
- CharacterRangeSplitter callback(included, excluded);
+ CharacterRangeSplitter callback(included, excluded, zone);
table.ForEach(&callback);
}
void CharacterRange::AddCaseEquivalents(ZoneList<CharacterRange>* ranges,
- bool is_ascii) {
+ bool is_ascii,
+ Zone* zone) {
Isolate* isolate = Isolate::Current();
uc16 bottom = from();
uc16 top = to();
@@ -5102,7 +5347,7 @@ void CharacterRange::AddCaseEquivalents(ZoneList<CharacterRange>* ranges,
for (int i = 0; i < length; i++) {
uc32 chr = chars[i];
if (chr != bottom) {
- ranges->Add(CharacterRange::Singleton(chars[i]));
+ ranges->Add(CharacterRange::Singleton(chars[i]), zone);
}
}
} else {
@@ -5142,7 +5387,7 @@ void CharacterRange::AddCaseEquivalents(ZoneList<CharacterRange>* ranges,
uc16 range_from = c - (block_end - pos);
uc16 range_to = c - (block_end - end);
if (!(bottom <= range_from && range_to <= top)) {
- ranges->Add(CharacterRange(range_from, range_to));
+ ranges->Add(CharacterRange(range_from, range_to), zone);
}
}
pos = end + 1;
@@ -5165,10 +5410,10 @@ bool CharacterRange::IsCanonical(ZoneList<CharacterRange>* ranges) {
}
-ZoneList<CharacterRange>* CharacterSet::ranges() {
+ZoneList<CharacterRange>* CharacterSet::ranges(Zone* zone) {
if (ranges_ == NULL) {
- ranges_ = new ZoneList<CharacterRange>(2);
- CharacterRange::AddClassEscape(standard_set_type_, ranges_);
+ ranges_ = new(zone) ZoneList<CharacterRange>(2, zone);
+ CharacterRange::AddClassEscape(standard_set_type_, ranges_, zone);
}
return ranges_;
}
@@ -5297,7 +5542,8 @@ void CharacterRange::Canonicalize(ZoneList<CharacterRange>* character_ranges) {
void CharacterRange::Negate(ZoneList<CharacterRange>* ranges,
- ZoneList<CharacterRange>* negated_ranges) {
+ ZoneList<CharacterRange>* negated_ranges,
+ Zone* zone) {
ASSERT(CharacterRange::IsCanonical(ranges));
ASSERT_EQ(0, negated_ranges->length());
int range_count = ranges->length();
@@ -5309,12 +5555,13 @@ void CharacterRange::Negate(ZoneList<CharacterRange>* ranges,
}
while (i < range_count) {
CharacterRange range = ranges->at(i);
- negated_ranges->Add(CharacterRange(from + 1, range.from() - 1));
+ negated_ranges->Add(CharacterRange(from + 1, range.from() - 1), zone);
from = range.to();
i++;
}
if (from < String::kMaxUtf16CodeUnit) {
- negated_ranges->Add(CharacterRange(from + 1, String::kMaxUtf16CodeUnit));
+ negated_ranges->Add(CharacterRange(from + 1, String::kMaxUtf16CodeUnit),
+ zone);
}
}
@@ -5323,33 +5570,33 @@ void CharacterRange::Negate(ZoneList<CharacterRange>* ranges,
// Splay tree
-OutSet* OutSet::Extend(unsigned value) {
+OutSet* OutSet::Extend(unsigned value, Zone* zone) {
if (Get(value))
return this;
- if (successors() != NULL) {
- for (int i = 0; i < successors()->length(); i++) {
- OutSet* successor = successors()->at(i);
+ if (successors(zone) != NULL) {
+ for (int i = 0; i < successors(zone)->length(); i++) {
+ OutSet* successor = successors(zone)->at(i);
if (successor->Get(value))
return successor;
}
} else {
- successors_ = new ZoneList<OutSet*>(2);
+ successors_ = new(zone) ZoneList<OutSet*>(2, zone);
}
- OutSet* result = new OutSet(first_, remaining_);
- result->Set(value);
- successors()->Add(result);
+ OutSet* result = new(zone) OutSet(first_, remaining_);
+ result->Set(value, zone);
+ successors(zone)->Add(result, zone);
return result;
}
-void OutSet::Set(unsigned value) {
+void OutSet::Set(unsigned value, Zone *zone) {
if (value < kFirstLimit) {
first_ |= (1 << value);
} else {
if (remaining_ == NULL)
- remaining_ = new ZoneList<unsigned>(1);
+ remaining_ = new(zone) ZoneList<unsigned>(1, zone);
if (remaining_->is_empty() || !remaining_->Contains(value))
- remaining_->Add(value);
+ remaining_->Add(value, zone);
}
}
@@ -5368,13 +5615,15 @@ bool OutSet::Get(unsigned value) {
const uc16 DispatchTable::Config::kNoKey = unibrow::Utf8::kBadChar;
-void DispatchTable::AddRange(CharacterRange full_range, int value) {
+void DispatchTable::AddRange(CharacterRange full_range, int value,
+ Zone* zone) {
CharacterRange current = full_range;
if (tree()->is_empty()) {
// If this is the first range we just insert into the table.
ZoneSplayTree<Config>::Locator loc;
ASSERT_RESULT(tree()->Insert(current.from(), &loc));
- loc.set_value(Entry(current.from(), current.to(), empty()->Extend(value)));
+ loc.set_value(Entry(current.from(), current.to(),
+ empty()->Extend(value, zone)));
return;
}
// First see if there is a range to the left of this one that
@@ -5417,7 +5666,7 @@ void DispatchTable::AddRange(CharacterRange full_range, int value) {
ASSERT_RESULT(tree()->Insert(current.from(), &ins));
ins.set_value(Entry(current.from(),
entry->from() - 1,
- empty()->Extend(value)));
+ empty()->Extend(value, zone)));
current.set_from(entry->from());
}
ASSERT_EQ(current.from(), entry->from());
@@ -5435,7 +5684,7 @@ void DispatchTable::AddRange(CharacterRange full_range, int value) {
// The overlapping range is now completely contained by the range
// we're adding so we can just update it and move the start point
// of the range we're adding just past it.
- entry->AddValue(value);
+ entry->AddValue(value, zone);
// Bail out if the last interval ended at 0xFFFF since otherwise
// adding 1 will wrap around to 0.
if (entry->to() == String::kMaxUtf16CodeUnit)
@@ -5448,7 +5697,7 @@ void DispatchTable::AddRange(CharacterRange full_range, int value) {
ASSERT_RESULT(tree()->Insert(current.from(), &ins));
ins.set_value(Entry(current.from(),
current.to(),
- empty()->Extend(value)));
+ empty()->Extend(value, zone)));
break;
}
}
@@ -5572,8 +5821,11 @@ void Analysis::VisitAssertion(AssertionNode* that) {
}
-void BackReferenceNode::FillInBMInfo(
- int offset, BoyerMooreLookahead* bm, bool not_at_start) {
+void BackReferenceNode::FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
// Working out the set of characters that a backreference can match is too
// hard, so we just say that any character can match.
bm->SetRest(offset);
@@ -5585,9 +5837,13 @@ STATIC_ASSERT(BoyerMoorePositionInfo::kMapSize ==
RegExpMacroAssembler::kTableSize);
-void ChoiceNode::FillInBMInfo(
- int offset, BoyerMooreLookahead* bm, bool not_at_start) {
+void ChoiceNode::FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
ZoneList<GuardedAlternative>* alts = alternatives();
+ budget = (budget - 1) / alts->length();
for (int i = 0; i < alts->length(); i++) {
GuardedAlternative& alt = alts->at(i);
if (alt.guards() != NULL && alt.guards()->length() != 0) {
@@ -5595,14 +5851,18 @@ void ChoiceNode::FillInBMInfo(
SaveBMInfo(bm, not_at_start, offset);
return;
}
- alt.node()->FillInBMInfo(offset, bm, not_at_start);
+ alt.node()->FillInBMInfo(
+ offset, recursion_depth + 1, budget, bm, not_at_start);
}
SaveBMInfo(bm, not_at_start, offset);
}
-void TextNode::FillInBMInfo(
- int initial_offset, BoyerMooreLookahead* bm, bool not_at_start) {
+void TextNode::FillInBMInfo(int initial_offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
if (initial_offset >= bm->length()) return;
int offset = initial_offset;
int max_char = bm->max_char();
@@ -5637,7 +5897,7 @@ void TextNode::FillInBMInfo(
} else {
ASSERT(text.type == TextElement::CHAR_CLASS);
RegExpCharacterClass* char_class = text.data.u_char_class;
- ZoneList<CharacterRange>* ranges = char_class->ranges();
+ ZoneList<CharacterRange>* ranges = char_class->ranges(zone());
if (char_class->is_negated()) {
bm->SetAll(offset);
} else {
@@ -5656,6 +5916,8 @@ void TextNode::FillInBMInfo(
return;
}
on_success()->FillInBMInfo(offset,
+ recursion_depth + 1,
+ budget - 1,
bm,
true); // Not at start after a text node.
if (initial_offset == 0) set_bm_info(not_at_start, bm);
@@ -5755,7 +6017,7 @@ void DispatchTableConstructor::VisitText(TextNode* that) {
}
case TextElement::CHAR_CLASS: {
RegExpCharacterClass* tree = elm.data.u_char_class;
- ZoneList<CharacterRange>* ranges = tree->ranges();
+ ZoneList<CharacterRange>* ranges = tree->ranges(that->zone());
if (tree->is_negated()) {
AddInverse(ranges);
} else {
@@ -5780,14 +6042,16 @@ void DispatchTableConstructor::VisitAction(ActionNode* that) {
RegExpEngine::CompilationResult RegExpEngine::Compile(
RegExpCompileData* data,
bool ignore_case,
+ bool is_global,
bool is_multiline,
Handle<String> pattern,
Handle<String> sample_subject,
- bool is_ascii) {
+ bool is_ascii,
+ Zone* zone) {
if ((data->capture_count + 1) * 2 - 1 > RegExpMacroAssembler::kMaxRegister) {
return IrregexpRegExpTooBig();
}
- RegExpCompiler compiler(data->capture_count, ignore_case, is_ascii);
+ RegExpCompiler compiler(data->capture_count, ignore_case, is_ascii, zone);
// Sample some characters from the middle of the string.
static const int kSampleSize = 128;
@@ -5817,7 +6081,7 @@ RegExpEngine::CompilationResult RegExpEngine::Compile(
RegExpQuantifier::ToNode(0,
RegExpTree::kInfinity,
false,
- new RegExpCharacterClass('*'),
+ new(zone) RegExpCharacterClass('*'),
&compiler,
captured_body,
data->contains_anchor);
@@ -5825,10 +6089,10 @@ RegExpEngine::CompilationResult RegExpEngine::Compile(
if (data->contains_anchor) {
// Unroll loop once, to take care of the case that might start
// at the start of input.
- ChoiceNode* first_step_node = new ChoiceNode(2);
+ ChoiceNode* first_step_node = new(zone) ChoiceNode(2, zone);
first_step_node->AddAlternative(GuardedAlternative(captured_body));
first_step_node->AddAlternative(GuardedAlternative(
- new TextNode(new RegExpCharacterClass('*'), loop_node)));
+ new(zone) TextNode(new(zone) RegExpCharacterClass('*'), loop_node)));
node = first_step_node;
} else {
node = loop_node;
@@ -5841,7 +6105,7 @@ RegExpEngine::CompilationResult RegExpEngine::Compile(
if (node != NULL) node = node->FilterASCII(RegExpCompiler::kMaxRecursion);
}
- if (node == NULL) node = new EndNode(EndNode::BACKTRACK);
+ if (node == NULL) node = new(zone) EndNode(EndNode::BACKTRACK, zone);
data->node = node;
Analysis analysis(ignore_case, is_ascii);
analysis.EnsureAnalyzed(node);
@@ -5859,19 +6123,23 @@ RegExpEngine::CompilationResult RegExpEngine::Compile(
: NativeRegExpMacroAssembler::UC16;
#if V8_TARGET_ARCH_IA32
- RegExpMacroAssemblerIA32 macro_assembler(mode, (data->capture_count + 1) * 2);
+ RegExpMacroAssemblerIA32 macro_assembler(mode, (data->capture_count + 1) * 2,
+ zone);
#elif V8_TARGET_ARCH_X64
- RegExpMacroAssemblerX64 macro_assembler(mode, (data->capture_count + 1) * 2);
+ RegExpMacroAssemblerX64 macro_assembler(mode, (data->capture_count + 1) * 2,
+ zone);
#elif V8_TARGET_ARCH_ARM
- RegExpMacroAssemblerARM macro_assembler(mode, (data->capture_count + 1) * 2);
+ RegExpMacroAssemblerARM macro_assembler(mode, (data->capture_count + 1) * 2,
+ zone);
#elif V8_TARGET_ARCH_MIPS
- RegExpMacroAssemblerMIPS macro_assembler(mode, (data->capture_count + 1) * 2);
+ RegExpMacroAssemblerMIPS macro_assembler(mode, (data->capture_count + 1) * 2,
+ zone);
#endif
#else // V8_INTERPRETED_REGEXP
// Interpreted regexp implementation.
EmbeddedVector<byte, 1024> codes;
- RegExpMacroAssemblerIrregexp macro_assembler(codes);
+ RegExpMacroAssemblerIrregexp macro_assembler(codes, zone);
#endif // V8_INTERPRETED_REGEXP
// Inserted here, instead of in Assembler, because it depends on information
@@ -5883,6 +6151,13 @@ RegExpEngine::CompilationResult RegExpEngine::Compile(
macro_assembler.SetCurrentPositionFromEnd(max_length);
}
+ if (is_global) {
+ macro_assembler.set_global_mode(
+ (data->tree->min_match() > 0)
+ ? RegExpMacroAssembler::GLOBAL_NO_ZERO_LENGTH_CHECK
+ : RegExpMacroAssembler::GLOBAL);
+ }
+
return compiler.Assemble(&macro_assembler,
node,
data->capture_count,
diff --git a/src/3rdparty/v8/src/jsregexp.h b/src/3rdparty/v8/src/jsregexp.h
index 20313ca..96825ce 100644
--- a/src/3rdparty/v8/src/jsregexp.h
+++ b/src/3rdparty/v8/src/jsregexp.h
@@ -71,7 +71,8 @@ class RegExpImpl {
// Returns false if compilation fails.
static Handle<Object> Compile(Handle<JSRegExp> re,
Handle<String> pattern,
- Handle<String> flags);
+ Handle<String> flags,
+ Zone* zone);
// See ECMA-262 section 15.10.6.2.
// This function calls the garbage collector if necessary.
@@ -92,6 +93,14 @@ class RegExpImpl {
JSRegExp::Flags flags,
Handle<String> match_pattern);
+
+ static int AtomExecRaw(Handle<JSRegExp> regexp,
+ Handle<String> subject,
+ int index,
+ int32_t* output,
+ int output_size);
+
+
static Handle<Object> AtomExec(Handle<JSRegExp> regexp,
Handle<String> subject,
int index,
@@ -104,31 +113,71 @@ class RegExpImpl {
// This ensures that the regexp is compiled for the subject, and that
// the subject is flat.
// Returns the number of integer spaces required by IrregexpExecOnce
- // as its "registers" argument. If the regexp cannot be compiled,
+ // as its "registers" argument. If the regexp cannot be compiled,
// an exception is set as pending, and this function returns negative.
static int IrregexpPrepare(Handle<JSRegExp> regexp,
Handle<String> subject);
- // Execute a regular expression once on the subject, starting from
- // character "index".
- // If successful, returns RE_SUCCESS and set the capture positions
- // in the first registers.
+ // Execute a regular expression on the subject, starting from index.
+ // If matching succeeds, return the number of matches. This can be larger
+ // than one in the case of global regular expressions.
+ // The captures and subcaptures are stored into the registers vector.
// If matching fails, returns RE_FAILURE.
// If execution fails, sets a pending exception and returns RE_EXCEPTION.
- static IrregexpResult IrregexpExecOnce(Handle<JSRegExp> regexp,
- Handle<String> subject,
- int index,
- Vector<int> registers);
+ static int IrregexpExecRaw(Handle<JSRegExp> regexp,
+ Handle<String> subject,
+ int index,
+ int32_t* output,
+ int output_size);
// Execute an Irregexp bytecode pattern.
// On a successful match, the result is a JSArray containing
- // captured positions. On a failure, the result is the null value.
+ // captured positions. On a failure, the result is the null value.
// Returns an empty handle in case of an exception.
static Handle<Object> IrregexpExec(Handle<JSRegExp> regexp,
Handle<String> subject,
int index,
Handle<JSArray> lastMatchInfo);
+ // Set last match info. If match is NULL, then setting captures is omitted.
+ static Handle<JSArray> SetLastMatchInfo(Handle<JSArray> last_match_info,
+ Handle<String> subject,
+ int capture_count,
+ int32_t* match);
+
+
+ class GlobalCache {
+ public:
+ GlobalCache(Handle<JSRegExp> regexp,
+ Handle<String> subject,
+ bool is_global,
+ Isolate* isolate);
+
+ ~GlobalCache();
+
+ // Fetch the next entry in the cache for global regexp match results.
+ // This does not set the last match info. Upon failure, NULL is returned.
+ // The cause can be checked with Result(). The previous
+ // result is still in available in memory when a failure happens.
+ int32_t* FetchNext();
+
+ int32_t* LastSuccessfulMatch();
+
+ inline bool HasException() { return num_matches_ < 0; }
+
+ private:
+ int num_matches_;
+ int max_matches_;
+ int current_match_index_;
+ int registers_per_match_;
+ // Pointer to the last set of captures.
+ int32_t* register_array_;
+ int register_array_size_;
+ Handle<JSRegExp> regexp_;
+ Handle<String> subject_;
+ };
+
+
// Array index in the lastMatchInfo array.
static const int kLastCaptureCount = 0;
static const int kLastSubject = 1;
@@ -188,30 +237,10 @@ class RegExpImpl {
static const int kRegWxpCompiledLimit = 1 * MB;
private:
- static String* last_ascii_string_;
- static String* two_byte_cached_string_;
-
static bool CompileIrregexp(
Handle<JSRegExp> re, Handle<String> sample_subject, bool is_ascii);
static inline bool EnsureCompiledIrregexp(
Handle<JSRegExp> re, Handle<String> sample_subject, bool is_ascii);
-
-
- // Set the subject cache. The previous string buffer is not deleted, so the
- // caller should ensure that it doesn't leak.
- static void SetSubjectCache(String* subject,
- char* utf8_subject,
- int uft8_length,
- int character_position,
- int utf8_position);
-
- // A one element cache of the last utf8_subject string and its length. The
- // subject JS String object is cached in the heap. We also cache a
- // translation between position and utf8 position.
- static char* utf8_subject_cache_;
- static int utf8_length_cache_;
- static int utf8_position_;
- static int character_position_;
};
@@ -233,7 +262,8 @@ class CharacterRange {
// For compatibility with the CHECK_OK macro
CharacterRange(void* null) { ASSERT_EQ(NULL, null); } //NOLINT
CharacterRange(uc16 from, uc16 to) : from_(from), to_(to) { }
- static void AddClassEscape(uc16 type, ZoneList<CharacterRange>* ranges);
+ static void AddClassEscape(uc16 type, ZoneList<CharacterRange>* ranges,
+ Zone* zone);
static Vector<const int> GetWordBounds();
static inline CharacterRange Singleton(uc16 value) {
return CharacterRange(value, value);
@@ -253,11 +283,13 @@ class CharacterRange {
bool is_valid() { return from_ <= to_; }
bool IsEverything(uc16 max) { return from_ == 0 && to_ >= max; }
bool IsSingleton() { return (from_ == to_); }
- void AddCaseEquivalents(ZoneList<CharacterRange>* ranges, bool is_ascii);
+ void AddCaseEquivalents(ZoneList<CharacterRange>* ranges, bool is_ascii,
+ Zone* zone);
static void Split(ZoneList<CharacterRange>* base,
Vector<const int> overlay,
ZoneList<CharacterRange>** included,
- ZoneList<CharacterRange>** excluded);
+ ZoneList<CharacterRange>** excluded,
+ Zone* zone);
// Whether a range list is in canonical form: Ranges ordered by from value,
// and ranges non-overlapping and non-adjacent.
static bool IsCanonical(ZoneList<CharacterRange>* ranges);
@@ -268,7 +300,8 @@ class CharacterRange {
static void Canonicalize(ZoneList<CharacterRange>* ranges);
// Negate the contents of a character range in canonical form.
static void Negate(ZoneList<CharacterRange>* src,
- ZoneList<CharacterRange>* dst);
+ ZoneList<CharacterRange>* dst,
+ Zone* zone);
static const int kStartMarker = (1 << 24);
static const int kPayloadMask = (1 << 24) - 1;
@@ -283,7 +316,7 @@ class CharacterRange {
class OutSet: public ZoneObject {
public:
OutSet() : first_(0), remaining_(NULL), successors_(NULL) { }
- OutSet* Extend(unsigned value);
+ OutSet* Extend(unsigned value, Zone* zone);
bool Get(unsigned value);
static const unsigned kFirstLimit = 32;
@@ -291,12 +324,12 @@ class OutSet: public ZoneObject {
// Destructively set a value in this set. In most cases you want
// to use Extend instead to ensure that only one instance exists
// that contains the same values.
- void Set(unsigned value);
+ void Set(unsigned value, Zone* zone);
// The successors are a list of sets that contain the same values
// as this set and the one more value that is not present in this
// set.
- ZoneList<OutSet*>* successors() { return successors_; }
+ ZoneList<OutSet*>* successors(Zone* zone) { return successors_; }
OutSet(uint32_t first, ZoneList<unsigned>* remaining)
: first_(first), remaining_(remaining), successors_(NULL) { }
@@ -311,6 +344,8 @@ class OutSet: public ZoneObject {
// Used for mapping character ranges to choices.
class DispatchTable : public ZoneObject {
public:
+ explicit DispatchTable(Zone* zone) : tree_(zone) { }
+
class Entry {
public:
Entry() : from_(0), to_(0), out_set_(NULL) { }
@@ -319,7 +354,9 @@ class DispatchTable : public ZoneObject {
uc16 from() { return from_; }
uc16 to() { return to_; }
void set_to(uc16 value) { to_ = value; }
- void AddValue(int value) { out_set_ = out_set_->Extend(value); }
+ void AddValue(int value, Zone* zone) {
+ out_set_ = out_set_->Extend(value, zone);
+ }
OutSet* out_set() { return out_set_; }
private:
uc16 from_;
@@ -343,12 +380,14 @@ class DispatchTable : public ZoneObject {
}
};
- void AddRange(CharacterRange range, int value);
+ void AddRange(CharacterRange range, int value, Zone* zone);
OutSet* Get(uc16 value);
void Dump();
template <typename Callback>
- void ForEach(Callback* callback) { return tree()->ForEach(callback); }
+ void ForEach(Callback* callback) {
+ return tree()->ForEach(callback);
+ }
private:
// There can't be a static empty set since it allocates its
@@ -528,7 +567,8 @@ extern int kUninitializedRegExpNodePlaceHolder;
class RegExpNode: public ZoneObject {
public:
- RegExpNode() : replacement_(NULL), trace_count_(0) {
+ explicit RegExpNode(Zone* zone)
+ : replacement_(NULL), trace_count_(0), zone_(zone) {
bm_info_[0] = bm_info_[1] = NULL;
}
virtual ~RegExpNode();
@@ -574,9 +614,14 @@ class RegExpNode: public ZoneObject {
// Collects information on the possible code units (mod 128) that can match if
// we look forward. This is used for a Boyer-Moore-like string searching
// implementation. TODO(erikcorry): This should share more code with
- // EatsAtLeast, GetQuickCheckDetails.
- virtual void FillInBMInfo(
- int offset, BoyerMooreLookahead* bm, bool not_at_start) {
+ // EatsAtLeast, GetQuickCheckDetails. The budget argument is used to limit
+ // the number of nodes we are willing to look at in order to create this data.
+ static const int kFillInBMBudget = 200;
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
UNREACHABLE();
}
@@ -617,6 +662,8 @@ class RegExpNode: public ZoneObject {
return bm_info_[not_at_start ? 1 : 0];
}
+ Zone* zone() const { return zone_; }
+
protected:
enum LimitResult { DONE, CONTINUE };
RegExpNode* replacement_;
@@ -638,6 +685,8 @@ class RegExpNode: public ZoneObject {
// deferred operations in the current trace and generating a goto.
int trace_count_;
BoyerMooreLookahead* bm_info_[2];
+
+ Zone* zone_;
};
@@ -671,13 +720,17 @@ class Interval {
class SeqRegExpNode: public RegExpNode {
public:
explicit SeqRegExpNode(RegExpNode* on_success)
- : on_success_(on_success) { }
+ : RegExpNode(on_success->zone()), on_success_(on_success) { }
RegExpNode* on_success() { return on_success_; }
void set_on_success(RegExpNode* node) { on_success_ = node; }
virtual RegExpNode* FilterASCII(int depth);
- virtual void FillInBMInfo(
- int offset, BoyerMooreLookahead* bm, bool not_at_start) {
- on_success_->FillInBMInfo(offset, bm, not_at_start);
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
+ on_success_->FillInBMInfo(
+ offset, recursion_depth + 1, budget - 1, bm, not_at_start);
if (offset == 0) set_bm_info(not_at_start, bm);
}
@@ -730,8 +783,11 @@ class ActionNode: public SeqRegExpNode {
return on_success()->GetQuickCheckDetails(
details, compiler, filled_in, not_at_start);
}
- virtual void FillInBMInfo(
- int offset, BoyerMooreLookahead* bm, bool not_at_start);
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start);
Type type() { return type_; }
// TODO(erikcorry): We should allow some action nodes in greedy loops.
virtual int GreedyLoopTextLength() { return kNodeIsTooComplexForGreedyLoops; }
@@ -782,8 +838,8 @@ class TextNode: public SeqRegExpNode {
TextNode(RegExpCharacterClass* that,
RegExpNode* on_success)
: SeqRegExpNode(on_success),
- elms_(new ZoneList<TextElement>(1)) {
- elms_->Add(TextElement::CharClass(that));
+ elms_(new(zone()) ZoneList<TextElement>(1, zone())) {
+ elms_->Add(TextElement::CharClass(that), zone());
}
virtual void Accept(NodeVisitor* visitor);
virtual void Emit(RegExpCompiler* compiler, Trace* trace);
@@ -799,8 +855,11 @@ class TextNode: public SeqRegExpNode {
virtual int GreedyLoopTextLength();
virtual RegExpNode* GetSuccessorOfOmnivorousTextNode(
RegExpCompiler* compiler);
- virtual void FillInBMInfo(
- int offset, BoyerMooreLookahead* bm, bool not_at_start);
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start);
void CalculateOffsets();
virtual RegExpNode* FilterASCII(int depth);
@@ -836,19 +895,19 @@ class AssertionNode: public SeqRegExpNode {
AFTER_NEWLINE
};
static AssertionNode* AtEnd(RegExpNode* on_success) {
- return new AssertionNode(AT_END, on_success);
+ return new(on_success->zone()) AssertionNode(AT_END, on_success);
}
static AssertionNode* AtStart(RegExpNode* on_success) {
- return new AssertionNode(AT_START, on_success);
+ return new(on_success->zone()) AssertionNode(AT_START, on_success);
}
static AssertionNode* AtBoundary(RegExpNode* on_success) {
- return new AssertionNode(AT_BOUNDARY, on_success);
+ return new(on_success->zone()) AssertionNode(AT_BOUNDARY, on_success);
}
static AssertionNode* AtNonBoundary(RegExpNode* on_success) {
- return new AssertionNode(AT_NON_BOUNDARY, on_success);
+ return new(on_success->zone()) AssertionNode(AT_NON_BOUNDARY, on_success);
}
static AssertionNode* AfterNewline(RegExpNode* on_success) {
- return new AssertionNode(AFTER_NEWLINE, on_success);
+ return new(on_success->zone()) AssertionNode(AFTER_NEWLINE, on_success);
}
virtual void Accept(NodeVisitor* visitor);
virtual void Emit(RegExpCompiler* compiler, Trace* trace);
@@ -859,8 +918,11 @@ class AssertionNode: public SeqRegExpNode {
RegExpCompiler* compiler,
int filled_in,
bool not_at_start);
- virtual void FillInBMInfo(
- int offset, BoyerMooreLookahead* bm, bool not_at_start);
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start);
AssertionNodeType type() { return type_; }
void set_type(AssertionNodeType type) { type_ = type; }
@@ -897,8 +959,11 @@ class BackReferenceNode: public SeqRegExpNode {
bool not_at_start) {
return;
}
- virtual void FillInBMInfo(
- int offset, BoyerMooreLookahead* bm, bool not_at_start);
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start);
private:
int start_reg_;
@@ -909,7 +974,8 @@ class BackReferenceNode: public SeqRegExpNode {
class EndNode: public RegExpNode {
public:
enum Action { ACCEPT, BACKTRACK, NEGATIVE_SUBMATCH_SUCCESS };
- explicit EndNode(Action action) : action_(action) { }
+ explicit EndNode(Action action, Zone* zone)
+ : RegExpNode(zone), action_(action) { }
virtual void Accept(NodeVisitor* visitor);
virtual void Emit(RegExpCompiler* compiler, Trace* trace);
virtual int EatsAtLeast(int still_to_find,
@@ -922,8 +988,11 @@ class EndNode: public RegExpNode {
// Returning 0 from EatsAtLeast should ensure we never get here.
UNREACHABLE();
}
- virtual void FillInBMInfo(
- int offset, BoyerMooreLookahead* bm, bool not_at_start) {
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
// Returning 0 from EatsAtLeast should ensure we never get here.
UNREACHABLE();
}
@@ -938,8 +1007,9 @@ class NegativeSubmatchSuccess: public EndNode {
NegativeSubmatchSuccess(int stack_pointer_reg,
int position_reg,
int clear_capture_count,
- int clear_capture_start)
- : EndNode(NEGATIVE_SUBMATCH_SUCCESS),
+ int clear_capture_start,
+ Zone* zone)
+ : EndNode(NEGATIVE_SUBMATCH_SUCCESS, zone),
stack_pointer_register_(stack_pointer_reg),
current_position_register_(position_reg),
clear_capture_count_(clear_capture_count),
@@ -975,7 +1045,7 @@ class Guard: public ZoneObject {
class GuardedAlternative {
public:
explicit GuardedAlternative(RegExpNode* node) : node_(node), guards_(NULL) { }
- void AddGuard(Guard* guard);
+ void AddGuard(Guard* guard, Zone* zone);
RegExpNode* node() { return node_; }
void set_node(RegExpNode* node) { node_ = node; }
ZoneList<Guard*>* guards() { return guards_; }
@@ -991,13 +1061,17 @@ class AlternativeGeneration;
class ChoiceNode: public RegExpNode {
public:
- explicit ChoiceNode(int expected_size)
- : alternatives_(new ZoneList<GuardedAlternative>(expected_size)),
+ explicit ChoiceNode(int expected_size, Zone* zone)
+ : RegExpNode(zone),
+ alternatives_(new(zone)
+ ZoneList<GuardedAlternative>(expected_size, zone)),
table_(NULL),
not_at_start_(false),
being_calculated_(false) { }
virtual void Accept(NodeVisitor* visitor);
- void AddAlternative(GuardedAlternative node) { alternatives()->Add(node); }
+ void AddAlternative(GuardedAlternative node) {
+ alternatives()->Add(node, zone());
+ }
ZoneList<GuardedAlternative>* alternatives() { return alternatives_; }
DispatchTable* GetTable(bool ignore_case);
virtual void Emit(RegExpCompiler* compiler, Trace* trace);
@@ -1012,8 +1086,11 @@ class ChoiceNode: public RegExpNode {
RegExpCompiler* compiler,
int characters_filled_in,
bool not_at_start);
- virtual void FillInBMInfo(
- int offset, BoyerMooreLookahead* bm, bool not_at_start);
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start);
bool being_calculated() { return being_calculated_; }
bool not_at_start() { return not_at_start_; }
@@ -1050,8 +1127,9 @@ class ChoiceNode: public RegExpNode {
class NegativeLookaheadChoiceNode: public ChoiceNode {
public:
explicit NegativeLookaheadChoiceNode(GuardedAlternative this_must_fail,
- GuardedAlternative then_do_this)
- : ChoiceNode(2) {
+ GuardedAlternative then_do_this,
+ Zone* zone)
+ : ChoiceNode(2, zone) {
AddAlternative(this_must_fail);
AddAlternative(then_do_this);
}
@@ -1062,9 +1140,13 @@ class NegativeLookaheadChoiceNode: public ChoiceNode {
RegExpCompiler* compiler,
int characters_filled_in,
bool not_at_start);
- virtual void FillInBMInfo(
- int offset, BoyerMooreLookahead* bm, bool not_at_start) {
- alternatives_->at(1).node()->FillInBMInfo(offset, bm, not_at_start);
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
+ alternatives_->at(1).node()->FillInBMInfo(
+ offset, recursion_depth + 1, budget - 1, bm, not_at_start);
if (offset == 0) set_bm_info(not_at_start, bm);
}
// For a negative lookahead we don't emit the quick check for the
@@ -1079,8 +1161,8 @@ class NegativeLookaheadChoiceNode: public ChoiceNode {
class LoopChoiceNode: public ChoiceNode {
public:
- explicit LoopChoiceNode(bool body_can_be_zero_length)
- : ChoiceNode(2),
+ explicit LoopChoiceNode(bool body_can_be_zero_length, Zone* zone)
+ : ChoiceNode(2, zone),
loop_node_(NULL),
continue_node_(NULL),
body_can_be_zero_length_(body_can_be_zero_length) { }
@@ -1094,8 +1176,11 @@ class LoopChoiceNode: public ChoiceNode {
RegExpCompiler* compiler,
int characters_filled_in,
bool not_at_start);
- virtual void FillInBMInfo(
- int offset, BoyerMooreLookahead* bm, bool not_at_start);
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start);
RegExpNode* loop_node() { return loop_node_; }
RegExpNode* continue_node() { return continue_node_; }
bool body_can_be_zero_length() { return body_can_be_zero_length_; }
@@ -1162,15 +1247,15 @@ ContainedInLattice AddRange(ContainedInLattice a,
class BoyerMoorePositionInfo : public ZoneObject {
public:
- BoyerMoorePositionInfo()
- : map_(new ZoneList<bool>(kMapSize)),
+ explicit BoyerMoorePositionInfo(Zone* zone)
+ : map_(new(zone) ZoneList<bool>(kMapSize, zone)),
map_count_(0),
w_(kNotYet),
s_(kNotYet),
d_(kNotYet),
surrogate_(kNotYet) {
for (int i = 0; i < kMapSize; i++) {
- map_->Add(false);
+ map_->Add(false, zone);
}
}
@@ -1199,7 +1284,7 @@ class BoyerMoorePositionInfo : public ZoneObject {
class BoyerMooreLookahead : public ZoneObject {
public:
- BoyerMooreLookahead(int length, RegExpCompiler* compiler);
+ BoyerMooreLookahead(int length, RegExpCompiler* compiler, Zone* zone);
int length() { return length_; }
int max_char() { return max_char_; }
@@ -1401,12 +1486,13 @@ class Trace {
void AdvanceCurrentPositionInTrace(int by, RegExpCompiler* compiler);
private:
- int FindAffectedRegisters(OutSet* affected_registers);
+ int FindAffectedRegisters(OutSet* affected_registers, Zone* zone);
void PerformDeferredActions(RegExpMacroAssembler* macro,
- int max_register,
- OutSet& affected_registers,
- OutSet* registers_to_pop,
- OutSet* registers_to_clear);
+ int max_register,
+ OutSet& affected_registers,
+ OutSet* registers_to_pop,
+ OutSet* registers_to_clear,
+ Zone* zone);
void RestoreAffectedRegisters(RegExpMacroAssembler* macro,
int max_register,
OutSet& registers_to_pop,
@@ -1439,15 +1525,17 @@ FOR_EACH_NODE_TYPE(DECLARE_VISIT)
// dispatch table of a choice node.
class DispatchTableConstructor: public NodeVisitor {
public:
- DispatchTableConstructor(DispatchTable* table, bool ignore_case)
+ DispatchTableConstructor(DispatchTable* table, bool ignore_case,
+ Zone* zone)
: table_(table),
choice_index_(-1),
- ignore_case_(ignore_case) { }
+ ignore_case_(ignore_case),
+ zone_(zone) { }
void BuildTable(ChoiceNode* node);
void AddRange(CharacterRange range) {
- table()->AddRange(range, choice_index_);
+ table()->AddRange(range, choice_index_, zone_);
}
void AddInverse(ZoneList<CharacterRange>* ranges);
@@ -1464,6 +1552,7 @@ FOR_EACH_NODE_TYPE(DECLARE_VISIT)
DispatchTable* table_;
int choice_index_;
bool ignore_case_;
+ Zone* zone_;
};
@@ -1545,48 +1634,16 @@ class RegExpEngine: public AllStatic {
static CompilationResult Compile(RegExpCompileData* input,
bool ignore_case,
+ bool global,
bool multiline,
Handle<String> pattern,
Handle<String> sample_subject,
- bool is_ascii);
+ bool is_ascii, Zone* zone);
static void DotPrint(const char* label, RegExpNode* node, bool ignore_case);
};
-class OffsetsVector {
- public:
- inline OffsetsVector(int num_registers, Isolate* isolate)
- : offsets_vector_length_(num_registers) {
- if (offsets_vector_length_ > Isolate::kJSRegexpStaticOffsetsVectorSize) {
- vector_ = NewArray<int>(offsets_vector_length_);
- } else {
- vector_ = isolate->jsregexp_static_offsets_vector();
- }
- }
- inline ~OffsetsVector() {
- if (offsets_vector_length_ > Isolate::kJSRegexpStaticOffsetsVectorSize) {
- DeleteArray(vector_);
- vector_ = NULL;
- }
- }
- inline int* vector() { return vector_; }
- inline int length() { return offsets_vector_length_; }
-
- static const int kStaticOffsetsVectorSize = 50;
-
- private:
- static Address static_offsets_vector_address(Isolate* isolate) {
- return reinterpret_cast<Address>(isolate->jsregexp_static_offsets_vector());
- }
-
- int* vector_;
- int offsets_vector_length_;
-
- friend class ExternalReference;
-};
-
-
} } // namespace v8::internal
#endif // V8_JSREGEXP_H_
diff --git a/src/3rdparty/v8/src/list-inl.h b/src/3rdparty/v8/src/list-inl.h
index 6cf3bad..60a033d 100644
--- a/src/3rdparty/v8/src/list-inl.h
+++ b/src/3rdparty/v8/src/list-inl.h
@@ -35,25 +35,25 @@ namespace internal {
template<typename T, class P>
-void List<T, P>::Add(const T& element) {
+void List<T, P>::Add(const T& element, P alloc) {
if (length_ < capacity_) {
data_[length_++] = element;
} else {
- List<T, P>::ResizeAdd(element);
+ List<T, P>::ResizeAdd(element, alloc);
}
}
template<typename T, class P>
-void List<T, P>::AddAll(const List<T, P>& other) {
- AddAll(other.ToVector());
+void List<T, P>::AddAll(const List<T, P>& other, P alloc) {
+ AddAll(other.ToVector(), alloc);
}
template<typename T, class P>
-void List<T, P>::AddAll(const Vector<T>& other) {
+void List<T, P>::AddAll(const Vector<T>& other, P alloc) {
int result_length = length_ + other.length();
- if (capacity_ < result_length) Resize(result_length);
+ if (capacity_ < result_length) Resize(result_length, alloc);
for (int i = 0; i < other.length(); i++) {
data_[length_ + i] = other.at(i);
}
@@ -64,13 +64,13 @@ void List<T, P>::AddAll(const Vector<T>& other) {
// Use two layers of inlining so that the non-inlined function can
// use the same implementation as the inlined version.
template<typename T, class P>
-void List<T, P>::ResizeAdd(const T& element) {
- ResizeAddInternal(element);
+void List<T, P>::ResizeAdd(const T& element, P alloc) {
+ ResizeAddInternal(element, alloc);
}
template<typename T, class P>
-void List<T, P>::ResizeAddInternal(const T& element) {
+void List<T, P>::ResizeAddInternal(const T& element, P alloc) {
ASSERT(length_ >= capacity_);
// Grow the list capacity by 100%, but make sure to let it grow
// even when the capacity is zero (possible initial case).
@@ -78,14 +78,14 @@ void List<T, P>::ResizeAddInternal(const T& element) {
// Since the element reference could be an element of the list, copy
// it out of the old backing storage before resizing.
T temp = element;
- Resize(new_capacity);
+ Resize(new_capacity, alloc);
data_[length_++] = temp;
}
template<typename T, class P>
-void List<T, P>::Resize(int new_capacity) {
- T* new_data = List<T, P>::NewData(new_capacity);
+void List<T, P>::Resize(int new_capacity, P alloc) {
+ T* new_data = NewData(new_capacity, alloc);
memcpy(new_data, data_, capacity_ * sizeof(T));
List<T, P>::DeleteData(data_);
data_ = new_data;
@@ -94,17 +94,17 @@ void List<T, P>::Resize(int new_capacity) {
template<typename T, class P>
-Vector<T> List<T, P>::AddBlock(T value, int count) {
+Vector<T> List<T, P>::AddBlock(T value, int count, P alloc) {
int start = length_;
- for (int i = 0; i < count; i++) Add(value);
+ for (int i = 0; i < count; i++) Add(value, alloc);
return Vector<T>(&data_[start], count);
}
template<typename T, class P>
-void List<T, P>::InsertAt(int index, const T& elm) {
+void List<T, P>::InsertAt(int index, const T& elm, P alloc) {
ASSERT(index >= 0 && index <= length_);
- Add(elm);
+ Add(elm, alloc);
for (int i = length_ - 1; i > index; --i) {
data_[i] = data_[i - 1];
}
@@ -137,9 +137,9 @@ bool List<T, P>::RemoveElement(const T& elm) {
template<typename T, class P>
-void List<T, P>::Allocate(int length) {
+void List<T, P>::Allocate(int length, P allocator) {
DeleteData(data_);
- Initialize(length);
+ Initialize(length, allocator);
length_ = length;
}
@@ -147,7 +147,11 @@ void List<T, P>::Allocate(int length) {
template<typename T, class P>
void List<T, P>::Clear() {
DeleteData(data_);
- Initialize(0);
+ // We don't call Initialize(0) since that requires passing a Zone,
+ // which we don't really need.
+ data_ = NULL;
+ capacity_ = 0;
+ length_ = 0;
}
@@ -207,9 +211,9 @@ void List<T, P>::Sort() {
template<typename T, class P>
-void List<T, P>::Initialize(int capacity) {
+void List<T, P>::Initialize(int capacity, P allocator) {
ASSERT(capacity >= 0);
- data_ = (capacity > 0) ? NewData(capacity) : NULL;
+ data_ = (capacity > 0) ? NewData(capacity, allocator) : NULL;
capacity_ = capacity;
length_ = 0;
}
diff --git a/src/3rdparty/v8/src/list.h b/src/3rdparty/v8/src/list.h
index 7350c0d..7fd4f5c 100644
--- a/src/3rdparty/v8/src/list.h
+++ b/src/3rdparty/v8/src/list.h
@@ -45,12 +45,18 @@ namespace internal {
// the C free store or the zone; see zone.h.
// Forward defined as
-// template <typename T, class P = FreeStoreAllocationPolicy> class List;
-template <typename T, class P>
+// template <typename T,
+// class AllocationPolicy = FreeStoreAllocationPolicy> class List;
+template <typename T, class AllocationPolicy>
class List {
public:
- List() { Initialize(0); }
- INLINE(explicit List(int capacity)) { Initialize(capacity); }
+ explicit List(AllocationPolicy allocator = AllocationPolicy()) {
+ Initialize(0, allocator);
+ }
+ INLINE(explicit List(int capacity,
+ AllocationPolicy allocator = AllocationPolicy())) {
+ Initialize(capacity, allocator);
+ }
INLINE(~List()) { DeleteData(data_); }
// Deallocates memory used by the list and leaves the list in a consistent
@@ -60,10 +66,18 @@ class List {
Initialize(0);
}
- INLINE(void* operator new(size_t size)) {
- return P::New(static_cast<int>(size));
+ INLINE(void* operator new(size_t size,
+ AllocationPolicy allocator = AllocationPolicy())) {
+ return allocator.New(static_cast<int>(size));
+ }
+ INLINE(void operator delete(void* p)) {
+ AllocationPolicy::Delete(p);
+ }
+
+ // Please the MSVC compiler. We should never have to execute this.
+ INLINE(void operator delete(void* p, AllocationPolicy allocator)) {
+ UNREACHABLE();
}
- INLINE(void operator delete(void* p, size_t)) { return P::Delete(p); }
// Returns a reference to the element at index i. This reference is
// not safe to use after operations that can change the list's
@@ -87,21 +101,25 @@ class List {
// Adds a copy of the given 'element' to the end of the list,
// expanding the list if necessary.
- void Add(const T& element);
+ void Add(const T& element, AllocationPolicy allocator = AllocationPolicy());
// Add all the elements from the argument list to this list.
- void AddAll(const List<T, P>& other);
+ void AddAll(const List<T, AllocationPolicy>& other,
+ AllocationPolicy allocator = AllocationPolicy());
// Add all the elements from the vector to this list.
- void AddAll(const Vector<T>& other);
+ void AddAll(const Vector<T>& other,
+ AllocationPolicy allocator = AllocationPolicy());
// Inserts the element at the specific index.
- void InsertAt(int index, const T& element);
+ void InsertAt(int index, const T& element,
+ AllocationPolicy allocator = AllocationPolicy());
// Added 'count' elements with the value 'value' and returns a
// vector that allows access to the elements. The vector is valid
// until the next change is made to this list.
- Vector<T> AddBlock(T value, int count);
+ Vector<T> AddBlock(T value, int count,
+ AllocationPolicy allocator = AllocationPolicy());
// Removes the i'th element without deleting it even if T is a
// pointer type; moves all elements above i "down". Returns the
@@ -118,7 +136,8 @@ class List {
INLINE(T RemoveLast()) { return Remove(length_ - 1); }
// Deletes current list contents and allocates space for 'length' elements.
- INLINE(void Allocate(int length));
+ INLINE(void Allocate(int length,
+ AllocationPolicy allocator = AllocationPolicy()));
// Clears the list by setting the length to zero. Even if T is a
// pointer type, clearing the list doesn't delete the entries.
@@ -142,26 +161,31 @@ class List {
void Sort(int (*cmp)(const T* x, const T* y));
void Sort();
- INLINE(void Initialize(int capacity));
+ INLINE(void Initialize(int capacity,
+ AllocationPolicy allocator = AllocationPolicy()));
private:
T* data_;
int capacity_;
int length_;
- INLINE(T* NewData(int n)) { return static_cast<T*>(P::New(n * sizeof(T))); }
- INLINE(void DeleteData(T* data)) { P::Delete(data); }
+ INLINE(T* NewData(int n, AllocationPolicy allocator)) {
+ return static_cast<T*>(allocator.New(n * sizeof(T)));
+ }
+ INLINE(void DeleteData(T* data)) {
+ AllocationPolicy::Delete(data);
+ }
// Increase the capacity of a full list, and add an element.
// List must be full already.
- void ResizeAdd(const T& element);
+ void ResizeAdd(const T& element, AllocationPolicy allocator);
// Inlined implementation of ResizeAdd, shared by inlined and
// non-inlined versions of ResizeAdd.
- void ResizeAddInternal(const T& element);
+ void ResizeAddInternal(const T& element, AllocationPolicy allocator);
// Resize the list.
- void Resize(int new_capacity);
+ void Resize(int new_capacity, AllocationPolicy allocator);
DISALLOW_COPY_AND_ASSIGN(List);
};
diff --git a/src/3rdparty/v8/src/lithium-allocator.cc b/src/3rdparty/v8/src/lithium-allocator.cc
index 9534f9e..91a9811 100644
--- a/src/3rdparty/v8/src/lithium-allocator.cc
+++ b/src/3rdparty/v8/src/lithium-allocator.cc
@@ -230,9 +230,9 @@ LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
if (HasRegisterAssigned()) {
ASSERT(!IsSpilled());
if (IsDouble()) {
- op = LDoubleRegister::Create(assigned_register());
+ op = LDoubleRegister::Create(assigned_register(), zone);
} else {
- op = LRegister::Create(assigned_register());
+ op = LRegister::Create(assigned_register(), zone);
}
} else if (IsSpilled()) {
ASSERT(!HasRegisterAssigned());
@@ -533,14 +533,14 @@ LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
LAllocator::LAllocator(int num_values, HGraph* graph)
: zone_(graph->zone()),
chunk_(NULL),
- live_in_sets_(graph->blocks()->length()),
- live_ranges_(num_values * 2),
+ live_in_sets_(graph->blocks()->length(), zone_),
+ live_ranges_(num_values * 2, zone_),
fixed_live_ranges_(NULL),
fixed_double_live_ranges_(NULL),
- unhandled_live_ranges_(num_values * 2),
- active_live_ranges_(8),
- inactive_live_ranges_(8),
- reusable_slots_(8),
+ unhandled_live_ranges_(num_values * 2, zone_),
+ active_live_ranges_(8, zone_),
+ inactive_live_ranges_(8, zone_),
+ reusable_slots_(8, zone_),
next_virtual_register_(num_values),
first_artificial_register_(num_values),
mode_(GENERAL_REGISTERS),
@@ -553,8 +553,8 @@ LAllocator::LAllocator(int num_values, HGraph* graph)
void LAllocator::InitializeLivenessAnalysis() {
// Initialize the live_in sets for each block to NULL.
int block_count = graph_->blocks()->length();
- live_in_sets_.Initialize(block_count);
- live_in_sets_.AddBlock(NULL, block_count);
+ live_in_sets_.Initialize(block_count, zone());
+ live_in_sets_.AddBlock(NULL, block_count, zone());
}
@@ -630,7 +630,7 @@ LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
TraceAlloc("Fixed reg is tagged at %d\n", pos);
LInstruction* instr = InstructionAt(pos);
if (instr->HasPointerMap()) {
- instr->pointer_map()->RecordPointer(operand);
+ instr->pointer_map()->RecordPointer(operand, zone());
}
}
return operand;
@@ -665,7 +665,7 @@ LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
LiveRange* LAllocator::LiveRangeFor(int index) {
if (index >= live_ranges_.length()) {
- live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1);
+ live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
}
LiveRange* result = live_ranges_[index];
if (result == NULL) {
@@ -746,7 +746,7 @@ void LAllocator::AddConstraintsGapMove(int index,
LOperand* from,
LOperand* to) {
LGap* gap = GapAt(index);
- LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
+ LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
if (from->IsUnallocated()) {
const ZoneList<LMoveOperands>* move_operands = move->move_operands();
for (int i = 0; i < move_operands->length(); ++i) {
@@ -755,13 +755,13 @@ void LAllocator::AddConstraintsGapMove(int index,
if (cur_to->IsUnallocated()) {
if (LUnallocated::cast(cur_to)->virtual_register() ==
LUnallocated::cast(from)->virtual_register()) {
- move->AddMove(cur.source(), to);
+ move->AddMove(cur.source(), to, zone());
return;
}
}
}
}
- move->AddMove(from, to);
+ move->AddMove(from, to, zone());
}
@@ -800,7 +800,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
LiveRange* range = LiveRangeFor(first_output->virtual_register());
bool assigned = false;
if (first_output->HasFixedPolicy()) {
- LUnallocated* output_copy = first_output->CopyUnconstrained();
+ LUnallocated* output_copy = first_output->CopyUnconstrained(zone());
bool is_tagged = HasTaggedValue(first_output->virtual_register());
AllocateFixed(first_output, gap_index, is_tagged);
@@ -821,8 +821,8 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
// Thus it should be inserted to a lifetime position corresponding to
// the instruction end.
LGap* gap = GapAt(gap_index);
- LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE);
- move->AddMove(first_output, range->GetSpillOperand());
+ LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE, zone());
+ move->AddMove(first_output, range->GetSpillOperand(), zone());
}
}
@@ -831,7 +831,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
for (UseIterator it(second); !it.Done(); it.Advance()) {
LUnallocated* cur_input = LUnallocated::cast(it.Current());
if (cur_input->HasFixedPolicy()) {
- LUnallocated* input_copy = cur_input->CopyUnconstrained();
+ LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
bool is_tagged = HasTaggedValue(cur_input->virtual_register());
AllocateFixed(cur_input, gap_index + 1, is_tagged);
AddConstraintsGapMove(gap_index, input_copy, cur_input);
@@ -840,7 +840,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
// of the instruction.
ASSERT(!cur_input->IsUsedAtStart());
- LUnallocated* input_copy = cur_input->CopyUnconstrained();
+ LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
cur_input->set_virtual_register(GetVirtualRegister());
if (!AllocationOk()) return;
@@ -864,7 +864,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
int output_vreg = second_output->virtual_register();
int input_vreg = cur_input->virtual_register();
- LUnallocated* input_copy = cur_input->CopyUnconstrained();
+ LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
cur_input->set_virtual_register(second_output->virtual_register());
AddConstraintsGapMove(gap_index, input_copy, cur_input);
@@ -872,7 +872,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
int index = gap_index + 1;
LInstruction* instr = InstructionAt(index);
if (instr->HasPointerMap()) {
- instr->pointer_map()->RecordPointer(input_copy);
+ instr->pointer_map()->RecordPointer(input_copy, zone());
}
} else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
// The input is assumed to immediately have a tagged representation,
@@ -901,7 +901,7 @@ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
if (IsGapAt(index)) {
// We have a gap at this position.
LGap* gap = GapAt(index);
- LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
+ LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
const ZoneList<LMoveOperands>* move_operands = move->move_operands();
for (int i = 0; i < move_operands->length(); ++i) {
LMoveOperands* cur = &move_operands->at(i);
@@ -1046,17 +1046,17 @@ void LAllocator::ResolvePhis(HBasicBlock* block) {
InstructionAt(cur_block->last_instruction_index());
if (branch->HasPointerMap()) {
if (phi->representation().IsTagged()) {
- branch->pointer_map()->RecordPointer(phi_operand);
+ branch->pointer_map()->RecordPointer(phi_operand, zone());
} else if (!phi->representation().IsDouble()) {
- branch->pointer_map()->RecordUntagged(phi_operand);
+ branch->pointer_map()->RecordUntagged(phi_operand, zone());
}
}
}
LiveRange* live_range = LiveRangeFor(phi->id());
LLabel* label = chunk_->GetLabel(phi->block()->block_id());
- label->GetOrCreateParallelMove(LGap::START)->
- AddMove(phi_operand, live_range->GetSpillOperand());
+ label->GetOrCreateParallelMove(LGap::START, zone())->
+ AddMove(phi_operand, live_range->GetSpillOperand(), zone());
live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
}
}
@@ -1064,7 +1064,7 @@ void LAllocator::ResolvePhis(HBasicBlock* block) {
bool LAllocator::Allocate(LChunk* chunk) {
ASSERT(chunk_ == NULL);
- chunk_ = chunk;
+ chunk_ = static_cast<LPlatformChunk*>(chunk);
MeetRegisterConstraints();
if (!AllocationOk()) return false;
ResolvePhis();
@@ -1151,14 +1151,15 @@ void LAllocator::ResolveControlFlow(LiveRange* range,
LInstruction* branch = InstructionAt(pred->last_instruction_index());
if (branch->HasPointerMap()) {
if (HasTaggedValue(range->id())) {
- branch->pointer_map()->RecordPointer(cur_op);
+ branch->pointer_map()->RecordPointer(cur_op, zone());
} else if (!cur_op->IsDoubleStackSlot() &&
!cur_op->IsDoubleRegister()) {
branch->pointer_map()->RemovePointer(cur_op);
}
}
}
- gap->GetOrCreateParallelMove(LGap::START)->AddMove(pred_op, cur_op);
+ gap->GetOrCreateParallelMove(
+ LGap::START, zone())->AddMove(pred_op, cur_op, zone());
}
}
}
@@ -1169,11 +1170,11 @@ LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
if (IsGapAt(index)) {
LGap* gap = GapAt(index);
return gap->GetOrCreateParallelMove(
- pos.IsInstructionStart() ? LGap::START : LGap::END);
+ pos.IsInstructionStart() ? LGap::START : LGap::END, zone());
}
int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
return GapAt(gap_pos)->GetOrCreateParallelMove(
- (gap_pos < index) ? LGap::AFTER : LGap::BEFORE);
+ (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, zone());
}
@@ -1205,7 +1206,7 @@ void LAllocator::ConnectRanges() {
LParallelMove* move = GetConnectingParallelMove(pos);
LOperand* prev_operand = first_range->CreateAssignedOperand(zone_);
LOperand* cur_operand = second_range->CreateAssignedOperand(zone_);
- move->AddMove(prev_operand, cur_operand);
+ move->AddMove(prev_operand, cur_operand, zone());
}
}
}
@@ -1270,7 +1271,7 @@ void LAllocator::BuildLiveRanges() {
LOperand* hint = NULL;
LOperand* phi_operand = NULL;
LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
- LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
+ LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
for (int j = 0; j < move->move_operands()->length(); ++j) {
LOperand* to = move->move_operands()->at(j).destination();
if (to->IsUnallocated() &&
@@ -1421,7 +1422,7 @@ void LAllocator::PopulatePointerMaps() {
safe_point >= range->spill_start_index()) {
TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
range->id(), range->spill_start_index(), safe_point);
- map->RecordPointer(range->GetSpillOperand());
+ map->RecordPointer(range->GetSpillOperand(), zone());
}
if (!cur->IsSpilled()) {
@@ -1430,7 +1431,7 @@ void LAllocator::PopulatePointerMaps() {
cur->id(), cur->Start().Value(), safe_point);
LOperand* operand = cur->CreateAssignedOperand(zone_);
ASSERT(!operand->IsStackSlot());
- map->RecordPointer(operand);
+ map->RecordPointer(operand, zone());
}
}
}
@@ -1632,13 +1633,13 @@ RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
void LAllocator::AddToActive(LiveRange* range) {
TraceAlloc("Add live range %d to active\n", range->id());
- active_live_ranges_.Add(range);
+ active_live_ranges_.Add(range, zone());
}
void LAllocator::AddToInactive(LiveRange* range) {
TraceAlloc("Add live range %d to inactive\n", range->id());
- inactive_live_ranges_.Add(range);
+ inactive_live_ranges_.Add(range, zone());
}
@@ -1649,13 +1650,13 @@ void LAllocator::AddToUnhandledSorted(LiveRange* range) {
LiveRange* cur_range = unhandled_live_ranges_.at(i);
if (range->ShouldBeAllocatedBefore(cur_range)) {
TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
- unhandled_live_ranges_.InsertAt(i + 1, range);
+ unhandled_live_ranges_.InsertAt(i + 1, range, zone());
ASSERT(UnhandledIsSorted());
return;
}
}
TraceAlloc("Add live range %d to unhandled at start\n", range->id());
- unhandled_live_ranges_.InsertAt(0, range);
+ unhandled_live_ranges_.InsertAt(0, range, zone());
ASSERT(UnhandledIsSorted());
}
@@ -1664,7 +1665,7 @@ void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
if (range == NULL || range->IsEmpty()) return;
ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
- unhandled_live_ranges_.Add(range);
+ unhandled_live_ranges_.Add(range, zone());
}
@@ -1705,7 +1706,7 @@ void LAllocator::FreeSpillSlot(LiveRange* range) {
int index = range->TopLevel()->GetSpillOperand()->index();
if (index >= 0) {
- reusable_slots_.Add(range);
+ reusable_slots_.Add(range, zone());
}
}
@@ -1733,7 +1734,7 @@ void LAllocator::ActiveToHandled(LiveRange* range) {
void LAllocator::ActiveToInactive(LiveRange* range) {
ASSERT(active_live_ranges_.Contains(range));
active_live_ranges_.RemoveElement(range);
- inactive_live_ranges_.Add(range);
+ inactive_live_ranges_.Add(range, zone());
TraceAlloc("Moving live range %d from active to inactive\n", range->id());
}
@@ -1749,7 +1750,7 @@ void LAllocator::InactiveToHandled(LiveRange* range) {
void LAllocator::InactiveToActive(LiveRange* range) {
ASSERT(inactive_live_ranges_.Contains(range));
inactive_live_ranges_.RemoveElement(range);
- active_live_ranges_.Add(range);
+ active_live_ranges_.Add(range, zone());
TraceAlloc("Moving live range %d from inactive to active\n", range->id());
}
diff --git a/src/3rdparty/v8/src/lithium-allocator.h b/src/3rdparty/v8/src/lithium-allocator.h
index f5ab055..5b05263 100644
--- a/src/3rdparty/v8/src/lithium-allocator.h
+++ b/src/3rdparty/v8/src/lithium-allocator.h
@@ -48,7 +48,7 @@ class BitVector;
class StringStream;
class LArgument;
-class LChunk;
+class LPlatformChunk;
class LOperand;
class LUnallocated;
class LConstantOperand;
@@ -455,8 +455,9 @@ class LAllocator BASE_EMBEDDED {
return &fixed_double_live_ranges_;
}
- LChunk* chunk() const { return chunk_; }
+ LPlatformChunk* chunk() const { return chunk_; }
HGraph* graph() const { return graph_; }
+ Zone* zone() const { return zone_; }
int GetVirtualRegister() {
if (next_virtual_register_ > LUnallocated::kMaxVirtualRegisters) {
@@ -597,7 +598,7 @@ class LAllocator BASE_EMBEDDED {
Zone* zone_;
- LChunk* chunk_;
+ LPlatformChunk* chunk_;
// During liveness analysis keep a mapping from block id to live_in sets
// for blocks already analyzed.
diff --git a/src/3rdparty/v8/src/lithium.cc b/src/3rdparty/v8/src/lithium.cc
index c41cce8..eb2198d 100644
--- a/src/3rdparty/v8/src/lithium.cc
+++ b/src/3rdparty/v8/src/lithium.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -27,6 +27,23 @@
#include "v8.h"
#include "lithium.h"
+#include "scopes.h"
+
+#if V8_TARGET_ARCH_IA32
+#include "ia32/lithium-ia32.h"
+#include "ia32/lithium-codegen-ia32.h"
+#elif V8_TARGET_ARCH_X64
+#include "x64/lithium-x64.h"
+#include "x64/lithium-codegen-x64.h"
+#elif V8_TARGET_ARCH_ARM
+#include "arm/lithium-arm.h"
+#include "arm/lithium-codegen-arm.h"
+#elif V8_TARGET_ARCH_MIPS
+#include "mips/lithium-mips.h"
+#include "mips/lithium-codegen-mips.h"
+#else
+#error "Unknown architecture."
+#endif
namespace v8 {
namespace internal {
@@ -156,7 +173,7 @@ void LParallelMove::PrintDataTo(StringStream* stream) const {
void LEnvironment::PrintTo(StringStream* stream) {
- stream->Add("[id=%d|", ast_id());
+ stream->Add("[id=%d|", ast_id().ToInt());
stream->Add("[parameters=%d|", parameter_count());
stream->Add("[arguments_stack_height=%d|", arguments_stack_height());
for (int i = 0; i < values_.length(); ++i) {
@@ -171,11 +188,11 @@ void LEnvironment::PrintTo(StringStream* stream) {
}
-void LPointerMap::RecordPointer(LOperand* op) {
+void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
// Do not record arguments as pointers.
if (op->IsStackSlot() && op->index() < 0) return;
ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
- pointer_operands_.Add(op);
+ pointer_operands_.Add(op, zone);
}
@@ -192,11 +209,11 @@ void LPointerMap::RemovePointer(LOperand* op) {
}
-void LPointerMap::RecordUntagged(LOperand* op) {
+void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
// Do not record arguments as pointers.
if (op->IsStackSlot() && op->index() < 0) return;
ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
- untagged_operands_.Add(op);
+ untagged_operands_.Add(op, zone);
}
@@ -225,9 +242,12 @@ int ElementsKindToShiftSize(ElementsKind elements_kind) {
return 2;
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
return 3;
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
return kPointerSizeLog2;
@@ -237,4 +257,183 @@ int ElementsKindToShiftSize(ElementsKind elements_kind) {
}
+LLabel* LChunk::GetLabel(int block_id) const {
+ HBasicBlock* block = graph_->blocks()->at(block_id);
+ int first_instruction = block->first_instruction_index();
+ return LLabel::cast(instructions_[first_instruction]);
+}
+
+
+int LChunk::LookupDestination(int block_id) const {
+ LLabel* cur = GetLabel(block_id);
+ while (cur->replacement() != NULL) {
+ cur = cur->replacement();
+ }
+ return cur->block_id();
+}
+
+Label* LChunk::GetAssemblyLabel(int block_id) const {
+ LLabel* label = GetLabel(block_id);
+ ASSERT(!label->HasReplacement());
+ return label->label();
+}
+
+void LChunk::MarkEmptyBlocks() {
+ HPhase phase("L_Mark empty blocks", this);
+ for (int i = 0; i < graph()->blocks()->length(); ++i) {
+ HBasicBlock* block = graph()->blocks()->at(i);
+ int first = block->first_instruction_index();
+ int last = block->last_instruction_index();
+ LInstruction* first_instr = instructions()->at(first);
+ LInstruction* last_instr = instructions()->at(last);
+
+ LLabel* label = LLabel::cast(first_instr);
+ if (last_instr->IsGoto()) {
+ LGoto* goto_instr = LGoto::cast(last_instr);
+ if (label->IsRedundant() &&
+ !label->is_loop_header()) {
+ bool can_eliminate = true;
+ for (int i = first + 1; i < last && can_eliminate; ++i) {
+ LInstruction* cur = instructions()->at(i);
+ if (cur->IsGap()) {
+ LGap* gap = LGap::cast(cur);
+ if (!gap->IsRedundant()) {
+ can_eliminate = false;
+ }
+ } else {
+ can_eliminate = false;
+ }
+ }
+
+ if (can_eliminate) {
+ label->set_replacement(GetLabel(goto_instr->block_id()));
+ }
+ }
+ }
+ }
+}
+
+
+void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
+ LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
+ int index = -1;
+ if (instr->IsControl()) {
+ instructions_.Add(gap, zone());
+ index = instructions_.length();
+ instructions_.Add(instr, zone());
+ } else {
+ index = instructions_.length();
+ instructions_.Add(instr, zone());
+ instructions_.Add(gap, zone());
+ }
+ if (instr->HasPointerMap()) {
+ pointer_maps_.Add(instr->pointer_map(), zone());
+ instr->pointer_map()->set_lithium_position(index);
+ }
+}
+
+
+LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
+ return LConstantOperand::Create(constant->id(), zone());
+}
+
+
+int LChunk::GetParameterStackSlot(int index) const {
+ // The receiver is at index 0, the first parameter at index 1, so we
+ // shift all parameter indexes down by the number of parameters, and
+ // make sure they end up negative so they are distinguishable from
+ // spill slots.
+ int result = index - info()->scope()->num_parameters() - 1;
+ ASSERT(result < 0);
+ return result;
+}
+
+
+// A parameter relative to ebp in the arguments stub.
+int LChunk::ParameterAt(int index) {
+ ASSERT(-1 <= index); // -1 is the receiver.
+ return (1 + info()->scope()->num_parameters() - index) *
+ kPointerSize;
+}
+
+
+LGap* LChunk::GetGapAt(int index) const {
+ return LGap::cast(instructions_[index]);
+}
+
+
+bool LChunk::IsGapAt(int index) const {
+ return instructions_[index]->IsGap();
+}
+
+
+int LChunk::NearestGapPos(int index) const {
+ while (!IsGapAt(index)) index--;
+ return index;
+}
+
+
+void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
+ GetGapAt(index)->GetOrCreateParallelMove(
+ LGap::START, zone())->AddMove(from, to, zone());
+}
+
+
+HConstant* LChunk::LookupConstant(LConstantOperand* operand) const {
+ return HConstant::cast(graph_->LookupValue(operand->index()));
+}
+
+
+Representation LChunk::LookupLiteralRepresentation(
+ LConstantOperand* operand) const {
+ return graph_->LookupValue(operand->index())->representation();
+}
+
+
+LChunk* LChunk::NewChunk(HGraph* graph) {
+ NoHandleAllocation no_handles;
+ AssertNoAllocation no_gc;
+
+ int values = graph->GetMaximumValueID();
+ CompilationInfo* info = graph->info();
+ if (values > LUnallocated::kMaxVirtualRegisters) {
+ info->set_bailout_reason("not enough virtual registers for values");
+ return NULL;
+ }
+ LAllocator allocator(values, graph);
+ LChunkBuilder builder(info, graph, &allocator);
+ LChunk* chunk = builder.Build();
+ if (chunk == NULL) return NULL;
+
+ if (!allocator.Allocate(chunk)) {
+ info->set_bailout_reason("not enough virtual registers (regalloc)");
+ return NULL;
+ }
+
+ return chunk;
+}
+
+
+Handle<Code> LChunk::Codegen() {
+ MacroAssembler assembler(info()->isolate(), NULL, 0);
+ LCodeGen generator(this, &assembler, info());
+
+ MarkEmptyBlocks();
+
+ if (generator.GenerateCode()) {
+ if (FLAG_trace_codegen) {
+ PrintF("Crankshaft Compiler - ");
+ }
+ CodeGenerator::MakeCodePrologue(info());
+ Code::Flags flags = Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
+ Handle<Code> code =
+ CodeGenerator::MakeCodeEpilogue(&assembler, flags, info());
+ generator.FinishCode(code);
+ CodeGenerator::PrintCode(code, info());
+ return code;
+ }
+ return Handle<Code>::null();
+}
+
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/lithium.h b/src/3rdparty/v8/src/lithium.h
index 2ccbf56..b4eb2bb 100644
--- a/src/3rdparty/v8/src/lithium.h
+++ b/src/3rdparty/v8/src/lithium.h
@@ -133,13 +133,15 @@ class LUnallocated: public LOperand {
// index in the upper bits.
static const int kPolicyWidth = 3;
static const int kLifetimeWidth = 1;
- static const int kVirtualRegisterWidth = 18;
+ static const int kVirtualRegisterWidth = 15;
static const int kPolicyShift = kKindFieldWidth;
static const int kLifetimeShift = kPolicyShift + kPolicyWidth;
static const int kVirtualRegisterShift = kLifetimeShift + kLifetimeWidth;
static const int kFixedIndexShift =
kVirtualRegisterShift + kVirtualRegisterWidth;
+ static const int kFixedIndexWidth = 32 - kFixedIndexShift;
+ STATIC_ASSERT(kFixedIndexWidth > 5);
class PolicyField : public BitField<Policy, kPolicyShift, kPolicyWidth> { };
@@ -154,8 +156,8 @@ class LUnallocated: public LOperand {
};
static const int kMaxVirtualRegisters = 1 << kVirtualRegisterWidth;
- static const int kMaxFixedIndex = 63;
- static const int kMinFixedIndex = -64;
+ static const int kMaxFixedIndex = (1 << kFixedIndexWidth) - 1;
+ static const int kMinFixedIndex = -(1 << kFixedIndexWidth);
bool HasAnyPolicy() const {
return policy() == ANY;
@@ -187,8 +189,8 @@ class LUnallocated: public LOperand {
value_ = VirtualRegisterField::update(value_, id);
}
- LUnallocated* CopyUnconstrained() {
- LUnallocated* result = new LUnallocated(ANY);
+ LUnallocated* CopyUnconstrained(Zone* zone) {
+ LUnallocated* result = new(zone) LUnallocated(ANY);
result->set_virtual_register(virtual_register());
return result;
}
@@ -260,10 +262,10 @@ class LMoveOperands BASE_EMBEDDED {
class LConstantOperand: public LOperand {
public:
- static LConstantOperand* Create(int index) {
+ static LConstantOperand* Create(int index, Zone* zone) {
ASSERT(index >= 0);
if (index < kNumCachedOperands) return &cache[index];
- return new LConstantOperand(index);
+ return new(zone) LConstantOperand(index);
}
static LConstantOperand* cast(LOperand* op) {
@@ -296,10 +298,10 @@ class LArgument: public LOperand {
class LStackSlot: public LOperand {
public:
- static LStackSlot* Create(int index) {
+ static LStackSlot* Create(int index, Zone* zone) {
ASSERT(index >= 0);
if (index < kNumCachedOperands) return &cache[index];
- return new LStackSlot(index);
+ return new(zone) LStackSlot(index);
}
static LStackSlot* cast(LOperand* op) {
@@ -321,10 +323,10 @@ class LStackSlot: public LOperand {
class LDoubleStackSlot: public LOperand {
public:
- static LDoubleStackSlot* Create(int index) {
+ static LDoubleStackSlot* Create(int index, Zone* zone) {
ASSERT(index >= 0);
if (index < kNumCachedOperands) return &cache[index];
- return new LDoubleStackSlot(index);
+ return new(zone) LDoubleStackSlot(index);
}
static LDoubleStackSlot* cast(LOperand* op) {
@@ -346,10 +348,10 @@ class LDoubleStackSlot: public LOperand {
class LRegister: public LOperand {
public:
- static LRegister* Create(int index) {
+ static LRegister* Create(int index, Zone* zone) {
ASSERT(index >= 0);
if (index < kNumCachedOperands) return &cache[index];
- return new LRegister(index);
+ return new(zone) LRegister(index);
}
static LRegister* cast(LOperand* op) {
@@ -371,10 +373,10 @@ class LRegister: public LOperand {
class LDoubleRegister: public LOperand {
public:
- static LDoubleRegister* Create(int index) {
+ static LDoubleRegister* Create(int index, Zone* zone) {
ASSERT(index >= 0);
if (index < kNumCachedOperands) return &cache[index];
- return new LDoubleRegister(index);
+ return new(zone) LDoubleRegister(index);
}
static LDoubleRegister* cast(LOperand* op) {
@@ -396,10 +398,10 @@ class LDoubleRegister: public LOperand {
class LParallelMove : public ZoneObject {
public:
- LParallelMove() : move_operands_(4) { }
+ explicit LParallelMove(Zone* zone) : move_operands_(4, zone) { }
- void AddMove(LOperand* from, LOperand* to) {
- move_operands_.Add(LMoveOperands(from, to));
+ void AddMove(LOperand* from, LOperand* to, Zone* zone) {
+ move_operands_.Add(LMoveOperands(from, to), zone);
}
bool IsRedundant() const;
@@ -417,9 +419,9 @@ class LParallelMove : public ZoneObject {
class LPointerMap: public ZoneObject {
public:
- explicit LPointerMap(int position)
- : pointer_operands_(8),
- untagged_operands_(0),
+ explicit LPointerMap(int position, Zone* zone)
+ : pointer_operands_(8, zone),
+ untagged_operands_(0, zone),
position_(position),
lithium_position_(-1) { }
@@ -438,9 +440,9 @@ class LPointerMap: public ZoneObject {
lithium_position_ = pos;
}
- void RecordPointer(LOperand* op);
+ void RecordPointer(LOperand* op, Zone* zone);
void RemovePointer(LOperand* op);
- void RecordUntagged(LOperand* op);
+ void RecordUntagged(LOperand* op, Zone* zone);
void PrintTo(StringStream* stream);
private:
@@ -455,11 +457,13 @@ class LEnvironment: public ZoneObject {
public:
LEnvironment(Handle<JSFunction> closure,
FrameType frame_type,
- int ast_id,
+ BailoutId ast_id,
int parameter_count,
int argument_count,
int value_count,
- LEnvironment* outer)
+ LEnvironment* outer,
+ HEnterInlined* entry,
+ Zone* zone)
: closure_(closure),
frame_type_(frame_type),
arguments_stack_height_(argument_count),
@@ -468,18 +472,21 @@ class LEnvironment: public ZoneObject {
ast_id_(ast_id),
parameter_count_(parameter_count),
pc_offset_(-1),
- values_(value_count),
- is_tagged_(value_count, closure->GetHeap()->isolate()->zone()),
+ values_(value_count, zone),
+ is_tagged_(value_count, zone),
+ is_uint32_(value_count, zone),
spilled_registers_(NULL),
spilled_double_registers_(NULL),
- outer_(outer) { }
+ outer_(outer),
+ entry_(entry),
+ zone_(zone) { }
Handle<JSFunction> closure() const { return closure_; }
FrameType frame_type() const { return frame_type_; }
int arguments_stack_height() const { return arguments_stack_height_; }
int deoptimization_index() const { return deoptimization_index_; }
int translation_index() const { return translation_index_; }
- int ast_id() const { return ast_id_; }
+ BailoutId ast_id() const { return ast_id_; }
int parameter_count() const { return parameter_count_; }
int pc_offset() const { return pc_offset_; }
LOperand** spilled_registers() const { return spilled_registers_; }
@@ -488,18 +495,30 @@ class LEnvironment: public ZoneObject {
}
const ZoneList<LOperand*>* values() const { return &values_; }
LEnvironment* outer() const { return outer_; }
+ HEnterInlined* entry() { return entry_; }
- void AddValue(LOperand* operand, Representation representation) {
- values_.Add(operand);
+ void AddValue(LOperand* operand,
+ Representation representation,
+ bool is_uint32) {
+ values_.Add(operand, zone());
if (representation.IsTagged()) {
+ ASSERT(!is_uint32);
is_tagged_.Add(values_.length() - 1);
}
+
+ if (is_uint32) {
+ is_uint32_.Add(values_.length() - 1);
+ }
}
bool HasTaggedValueAt(int index) const {
return is_tagged_.Contains(index);
}
+ bool HasUint32ValueAt(int index) const {
+ return is_uint32_.Contains(index);
+ }
+
void Register(int deoptimization_index,
int translation_index,
int pc_offset) {
@@ -520,17 +539,20 @@ class LEnvironment: public ZoneObject {
void PrintTo(StringStream* stream);
+ Zone* zone() const { return zone_; }
+
private:
Handle<JSFunction> closure_;
FrameType frame_type_;
int arguments_stack_height_;
int deoptimization_index_;
int translation_index_;
- int ast_id_;
+ BailoutId ast_id_;
int parameter_count_;
int pc_offset_;
ZoneList<LOperand*> values_;
BitVector is_tagged_;
+ BitVector is_uint32_;
// Allocation index indexed arrays of spill slot operands for registers
// that are also in spill slots at an OSR entry. NULL for environments
@@ -539,6 +561,9 @@ class LEnvironment: public ZoneObject {
LOperand** spilled_double_registers_;
LEnvironment* outer_;
+ HEnterInlined* entry_;
+
+ Zone* zone_;
};
@@ -616,6 +641,69 @@ class DeepIterator BASE_EMBEDDED {
};
+class LPlatformChunk;
+class LGap;
+class LLabel;
+
+// Superclass providing data and behavior common to all the
+// arch-specific LPlatformChunk classes.
+class LChunk: public ZoneObject {
+ public:
+ static LChunk* NewChunk(HGraph* graph);
+
+ void AddInstruction(LInstruction* instruction, HBasicBlock* block);
+ LConstantOperand* DefineConstantOperand(HConstant* constant);
+ HConstant* LookupConstant(LConstantOperand* operand) const;
+ Representation LookupLiteralRepresentation(LConstantOperand* operand) const;
+
+ int ParameterAt(int index);
+ int GetParameterStackSlot(int index) const;
+ int spill_slot_count() const { return spill_slot_count_; }
+ CompilationInfo* info() const { return info_; }
+ HGraph* graph() const { return graph_; }
+ const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
+ void AddGapMove(int index, LOperand* from, LOperand* to);
+ LGap* GetGapAt(int index) const;
+ bool IsGapAt(int index) const;
+ int NearestGapPos(int index) const;
+ void MarkEmptyBlocks();
+ const ZoneList<LPointerMap*>* pointer_maps() const { return &pointer_maps_; }
+ LLabel* GetLabel(int block_id) const;
+ int LookupDestination(int block_id) const;
+ Label* GetAssemblyLabel(int block_id) const;
+
+ const ZoneList<Handle<JSFunction> >* inlined_closures() const {
+ return &inlined_closures_;
+ }
+
+ void AddInlinedClosure(Handle<JSFunction> closure) {
+ inlined_closures_.Add(closure, zone());
+ }
+
+ Zone* zone() const { return info_->zone(); }
+
+ Handle<Code> Codegen();
+
+ protected:
+ LChunk(CompilationInfo* info, HGraph* graph)
+ : spill_slot_count_(0),
+ info_(info),
+ graph_(graph),
+ instructions_(32, graph->zone()),
+ pointer_maps_(8, graph->zone()),
+ inlined_closures_(1, graph->zone()) { }
+
+ int spill_slot_count_;
+
+ private:
+ CompilationInfo* info_;
+ HGraph* const graph_;
+ ZoneList<LInstruction*> instructions_;
+ ZoneList<LPointerMap*> pointer_maps_;
+ ZoneList<Handle<JSFunction> > inlined_closures_;
+};
+
+
int ElementsKindToShiftSize(ElementsKind elements_kind);
diff --git a/src/3rdparty/v8/src/liveedit-debugger.js b/src/3rdparty/v8/src/liveedit-debugger.js
index 4463c93..cfcdb81 100644
--- a/src/3rdparty/v8/src/liveedit-debugger.js
+++ b/src/3rdparty/v8/src/liveedit-debugger.js
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -47,6 +47,8 @@ Debug.LiveEdit = new function() {
// Forward declaration for minifier.
var FunctionStatus;
+ var NEEDS_STEP_IN_PROPERTY_NAME = "stack_update_needs_step_in";
+
// Applies the change to the script.
// The change is in form of list of chunks encoded in a single array as
// a series of triplets (pos1_start, pos1_end, pos2_end)
@@ -161,7 +163,7 @@ Debug.LiveEdit = new function() {
// Our current implementation requires client to manually issue "step in"
// command for correct stack state.
- preview_description.stack_update_needs_step_in =
+ preview_description[NEEDS_STEP_IN_PROPERTY_NAME] =
preview_description.stack_modified;
// Start with breakpoints. Convert their line/column positions and
@@ -1078,6 +1080,18 @@ Debug.LiveEdit = new function() {
return ProcessOldNode(old_code_tree);
}
+ // Restarts call frame and returns value similar to what LiveEdit returns.
+ function RestartFrame(frame_mirror) {
+ var result = frame_mirror.restart();
+ if (IS_STRING(result)) {
+ throw new Failure("Failed to restart frame: " + result);
+ }
+ var result = {};
+ result[NEEDS_STEP_IN_PROPERTY_NAME] = true;
+ return result;
+ }
+ // Function is public.
+ this.RestartFrame = RestartFrame;
// Functions are public for tests.
this.TestApi = {
diff --git a/src/3rdparty/v8/src/liveedit.cc b/src/3rdparty/v8/src/liveedit.cc
index 22b8250..dc7d4b1 100644
--- a/src/3rdparty/v8/src/liveedit.cc
+++ b/src/3rdparty/v8/src/liveedit.cc
@@ -601,7 +601,7 @@ static void CompileScriptForTracker(Isolate* isolate, Handle<Script> script) {
PostponeInterruptsScope postpone(isolate);
// Build AST.
- CompilationInfo info(script);
+ CompilationInfoWithZone info(script);
info.MarkAsGlobal();
// Parse and don't allow skipping lazy functions.
if (ParserApi::Parse(&info, kNoParsingFlags)) {
@@ -635,6 +635,21 @@ static Handle<JSValue> WrapInJSValue(Handle<Object> object) {
}
+static Handle<SharedFunctionInfo> UnwrapSharedFunctionInfoFromJSValue(
+ Handle<JSValue> jsValue) {
+ Object* shared = jsValue->value();
+ CHECK(shared->IsSharedFunctionInfo());
+ return Handle<SharedFunctionInfo>(SharedFunctionInfo::cast(shared));
+}
+
+
+static int GetArrayLength(Handle<JSArray> array) {
+ Object* length = array->length();
+ CHECK(length->IsSmi());
+ return Smi::cast(length)->value();
+}
+
+
// Simple helper class that creates more or less typed structures over
// JSArray object. This is an adhoc method of passing structures from C++
// to JavaScript.
@@ -670,6 +685,7 @@ class JSArrayBasedStruct {
}
int GetSmiValueField(int field_position) {
Object* res = GetField(field_position);
+ CHECK(res->IsSmi());
return Smi::cast(res)->value();
}
@@ -714,14 +730,17 @@ class FunctionInfoWrapper : public JSArrayBasedStruct<FunctionInfoWrapper> {
return this->GetSmiValueField(kParentIndexOffset_);
}
Handle<Code> GetFunctionCode() {
- Handle<Object> raw_result = UnwrapJSValue(Handle<JSValue>(
- JSValue::cast(this->GetField(kCodeOffset_))));
+ Object* element = this->GetField(kCodeOffset_);
+ CHECK(element->IsJSValue());
+ Handle<JSValue> value_wrapper(JSValue::cast(element));
+ Handle<Object> raw_result = UnwrapJSValue(value_wrapper);
+ CHECK(raw_result->IsCode());
return Handle<Code>::cast(raw_result);
}
Handle<Object> GetCodeScopeInfo() {
- Handle<Object> raw_result = UnwrapJSValue(Handle<JSValue>(
- JSValue::cast(this->GetField(kCodeScopeInfoOffset_))));
- return raw_result;
+ Object* element = this->GetField(kCodeScopeInfoOffset_);
+ CHECK(element->IsJSValue());
+ return UnwrapJSValue(Handle<JSValue>(JSValue::cast(element)));
}
int GetStartPosition() {
return this->GetSmiValueField(kStartPositionOffset_);
@@ -771,9 +790,9 @@ class SharedInfoWrapper : public JSArrayBasedStruct<SharedInfoWrapper> {
}
Handle<SharedFunctionInfo> GetInfo() {
Object* element = this->GetField(kSharedInfoOffset_);
+ CHECK(element->IsJSValue());
Handle<JSValue> value_wrapper(JSValue::cast(element));
- Handle<Object> raw_result = UnwrapJSValue(value_wrapper);
- return Handle<SharedFunctionInfo>::cast(raw_result);
+ return UnwrapSharedFunctionInfoFromJSValue(value_wrapper);
}
private:
@@ -825,7 +844,8 @@ class FunctionInfoListener {
// Saves full information about a function: its code, its scope info
// and a SharedFunctionInfo object.
- void FunctionInfo(Handle<SharedFunctionInfo> shared, Scope* scope) {
+ void FunctionInfo(Handle<SharedFunctionInfo> shared, Scope* scope,
+ Zone* zone) {
if (!shared->IsSharedFunctionInfo()) {
return;
}
@@ -836,14 +856,14 @@ class FunctionInfoListener {
Handle<Object>(shared->scope_info()));
info.SetSharedFunctionInfo(shared);
- Handle<Object> scope_info_list(SerializeFunctionScope(scope));
+ Handle<Object> scope_info_list(SerializeFunctionScope(scope, zone));
info.SetOuterScopeInfo(scope_info_list);
}
Handle<JSArray> GetResult() { return result_; }
private:
- Object* SerializeFunctionScope(Scope* scope) {
+ Object* SerializeFunctionScope(Scope* scope, Zone* zone) {
HandleScope handle_scope;
Handle<JSArray> scope_info_list = FACTORY->NewJSArray(10);
@@ -857,8 +877,8 @@ class FunctionInfoListener {
return HEAP->undefined_value();
}
do {
- ZoneList<Variable*> stack_list(outer_scope->StackLocalCount());
- ZoneList<Variable*> context_list(outer_scope->ContextLocalCount());
+ ZoneList<Variable*> stack_list(outer_scope->StackLocalCount(), zone);
+ ZoneList<Variable*> context_list(outer_scope->ContextLocalCount(), zone);
outer_scope->CollectStackAndContextLocals(&stack_list, &context_list);
context_list.Sort(&Variable::CompareIndex);
@@ -893,7 +913,6 @@ class FunctionInfoListener {
JSArray* LiveEdit::GatherCompileInfo(Handle<Script> script,
Handle<String> source) {
Isolate* isolate = Isolate::Current();
- ZoneScope zone_scope(isolate, DELETE_ON_EXIT);
FunctionInfoListener listener;
Handle<Object> original_source = Handle<Object>(script->source());
@@ -909,7 +928,7 @@ JSArray* LiveEdit::GatherCompileInfo(Handle<Script> script,
void LiveEdit::WrapSharedFunctionInfos(Handle<JSArray> array) {
HandleScope scope;
- int len = Smi::cast(array->length())->value();
+ int len = GetArrayLength(array);
for (int i = 0; i < len; i++) {
Handle<SharedFunctionInfo> info(
SharedFunctionInfo::cast(array->GetElementNoExceptionThrown(i)));
@@ -922,33 +941,35 @@ void LiveEdit::WrapSharedFunctionInfos(Handle<JSArray> array) {
}
-// Visitor that collects all references to a particular code object,
-// including "CODE_TARGET" references in other code objects.
-// It works in context of ZoneScope.
-class ReferenceCollectorVisitor : public ObjectVisitor {
+// Visitor that finds all references to a particular code object,
+// including "CODE_TARGET" references in other code objects and replaces
+// them on the fly.
+class ReplacingVisitor : public ObjectVisitor {
public:
- explicit ReferenceCollectorVisitor(Code* original)
- : original_(original), rvalues_(10), reloc_infos_(10), code_entries_(10) {
+ explicit ReplacingVisitor(Code* original, Code* substitution)
+ : original_(original), substitution_(substitution) {
}
virtual void VisitPointers(Object** start, Object** end) {
for (Object** p = start; p < end; p++) {
if (*p == original_) {
- rvalues_.Add(p);
+ *p = substitution_;
}
}
}
virtual void VisitCodeEntry(Address entry) {
if (Code::GetObjectFromEntryAddress(entry) == original_) {
- code_entries_.Add(entry);
+ Address substitution_entry = substitution_->instruction_start();
+ Memory::Address_at(entry) = substitution_entry;
}
}
virtual void VisitCodeTarget(RelocInfo* rinfo) {
if (RelocInfo::IsCodeTarget(rinfo->rmode()) &&
Code::GetCodeFromTargetAddress(rinfo->target_address()) == original_) {
- reloc_infos_.Add(*rinfo);
+ Address substitution_entry = substitution_->instruction_start();
+ rinfo->set_target_address(substitution_entry);
}
}
@@ -956,56 +977,40 @@ class ReferenceCollectorVisitor : public ObjectVisitor {
VisitCodeTarget(rinfo);
}
- // Post-visiting method that iterates over all collected references and
- // modifies them.
- void Replace(Code* substitution) {
- for (int i = 0; i < rvalues_.length(); i++) {
- *(rvalues_[i]) = substitution;
- }
- Address substitution_entry = substitution->instruction_start();
- for (int i = 0; i < reloc_infos_.length(); i++) {
- reloc_infos_[i].set_target_address(substitution_entry);
- }
- for (int i = 0; i < code_entries_.length(); i++) {
- Address entry = code_entries_[i];
- Memory::Address_at(entry) = substitution_entry;
- }
- }
-
private:
Code* original_;
- ZoneList<Object**> rvalues_;
- ZoneList<RelocInfo> reloc_infos_;
- ZoneList<Address> code_entries_;
+ Code* substitution_;
};
// Finds all references to original and replaces them with substitution.
-static void ReplaceCodeObject(Code* original, Code* substitution) {
- ASSERT(!HEAP->InNewSpace(substitution));
+static void ReplaceCodeObject(Handle<Code> original,
+ Handle<Code> substitution) {
+ // Perform a full GC in order to ensure that we are not in the middle of an
+ // incremental marking phase when we are replacing the code object.
+ // Since we are not in an incremental marking phase we can write pointers
+ // to code objects (that are never in new space) without worrying about
+ // write barriers.
+ HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "liveedit.cc ReplaceCodeObject");
+
+ ASSERT(!HEAP->InNewSpace(*substitution));
- HeapIterator iterator;
AssertNoAllocation no_allocations_please;
- // A zone scope for ReferenceCollectorVisitor.
- ZoneScope scope(Isolate::Current(), DELETE_ON_EXIT);
-
- ReferenceCollectorVisitor visitor(original);
+ ReplacingVisitor visitor(*original, *substitution);
// Iterate over all roots. Stack frames may have pointer into original code,
// so temporary replace the pointers with offset numbers
// in prologue/epilogue.
- {
- HEAP->IterateStrongRoots(&visitor, VISIT_ALL);
- }
+ HEAP->IterateRoots(&visitor, VISIT_ALL);
// Now iterate over all pointers of all objects, including code_target
// implicit pointers.
+ HeapIterator iterator;
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
obj->Iterate(&visitor);
}
-
- visitor.Replace(substitution);
}
@@ -1089,8 +1094,8 @@ MaybeObject* LiveEdit::ReplaceFunctionCode(
if (IsJSFunctionCode(shared_info->code())) {
Handle<Code> code = compile_info_wrapper.GetFunctionCode();
- ReplaceCodeObject(shared_info->code(), *code);
- Handle<Object> code_scope_info = compile_info_wrapper.GetCodeScopeInfo();
+ ReplaceCodeObject(Handle<Code>(shared_info->code()), code);
+ Handle<Object> code_scope_info = compile_info_wrapper.GetCodeScopeInfo();
if (code_scope_info->IsFixedArray()) {
shared_info->set_scope_info(ScopeInfo::cast(*code_scope_info));
}
@@ -1140,7 +1145,8 @@ MaybeObject* LiveEdit::FunctionSourceUpdated(
void LiveEdit::SetFunctionScript(Handle<JSValue> function_wrapper,
Handle<Object> script_handle) {
Handle<SharedFunctionInfo> shared_info =
- Handle<SharedFunctionInfo>::cast(UnwrapJSValue(function_wrapper));
+ UnwrapSharedFunctionInfoFromJSValue(function_wrapper);
+ CHECK(script_handle->IsScript() || script_handle->IsUndefined());
shared_info->set_script(*script_handle);
Isolate::Current()->compilation_cache()->Remove(shared_info);
@@ -1159,19 +1165,22 @@ void LiveEdit::SetFunctionScript(Handle<JSValue> function_wrapper,
static int TranslatePosition(int original_position,
Handle<JSArray> position_change_array) {
int position_diff = 0;
- int array_len = Smi::cast(position_change_array->length())->value();
+ int array_len = GetArrayLength(position_change_array);
// TODO(635): binary search may be used here
for (int i = 0; i < array_len; i += 3) {
Object* element = position_change_array->GetElementNoExceptionThrown(i);
+ CHECK(element->IsSmi());
int chunk_start = Smi::cast(element)->value();
if (original_position < chunk_start) {
break;
}
element = position_change_array->GetElementNoExceptionThrown(i + 1);
+ CHECK(element->IsSmi());
int chunk_end = Smi::cast(element)->value();
// Position mustn't be inside a chunk.
ASSERT(original_position >= chunk_end);
element = position_change_array->GetElementNoExceptionThrown(i + 2);
+ CHECK(element->IsSmi());
int chunk_changed_end = Smi::cast(element)->value();
position_diff = chunk_changed_end - chunk_end;
}
@@ -1278,7 +1287,9 @@ static Handle<Code> PatchPositionsInCode(
continue;
}
}
- buffer_writer.Write(it.rinfo());
+ if (RelocInfo::IsRealRelocMode(rinfo->rmode())) {
+ buffer_writer.Write(it.rinfo());
+ }
}
}
@@ -1300,7 +1311,6 @@ static Handle<Code> PatchPositionsInCode(
MaybeObject* LiveEdit::PatchFunctionPositions(
Handle<JSArray> shared_info_array, Handle<JSArray> position_change_array) {
-
if (!SharedInfoWrapper::IsInstance(shared_info_array)) {
return Isolate::Current()->ThrowIllegalOperation();
}
@@ -1332,7 +1342,7 @@ MaybeObject* LiveEdit::PatchFunctionPositions(
// on stack (it is safe to substitute the code object on stack, because
// we only change the structure of rinfo and leave instructions
// untouched).
- ReplaceCodeObject(info->code(), *patched_code);
+ ReplaceCodeObject(Handle<Code>(info->code()), patched_code);
}
}
@@ -1390,11 +1400,11 @@ void LiveEdit::ReplaceRefToNestedFunction(
Handle<JSValue> subst_function_wrapper) {
Handle<SharedFunctionInfo> parent_shared =
- Handle<SharedFunctionInfo>::cast(UnwrapJSValue(parent_function_wrapper));
+ UnwrapSharedFunctionInfoFromJSValue(parent_function_wrapper);
Handle<SharedFunctionInfo> orig_shared =
- Handle<SharedFunctionInfo>::cast(UnwrapJSValue(orig_function_wrapper));
+ UnwrapSharedFunctionInfoFromJSValue(orig_function_wrapper);
Handle<SharedFunctionInfo> subst_shared =
- Handle<SharedFunctionInfo>::cast(UnwrapJSValue(subst_function_wrapper));
+ UnwrapSharedFunctionInfoFromJSValue(subst_function_wrapper);
for (RelocIterator it(parent_shared->code()); !it.done(); it.next()) {
if (it.rinfo()->rmode() == RelocInfo::EMBEDDED_OBJECT) {
@@ -1417,12 +1427,13 @@ static bool CheckActivation(Handle<JSArray> shared_info_array,
Handle<JSFunction> function(
JSFunction::cast(JavaScriptFrame::cast(frame)->function()));
- int len = Smi::cast(shared_info_array->length())->value();
+ int len = GetArrayLength(shared_info_array);
for (int i = 0; i < len; i++) {
- JSValue* wrapper =
- JSValue::cast(shared_info_array->GetElementNoExceptionThrown(i));
- Handle<SharedFunctionInfo> shared(
- SharedFunctionInfo::cast(wrapper->value()));
+ Object* element = shared_info_array->GetElementNoExceptionThrown(i);
+ CHECK(element->IsJSValue());
+ Handle<JSValue> jsvalue(JSValue::cast(element));
+ Handle<SharedFunctionInfo> shared =
+ UnwrapSharedFunctionInfoFromJSValue(jsvalue);
if (function->shared() == *shared || IsInlined(*function, *shared)) {
SetElementNonStrict(result, i, Handle<Smi>(Smi::FromInt(status)));
@@ -1491,7 +1502,9 @@ static const char* DropFrames(Vector<StackFrame*> frames,
isolate->builtins()->builtin(
Builtins::kFrameDropper_LiveEdit)) {
// OK, we can drop our own code.
- *mode = Debug::FRAME_DROPPED_IN_DIRECT_CALL;
+ pre_top_frame = frames[top_frame_index - 2];
+ top_frame = frames[top_frame_index - 1];
+ *mode = Debug::CURRENTLY_SET_MODE;
frame_has_padding = false;
} else if (pre_top_frame_code ==
isolate->builtins()->builtin(Builtins::kReturn_DebugBreak)) {
@@ -1506,6 +1519,15 @@ static const char* DropFrames(Vector<StackFrame*> frames,
// Here the stub is CEntry, it's not debug-only and can't be padded.
// If anyone would complain, a proxy padded stub could be added.
frame_has_padding = false;
+ } else if (pre_top_frame->type() == StackFrame::ARGUMENTS_ADAPTOR) {
+ // This must be adaptor that remain from the frame dropping that
+ // is still on stack. A frame dropper frame must be above it.
+ ASSERT(frames[top_frame_index - 2]->LookupCode() ==
+ isolate->builtins()->builtin(Builtins::kFrameDropper_LiveEdit));
+ pre_top_frame = frames[top_frame_index - 3];
+ top_frame = frames[top_frame_index - 2];
+ *mode = Debug::CURRENTLY_SET_MODE;
+ frame_has_padding = false;
} else {
return "Unknown structure of stack above changing function";
}
@@ -1589,16 +1611,36 @@ static bool IsDropableFrame(StackFrame* frame) {
return !frame->is_exit();
}
-// Fills result array with statuses of functions. Modifies the stack
-// removing all listed function if possible and if do_drop is true.
-static const char* DropActivationsInActiveThread(
- Handle<JSArray> shared_info_array, Handle<JSArray> result, bool do_drop) {
+
+// Describes a set of call frames that execute any of listed functions.
+// Finding no such frames does not mean error.
+class MultipleFunctionTarget {
+ public:
+ MultipleFunctionTarget(Handle<JSArray> shared_info_array,
+ Handle<JSArray> result)
+ : m_shared_info_array(shared_info_array),
+ m_result(result) {}
+ bool MatchActivation(StackFrame* frame,
+ LiveEdit::FunctionPatchabilityStatus status) {
+ return CheckActivation(m_shared_info_array, m_result, frame, status);
+ }
+ const char* GetNotFoundMessage() {
+ return NULL;
+ }
+ private:
+ Handle<JSArray> m_shared_info_array;
+ Handle<JSArray> m_result;
+};
+
+// Drops all call frame matched by target and all frames above them.
+template<typename TARGET>
+static const char* DropActivationsInActiveThreadImpl(
+ TARGET& target, bool do_drop, Zone* zone) {
Isolate* isolate = Isolate::Current();
Debug* debug = isolate->debug();
- ZoneScope scope(isolate, DELETE_ON_EXIT);
- Vector<StackFrame*> frames = CreateStackMap();
+ ZoneScope scope(zone, DELETE_ON_EXIT);
+ Vector<StackFrame*> frames = CreateStackMap(zone);
- int array_len = Smi::cast(shared_info_array->length())->value();
int top_frame_index = -1;
int frame_index = 0;
@@ -1608,8 +1650,8 @@ static const char* DropActivationsInActiveThread(
top_frame_index = frame_index;
break;
}
- if (CheckActivation(shared_info_array, result, frame,
- LiveEdit::FUNCTION_BLOCKED_UNDER_NATIVE_CODE)) {
+ if (target.MatchActivation(
+ frame, LiveEdit::FUNCTION_BLOCKED_UNDER_NATIVE_CODE)) {
// We are still above break_frame. It is not a target frame,
// it is a problem.
return "Debugger mark-up on stack is not found";
@@ -1618,7 +1660,7 @@ static const char* DropActivationsInActiveThread(
if (top_frame_index == -1) {
// We haven't found break frame, but no function is blocking us anyway.
- return NULL;
+ return target.GetNotFoundMessage();
}
bool target_frame_found = false;
@@ -1631,8 +1673,8 @@ static const char* DropActivationsInActiveThread(
c_code_found = true;
break;
}
- if (CheckActivation(shared_info_array, result, frame,
- LiveEdit::FUNCTION_BLOCKED_ON_ACTIVE_STACK)) {
+ if (target.MatchActivation(
+ frame, LiveEdit::FUNCTION_BLOCKED_ON_ACTIVE_STACK)) {
target_frame_found = true;
bottom_js_frame_index = frame_index;
}
@@ -1644,8 +1686,8 @@ static const char* DropActivationsInActiveThread(
for (; frame_index < frames.length(); frame_index++) {
StackFrame* frame = frames[frame_index];
if (frame->is_java_script()) {
- if (CheckActivation(shared_info_array, result, frame,
- LiveEdit::FUNCTION_BLOCKED_UNDER_NATIVE_CODE)) {
+ if (target.MatchActivation(
+ frame, LiveEdit::FUNCTION_BLOCKED_UNDER_NATIVE_CODE)) {
// Cannot drop frame under C frames.
return NULL;
}
@@ -1660,7 +1702,7 @@ static const char* DropActivationsInActiveThread(
if (!target_frame_found) {
// Nothing to drop.
- return NULL;
+ return target.GetNotFoundMessage();
}
Debug::FrameDropMode drop_mode = Debug::FRAMES_UNTOUCHED;
@@ -1683,6 +1725,23 @@ static const char* DropActivationsInActiveThread(
}
debug->FramesHaveBeenDropped(new_id, drop_mode,
restarter_frame_function_pointer);
+ return NULL;
+}
+
+// Fills result array with statuses of functions. Modifies the stack
+// removing all listed function if possible and if do_drop is true.
+static const char* DropActivationsInActiveThread(
+ Handle<JSArray> shared_info_array, Handle<JSArray> result, bool do_drop,
+ Zone* zone) {
+ MultipleFunctionTarget target(shared_info_array, result);
+
+ const char* message =
+ DropActivationsInActiveThreadImpl(target, do_drop, zone);
+ if (message) {
+ return message;
+ }
+
+ int array_len = GetArrayLength(shared_info_array);
// Replace "blocked on active" with "replaced on active" status.
for (int i = 0; i < array_len; i++) {
@@ -1723,8 +1782,8 @@ class InactiveThreadActivationsChecker : public ThreadVisitor {
Handle<JSArray> LiveEdit::CheckAndDropActivations(
- Handle<JSArray> shared_info_array, bool do_drop) {
- int len = Smi::cast(shared_info_array->length())->value();
+ Handle<JSArray> shared_info_array, bool do_drop, Zone* zone) {
+ int len = GetArrayLength(shared_info_array);
Handle<JSArray> result = FACTORY->NewJSArray(len);
@@ -1748,7 +1807,7 @@ Handle<JSArray> LiveEdit::CheckAndDropActivations(
// Try to drop activations from the current stack.
const char* error_message =
- DropActivationsInActiveThread(shared_info_array, result, do_drop);
+ DropActivationsInActiveThread(shared_info_array, result, do_drop, zone);
if (error_message != NULL) {
// Add error message as an array extra element.
Vector<const char> vector_message(error_message, StrLength(error_message));
@@ -1759,6 +1818,50 @@ Handle<JSArray> LiveEdit::CheckAndDropActivations(
}
+// Describes a single callframe a target. Not finding this frame
+// means an error.
+class SingleFrameTarget {
+ public:
+ explicit SingleFrameTarget(JavaScriptFrame* frame)
+ : m_frame(frame),
+ m_saved_status(LiveEdit::FUNCTION_AVAILABLE_FOR_PATCH) {}
+
+ bool MatchActivation(StackFrame* frame,
+ LiveEdit::FunctionPatchabilityStatus status) {
+ if (frame->fp() == m_frame->fp()) {
+ m_saved_status = status;
+ return true;
+ }
+ return false;
+ }
+ const char* GetNotFoundMessage() {
+ return "Failed to found requested frame";
+ }
+ LiveEdit::FunctionPatchabilityStatus saved_status() {
+ return m_saved_status;
+ }
+ private:
+ JavaScriptFrame* m_frame;
+ LiveEdit::FunctionPatchabilityStatus m_saved_status;
+};
+
+
+// Finds a drops required frame and all frames above.
+// Returns error message or NULL.
+const char* LiveEdit::RestartFrame(JavaScriptFrame* frame, Zone* zone) {
+ SingleFrameTarget target(frame);
+
+ const char* result = DropActivationsInActiveThreadImpl(target, true, zone);
+ if (result != NULL) {
+ return result;
+ }
+ if (target.saved_status() == LiveEdit::FUNCTION_BLOCKED_UNDER_NATIVE_CODE) {
+ return "Function is blocked under native code";
+ }
+ return NULL;
+}
+
+
LiveEditFunctionTracker::LiveEditFunctionTracker(Isolate* isolate,
FunctionLiteral* fun)
: isolate_(isolate) {
@@ -1776,9 +1879,11 @@ LiveEditFunctionTracker::~LiveEditFunctionTracker() {
void LiveEditFunctionTracker::RecordFunctionInfo(
- Handle<SharedFunctionInfo> info, FunctionLiteral* lit) {
+ Handle<SharedFunctionInfo> info, FunctionLiteral* lit,
+ Zone* zone) {
if (isolate_->active_function_info_listener() != NULL) {
- isolate_->active_function_info_listener()->FunctionInfo(info, lit->scope());
+ isolate_->active_function_info_listener()->FunctionInfo(info, lit->scope(),
+ zone);
}
}
@@ -1807,7 +1912,8 @@ LiveEditFunctionTracker::~LiveEditFunctionTracker() {
void LiveEditFunctionTracker::RecordFunctionInfo(
- Handle<SharedFunctionInfo> info, FunctionLiteral* lit) {
+ Handle<SharedFunctionInfo> info, FunctionLiteral* lit,
+ Zone* zone) {
}
diff --git a/src/3rdparty/v8/src/liveedit.h b/src/3rdparty/v8/src/liveedit.h
index 4ee4466..5b12854 100644
--- a/src/3rdparty/v8/src/liveedit.h
+++ b/src/3rdparty/v8/src/liveedit.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -69,7 +69,7 @@ class LiveEditFunctionTracker {
explicit LiveEditFunctionTracker(Isolate* isolate, FunctionLiteral* fun);
~LiveEditFunctionTracker();
void RecordFunctionInfo(Handle<SharedFunctionInfo> info,
- FunctionLiteral* lit);
+ FunctionLiteral* lit, Zone* zone);
void RecordRootFunctionInfo(Handle<Code> code);
static bool IsActive(Isolate* isolate);
@@ -121,7 +121,11 @@ class LiveEdit : AllStatic {
// has restart the lowest found frames and drops all other frames above
// if possible and if do_drop is true.
static Handle<JSArray> CheckAndDropActivations(
- Handle<JSArray> shared_info_array, bool do_drop);
+ Handle<JSArray> shared_info_array, bool do_drop, Zone* zone);
+
+ // Restarts the call frame and completely drops all frames above it.
+ // Return error message or NULL.
+ static const char* RestartFrame(JavaScriptFrame* frame, Zone* zone);
// A copy of this is in liveedit-debugger.js.
enum FunctionPatchabilityStatus {
diff --git a/src/3rdparty/v8/src/liveobjectlist.cc b/src/3rdparty/v8/src/liveobjectlist.cc
index 1aabc59..6b89cf6 100644
--- a/src/3rdparty/v8/src/liveobjectlist.cc
+++ b/src/3rdparty/v8/src/liveobjectlist.cc
@@ -74,7 +74,7 @@ typedef int (*RawComparer)(const void*, const void*);
v(SeqAsciiString, "unexpected: SeqAsciiString") \
v(SeqString, "unexpected: SeqString") \
v(JSFunctionResultCache, "unexpected: JSFunctionResultCache") \
- v(GlobalContext, "unexpected: GlobalContext") \
+ v(NativeContext, "unexpected: NativeContext") \
v(MapCache, "unexpected: MapCache") \
v(CodeCacheHashTable, "unexpected: CodeCacheHashTable") \
v(CompilationCacheTable, "unexpected: CompilationCacheTable") \
@@ -1951,7 +1951,7 @@ MaybeObject* LiveObjectList::GetObjRetainers(int obj_id,
// Get the constructor function for context extension and arguments array.
JSObject* arguments_boilerplate =
- isolate->context()->global_context()->arguments_boilerplate();
+ isolate->context()->native_context()->arguments_boilerplate();
JSFunction* arguments_function =
JSFunction::cast(arguments_boilerplate->map()->constructor());
diff --git a/src/3rdparty/v8/src/log.cc b/src/3rdparty/v8/src/log.cc
index d93a9d8..b049ffe 100644
--- a/src/3rdparty/v8/src/log.cc
+++ b/src/3rdparty/v8/src/log.cc
@@ -145,7 +145,7 @@ class Profiler: public Thread {
//
// StackTracer implementation
//
-void StackTracer::Trace(Isolate* isolate, TickSample* sample) {
+DISABLE_ASAN void StackTracer::Trace(Isolate* isolate, TickSample* sample) {
ASSERT(isolate->IsInitialized());
// Avoid collecting traces while doing GC.
@@ -526,6 +526,7 @@ Logger::Logger()
name_buffer_(new NameBuffer),
address_to_name_map_(NULL),
is_initialized_(false),
+ code_event_handler_(NULL),
last_address_(NULL),
prev_sp_(NULL),
prev_function_(NULL),
@@ -541,6 +542,52 @@ Logger::~Logger() {
}
+void Logger::IssueCodeAddedEvent(Code* code,
+ const char* name,
+ size_t name_len) {
+ JitCodeEvent event;
+ event.type = JitCodeEvent::CODE_ADDED;
+ event.code_start = code->instruction_start();
+ event.code_len = code->instruction_size();
+ event.name.str = name;
+ event.name.len = name_len;
+
+ code_event_handler_(&event);
+}
+
+
+void Logger::IssueCodeMovedEvent(Address from, Address to) {
+ Code* from_code = Code::cast(HeapObject::FromAddress(from));
+
+ JitCodeEvent event;
+ event.type = JitCodeEvent::CODE_MOVED;
+ event.code_start = from_code->instruction_start();
+ event.code_len = from_code->instruction_size();
+
+ // Calculate the header size.
+ const size_t header_size =
+ from_code->instruction_start() - reinterpret_cast<byte*>(from_code);
+
+ // Calculate the new start address of the instructions.
+ event.new_code_start =
+ reinterpret_cast<byte*>(HeapObject::FromAddress(to)) + header_size;
+
+ code_event_handler_(&event);
+}
+
+
+void Logger::IssueCodeRemovedEvent(Address from) {
+ Code* from_code = Code::cast(HeapObject::FromAddress(from));
+
+ JitCodeEvent event;
+ event.type = JitCodeEvent::CODE_REMOVED;
+ event.code_start = from_code->instruction_start();
+ event.code_len = from_code->instruction_size();
+
+ code_event_handler_(&event);
+}
+
+
#define DECLARE_EVENT(ignore1, name) name,
static const char* const kLogEventsNames[Logger::NUMBER_OF_LOG_EVENTS] = {
LOG_EVENTS_AND_TAGS_LIST(DECLARE_EVENT)
@@ -864,13 +911,17 @@ void Logger::SetterCallbackEvent(String* name, Address entry_point) {
void Logger::CodeCreateEvent(LogEventsAndTags tag,
Code* code,
const char* comment) {
- if (!log_->IsEnabled()) return;
- if (FLAG_ll_prof || Serializer::enabled()) {
+ if (!is_logging_code_events()) return;
+ if (FLAG_ll_prof || Serializer::enabled() || code_event_handler_ != NULL) {
name_buffer_->Reset();
name_buffer_->AppendBytes(kLogEventsNames[tag]);
name_buffer_->AppendByte(':');
name_buffer_->AppendBytes(comment);
}
+ if (code_event_handler_ != NULL) {
+ IssueCodeAddedEvent(code, name_buffer_->get(), name_buffer_->size());
+ }
+ if (!log_->IsEnabled()) return;
if (FLAG_ll_prof) {
LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
}
@@ -899,13 +950,17 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
void Logger::CodeCreateEvent(LogEventsAndTags tag,
Code* code,
String* name) {
- if (!log_->IsEnabled()) return;
- if (FLAG_ll_prof || Serializer::enabled()) {
+ if (!is_logging_code_events()) return;
+ if (FLAG_ll_prof || Serializer::enabled() || code_event_handler_ != NULL) {
name_buffer_->Reset();
name_buffer_->AppendBytes(kLogEventsNames[tag]);
name_buffer_->AppendByte(':');
name_buffer_->AppendString(name);
}
+ if (code_event_handler_ != NULL) {
+ IssueCodeAddedEvent(code, name_buffer_->get(), name_buffer_->size());
+ }
+ if (!log_->IsEnabled()) return;
if (FLAG_ll_prof) {
LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
}
@@ -940,14 +995,18 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
Code* code,
SharedFunctionInfo* shared,
String* name) {
- if (!log_->IsEnabled()) return;
- if (FLAG_ll_prof || Serializer::enabled()) {
+ if (!is_logging_code_events()) return;
+ if (FLAG_ll_prof || Serializer::enabled() || code_event_handler_ != NULL) {
name_buffer_->Reset();
name_buffer_->AppendBytes(kLogEventsNames[tag]);
name_buffer_->AppendByte(':');
name_buffer_->AppendBytes(ComputeMarker(code));
name_buffer_->AppendString(name);
}
+ if (code_event_handler_ != NULL) {
+ IssueCodeAddedEvent(code, name_buffer_->get(), name_buffer_->size());
+ }
+ if (!log_->IsEnabled()) return;
if (FLAG_ll_prof) {
LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
}
@@ -981,8 +1040,8 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
Code* code,
SharedFunctionInfo* shared,
String* source, int line) {
- if (!log_->IsEnabled()) return;
- if (FLAG_ll_prof || Serializer::enabled()) {
+ if (!is_logging_code_events()) return;
+ if (FLAG_ll_prof || Serializer::enabled() || code_event_handler_ != NULL) {
name_buffer_->Reset();
name_buffer_->AppendBytes(kLogEventsNames[tag]);
name_buffer_->AppendByte(':');
@@ -993,6 +1052,10 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
name_buffer_->AppendByte(':');
name_buffer_->AppendInt(line);
}
+ if (code_event_handler_ != NULL) {
+ IssueCodeAddedEvent(code, name_buffer_->get(), name_buffer_->size());
+ }
+ if (!log_->IsEnabled()) return;
if (FLAG_ll_prof) {
LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
}
@@ -1022,13 +1085,17 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
void Logger::CodeCreateEvent(LogEventsAndTags tag, Code* code, int args_count) {
- if (!log_->IsEnabled()) return;
- if (FLAG_ll_prof || Serializer::enabled()) {
+ if (!is_logging_code_events()) return;
+ if (FLAG_ll_prof || Serializer::enabled() || code_event_handler_ != NULL) {
name_buffer_->Reset();
name_buffer_->AppendBytes(kLogEventsNames[tag]);
name_buffer_->AppendByte(':');
name_buffer_->AppendInt(args_count);
}
+ if (code_event_handler_ != NULL) {
+ IssueCodeAddedEvent(code, name_buffer_->get(), name_buffer_->size());
+ }
+ if (!log_->IsEnabled()) return;
if (FLAG_ll_prof) {
LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
}
@@ -1055,13 +1122,17 @@ void Logger::CodeMovingGCEvent() {
void Logger::RegExpCodeCreateEvent(Code* code, String* source) {
- if (!log_->IsEnabled()) return;
- if (FLAG_ll_prof || Serializer::enabled()) {
+ if (!is_logging_code_events()) return;
+ if (FLAG_ll_prof || Serializer::enabled() || code_event_handler_ != NULL) {
name_buffer_->Reset();
name_buffer_->AppendBytes(kLogEventsNames[REG_EXP_TAG]);
name_buffer_->AppendByte(':');
name_buffer_->AppendString(source);
}
+ if (code_event_handler_ != NULL) {
+ IssueCodeAddedEvent(code, name_buffer_->get(), name_buffer_->size());
+ }
+ if (!log_->IsEnabled()) return;
if (FLAG_ll_prof) {
LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
}
@@ -1083,6 +1154,7 @@ void Logger::RegExpCodeCreateEvent(Code* code, String* source) {
void Logger::CodeMoveEvent(Address from, Address to) {
+ if (code_event_handler_ != NULL) IssueCodeMovedEvent(from, to);
if (!log_->IsEnabled()) return;
if (FLAG_ll_prof) LowLevelCodeMoveEvent(from, to);
if (Serializer::enabled() && address_to_name_map_ != NULL) {
@@ -1093,6 +1165,7 @@ void Logger::CodeMoveEvent(Address from, Address to) {
void Logger::CodeDeleteEvent(Address from) {
+ if (code_event_handler_ != NULL) IssueCodeRemovedEvent(from);
if (!log_->IsEnabled()) return;
if (FLAG_ll_prof) LowLevelCodeDeleteEvent(from);
if (Serializer::enabled() && address_to_name_map_ != NULL) {
@@ -1392,7 +1465,7 @@ static int EnumerateCompiledFunctions(Handle<SharedFunctionInfo>* sfis,
void Logger::LogCodeObject(Object* object) {
- if (FLAG_log_code || FLAG_ll_prof) {
+ if (FLAG_log_code || FLAG_ll_prof || is_logging_code_events()) {
Code* code_object = Code::cast(object);
LogEventsAndTags tag = Logger::STUB_TAG;
const char* description = "Unknown code from the snapshot";
@@ -1676,6 +1749,18 @@ bool Logger::SetUp() {
}
+void Logger::SetCodeEventHandler(uint32_t options,
+ JitCodeEventHandler event_handler) {
+ code_event_handler_ = event_handler;
+
+ if (code_event_handler_ != NULL && (options & kJitCodeEventEnumExisting)) {
+ HandleScope scope;
+ LogCodeObjects();
+ LogCompiledFunctions();
+ }
+}
+
+
Sampler* Logger::sampler() {
return ticker_;
}
diff --git a/src/3rdparty/v8/src/log.h b/src/3rdparty/v8/src/log.h
index 03c7b3b..33f359a 100644
--- a/src/3rdparty/v8/src/log.h
+++ b/src/3rdparty/v8/src/log.h
@@ -86,6 +86,15 @@ class Ticker;
logger->Call; \
} while (false)
+#define LOG_CODE_EVENT(isolate, Call) \
+ do { \
+ v8::internal::Logger* logger = \
+ (isolate)->logger(); \
+ if (logger->is_logging_code_events()) \
+ logger->Call; \
+ } while (false)
+
+
#define LOG_EVENTS_AND_TAGS_LIST(V) \
V(CODE_CREATION_EVENT, "code-creation") \
V(CODE_MOVE_EVENT, "code-move") \
@@ -151,6 +160,10 @@ class Logger {
// Acquires resources for logging if the right flags are set.
bool SetUp();
+ // Sets the current code event handler.
+ void SetCodeEventHandler(uint32_t options,
+ JitCodeEventHandler event_handler);
+
void EnsureTickerStarted();
void EnsureTickerStopped();
@@ -274,6 +287,10 @@ class Logger {
return logging_nesting_ > 0;
}
+ bool is_logging_code_events() {
+ return is_logging() || code_event_handler_ != NULL;
+ }
+
// Pause/Resume collection of profiling data.
// When data collection is paused, CPU Tick events are discarded until
// data collection is Resumed.
@@ -312,6 +329,11 @@ class Logger {
Logger();
~Logger();
+ // Issue code notifications.
+ void IssueCodeAddedEvent(Code* code, const char* name, size_t name_len);
+ void IssueCodeMovedEvent(Address from, Address to);
+ void IssueCodeRemovedEvent(Address from);
+
// Emits the profiler's first message.
void ProfilerBeginEvent();
@@ -413,6 +435,9 @@ class Logger {
// 'true' between SetUp() and TearDown().
bool is_initialized_;
+ // The code event handler - if any.
+ JitCodeEventHandler code_event_handler_;
+
// Support for 'incremental addresses' in compressed logs:
// LogMessageBuilder::AppendAddress(Address addr)
Address last_address_;
diff --git a/src/3rdparty/v8/src/mark-compact-inl.h b/src/3rdparty/v8/src/mark-compact-inl.h
index 2f7e31f..10773e7 100644
--- a/src/3rdparty/v8/src/mark-compact-inl.h
+++ b/src/3rdparty/v8/src/mark-compact-inl.h
@@ -52,32 +52,15 @@ void MarkCompactCollector::SetFlags(int flags) {
}
-bool MarkCompactCollector::MarkObjectAndPush(HeapObject* obj) {
- if (MarkObjectWithoutPush(obj)) {
- marking_deque_.PushBlack(obj);
- return true;
- }
- return false;
-}
-
-
void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) {
ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
if (!mark_bit.Get()) {
mark_bit.Set();
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
- ProcessNewlyMarkedObject(obj);
- }
-}
-
-
-bool MarkCompactCollector::MarkObjectWithoutPush(HeapObject* obj) {
- MarkBit mark_bit = Marking::MarkBitFrom(obj);
- if (!mark_bit.Get()) {
- SetMark(obj, mark_bit);
- return true;
+ ASSERT(IsMarked(obj));
+ ASSERT(HEAP->Contains(obj));
+ marking_deque_.PushBlack(obj);
}
- return false;
}
@@ -86,9 +69,6 @@ void MarkCompactCollector::SetMark(HeapObject* obj, MarkBit mark_bit) {
ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
mark_bit.Set();
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
- if (obj->IsMap()) {
- heap_->ClearCacheOnMap(Map::cast(obj));
- }
}
diff --git a/src/3rdparty/v8/src/mark-compact.cc b/src/3rdparty/v8/src/mark-compact.cc
index 82fc1fc..7040728 100644
--- a/src/3rdparty/v8/src/mark-compact.cc
+++ b/src/3rdparty/v8/src/mark-compact.cc
@@ -62,25 +62,24 @@ MarkCompactCollector::MarkCompactCollector() : // NOLINT
sweep_precisely_(false),
reduce_memory_footprint_(false),
abort_incremental_marking_(false),
+ marking_parity_(ODD_MARKING_PARITY),
compacting_(false),
was_marked_incrementally_(false),
- flush_monomorphic_ics_(false),
tracer_(NULL),
migration_slots_buffer_(NULL),
heap_(NULL),
code_flusher_(NULL),
- encountered_weak_maps_(NULL),
- marker_(this, this) { }
+ encountered_weak_maps_(NULL) { }
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
class VerifyMarkingVisitor: public ObjectVisitor {
public:
void VisitPointers(Object** start, Object** end) {
for (Object** current = start; current < end; current++) {
if ((*current)->IsHeapObject()) {
HeapObject* object = HeapObject::cast(*current);
- ASSERT(HEAP->mark_compact_collector()->IsMarked(object));
+ CHECK(HEAP->mark_compact_collector()->IsMarked(object));
}
}
}
@@ -97,7 +96,7 @@ static void VerifyMarking(Address bottom, Address top) {
current += kPointerSize) {
object = HeapObject::FromAddress(current);
if (MarkCompactCollector::IsMarked(object)) {
- ASSERT(current >= next_object_must_be_here_or_later);
+ CHECK(current >= next_object_must_be_here_or_later);
object->Iterate(&visitor);
next_object_must_be_here_or_later = current + object->Size();
}
@@ -110,12 +109,12 @@ static void VerifyMarking(NewSpace* space) {
NewSpacePageIterator it(space->bottom(), end);
// The bottom position is at the start of its page. Allows us to use
// page->area_start() as start of range on all pages.
- ASSERT_EQ(space->bottom(),
+ CHECK_EQ(space->bottom(),
NewSpacePage::FromAddress(space->bottom())->area_start());
while (it.has_next()) {
NewSpacePage* page = it.next();
Address limit = it.has_next() ? page->area_end() : end;
- ASSERT(limit == end || !page->Contains(end));
+ CHECK(limit == end || !page->Contains(end));
VerifyMarking(page->area_start(), limit);
}
}
@@ -175,7 +174,7 @@ static void VerifyEvacuation(Address bottom, Address top) {
current += kPointerSize) {
object = HeapObject::FromAddress(current);
if (MarkCompactCollector::IsMarked(object)) {
- ASSERT(current >= next_object_must_be_here_or_later);
+ CHECK(current >= next_object_must_be_here_or_later);
object->Iterate(&visitor);
next_object_must_be_here_or_later = current + object->Size();
}
@@ -191,7 +190,7 @@ static void VerifyEvacuation(NewSpace* space) {
NewSpacePage* page = it.next();
Address current = page->area_start();
Address limit = it.has_next() ? page->area_end() : space->top();
- ASSERT(limit == space->top() || !page->Contains(space->top()));
+ CHECK(limit == space->top() || !page->Contains(space->top()));
while (current < limit) {
HeapObject* object = HeapObject::FromAddress(current);
object->Iterate(&visitor);
@@ -223,6 +222,101 @@ static void VerifyEvacuation(Heap* heap) {
VerifyEvacuationVisitor visitor;
heap->IterateStrongRoots(&visitor, VISIT_ALL);
}
+#endif // VERIFY_HEAP
+
+
+#ifdef DEBUG
+class VerifyNativeContextSeparationVisitor: public ObjectVisitor {
+ public:
+ VerifyNativeContextSeparationVisitor() : current_native_context_(NULL) {}
+
+ void VisitPointers(Object** start, Object** end) {
+ for (Object** current = start; current < end; current++) {
+ if ((*current)->IsHeapObject()) {
+ HeapObject* object = HeapObject::cast(*current);
+ if (object->IsString()) continue;
+ switch (object->map()->instance_type()) {
+ case JS_FUNCTION_TYPE:
+ CheckContext(JSFunction::cast(object)->context());
+ break;
+ case JS_GLOBAL_PROXY_TYPE:
+ CheckContext(JSGlobalProxy::cast(object)->native_context());
+ break;
+ case JS_GLOBAL_OBJECT_TYPE:
+ case JS_BUILTINS_OBJECT_TYPE:
+ CheckContext(GlobalObject::cast(object)->native_context());
+ break;
+ case JS_ARRAY_TYPE:
+ case JS_DATE_TYPE:
+ case JS_OBJECT_TYPE:
+ case JS_REGEXP_TYPE:
+ VisitPointer(HeapObject::RawField(object, JSObject::kMapOffset));
+ break;
+ case MAP_TYPE:
+ VisitPointer(HeapObject::RawField(object, Map::kPrototypeOffset));
+ VisitPointer(HeapObject::RawField(object, Map::kConstructorOffset));
+ break;
+ case FIXED_ARRAY_TYPE:
+ if (object->IsContext()) {
+ CheckContext(object);
+ } else {
+ FixedArray* array = FixedArray::cast(object);
+ int length = array->length();
+ // Set array length to zero to prevent cycles while iterating
+ // over array bodies, this is easier than intrusive marking.
+ array->set_length(0);
+ array->IterateBody(
+ FIXED_ARRAY_TYPE, FixedArray::SizeFor(length), this);
+ array->set_length(length);
+ }
+ break;
+ case JS_GLOBAL_PROPERTY_CELL_TYPE:
+ case JS_PROXY_TYPE:
+ case JS_VALUE_TYPE:
+ case TYPE_FEEDBACK_INFO_TYPE:
+ object->Iterate(this);
+ break;
+ case ACCESSOR_INFO_TYPE:
+ case BYTE_ARRAY_TYPE:
+ case CALL_HANDLER_INFO_TYPE:
+ case CODE_TYPE:
+ case FIXED_DOUBLE_ARRAY_TYPE:
+ case HEAP_NUMBER_TYPE:
+ case INTERCEPTOR_INFO_TYPE:
+ case ODDBALL_TYPE:
+ case SCRIPT_TYPE:
+ case SHARED_FUNCTION_INFO_TYPE:
+ break;
+ default:
+ UNREACHABLE();
+ }
+ }
+ }
+ }
+
+ private:
+ void CheckContext(Object* context) {
+ if (!context->IsContext()) return;
+ Context* native_context = Context::cast(context)->native_context();
+ if (current_native_context_ == NULL) {
+ current_native_context_ = native_context;
+ } else {
+ CHECK_EQ(current_native_context_, native_context);
+ }
+ }
+
+ Context* current_native_context_;
+};
+
+
+static void VerifyNativeContextSeparation(Heap* heap) {
+ HeapObjectIterator it(heap->code_space());
+
+ for (Object* object = it.Next(); object != NULL; object = it.Next()) {
+ VerifyNativeContextSeparationVisitor visitor;
+ Code::cast(object)->CodeIterateBody(&visitor);
+ }
+}
#endif
@@ -248,10 +342,17 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
if (!compacting_) {
ASSERT(evacuation_candidates_.length() == 0);
+#ifdef ENABLE_GDB_JIT_INTERFACE
+ // If GDBJIT interface is active disable compaction.
+ if (FLAG_gdbjit) return false;
+#endif
+
CollectEvacuationCandidates(heap()->old_pointer_space());
CollectEvacuationCandidates(heap()->old_data_space());
- if (FLAG_compact_code_space && mode == NON_INCREMENTAL_COMPACTION) {
+ if (FLAG_compact_code_space &&
+ (mode == NON_INCREMENTAL_COMPACTION ||
+ FLAG_incremental_code_compaction)) {
CollectEvacuationCandidates(heap()->code_space());
} else if (FLAG_trace_fragmentation) {
TraceFragmentation(heap()->code_space());
@@ -286,7 +387,7 @@ void MarkCompactCollector::CollectGarbage() {
ClearWeakMaps();
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyMarking(heap_);
}
@@ -296,13 +397,26 @@ void MarkCompactCollector::CollectGarbage() {
if (!FLAG_collect_maps) ReattachInitialMaps();
+#ifdef DEBUG
+ if (FLAG_verify_native_context_separation) {
+ VerifyNativeContextSeparation(heap_);
+ }
+#endif
+
Finish();
+ if (marking_parity_ == EVEN_MARKING_PARITY) {
+ marking_parity_ = ODD_MARKING_PARITY;
+ } else {
+ ASSERT(marking_parity_ == ODD_MARKING_PARITY);
+ marking_parity_ = EVEN_MARKING_PARITY;
+ }
+
tracer_ = NULL;
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
PageIterator it(space);
@@ -313,6 +427,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
}
}
+
void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
NewSpacePageIterator it(space->bottom(), space->top());
@@ -323,6 +438,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
}
}
+
void MarkCompactCollector::VerifyMarkbitsAreClean() {
VerifyMarkbitsAreClean(heap_->old_pointer_space());
VerifyMarkbitsAreClean(heap_->old_data_space());
@@ -334,11 +450,11 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
LargeObjectIterator it(heap_->lo_space());
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
MarkBit mark_bit = Marking::MarkBitFrom(obj);
- ASSERT(Marking::IsWhite(mark_bit));
- ASSERT_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
+ CHECK(Marking::IsWhite(mark_bit));
+ CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
}
}
-#endif
+#endif // VERIFY_HEAP
static void ClearMarkbitsInPagedSpace(PagedSpace* space) {
@@ -500,12 +616,10 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
space->identity() == OLD_DATA_SPACE ||
space->identity() == CODE_SPACE);
+ static const int kMaxMaxEvacuationCandidates = 1000;
int number_of_pages = space->CountTotalPages();
-
- const int kMaxMaxEvacuationCandidates = 1000;
- int max_evacuation_candidates = Min(
- kMaxMaxEvacuationCandidates,
- static_cast<int>(sqrt(static_cast<double>(number_of_pages / 2)) + 1));
+ int max_evacuation_candidates =
+ static_cast<int>(sqrt(number_of_pages / 2.0) + 1);
if (FLAG_stress_compaction || FLAG_always_compact) {
max_evacuation_candidates = kMaxMaxEvacuationCandidates;
@@ -535,25 +649,37 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
intptr_t over_reserved = reserved - space->SizeOfObjects();
static const intptr_t kFreenessThreshold = 50;
- if (over_reserved >= 2 * space->AreaSize() &&
- reduce_memory_footprint_) {
+ if (reduce_memory_footprint_ && over_reserved >= space->AreaSize()) {
+ // If reduction of memory footprint was requested, we are aggressive
+ // about choosing pages to free. We expect that half-empty pages
+ // are easier to compact so slightly bump the limit.
mode = REDUCE_MEMORY_FOOTPRINT;
-
- // We expect that empty pages are easier to compact so slightly bump the
- // limit.
max_evacuation_candidates += 2;
+ }
- if (FLAG_trace_fragmentation) {
- PrintF("Estimated over reserved memory: %.1f MB (setting threshold %d)\n",
- static_cast<double>(over_reserved) / MB,
- static_cast<int>(kFreenessThreshold));
- }
+
+ if (over_reserved > reserved / 3 && over_reserved >= 2 * space->AreaSize()) {
+ // If over-usage is very high (more than a third of the space), we
+ // try to free all mostly empty pages. We expect that almost empty
+ // pages are even easier to compact so bump the limit even more.
+ mode = REDUCE_MEMORY_FOOTPRINT;
+ max_evacuation_candidates *= 2;
+ }
+
+ if (FLAG_trace_fragmentation && mode == REDUCE_MEMORY_FOOTPRINT) {
+ PrintF("Estimated over reserved memory: %.1f / %.1f MB (threshold %d)\n",
+ static_cast<double>(over_reserved) / MB,
+ static_cast<double>(reserved) / MB,
+ static_cast<int>(kFreenessThreshold));
}
intptr_t estimated_release = 0;
Candidate candidates[kMaxMaxEvacuationCandidates];
+ max_evacuation_candidates =
+ Min(kMaxMaxEvacuationCandidates, max_evacuation_candidates);
+
int count = 0;
int fragmentation = 0;
Candidate* least = NULL;
@@ -566,7 +692,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
p->ClearEvacuationCandidate();
if (FLAG_stress_compaction) {
- int counter = space->heap()->ms_count();
+ unsigned int counter = space->heap()->ms_count();
uintptr_t page_number = reinterpret_cast<uintptr_t>(p) >> kPageSizeBits;
if ((counter & 1) == (page_number & 1)) fragmentation = 1;
} else if (mode == REDUCE_MEMORY_FOOTPRINT) {
@@ -658,12 +784,6 @@ void MarkCompactCollector::AbortCompaction() {
void MarkCompactCollector::Prepare(GCTracer* tracer) {
was_marked_incrementally_ = heap()->incremental_marking()->IsMarking();
- // Monomorphic ICs are preserved when possible, but need to be flushed
- // when they might be keeping a Context alive, or when the heap is about
- // to be serialized.
- flush_monomorphic_ics_ =
- heap()->isolate()->context_exit_happened() || Serializer::enabled();
-
// Rather than passing the tracer around we stash it in a static member
// variable.
tracer_ = tracer;
@@ -675,13 +795,6 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
ASSERT(!FLAG_never_compact || !FLAG_always_compact);
-#ifdef ENABLE_GDB_JIT_INTERFACE
- if (FLAG_gdbjit) {
- // If GDBJIT interface is active disable compaction.
- compacting_collection_ = false;
- }
-#endif
-
// Clear marking bits if incremental marking is aborted.
if (was_marked_incrementally_ && abort_incremental_marking_) {
heap()->incremental_marking()->Abort();
@@ -703,7 +816,7 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
space->PrepareForMarkCompact();
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (!was_marked_incrementally_ && FLAG_verify_heap) {
VerifyMarkbitsAreClean();
}
@@ -754,133 +867,140 @@ void MarkCompactCollector::Finish() {
// and continue with marking. This process repeats until all reachable
// objects have been marked.
-class CodeFlusher {
- public:
- explicit CodeFlusher(Isolate* isolate)
- : isolate_(isolate),
- jsfunction_candidates_head_(NULL),
- shared_function_info_candidates_head_(NULL) {}
+void CodeFlusher::ProcessJSFunctionCandidates() {
+ Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
+ Object* undefined = isolate_->heap()->undefined_value();
- void AddCandidate(SharedFunctionInfo* shared_info) {
- SetNextCandidate(shared_info, shared_function_info_candidates_head_);
- shared_function_info_candidates_head_ = shared_info;
- }
+ JSFunction* candidate = jsfunction_candidates_head_;
+ JSFunction* next_candidate;
+ while (candidate != NULL) {
+ next_candidate = GetNextCandidate(candidate);
+ ClearNextCandidate(candidate, undefined);
- void AddCandidate(JSFunction* function) {
- ASSERT(function->code() == function->shared()->code());
+ SharedFunctionInfo* shared = candidate->shared();
- SetNextCandidate(function, jsfunction_candidates_head_);
- jsfunction_candidates_head_ = function;
- }
+ Code* code = shared->code();
+ MarkBit code_mark = Marking::MarkBitFrom(code);
+ if (!code_mark.Get()) {
+ shared->set_code(lazy_compile);
+ candidate->set_code(lazy_compile);
+ } else if (code == lazy_compile) {
+ candidate->set_code(lazy_compile);
+ }
- void ProcessCandidates() {
- ProcessSharedFunctionInfoCandidates();
- ProcessJSFunctionCandidates();
- }
+ // We are in the middle of a GC cycle so the write barrier in the code
+ // setter did not record the slot update and we have to do that manually.
+ Address slot = candidate->address() + JSFunction::kCodeEntryOffset;
+ Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot));
+ isolate_->heap()->mark_compact_collector()->
+ RecordCodeEntrySlot(slot, target);
- private:
- void ProcessJSFunctionCandidates() {
- Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
+ Object** shared_code_slot =
+ HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset);
+ isolate_->heap()->mark_compact_collector()->
+ RecordSlot(shared_code_slot, shared_code_slot, *shared_code_slot);
- JSFunction* candidate = jsfunction_candidates_head_;
- JSFunction* next_candidate;
- while (candidate != NULL) {
- next_candidate = GetNextCandidate(candidate);
+ candidate = next_candidate;
+ }
- SharedFunctionInfo* shared = candidate->shared();
+ jsfunction_candidates_head_ = NULL;
+}
- Code* code = shared->code();
- MarkBit code_mark = Marking::MarkBitFrom(code);
- if (!code_mark.Get()) {
- shared->set_code(lazy_compile);
- candidate->set_code(lazy_compile);
- } else {
- candidate->set_code(shared->code());
- }
- // We are in the middle of a GC cycle so the write barrier in the code
- // setter did not record the slot update and we have to do that manually.
- Address slot = candidate->address() + JSFunction::kCodeEntryOffset;
- Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot));
- isolate_->heap()->mark_compact_collector()->
- RecordCodeEntrySlot(slot, target);
+void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
+ Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
- RecordSharedFunctionInfoCodeSlot(shared);
+ SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
+ SharedFunctionInfo* next_candidate;
+ while (candidate != NULL) {
+ next_candidate = GetNextCandidate(candidate);
+ ClearNextCandidate(candidate);
- candidate = next_candidate;
+ Code* code = candidate->code();
+ MarkBit code_mark = Marking::MarkBitFrom(code);
+ if (!code_mark.Get()) {
+ candidate->set_code(lazy_compile);
}
- jsfunction_candidates_head_ = NULL;
+ Object** code_slot =
+ HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset);
+ isolate_->heap()->mark_compact_collector()->
+ RecordSlot(code_slot, code_slot, *code_slot);
+
+ candidate = next_candidate;
}
+ shared_function_info_candidates_head_ = NULL;
+}
+
- void ProcessSharedFunctionInfoCandidates() {
- Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
+void CodeFlusher::EvictCandidate(JSFunction* function) {
+ ASSERT(!function->next_function_link()->IsUndefined());
+ Object* undefined = isolate_->heap()->undefined_value();
- SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
- SharedFunctionInfo* next_candidate;
+ JSFunction* candidate = jsfunction_candidates_head_;
+ JSFunction* next_candidate;
+ if (candidate == function) {
+ next_candidate = GetNextCandidate(function);
+ jsfunction_candidates_head_ = next_candidate;
+ ClearNextCandidate(function, undefined);
+ } else {
while (candidate != NULL) {
next_candidate = GetNextCandidate(candidate);
- SetNextCandidate(candidate, NULL);
- Code* code = candidate->code();
- MarkBit code_mark = Marking::MarkBitFrom(code);
- if (!code_mark.Get()) {
- candidate->set_code(lazy_compile);
+ if (next_candidate == function) {
+ next_candidate = GetNextCandidate(function);
+ SetNextCandidate(candidate, next_candidate);
+ ClearNextCandidate(function, undefined);
}
- RecordSharedFunctionInfoCodeSlot(candidate);
-
candidate = next_candidate;
}
-
- shared_function_info_candidates_head_ = NULL;
}
+}
- void RecordSharedFunctionInfoCodeSlot(SharedFunctionInfo* shared) {
- Object** slot = HeapObject::RawField(shared,
- SharedFunctionInfo::kCodeOffset);
- isolate_->heap()->mark_compact_collector()->
- RecordSlot(slot, slot, HeapObject::cast(*slot));
- }
- static JSFunction** GetNextCandidateField(JSFunction* candidate) {
- return reinterpret_cast<JSFunction**>(
- candidate->address() + JSFunction::kCodeEntryOffset);
- }
+void CodeFlusher::EvictJSFunctionCandidates() {
+ Object* undefined = isolate_->heap()->undefined_value();
- static JSFunction* GetNextCandidate(JSFunction* candidate) {
- return *GetNextCandidateField(candidate);
+ JSFunction* candidate = jsfunction_candidates_head_;
+ JSFunction* next_candidate;
+ while (candidate != NULL) {
+ next_candidate = GetNextCandidate(candidate);
+ ClearNextCandidate(candidate, undefined);
+ candidate = next_candidate;
}
- static void SetNextCandidate(JSFunction* candidate,
- JSFunction* next_candidate) {
- *GetNextCandidateField(candidate) = next_candidate;
- }
+ jsfunction_candidates_head_ = NULL;
+}
- static SharedFunctionInfo** GetNextCandidateField(
- SharedFunctionInfo* candidate) {
- Code* code = candidate->code();
- return reinterpret_cast<SharedFunctionInfo**>(
- code->address() + Code::kGCMetadataOffset);
- }
- static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
- return reinterpret_cast<SharedFunctionInfo*>(
- candidate->code()->gc_metadata());
+void CodeFlusher::EvictSharedFunctionInfoCandidates() {
+ SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
+ SharedFunctionInfo* next_candidate;
+ while (candidate != NULL) {
+ next_candidate = GetNextCandidate(candidate);
+ ClearNextCandidate(candidate);
+ candidate = next_candidate;
}
- static void SetNextCandidate(SharedFunctionInfo* candidate,
- SharedFunctionInfo* next_candidate) {
- candidate->code()->set_gc_metadata(next_candidate);
- }
+ shared_function_info_candidates_head_ = NULL;
+}
- Isolate* isolate_;
- JSFunction* jsfunction_candidates_head_;
- SharedFunctionInfo* shared_function_info_candidates_head_;
- DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
-};
+void CodeFlusher::IteratePointersToFromSpace(ObjectVisitor* v) {
+ Heap* heap = isolate_->heap();
+
+ JSFunction** slot = &jsfunction_candidates_head_;
+ JSFunction* candidate = jsfunction_candidates_head_;
+ while (candidate != NULL) {
+ if (heap->InFromSpace(candidate)) {
+ v->VisitPointer(reinterpret_cast<Object**>(slot));
+ }
+ candidate = GetNextCandidate(*slot);
+ slot = GetNextCandidateSlot(*slot);
+ }
+}
MarkCompactCollector::~MarkCompactCollector() {
@@ -927,81 +1047,24 @@ static inline HeapObject* ShortCircuitConsString(Object** p) {
}
-class StaticMarkingVisitor : public StaticVisitorBase {
+class MarkCompactMarkingVisitor
+ : public StaticMarkingVisitor<MarkCompactMarkingVisitor> {
public:
- static inline void IterateBody(Map* map, HeapObject* obj) {
- table_.GetVisitor(map)(map, obj);
- }
-
- static void Initialize() {
- table_.Register(kVisitShortcutCandidate,
- &FixedBodyVisitor<StaticMarkingVisitor,
- ConsString::BodyDescriptor,
- void>::Visit);
-
- table_.Register(kVisitConsString,
- &FixedBodyVisitor<StaticMarkingVisitor,
- ConsString::BodyDescriptor,
- void>::Visit);
-
- table_.Register(kVisitSlicedString,
- &FixedBodyVisitor<StaticMarkingVisitor,
- SlicedString::BodyDescriptor,
- void>::Visit);
-
- table_.Register(kVisitFixedArray,
- &FlexibleBodyVisitor<StaticMarkingVisitor,
- FixedArray::BodyDescriptor,
- void>::Visit);
-
- table_.Register(kVisitGlobalContext, &VisitGlobalContext);
+ static void ObjectStatsVisitBase(StaticVisitorBase::VisitorId id,
+ Map* map, HeapObject* obj);
- table_.Register(kVisitFixedDoubleArray, DataObjectVisitor::Visit);
+ static void ObjectStatsCountFixedArray(
+ FixedArrayBase* fixed_array,
+ FixedArraySubInstanceType fast_type,
+ FixedArraySubInstanceType dictionary_type);
- table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
- table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
- table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
- table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
-
- table_.Register(kVisitJSWeakMap, &VisitJSWeakMap);
-
- table_.Register(kVisitOddball,
- &FixedBodyVisitor<StaticMarkingVisitor,
- Oddball::BodyDescriptor,
- void>::Visit);
- table_.Register(kVisitMap,
- &FixedBodyVisitor<StaticMarkingVisitor,
- Map::BodyDescriptor,
- void>::Visit);
-
- table_.Register(kVisitCode, &VisitCode);
-
- table_.Register(kVisitSharedFunctionInfo,
- &VisitSharedFunctionInfoAndFlushCode);
-
- table_.Register(kVisitJSFunction,
- &VisitJSFunctionAndFlushCode);
-
- table_.Register(kVisitJSRegExp,
- &VisitRegExpAndFlushCode);
-
- table_.Register(kVisitPropertyCell,
- &FixedBodyVisitor<StaticMarkingVisitor,
- JSGlobalPropertyCell::BodyDescriptor,
- void>::Visit);
-
- table_.RegisterSpecializations<DataObjectVisitor,
- kVisitDataObject,
- kVisitDataObjectGeneric>();
-
- table_.RegisterSpecializations<JSObjectVisitor,
- kVisitJSObject,
- kVisitJSObjectGeneric>();
+ template<MarkCompactMarkingVisitor::VisitorId id>
+ class ObjectStatsTracker {
+ public:
+ static inline void Visit(Map* map, HeapObject* obj);
+ };
- table_.RegisterSpecializations<StructObjectVisitor,
- kVisitStruct,
- kVisitStructGeneric>();
- }
+ static void Initialize();
INLINE(static void VisitPointer(Heap* heap, Object** p)) {
MarkObjectByPointer(heap->mark_compact_collector(), p, p);
@@ -1020,48 +1083,21 @@ class StaticMarkingVisitor : public StaticVisitorBase {
}
}
- static void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) {
- ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
- JSGlobalPropertyCell* cell =
- JSGlobalPropertyCell::cast(rinfo->target_cell());
- MarkBit mark = Marking::MarkBitFrom(cell);
- heap->mark_compact_collector()->MarkObject(cell, mark);
- }
-
- static inline void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo) {
- ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
- // TODO(mstarzinger): We do not short-circuit cons strings here, verify
- // that there can be no such embedded pointers and add assertion here.
- HeapObject* object = HeapObject::cast(rinfo->target_object());
- heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
+ // Marks the object black and pushes it on the marking stack.
+ INLINE(static void MarkObject(Heap* heap, HeapObject* object)) {
MarkBit mark = Marking::MarkBitFrom(object);
heap->mark_compact_collector()->MarkObject(object, mark);
}
- static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
- ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
- Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
- if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
- && (target->ic_state() == MEGAMORPHIC ||
- heap->mark_compact_collector()->flush_monomorphic_ics_ ||
- target->ic_age() != heap->global_ic_age())) {
- IC::Clear(rinfo->pc());
- target = Code::GetCodeFromTargetAddress(rinfo->target_address());
+ // Marks the object black without pushing it on the marking stack.
+ // Returns true if object needed marking and false otherwise.
+ INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
+ MarkBit mark_bit = Marking::MarkBitFrom(object);
+ if (!mark_bit.Get()) {
+ heap->mark_compact_collector()->SetMark(object, mark_bit);
+ return true;
}
- MarkBit code_mark = Marking::MarkBitFrom(target);
- heap->mark_compact_collector()->MarkObject(target, code_mark);
- heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
- }
-
- static inline void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
- ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
- rinfo->IsPatchedReturnSequence()) ||
- (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
- rinfo->IsPatchedDebugBreakSlotSequence()));
- Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
- MarkBit code_mark = Marking::MarkBitFrom(target);
- heap->mark_compact_collector()->MarkObject(target, code_mark);
- heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
+ return false;
}
// Mark object pointed to by p.
@@ -1116,28 +1152,10 @@ class StaticMarkingVisitor : public StaticVisitorBase {
return true;
}
- static inline void VisitExternalReference(Address* p) { }
- static inline void VisitExternalReference(RelocInfo* rinfo) { }
- static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }
-
- private:
- class DataObjectVisitor {
- public:
- template<int size>
- static void VisitSpecialized(Map* map, HeapObject* object) {
- }
-
- static void Visit(Map* map, HeapObject* object) {
- }
- };
-
- typedef FlexibleBodyVisitor<StaticMarkingVisitor,
- JSObject::BodyDescriptor,
- void> JSObjectVisitor;
-
- typedef FlexibleBodyVisitor<StaticMarkingVisitor,
- StructBodyDescriptor,
- void> StructObjectVisitor;
+ INLINE(static void BeforeVisitingSharedFunctionInfo(HeapObject* object)) {
+ SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
+ shared->BeforeVisitingPointers();
+ }
static void VisitJSWeakMap(Map* map, HeapObject* object) {
MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
@@ -1151,12 +1169,12 @@ class StaticMarkingVisitor : public StaticVisitorBase {
// Skip visiting the backing hash table containing the mappings.
int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object);
- BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
+ BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers(
map->GetHeap(),
object,
JSWeakMap::BodyDescriptor::kStartOffset,
JSWeakMap::kTableOffset);
- BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
+ BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers(
map->GetHeap(),
object,
JSWeakMap::kTableOffset + kPointerSize,
@@ -1176,136 +1194,14 @@ class StaticMarkingVisitor : public StaticVisitorBase {
ASSERT(MarkCompactCollector::IsMarked(table->map()));
}
- static void VisitCode(Map* map, HeapObject* object) {
- Heap* heap = map->GetHeap();
- Code* code = reinterpret_cast<Code*>(object);
- if (FLAG_cleanup_code_caches_at_gc) {
- code->ClearTypeFeedbackCells(heap);
- }
- code->CodeIterateBody<StaticMarkingVisitor>(heap);
- }
+ private:
+ template<int id>
+ static inline void TrackObjectStatsAndVisit(Map* map, HeapObject* obj);
// Code flushing support.
- // How many collections newly compiled code object will survive before being
- // flushed.
- static const int kCodeAgeThreshold = 5;
-
static const int kRegExpCodeThreshold = 5;
- inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
- Object* undefined = heap->undefined_value();
- return (info->script() != undefined) &&
- (reinterpret_cast<Script*>(info->script())->source() != undefined);
- }
-
-
- inline static bool IsCompiled(JSFunction* function) {
- return function->code() !=
- function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
- }
-
- inline static bool IsCompiled(SharedFunctionInfo* function) {
- return function->code() !=
- function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
- }
-
- inline static bool IsFlushable(Heap* heap, JSFunction* function) {
- SharedFunctionInfo* shared_info = function->unchecked_shared();
-
- // Code is either on stack, in compilation cache or referenced
- // by optimized version of function.
- MarkBit code_mark = Marking::MarkBitFrom(function->code());
- if (code_mark.Get()) {
- if (!Marking::MarkBitFrom(shared_info).Get()) {
- shared_info->set_code_age(0);
- }
- return false;
- }
-
- // We do not flush code for optimized functions.
- if (function->code() != shared_info->code()) {
- return false;
- }
-
- return IsFlushable(heap, shared_info);
- }
-
- inline static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info) {
- // Code is either on stack, in compilation cache or referenced
- // by optimized version of function.
- MarkBit code_mark =
- Marking::MarkBitFrom(shared_info->code());
- if (code_mark.Get()) {
- return false;
- }
-
- // The function must be compiled and have the source code available,
- // to be able to recompile it in case we need the function again.
- if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
- return false;
- }
-
- // We never flush code for Api functions.
- Object* function_data = shared_info->function_data();
- if (function_data->IsFunctionTemplateInfo()) {
- return false;
- }
-
- // Only flush code for functions.
- if (shared_info->code()->kind() != Code::FUNCTION) {
- return false;
- }
-
- // Function must be lazy compilable.
- if (!shared_info->allows_lazy_compilation()) {
- return false;
- }
-
- // If this is a full script wrapped in a function we do no flush the code.
- if (shared_info->is_toplevel()) {
- return false;
- }
-
- // Age this shared function info.
- if (shared_info->code_age() < kCodeAgeThreshold) {
- shared_info->set_code_age(shared_info->code_age() + 1);
- return false;
- }
-
- return true;
- }
-
-
- static bool FlushCodeForFunction(Heap* heap, JSFunction* function) {
- if (!IsFlushable(heap, function)) return false;
-
- // This function's code looks flushable. But we have to postpone the
- // decision until we see all functions that point to the same
- // SharedFunctionInfo because some of them might be optimized.
- // That would make the nonoptimized version of the code nonflushable,
- // because it is required for bailing out from optimized code.
- heap->mark_compact_collector()->code_flusher()->AddCandidate(function);
- return true;
- }
-
- static inline bool IsValidNotBuiltinContext(Object* ctx) {
- return ctx->IsContext() &&
- !Context::cast(ctx)->global()->IsJSBuiltinsObject();
- }
-
-
- static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) {
- SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
-
- if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
-
- FixedBodyVisitor<StaticMarkingVisitor,
- SharedFunctionInfo::BodyDescriptor,
- void>::Visit(map, object);
- }
-
-
static void UpdateRegExpCodeAgeAndFlush(Heap* heap,
JSRegExp* re,
bool is_ascii) {
@@ -1368,7 +1264,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
Heap* heap = map->GetHeap();
MarkCompactCollector* collector = heap->mark_compact_collector();
if (!collector->is_code_flushing_enabled()) {
- VisitJSRegExpFields(map, object);
+ VisitJSRegExp(map, object);
return;
}
JSRegExp* re = reinterpret_cast<JSRegExp*>(object);
@@ -1376,183 +1272,161 @@ class StaticMarkingVisitor : public StaticVisitorBase {
UpdateRegExpCodeAgeAndFlush(heap, re, true);
UpdateRegExpCodeAgeAndFlush(heap, re, false);
// Visit the fields of the RegExp, including the updated FixedArray.
- VisitJSRegExpFields(map, object);
+ VisitJSRegExp(map, object);
}
+ static VisitorDispatchTable<Callback> non_count_table_;
+};
- static void VisitSharedFunctionInfoAndFlushCode(Map* map,
- HeapObject* object) {
- Heap* heap = map->GetHeap();
- SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
- if (shared->ic_age() != heap->global_ic_age()) {
- shared->ResetForNewContext(heap->global_ic_age());
- }
- MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
- if (!collector->is_code_flushing_enabled()) {
- VisitSharedFunctionInfoGeneric(map, object);
- return;
+void MarkCompactMarkingVisitor::ObjectStatsCountFixedArray(
+ FixedArrayBase* fixed_array,
+ FixedArraySubInstanceType fast_type,
+ FixedArraySubInstanceType dictionary_type) {
+ Heap* heap = fixed_array->map()->GetHeap();
+ if (fixed_array->map() != heap->fixed_cow_array_map() &&
+ fixed_array->map() != heap->fixed_double_array_map() &&
+ fixed_array != heap->empty_fixed_array()) {
+ if (fixed_array->IsDictionary()) {
+ heap->RecordObjectStats(FIXED_ARRAY_TYPE,
+ dictionary_type,
+ fixed_array->Size());
+ } else {
+ heap->RecordObjectStats(FIXED_ARRAY_TYPE,
+ fast_type,
+ fixed_array->Size());
}
- VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false);
}
+}
- static void VisitSharedFunctionInfoAndFlushCodeGeneric(
- Map* map, HeapObject* object, bool known_flush_code_candidate) {
- Heap* heap = map->GetHeap();
- SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
-
- if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
-
- if (!known_flush_code_candidate) {
- known_flush_code_candidate = IsFlushable(heap, shared);
- if (known_flush_code_candidate) {
- heap->mark_compact_collector()->code_flusher()->AddCandidate(shared);
- }
- }
-
- VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate);
+void MarkCompactMarkingVisitor::ObjectStatsVisitBase(
+ MarkCompactMarkingVisitor::VisitorId id, Map* map, HeapObject* obj) {
+ Heap* heap = map->GetHeap();
+ int object_size = obj->Size();
+ heap->RecordObjectStats(map->instance_type(), -1, object_size);
+ non_count_table_.GetVisitorById(id)(map, obj);
+ if (obj->IsJSObject()) {
+ JSObject* object = JSObject::cast(obj);
+ ObjectStatsCountFixedArray(object->elements(),
+ DICTIONARY_ELEMENTS_SUB_TYPE,
+ FAST_ELEMENTS_SUB_TYPE);
+ ObjectStatsCountFixedArray(object->properties(),
+ DICTIONARY_PROPERTIES_SUB_TYPE,
+ FAST_PROPERTIES_SUB_TYPE);
}
+}
- static void VisitCodeEntry(Heap* heap, Address entry_address) {
- Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
- MarkBit mark = Marking::MarkBitFrom(code);
- heap->mark_compact_collector()->MarkObject(code, mark);
- heap->mark_compact_collector()->
- RecordCodeEntrySlot(entry_address, code);
- }
-
- static void VisitGlobalContext(Map* map, HeapObject* object) {
- FixedBodyVisitor<StaticMarkingVisitor,
- Context::MarkCompactBodyDescriptor,
- void>::Visit(map, object);
+template<MarkCompactMarkingVisitor::VisitorId id>
+void MarkCompactMarkingVisitor::ObjectStatsTracker<id>::Visit(
+ Map* map, HeapObject* obj) {
+ ObjectStatsVisitBase(id, map, obj);
+}
- MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
- for (int idx = Context::FIRST_WEAK_SLOT;
- idx < Context::GLOBAL_CONTEXT_SLOTS;
- ++idx) {
- Object** slot =
- HeapObject::RawField(object, FixedArray::OffsetOfElementAt(idx));
- collector->RecordSlot(slot, slot, *slot);
- }
- }
- static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) {
+template<>
+class MarkCompactMarkingVisitor::ObjectStatsTracker<
+ MarkCompactMarkingVisitor::kVisitMap> {
+ public:
+ static inline void Visit(Map* map, HeapObject* obj) {
Heap* heap = map->GetHeap();
- MarkCompactCollector* collector = heap->mark_compact_collector();
- if (!collector->is_code_flushing_enabled()) {
- VisitJSFunction(map, object);
- return;
- }
-
- JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object);
- // The function must have a valid context and not be a builtin.
- bool flush_code_candidate = false;
- if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
- flush_code_candidate = FlushCodeForFunction(heap, jsfunction);
- }
-
- if (!flush_code_candidate) {
- Code* code = jsfunction->shared()->code();
- MarkBit code_mark = Marking::MarkBitFrom(code);
- collector->MarkObject(code, code_mark);
-
- if (jsfunction->code()->kind() == Code::OPTIMIZED_FUNCTION) {
- collector->MarkInlinedFunctionsCode(jsfunction->code());
- }
- }
-
- VisitJSFunctionFields(map,
- reinterpret_cast<JSFunction*>(object),
- flush_code_candidate);
+ Map* map_obj = Map::cast(obj);
+ ASSERT(map->instance_type() == MAP_TYPE);
+ DescriptorArray* array = map_obj->instance_descriptors();
+ if (map_obj->owns_descriptors() &&
+ array != heap->empty_descriptor_array()) {
+ int fixed_array_size = array->Size();
+ heap->RecordObjectStats(FIXED_ARRAY_TYPE,
+ DESCRIPTOR_ARRAY_SUB_TYPE,
+ fixed_array_size);
+ }
+ if (map_obj->HasTransitionArray()) {
+ int fixed_array_size = map_obj->transitions()->Size();
+ heap->RecordObjectStats(FIXED_ARRAY_TYPE,
+ TRANSITION_ARRAY_SUB_TYPE,
+ fixed_array_size);
+ }
+ if (map_obj->code_cache() != heap->empty_fixed_array()) {
+ heap->RecordObjectStats(
+ FIXED_ARRAY_TYPE,
+ MAP_CODE_CACHE_SUB_TYPE,
+ FixedArray::cast(map_obj->code_cache())->Size());
+ }
+ ObjectStatsVisitBase(kVisitMap, map, obj);
}
+};
- static void VisitJSFunction(Map* map, HeapObject* object) {
- VisitJSFunctionFields(map,
- reinterpret_cast<JSFunction*>(object),
- false);
+template<>
+class MarkCompactMarkingVisitor::ObjectStatsTracker<
+ MarkCompactMarkingVisitor::kVisitCode> {
+ public:
+ static inline void Visit(Map* map, HeapObject* obj) {
+ Heap* heap = map->GetHeap();
+ int object_size = obj->Size();
+ ASSERT(map->instance_type() == CODE_TYPE);
+ heap->RecordObjectStats(CODE_TYPE, Code::cast(obj)->kind(), object_size);
+ ObjectStatsVisitBase(kVisitCode, map, obj);
}
+};
-#define SLOT_ADDR(obj, offset) \
- reinterpret_cast<Object**>((obj)->address() + offset)
-
-
- static inline void VisitJSFunctionFields(Map* map,
- JSFunction* object,
- bool flush_code_candidate) {
+template<>
+class MarkCompactMarkingVisitor::ObjectStatsTracker<
+ MarkCompactMarkingVisitor::kVisitSharedFunctionInfo> {
+ public:
+ static inline void Visit(Map* map, HeapObject* obj) {
Heap* heap = map->GetHeap();
-
- VisitPointers(heap,
- HeapObject::RawField(object, JSFunction::kPropertiesOffset),
- HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
-
- if (!flush_code_candidate) {
- VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
- } else {
- // Don't visit code object.
-
- // Visit shared function info to avoid double checking of it's
- // flushability.
- SharedFunctionInfo* shared_info = object->unchecked_shared();
- MarkBit shared_info_mark = Marking::MarkBitFrom(shared_info);
- if (!shared_info_mark.Get()) {
- Map* shared_info_map = shared_info->map();
- MarkBit shared_info_map_mark =
- Marking::MarkBitFrom(shared_info_map);
- heap->mark_compact_collector()->SetMark(shared_info, shared_info_mark);
- heap->mark_compact_collector()->MarkObject(shared_info_map,
- shared_info_map_mark);
- VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
- shared_info,
- true);
- }
+ SharedFunctionInfo* sfi = SharedFunctionInfo::cast(obj);
+ if (sfi->scope_info() != heap->empty_fixed_array()) {
+ heap->RecordObjectStats(
+ FIXED_ARRAY_TYPE,
+ SCOPE_INFO_SUB_TYPE,
+ FixedArray::cast(sfi->scope_info())->Size());
}
-
- VisitPointers(
- heap,
- HeapObject::RawField(object,
- JSFunction::kCodeEntryOffset + kPointerSize),
- HeapObject::RawField(object,
- JSFunction::kNonWeakFieldsEndOffset));
+ ObjectStatsVisitBase(kVisitSharedFunctionInfo, map, obj);
}
+};
- static inline void VisitJSRegExpFields(Map* map,
- HeapObject* object) {
- int last_property_offset =
- JSRegExp::kSize + kPointerSize * map->inobject_properties();
- VisitPointers(map->GetHeap(),
- SLOT_ADDR(object, JSRegExp::kPropertiesOffset),
- SLOT_ADDR(object, last_property_offset));
- }
-
-
- static void VisitSharedFunctionInfoFields(Heap* heap,
- HeapObject* object,
- bool flush_code_candidate) {
- VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset));
- if (!flush_code_candidate) {
- VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset));
+template<>
+class MarkCompactMarkingVisitor::ObjectStatsTracker<
+ MarkCompactMarkingVisitor::kVisitFixedArray> {
+ public:
+ static inline void Visit(Map* map, HeapObject* obj) {
+ Heap* heap = map->GetHeap();
+ FixedArray* fixed_array = FixedArray::cast(obj);
+ if (fixed_array == heap->symbol_table()) {
+ heap->RecordObjectStats(
+ FIXED_ARRAY_TYPE,
+ SYMBOL_TABLE_SUB_TYPE,
+ fixed_array->Size());
}
-
- VisitPointers(heap,
- SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset),
- SLOT_ADDR(object, SharedFunctionInfo::kSize));
+ ObjectStatsVisitBase(kVisitFixedArray, map, obj);
}
+};
- #undef SLOT_ADDR
- typedef void (*Callback)(Map* map, HeapObject* object);
+void MarkCompactMarkingVisitor::Initialize() {
+ StaticMarkingVisitor<MarkCompactMarkingVisitor>::Initialize();
- static VisitorDispatchTable<Callback> table_;
-};
+ table_.Register(kVisitJSRegExp,
+ &VisitRegExpAndFlushCode);
+
+ if (FLAG_track_gc_object_stats) {
+ // Copy the visitor table to make call-through possible.
+ non_count_table_.CopyFrom(&table_);
+#define VISITOR_ID_COUNT_FUNCTION(id) \
+ table_.Register(kVisit##id, ObjectStatsTracker<kVisit##id>::Visit);
+ VISITOR_ID_LIST(VISITOR_ID_COUNT_FUNCTION)
+#undef VISITOR_ID_COUNT_FUNCTION
+ }
+}
-VisitorDispatchTable<StaticMarkingVisitor::Callback>
- StaticMarkingVisitor::table_;
+VisitorDispatchTable<MarkCompactMarkingVisitor::Callback>
+ MarkCompactMarkingVisitor::non_count_table_;
class MarkingVisitor : public ObjectVisitor {
@@ -1560,11 +1434,11 @@ class MarkingVisitor : public ObjectVisitor {
explicit MarkingVisitor(Heap* heap) : heap_(heap) { }
void VisitPointer(Object** p) {
- StaticMarkingVisitor::VisitPointer(heap_, p);
+ MarkCompactMarkingVisitor::VisitPointer(heap_, p);
}
void VisitPointers(Object** start, Object** end) {
- StaticMarkingVisitor::VisitPointers(heap_, start, end);
+ MarkCompactMarkingVisitor::VisitPointers(heap_, start, end);
}
private:
@@ -1611,26 +1485,6 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
};
-void MarkCompactCollector::MarkInlinedFunctionsCode(Code* code) {
- // For optimized functions we should retain both non-optimized version
- // of it's code and non-optimized version of all inlined functions.
- // This is required to support bailing out from inlined code.
- DeoptimizationInputData* data =
- DeoptimizationInputData::cast(code->deoptimization_data());
-
- FixedArray* literals = data->LiteralArray();
-
- for (int i = 0, count = data->InlinedFunctionCount()->value();
- i < count;
- i++) {
- JSFunction* inlined = JSFunction::cast(literals->get(i));
- Code* inlined_code = inlined->shared()->code();
- MarkBit inlined_code_mark = Marking::MarkBitFrom(inlined_code);
- MarkObject(inlined_code, inlined_code_mark);
- }
-}
-
-
void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
ThreadLocalTop* top) {
for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
@@ -1643,7 +1497,8 @@ void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
MarkBit code_mark = Marking::MarkBitFrom(code);
MarkObject(code, code_mark);
if (frame->is_optimized()) {
- MarkInlinedFunctionsCode(frame->LookupCode());
+ MarkCompactMarkingVisitor::MarkInlinedFunctionsCode(heap(),
+ frame->LookupCode());
}
}
}
@@ -1652,21 +1507,13 @@ void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
void MarkCompactCollector::PrepareForCodeFlushing() {
ASSERT(heap() == Isolate::Current()->heap());
- // TODO(1609) Currently incremental marker does not support code flushing.
- if (!FLAG_flush_code || was_marked_incrementally_) {
- EnableCodeFlushing(false);
- return;
+ // Enable code flushing for non-incremental cycles.
+ if (FLAG_flush_code && !FLAG_flush_code_incrementally) {
+ EnableCodeFlushing(!was_marked_incrementally_);
}
-#ifdef ENABLE_DEBUGGER_SUPPORT
- if (heap()->isolate()->debug()->IsLoaded() ||
- heap()->isolate()->debug()->has_break_points()) {
- EnableCodeFlushing(false);
- return;
- }
-#endif
-
- EnableCodeFlushing(true);
+ // If code flushing is disabled, there is no need to prepare for it.
+ if (!is_code_flushing_enabled()) return;
// Ensure that empty descriptor array is marked. Method MarkDescriptorArray
// relies on it being marked before any other descriptor array.
@@ -1723,7 +1570,7 @@ class RootMarkingVisitor : public ObjectVisitor {
// Mark the map pointer and body, and push them on the marking stack.
MarkBit map_mark = Marking::MarkBitFrom(map);
collector_->MarkObject(map, map_mark);
- StaticMarkingVisitor::IterateBody(map, object);
+ MarkCompactMarkingVisitor::IterateBody(map, object);
// Mark all the objects reachable from the map and body. May leave
// overflowed objects in the heap.
@@ -1786,151 +1633,6 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
};
-void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) {
- ASSERT(IsMarked(object));
- ASSERT(HEAP->Contains(object));
- if (object->IsMap()) {
- Map* map = Map::cast(object);
- heap_->ClearCacheOnMap(map);
-
- // When map collection is enabled we have to mark through map's transitions
- // in a special way to make transition links weak. Only maps for subclasses
- // of JSReceiver can have transitions.
- STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
- if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
- marker_.MarkMapContents(map);
- } else {
- marking_deque_.PushBlack(map);
- }
- } else {
- marking_deque_.PushBlack(object);
- }
-}
-
-
-// Force instantiation of template instances.
-template void Marker<IncrementalMarking>::MarkMapContents(Map* map);
-template void Marker<MarkCompactCollector>::MarkMapContents(Map* map);
-
-
-template <class T>
-void Marker<T>::MarkMapContents(Map* map) {
- // Mark prototype transitions array but don't push it into marking stack.
- // This will make references from it weak. We will clean dead prototype
- // transitions in ClearNonLiveTransitions.
- Object** proto_trans_slot =
- HeapObject::RawField(map, Map::kPrototypeTransitionsOrBackPointerOffset);
- HeapObject* prototype_transitions = HeapObject::cast(*proto_trans_slot);
- if (prototype_transitions->IsFixedArray()) {
- mark_compact_collector()->RecordSlot(proto_trans_slot,
- proto_trans_slot,
- prototype_transitions);
- MarkBit mark = Marking::MarkBitFrom(prototype_transitions);
- if (!mark.Get()) {
- mark.Set();
- MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(),
- prototype_transitions->Size());
- }
- }
-
- // Make sure that the back pointer stored either in the map itself or inside
- // its prototype transitions array is marked. Treat pointers in the descriptor
- // array as weak and also mark that array to prevent visiting it later.
- base_marker()->MarkObjectAndPush(HeapObject::cast(map->GetBackPointer()));
-
- Object** descriptor_array_slot =
- HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset);
- Object* descriptor_array = *descriptor_array_slot;
- if (!descriptor_array->IsSmi()) {
- MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(descriptor_array));
- }
-
- // Mark the Object* fields of the Map. Since the descriptor array has been
- // marked already, it is fine that one of these fields contains a pointer
- // to it. But make sure to skip back pointer and prototype transitions.
- STATIC_ASSERT(Map::kPointerFieldsEndOffset ==
- Map::kPrototypeTransitionsOrBackPointerOffset + kPointerSize);
- Object** start_slot = HeapObject::RawField(
- map, Map::kPointerFieldsBeginOffset);
- Object** end_slot = HeapObject::RawField(
- map, Map::kPrototypeTransitionsOrBackPointerOffset);
- for (Object** slot = start_slot; slot < end_slot; slot++) {
- Object* obj = *slot;
- if (!obj->NonFailureIsHeapObject()) continue;
- mark_compact_collector()->RecordSlot(start_slot, slot, obj);
- base_marker()->MarkObjectAndPush(reinterpret_cast<HeapObject*>(obj));
- }
-}
-
-
-template <class T>
-void Marker<T>::MarkDescriptorArray(DescriptorArray* descriptors) {
- // Empty descriptor array is marked as a root before any maps are marked.
- ASSERT(descriptors != descriptors->GetHeap()->empty_descriptor_array());
-
- // The DescriptorArray contains a pointer to its contents array, but the
- // contents array will be marked black and hence not be visited again.
- if (!base_marker()->MarkObjectAndPush(descriptors)) return;
- FixedArray* contents = FixedArray::cast(
- descriptors->get(DescriptorArray::kContentArrayIndex));
- ASSERT(contents->length() >= 2);
- ASSERT(Marking::IsWhite(Marking::MarkBitFrom(contents)));
- base_marker()->MarkObjectWithoutPush(contents);
-
- // Contents contains (value, details) pairs. If the descriptor contains a
- // transition (value is a Map), we don't mark the value as live. It might
- // be set to the NULL_DESCRIPTOR in ClearNonLiveTransitions later.
- for (int i = 0; i < contents->length(); i += 2) {
- PropertyDetails details(Smi::cast(contents->get(i + 1)));
-
- Object** slot = contents->data_start() + i;
- if (!(*slot)->IsHeapObject()) continue;
- HeapObject* value = HeapObject::cast(*slot);
-
- mark_compact_collector()->RecordSlot(slot, slot, *slot);
-
- switch (details.type()) {
- case NORMAL:
- case FIELD:
- case CONSTANT_FUNCTION:
- case HANDLER:
- case INTERCEPTOR:
- base_marker()->MarkObjectAndPush(value);
- break;
- case CALLBACKS:
- if (!value->IsAccessorPair()) {
- base_marker()->MarkObjectAndPush(value);
- } else if (base_marker()->MarkObjectWithoutPush(value)) {
- AccessorPair* accessors = AccessorPair::cast(value);
- MarkAccessorPairSlot(accessors, AccessorPair::kGetterOffset);
- MarkAccessorPairSlot(accessors, AccessorPair::kSetterOffset);
- }
- break;
- case ELEMENTS_TRANSITION:
- // For maps with multiple elements transitions, the transition maps are
- // stored in a FixedArray. Keep the fixed array alive but not the maps
- // that it refers to.
- if (value->IsFixedArray()) base_marker()->MarkObjectWithoutPush(value);
- break;
- case MAP_TRANSITION:
- case CONSTANT_TRANSITION:
- case NULL_DESCRIPTOR:
- break;
- }
- }
-}
-
-
-template <class T>
-void Marker<T>::MarkAccessorPairSlot(AccessorPair* accessors, int offset) {
- Object** slot = HeapObject::RawField(accessors, offset);
- HeapObject* accessor = HeapObject::cast(*slot);
- if (accessor->IsMap()) return;
- mark_compact_collector()->RecordSlot(slot, slot, accessor);
- base_marker()->MarkObjectAndPush(accessor);
-}
-
-
// Fill the marking stack with overflowed objects returned by the given
// iterator. Stop when the marking stack is filled or the end of the space
// is reached, whichever comes first.
@@ -2167,7 +1869,7 @@ void MarkCompactCollector::EmptyMarkingDeque() {
MarkBit map_mark = Marking::MarkBitFrom(map);
MarkObject(map, map_mark);
- StaticMarkingVisitor::IterateBody(map, object);
+ MarkCompactMarkingVisitor::IterateBody(map, object);
}
// Process encountered weak maps, mark objects only reachable by those
@@ -2264,7 +1966,7 @@ void MarkCompactCollector::MarkLiveObjects() {
// non-incremental marker can deal with them as if overflow
// occured during normal marking.
// But incremental marker uses a separate marking deque
- // so we have to explicitly copy it's overflow state.
+ // so we have to explicitly copy its overflow state.
incremental_marking->Finalize();
incremental_marking_overflowed =
incremental_marking->marking_deque()->overflowed();
@@ -2306,7 +2008,7 @@ void MarkCompactCollector::MarkLiveObjects() {
ASSERT(cell->IsJSGlobalPropertyCell());
if (IsMarked(cell)) {
int offset = JSGlobalPropertyCell::kValueOffset;
- StaticMarkingVisitor::VisitPointer(
+ MarkCompactMarkingVisitor::VisitPointer(
heap(),
reinterpret_cast<Object**>(cell->address() + offset));
}
@@ -2373,17 +2075,26 @@ void MarkCompactCollector::AfterMarking() {
// Flush code from collected candidates.
if (is_code_flushing_enabled()) {
code_flusher_->ProcessCandidates();
+ // If incremental marker does not support code flushing, we need to
+ // disable it before incremental marking steps for next cycle.
+ if (FLAG_flush_code && !FLAG_flush_code_incrementally) {
+ EnableCodeFlushing(false);
+ }
}
if (!FLAG_watch_ic_patching) {
// Clean up dead objects from the runtime profiler.
heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
}
+
+ if (FLAG_track_gc_object_stats) {
+ heap()->CheckpointObjectStats();
+ }
}
void MarkCompactCollector::ProcessMapCaches() {
- Object* raw_context = heap()->global_contexts_list_;
+ Object* raw_context = heap()->native_contexts_list_;
while (raw_context != heap()->undefined_value()) {
Context* context = reinterpret_cast<Context*>(raw_context);
if (IsMarked(context)) {
@@ -2483,7 +2194,7 @@ void MarkCompactCollector::ClearNonLiveTransitions() {
void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
int number_of_transitions = map->NumberOfProtoTransitions();
- FixedArray* prototype_transitions = map->prototype_transitions();
+ FixedArray* prototype_transitions = map->GetPrototypeTransitions();
int new_number_of_transitions = 0;
const int header = Map::kProtoTransitionHeaderSize;
@@ -2561,7 +2272,8 @@ void MarkCompactCollector::ProcessWeakMaps() {
Object** value_slot =
HeapObject::RawField(table, FixedArray::OffsetOfElementAt(
ObjectHashTable::EntryToValueIndex(i)));
- StaticMarkingVisitor::MarkObjectByPointer(this, anchor, value_slot);
+ MarkCompactMarkingVisitor::MarkObjectByPointer(
+ this, anchor, value_slot);
}
}
weak_map_obj = weak_map->next();
@@ -2675,15 +2387,33 @@ class PointersUpdatingVisitor: public ObjectVisitor {
void VisitEmbeddedPointer(RelocInfo* rinfo) {
ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
Object* target = rinfo->target_object();
+ Object* old_target = target;
VisitPointer(&target);
- rinfo->set_target_object(target);
+ // Avoid unnecessary changes that might unnecessary flush the instruction
+ // cache.
+ if (target != old_target) {
+ rinfo->set_target_object(target);
+ }
}
void VisitCodeTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
+ Object* old_target = target;
VisitPointer(&target);
- rinfo->set_target_address(Code::cast(target)->instruction_start());
+ if (target != old_target) {
+ rinfo->set_target_address(Code::cast(target)->instruction_start());
+ }
+ }
+
+ void VisitCodeAgeSequence(RelocInfo* rinfo) {
+ ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
+ Object* stub = rinfo->code_age_stub();
+ ASSERT(stub != NULL);
+ VisitPointer(&stub);
+ if (stub != rinfo->code_age_stub()) {
+ rinfo->set_code_age_stub(Code::cast(stub));
+ }
}
void VisitDebugTarget(RelocInfo* rinfo) {
@@ -2739,7 +2469,9 @@ static void UpdatePointer(HeapObject** p, HeapObject* object) {
// We have to zap this pointer, because the store buffer may overflow later,
// and then we have to scan the entire heap and we don't want to find
// spurious newspace pointers in the old space.
- *p = reinterpret_cast<HeapObject*>(Smi::FromInt(0));
+ // TODO(mstarzinger): This was changed to a sentinel value to track down
+ // rare crashes, change it back to Smi::FromInt(0) later.
+ *p = reinterpret_cast<HeapObject*>(Smi::FromInt(0x0f100d00 >> 1)); // flood
}
}
@@ -3221,6 +2953,8 @@ void MarkCompactCollector::ProcessInvalidatedCode(ObjectVisitor* visitor) {
void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
+ Heap::RelocationLock relocation_lock(heap());
+
bool code_slots_filtering_required;
{ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
code_slots_filtering_required = MarkInvalidatedCode();
@@ -3359,8 +3093,8 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
}
}
- // Update pointer from the global contexts list.
- updating_visitor.VisitPointer(heap_->global_contexts_list_address());
+ // Update pointer from the native contexts list.
+ updating_visitor.VisitPointer(heap_->native_contexts_list_address());
heap_->symbol_table()->Iterate(&updating_visitor);
@@ -3383,7 +3117,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
heap_->isolate()->inner_pointer_to_code_cache()->Flush();
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyEvacuation(heap_);
}
@@ -3795,11 +3529,6 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
bool lazy_sweeping_active = false;
bool unused_page_present = false;
- intptr_t old_space_size = heap()->PromotedSpaceSizeOfObjects();
- intptr_t space_left =
- Min(heap()->OldGenPromotionLimit(old_space_size),
- heap()->OldGenAllocationLimit(old_space_size)) - old_space_size;
-
while (it.has_next()) {
Page* p = it.next();
@@ -3859,7 +3588,7 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
}
freed_bytes += SweepConservatively(space, p);
pages_swept++;
- if (space_left + freed_bytes > newspace_size) {
+ if (freed_bytes > 2 * newspace_size) {
space->SetPagesToSweep(p->next_page());
lazy_sweeping_active = true;
} else {
@@ -3935,11 +3664,19 @@ void MarkCompactCollector::SweepSpaces() {
void MarkCompactCollector::EnableCodeFlushing(bool enable) {
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ if (heap()->isolate()->debug()->IsLoaded() ||
+ heap()->isolate()->debug()->has_break_points()) {
+ enable = false;
+ }
+#endif
+
if (enable) {
if (code_flusher_ != NULL) return;
code_flusher_ = new CodeFlusher(heap()->isolate());
} else {
if (code_flusher_ == NULL) return;
+ code_flusher_->EvictAllCandidates();
delete code_flusher_;
code_flusher_ = NULL;
}
@@ -3963,7 +3700,8 @@ void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj,
void MarkCompactCollector::Initialize() {
- StaticMarkingVisitor::Initialize();
+ MarkCompactMarkingVisitor::Initialize();
+ IncrementalMarking::Initialize();
}
@@ -4039,6 +3777,20 @@ void MarkCompactCollector::RecordCodeEntrySlot(Address slot, Code* target) {
}
+void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
+ ASSERT(heap()->gc_state() == Heap::MARK_COMPACT);
+ if (is_compacting()) {
+ Code* host = heap()->isolate()->inner_pointer_to_code_cache()->
+ GcSafeFindCodeForInnerPointer(pc);
+ MarkBit mark_bit = Marking::MarkBitFrom(host);
+ if (Marking::IsBlack(mark_bit)) {
+ RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
+ RecordRelocSlot(&rinfo, target);
+ }
+ }
+}
+
+
static inline SlotsBuffer::SlotType DecodeSlotType(
SlotsBuffer::ObjectSlot slot) {
return static_cast<SlotsBuffer::SlotType>(reinterpret_cast<intptr_t>(slot));
diff --git a/src/3rdparty/v8/src/mark-compact.h b/src/3rdparty/v8/src/mark-compact.h
index dbc2869..0a4c1ea 100644
--- a/src/3rdparty/v8/src/mark-compact.h
+++ b/src/3rdparty/v8/src/mark-compact.h
@@ -304,6 +304,26 @@ class SlotsBuffer {
NUMBER_OF_SLOT_TYPES
};
+ static const char* SlotTypeToString(SlotType type) {
+ switch (type) {
+ case EMBEDDED_OBJECT_SLOT:
+ return "EMBEDDED_OBJECT_SLOT";
+ case RELOCATED_CODE_OBJECT:
+ return "RELOCATED_CODE_OBJECT";
+ case CODE_TARGET_SLOT:
+ return "CODE_TARGET_SLOT";
+ case CODE_ENTRY_SLOT:
+ return "CODE_ENTRY_SLOT";
+ case DEBUG_TARGET_SLOT:
+ return "DEBUG_TARGET_SLOT";
+ case JS_RETURN_SLOT:
+ return "JS_RETURN_SLOT";
+ case NUMBER_OF_SLOT_TYPES:
+ return "NUMBER_OF_SLOT_TYPES";
+ }
+ return "UNKNOWN SlotType";
+ }
+
void UpdateSlots(Heap* heap);
void UpdateSlotsWithFilter(Heap* heap);
@@ -383,31 +403,96 @@ class SlotsBuffer {
};
-// -------------------------------------------------------------------------
-// Marker shared between incremental and non-incremental marking
-template<class BaseMarker> class Marker {
+// CodeFlusher collects candidates for code flushing during marking and
+// processes those candidates after marking has completed in order to
+// reset those functions referencing code objects that would otherwise
+// be unreachable. Code objects can be referenced in two ways:
+// - SharedFunctionInfo references unoptimized code.
+// - JSFunction references either unoptimized or optimized code.
+// We are not allowed to flush unoptimized code for functions that got
+// optimized or inlined into optimized code, because we might bailout
+// into the unoptimized code again during deoptimization.
+class CodeFlusher {
public:
- Marker(BaseMarker* base_marker, MarkCompactCollector* mark_compact_collector)
- : base_marker_(base_marker),
- mark_compact_collector_(mark_compact_collector) {}
+ explicit CodeFlusher(Isolate* isolate)
+ : isolate_(isolate),
+ jsfunction_candidates_head_(NULL),
+ shared_function_info_candidates_head_(NULL) {}
+
+ void AddCandidate(SharedFunctionInfo* shared_info) {
+ if (GetNextCandidate(shared_info) == NULL) {
+ SetNextCandidate(shared_info, shared_function_info_candidates_head_);
+ shared_function_info_candidates_head_ = shared_info;
+ }
+ }
- // Mark pointers in a Map and its DescriptorArray together, possibly
- // treating transitions or back pointers weak.
- void MarkMapContents(Map* map);
- void MarkDescriptorArray(DescriptorArray* descriptors);
- void MarkAccessorPairSlot(AccessorPair* accessors, int offset);
+ void AddCandidate(JSFunction* function) {
+ ASSERT(function->code() == function->shared()->code());
+ if (GetNextCandidate(function)->IsUndefined()) {
+ SetNextCandidate(function, jsfunction_candidates_head_);
+ jsfunction_candidates_head_ = function;
+ }
+ }
+
+ void EvictCandidate(JSFunction* function);
+
+ void ProcessCandidates() {
+ ProcessSharedFunctionInfoCandidates();
+ ProcessJSFunctionCandidates();
+ }
+
+ void EvictAllCandidates() {
+ EvictJSFunctionCandidates();
+ EvictSharedFunctionInfoCandidates();
+ }
+
+ void IteratePointersToFromSpace(ObjectVisitor* v);
private:
- BaseMarker* base_marker() {
- return base_marker_;
+ void ProcessJSFunctionCandidates();
+ void ProcessSharedFunctionInfoCandidates();
+ void EvictJSFunctionCandidates();
+ void EvictSharedFunctionInfoCandidates();
+
+ static JSFunction** GetNextCandidateSlot(JSFunction* candidate) {
+ return reinterpret_cast<JSFunction**>(
+ HeapObject::RawField(candidate, JSFunction::kNextFunctionLinkOffset));
+ }
+
+ static JSFunction* GetNextCandidate(JSFunction* candidate) {
+ Object* next_candidate = candidate->next_function_link();
+ return reinterpret_cast<JSFunction*>(next_candidate);
+ }
+
+ static void SetNextCandidate(JSFunction* candidate,
+ JSFunction* next_candidate) {
+ candidate->set_next_function_link(next_candidate);
}
- MarkCompactCollector* mark_compact_collector() {
- return mark_compact_collector_;
+ static void ClearNextCandidate(JSFunction* candidate, Object* undefined) {
+ ASSERT(undefined->IsUndefined());
+ candidate->set_next_function_link(undefined, SKIP_WRITE_BARRIER);
}
- BaseMarker* base_marker_;
- MarkCompactCollector* mark_compact_collector_;
+ static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
+ Object* next_candidate = candidate->code()->gc_metadata();
+ return reinterpret_cast<SharedFunctionInfo*>(next_candidate);
+ }
+
+ static void SetNextCandidate(SharedFunctionInfo* candidate,
+ SharedFunctionInfo* next_candidate) {
+ candidate->code()->set_gc_metadata(next_candidate);
+ }
+
+ static void ClearNextCandidate(SharedFunctionInfo* candidate) {
+ candidate->code()->set_gc_metadata(NULL, SKIP_WRITE_BARRIER);
+ }
+
+ Isolate* isolate_;
+ JSFunction* jsfunction_candidates_head_;
+ SharedFunctionInfo* shared_function_info_candidates_head_;
+
+ DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
};
@@ -505,7 +590,7 @@ class MarkCompactCollector {
PRECISE
};
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void VerifyMarkbitsAreClean();
static void VerifyMarkbitsAreClean(PagedSpace* space);
static void VerifyMarkbitsAreClean(NewSpace* space);
@@ -553,6 +638,7 @@ class MarkCompactCollector {
void RecordRelocSlot(RelocInfo* rinfo, Object* target);
void RecordCodeEntrySlot(Address slot, Code* target);
+ void RecordCodeTargetPatch(Address pc, Code* target);
INLINE(void RecordSlot(Object** anchor_slot, Object** slot, Object* object));
@@ -574,6 +660,8 @@ class MarkCompactCollector {
bool is_compacting() const { return compacting_; }
+ MarkingParity marking_parity() { return marking_parity_; }
+
private:
MarkCompactCollector();
~MarkCompactCollector();
@@ -606,14 +694,14 @@ class MarkCompactCollector {
bool abort_incremental_marking_;
+ MarkingParity marking_parity_;
+
// True if we are collecting slots to perform evacuation from evacuation
// candidates.
bool compacting_;
bool was_marked_incrementally_;
- bool flush_monomorphic_ics_;
-
// A pointer to the current stack-allocated GC tracer object during a full
// collection (NULL before and after).
GCTracer* tracer_;
@@ -636,15 +724,9 @@ class MarkCompactCollector {
friend class RootMarkingVisitor;
friend class MarkingVisitor;
- friend class StaticMarkingVisitor;
+ friend class MarkCompactMarkingVisitor;
friend class CodeMarkingVisitor;
friend class SharedFunctionInfoMarkingVisitor;
- friend class Marker<IncrementalMarking>;
- friend class Marker<MarkCompactCollector>;
-
- // Mark non-optimize code for functions inlined into the given optimized
- // code. This will prevent it from being flushed.
- void MarkInlinedFunctionsCode(Code* code);
// Mark code objects that are active on the stack to prevent them
// from being flushed.
@@ -658,25 +740,13 @@ class MarkCompactCollector {
void AfterMarking();
// Marks the object black and pushes it on the marking stack.
- // Returns true if object needed marking and false otherwise.
- // This is for non-incremental marking only.
- INLINE(bool MarkObjectAndPush(HeapObject* obj));
-
- // Marks the object black and pushes it on the marking stack.
// This is for non-incremental marking only.
INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit));
- // Marks the object black without pushing it on the marking stack.
- // Returns true if object needed marking and false otherwise.
- // This is for non-incremental marking only.
- INLINE(bool MarkObjectWithoutPush(HeapObject* obj));
-
// Marks the object black assuming that it is not yet marked.
// This is for non-incremental marking only.
INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit));
- void ProcessNewlyMarkedObject(HeapObject* obj);
-
// Mark the heap roots and all objects reachable from them.
void MarkRoots(RootMarkingVisitor* visitor);
@@ -779,7 +849,6 @@ class MarkCompactCollector {
MarkingDeque marking_deque_;
CodeFlusher* code_flusher_;
Object* encountered_weak_maps_;
- Marker<MarkCompactCollector> marker_;
List<Page*> evacuation_candidates_;
List<Code*> invalidated_code_;
diff --git a/src/3rdparty/v8/src/messages.cc b/src/3rdparty/v8/src/messages.cc
index a0793c2..ce965fc 100644
--- a/src/3rdparty/v8/src/messages.cc
+++ b/src/3rdparty/v8/src/messages.cc
@@ -106,11 +106,20 @@ void MessageHandler::ReportMessage(Isolate* isolate,
// We are calling into embedder's code which can throw exceptions.
// Thus we need to save current exception state, reset it to the clean one
// and ignore scheduled exceptions callbacks can throw.
+
+ // We pass the exception object into the message handler callback though.
+ Object* exception_object = isolate->heap()->undefined_value();
+ if (isolate->has_pending_exception()) {
+ isolate->pending_exception()->ToObject(&exception_object);
+ }
+ Handle<Object> exception_handle(exception_object);
+
Isolate::ExceptionScope exception_scope(isolate);
isolate->clear_pending_exception();
isolate->set_external_caught_exception(false);
v8::Local<v8::Message> api_message_obj = v8::Utils::MessageToLocal(message);
+ v8::Local<v8::Value> api_exception_obj = v8::Utils::ToLocal(exception_handle);
v8::NeanderArray global_listeners(FACTORY->message_listeners());
int global_length = global_listeners.length();
@@ -123,15 +132,13 @@ void MessageHandler::ReportMessage(Isolate* isolate,
for (int i = 0; i < global_length; i++) {
HandleScope scope;
if (global_listeners.get(i)->IsUndefined()) continue;
- v8::NeanderObject listener(JSObject::cast(global_listeners.get(i)));
- Handle<Foreign> callback_obj(Foreign::cast(listener.get(0)));
+ Handle<Foreign> callback_obj(Foreign::cast(global_listeners.get(i)));
v8::MessageCallback callback =
FUNCTION_CAST<v8::MessageCallback>(callback_obj->foreign_address());
- Handle<Object> callback_data(listener.get(1));
{
// Do not allow exceptions to propagate.
v8::TryCatch try_catch;
- callback(api_message_obj, v8::Utils::ToLocal(callback_data));
+ callback(api_message_obj, api_exception_obj);
}
if (isolate->has_scheduled_exception()) {
isolate->clear_scheduled_exception();
@@ -148,7 +155,9 @@ Handle<String> MessageHandler::GetMessage(Handle<Object> data) {
JSFunction::cast(
Isolate::Current()->js_builtins_object()->
GetPropertyNoExceptionThrown(*fmt_str)));
- Handle<Object> argv[] = { data };
+ Handle<JSMessageObject> message = Handle<JSMessageObject>::cast(data);
+ Handle<Object> argv[] = { Handle<Object>(message->type()),
+ Handle<Object>(message->arguments()) };
bool caught_exception;
Handle<Object> result =
diff --git a/src/3rdparty/v8/src/messages.js b/src/3rdparty/v8/src/messages.js
index ab71936..f04bed9 100644
--- a/src/3rdparty/v8/src/messages.js
+++ b/src/3rdparty/v8/src/messages.js
@@ -26,18 +26,137 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// -------------------------------------------------------------------
-//
-// If this object gets passed to an error constructor the error will
-// get an accessor for .message that constructs a descriptive error
-// message on access.
-var kAddMessageAccessorsMarker = { };
-
-// This will be lazily initialized when first needed (and forcibly
-// overwritten even though it's const).
-var kMessages = 0;
-function FormatString(format, message) {
- var args = %MessageGetArguments(message);
+var kMessages = {
+ // Error
+ cyclic_proto: ["Cyclic __proto__ value"],
+ code_gen_from_strings: ["%0"],
+ // TypeError
+ unexpected_token: ["Unexpected token ", "%0"],
+ unexpected_token_number: ["Unexpected number"],
+ unexpected_token_string: ["Unexpected string"],
+ unexpected_token_identifier: ["Unexpected identifier"],
+ unexpected_reserved: ["Unexpected reserved word"],
+ unexpected_strict_reserved: ["Unexpected strict mode reserved word"],
+ unexpected_eos: ["Unexpected end of input"],
+ malformed_regexp: ["Invalid regular expression: /", "%0", "/: ", "%1"],
+ unterminated_regexp: ["Invalid regular expression: missing /"],
+ regexp_flags: ["Cannot supply flags when constructing one RegExp from another"],
+ incompatible_method_receiver: ["Method ", "%0", " called on incompatible receiver ", "%1"],
+ invalid_lhs_in_assignment: ["Invalid left-hand side in assignment"],
+ invalid_lhs_in_for_in: ["Invalid left-hand side in for-in"],
+ invalid_lhs_in_postfix_op: ["Invalid left-hand side expression in postfix operation"],
+ invalid_lhs_in_prefix_op: ["Invalid left-hand side expression in prefix operation"],
+ multiple_defaults_in_switch: ["More than one default clause in switch statement"],
+ newline_after_throw: ["Illegal newline after throw"],
+ redeclaration: ["%0", " '", "%1", "' has already been declared"],
+ no_catch_or_finally: ["Missing catch or finally after try"],
+ unknown_label: ["Undefined label '", "%0", "'"],
+ uncaught_exception: ["Uncaught ", "%0"],
+ stack_trace: ["Stack Trace:\n", "%0"],
+ called_non_callable: ["%0", " is not a function"],
+ undefined_method: ["Object ", "%1", " has no method '", "%0", "'"],
+ property_not_function: ["Property '", "%0", "' of object ", "%1", " is not a function"],
+ cannot_convert_to_primitive: ["Cannot convert object to primitive value"],
+ not_constructor: ["%0", " is not a constructor"],
+ not_defined: ["%0", " is not defined"],
+ non_object_property_load: ["Cannot read property '", "%0", "' of ", "%1"],
+ non_object_property_store: ["Cannot set property '", "%0", "' of ", "%1"],
+ non_object_property_call: ["Cannot call method '", "%0", "' of ", "%1"],
+ with_expression: ["%0", " has no properties"],
+ illegal_invocation: ["Illegal invocation"],
+ no_setter_in_callback: ["Cannot set property ", "%0", " of ", "%1", " which has only a getter"],
+ apply_non_function: ["Function.prototype.apply was called on ", "%0", ", which is a ", "%1", " and not a function"],
+ apply_wrong_args: ["Function.prototype.apply: Arguments list has wrong type"],
+ invalid_in_operator_use: ["Cannot use 'in' operator to search for '", "%0", "' in ", "%1"],
+ instanceof_function_expected: ["Expecting a function in instanceof check, but got ", "%0"],
+ instanceof_nonobject_proto: ["Function has non-object prototype '", "%0", "' in instanceof check"],
+ null_to_object: ["Cannot convert null to object"],
+ reduce_no_initial: ["Reduce of empty array with no initial value"],
+ getter_must_be_callable: ["Getter must be a function: ", "%0"],
+ setter_must_be_callable: ["Setter must be a function: ", "%0"],
+ value_and_accessor: ["Invalid property. A property cannot both have accessors and be writable or have a value, ", "%0"],
+ proto_object_or_null: ["Object prototype may only be an Object or null"],
+ property_desc_object: ["Property description must be an object: ", "%0"],
+ redefine_disallowed: ["Cannot redefine property: ", "%0"],
+ define_disallowed: ["Cannot define property:", "%0", ", object is not extensible."],
+ non_extensible_proto: ["%0", " is not extensible"],
+ handler_non_object: ["Proxy.", "%0", " called with non-object as handler"],
+ proto_non_object: ["Proxy.", "%0", " called with non-object as prototype"],
+ trap_function_expected: ["Proxy.", "%0", " called with non-function for '", "%1", "' trap"],
+ handler_trap_missing: ["Proxy handler ", "%0", " has no '", "%1", "' trap"],
+ handler_trap_must_be_callable: ["Proxy handler ", "%0", " has non-callable '", "%1", "' trap"],
+ handler_returned_false: ["Proxy handler ", "%0", " returned false from '", "%1", "' trap"],
+ handler_returned_undefined: ["Proxy handler ", "%0", " returned undefined from '", "%1", "' trap"],
+ proxy_prop_not_configurable: ["Proxy handler ", "%0", " returned non-configurable descriptor for property '", "%2", "' from '", "%1", "' trap"],
+ proxy_non_object_prop_names: ["Trap '", "%1", "' returned non-object ", "%0"],
+ proxy_repeated_prop_name: ["Trap '", "%1", "' returned repeated property name '", "%2", "'"],
+ invalid_weakmap_key: ["Invalid value used as weak map key"],
+ not_date_object: ["this is not a Date object."],
+ observe_non_object: ["Object.", "%0", " cannot ", "%0", " non-object"],
+ observe_non_function: ["Object.", "%0", " cannot deliver to non-function"],
+ observe_callback_frozen: ["Object.observe cannot deliver to a frozen function object"],
+ observe_type_non_string: ["Invalid changeRecord with non-string 'type' property"],
+ observe_notify_non_notifier: ["notify called on non-notifier object"],
+ // RangeError
+ invalid_array_length: ["Invalid array length"],
+ stack_overflow: ["Maximum call stack size exceeded"],
+ invalid_time_value: ["Invalid time value"],
+ // SyntaxError
+ unable_to_parse: ["Parse error"],
+ invalid_regexp_flags: ["Invalid flags supplied to RegExp constructor '", "%0", "'"],
+ invalid_regexp: ["Invalid RegExp pattern /", "%0", "/"],
+ illegal_break: ["Illegal break statement"],
+ illegal_continue: ["Illegal continue statement"],
+ illegal_return: ["Illegal return statement"],
+ illegal_let: ["Illegal let declaration outside extended mode"],
+ error_loading_debugger: ["Error loading debugger"],
+ no_input_to_regexp: ["No input to ", "%0"],
+ invalid_json: ["String '", "%0", "' is not valid JSON"],
+ circular_structure: ["Converting circular structure to JSON"],
+ called_on_non_object: ["%0", " called on non-object"],
+ called_on_null_or_undefined: ["%0", " called on null or undefined"],
+ array_indexof_not_defined: ["Array.getIndexOf: Argument undefined"],
+ object_not_extensible: ["Can't add property ", "%0", ", object is not extensible"],
+ illegal_access: ["Illegal access"],
+ invalid_preparser_data: ["Invalid preparser data for function ", "%0"],
+ strict_mode_with: ["Strict mode code may not include a with statement"],
+ strict_catch_variable: ["Catch variable may not be eval or arguments in strict mode"],
+ too_many_arguments: ["Too many arguments in function call (only 32766 allowed)"],
+ too_many_parameters: ["Too many parameters in function definition (only 32766 allowed)"],
+ too_many_variables: ["Too many variables declared (only 131071 allowed)"],
+ strict_param_name: ["Parameter name eval or arguments is not allowed in strict mode"],
+ strict_param_dupe: ["Strict mode function may not have duplicate parameter names"],
+ strict_var_name: ["Variable name may not be eval or arguments in strict mode"],
+ strict_function_name: ["Function name may not be eval or arguments in strict mode"],
+ strict_octal_literal: ["Octal literals are not allowed in strict mode."],
+ strict_duplicate_property: ["Duplicate data property in object literal not allowed in strict mode"],
+ accessor_data_property: ["Object literal may not have data and accessor property with the same name"],
+ accessor_get_set: ["Object literal may not have multiple get/set accessors with the same name"],
+ strict_lhs_assignment: ["Assignment to eval or arguments is not allowed in strict mode"],
+ strict_lhs_postfix: ["Postfix increment/decrement may not have eval or arguments operand in strict mode"],
+ strict_lhs_prefix: ["Prefix increment/decrement may not have eval or arguments operand in strict mode"],
+ strict_reserved_word: ["Use of future reserved word in strict mode"],
+ strict_delete: ["Delete of an unqualified identifier in strict mode."],
+ strict_delete_property: ["Cannot delete property '", "%0", "' of ", "%1"],
+ strict_const: ["Use of const in strict mode."],
+ strict_function: ["In strict mode code, functions can only be declared at top level or immediately within another function." ],
+ strict_read_only_property: ["Cannot assign to read only property '", "%0", "' of ", "%1"],
+ strict_cannot_assign: ["Cannot assign to read only '", "%0", "' in strict mode"],
+ strict_poison_pill: ["'caller', 'callee', and 'arguments' properties may not be accessed on strict mode functions or the arguments objects for calls to them"],
+ strict_caller: ["Illegal access to a strict mode caller function."],
+ unprotected_let: ["Illegal let declaration in unprotected statement context."],
+ unprotected_const: ["Illegal const declaration in unprotected statement context."],
+ cant_prevent_ext_external_array_elements: ["Cannot prevent extension of an object with external array elements"],
+ redef_external_array_element: ["Cannot redefine a property of an object with external array elements"],
+ harmony_const_assign: ["Assignment to constant variable."],
+ invalid_module_path: ["Module does not export '", "%0", "', or export is not itself a module"],
+ module_type_error: ["Module '", "%0", "' used improperly"],
+ module_export_undefined: ["Export '", "%0", "' is not defined in module"],
+};
+
+
+function FormatString(format, args) {
var result = "";
var arg_num = 0;
for (var i = 0; i < format.length; i++) {
@@ -48,9 +167,14 @@ function FormatString(format, message) {
if (arg_num < 4) {
// str is one of %0, %1, %2 or %3.
try {
- str = ToDetailString(args[arg_num]);
+ str = NoSideEffectToString(args[arg_num]);
} catch (e) {
- str = "#<error>";
+ if (%IsJSModule(args[arg_num]))
+ str = "module";
+ else if (IS_SPEC_OBJECT(args[arg_num]))
+ str = "object";
+ else
+ str = "#<error>";
}
}
}
@@ -60,6 +184,26 @@ function FormatString(format, message) {
}
+function NoSideEffectToString(obj) {
+ if (IS_STRING(obj)) return obj;
+ if (IS_NUMBER(obj)) return %_NumberToString(obj);
+ if (IS_BOOLEAN(obj)) return x ? 'true' : 'false';
+ if (IS_UNDEFINED(obj)) return 'undefined';
+ if (IS_NULL(obj)) return 'null';
+ if (IS_OBJECT(obj) && %GetDataProperty(obj, "toString") === ObjectToString) {
+ var constructor = obj.constructor;
+ if (typeof constructor == "function") {
+ var constructorName = constructor.name;
+ if (IS_STRING(constructorName) && constructorName !== "") {
+ return "#<" + constructorName + ">";
+ }
+ }
+ }
+ if (IsNativeErrorObject(obj)) return %_CallFunction(obj, ErrorToString);
+ return %_CallFunction(obj, ObjectToString);
+}
+
+
// To check if something is a native error we need to check the
// concrete native error types. It is not sufficient to use instanceof
// since it possible to create an object that has Error.prototype on
@@ -110,7 +254,7 @@ function MakeGenericError(constructor, type, args) {
if (IS_UNDEFINED(args)) {
args = [];
}
- var e = new constructor(kAddMessageAccessorsMarker);
+ var e = new constructor(FormatMessage(type, args));
e.type = type;
e.arguments = args;
return e;
@@ -130,154 +274,10 @@ function MakeGenericError(constructor, type, args) {
// Helper functions; called from the runtime system.
-function FormatMessage(message) {
- if (kMessages === 0) {
- var messagesDictionary = [
- // Error
- "cyclic_proto", ["Cyclic __proto__ value"],
- "code_gen_from_strings", ["Code generation from strings disallowed for this context"],
- // TypeError
- "unexpected_token", ["Unexpected token ", "%0"],
- "unexpected_token_number", ["Unexpected number"],
- "unexpected_token_string", ["Unexpected string"],
- "unexpected_token_identifier", ["Unexpected identifier"],
- "unexpected_reserved", ["Unexpected reserved word"],
- "unexpected_strict_reserved", ["Unexpected strict mode reserved word"],
- "unexpected_eos", ["Unexpected end of input"],
- "malformed_regexp", ["Invalid regular expression: /", "%0", "/: ", "%1"],
- "unterminated_regexp", ["Invalid regular expression: missing /"],
- "regexp_flags", ["Cannot supply flags when constructing one RegExp from another"],
- "incompatible_method_receiver", ["Method ", "%0", " called on incompatible receiver ", "%1"],
- "invalid_lhs_in_assignment", ["Invalid left-hand side in assignment"],
- "invalid_lhs_in_for_in", ["Invalid left-hand side in for-in"],
- "invalid_lhs_in_postfix_op", ["Invalid left-hand side expression in postfix operation"],
- "invalid_lhs_in_prefix_op", ["Invalid left-hand side expression in prefix operation"],
- "multiple_defaults_in_switch", ["More than one default clause in switch statement"],
- "newline_after_throw", ["Illegal newline after throw"],
- "redeclaration", ["%0", " '", "%1", "' has already been declared"],
- "no_catch_or_finally", ["Missing catch or finally after try"],
- "unknown_label", ["Undefined label '", "%0", "'"],
- "uncaught_exception", ["Uncaught ", "%0"],
- "stack_trace", ["Stack Trace:\n", "%0"],
- "called_non_callable", ["%0", " is not a function"],
- "undefined_method", ["Object ", "%1", " has no method '", "%0", "'"],
- "property_not_function", ["Property '", "%0", "' of object ", "%1", " is not a function"],
- "cannot_convert_to_primitive", ["Cannot convert object to primitive value"],
- "not_constructor", ["%0", " is not a constructor"],
- "not_defined", ["%0", " is not defined"],
- "non_object_property_load", ["Cannot read property '", "%0", "' of ", "%1"],
- "non_object_property_store", ["Cannot set property '", "%0", "' of ", "%1"],
- "non_object_property_call", ["Cannot call method '", "%0", "' of ", "%1"],
- "with_expression", ["%0", " has no properties"],
- "illegal_invocation", ["Illegal invocation"],
- "no_setter_in_callback", ["Cannot set property ", "%0", " of ", "%1", " which has only a getter"],
- "apply_non_function", ["Function.prototype.apply was called on ", "%0", ", which is a ", "%1", " and not a function"],
- "apply_wrong_args", ["Function.prototype.apply: Arguments list has wrong type"],
- "invalid_in_operator_use", ["Cannot use 'in' operator to search for '", "%0", "' in ", "%1"],
- "instanceof_function_expected", ["Expecting a function in instanceof check, but got ", "%0"],
- "instanceof_nonobject_proto", ["Function has non-object prototype '", "%0", "' in instanceof check"],
- "null_to_object", ["Cannot convert null to object"],
- "reduce_no_initial", ["Reduce of empty array with no initial value"],
- "getter_must_be_callable", ["Getter must be a function: ", "%0"],
- "setter_must_be_callable", ["Setter must be a function: ", "%0"],
- "value_and_accessor", ["Invalid property. A property cannot both have accessors and be writable or have a value, ", "%0"],
- "proto_object_or_null", ["Object prototype may only be an Object or null"],
- "property_desc_object", ["Property description must be an object: ", "%0"],
- "redefine_disallowed", ["Cannot redefine property: ", "%0"],
- "define_disallowed", ["Cannot define property:", "%0", ", object is not extensible."],
- "non_extensible_proto", ["%0", " is not extensible"],
- "handler_non_object", ["Proxy.", "%0", " called with non-object as handler"],
- "proto_non_object", ["Proxy.", "%0", " called with non-object as prototype"],
- "trap_function_expected", ["Proxy.", "%0", " called with non-function for '", "%1", "' trap"],
- "handler_trap_missing", ["Proxy handler ", "%0", " has no '", "%1", "' trap"],
- "handler_trap_must_be_callable", ["Proxy handler ", "%0", " has non-callable '", "%1", "' trap"],
- "handler_returned_false", ["Proxy handler ", "%0", " returned false from '", "%1", "' trap"],
- "handler_returned_undefined", ["Proxy handler ", "%0", " returned undefined from '", "%1", "' trap"],
- "proxy_prop_not_configurable", ["Proxy handler ", "%0", " returned non-configurable descriptor for property '", "%2", "' from '", "%1", "' trap"],
- "proxy_non_object_prop_names", ["Trap '", "%1", "' returned non-object ", "%0"],
- "proxy_repeated_prop_name", ["Trap '", "%1", "' returned repeated property name '", "%2", "'"],
- "invalid_weakmap_key", ["Invalid value used as weak map key"],
- // RangeError
- "invalid_array_length", ["Invalid array length"],
- "stack_overflow", ["Maximum call stack size exceeded"],
- "invalid_time_value", ["Invalid time value"],
- // SyntaxError
- "unable_to_parse", ["Parse error"],
- "invalid_regexp_flags", ["Invalid flags supplied to RegExp constructor '", "%0", "'"],
- "invalid_regexp", ["Invalid RegExp pattern /", "%0", "/"],
- "illegal_break", ["Illegal break statement"],
- "illegal_continue", ["Illegal continue statement"],
- "illegal_return", ["Illegal return statement"],
- "illegal_let", ["Illegal let declaration outside extended mode"],
- "error_loading_debugger", ["Error loading debugger"],
- "no_input_to_regexp", ["No input to ", "%0"],
- "invalid_json", ["String '", "%0", "' is not valid JSON"],
- "circular_structure", ["Converting circular structure to JSON"],
- "called_on_non_object", ["%0", " called on non-object"],
- "called_on_null_or_undefined", ["%0", " called on null or undefined"],
- "array_indexof_not_defined", ["Array.getIndexOf: Argument undefined"],
- "object_not_extensible", ["Can't add property ", "%0", ", object is not extensible"],
- "illegal_access", ["Illegal access"],
- "invalid_preparser_data", ["Invalid preparser data for function ", "%0"],
- "strict_mode_with", ["Strict mode code may not include a with statement"],
- "strict_catch_variable", ["Catch variable may not be eval or arguments in strict mode"],
- "too_many_arguments", ["Too many arguments in function call (only 32766 allowed)"],
- "too_many_parameters", ["Too many parameters in function definition (only 32766 allowed)"],
- "too_many_variables", ["Too many variables declared (only 32767 allowed)"],
- "strict_param_name", ["Parameter name eval or arguments is not allowed in strict mode"],
- "strict_param_dupe", ["Strict mode function may not have duplicate parameter names"],
- "strict_var_name", ["Variable name may not be eval or arguments in strict mode"],
- "strict_function_name", ["Function name may not be eval or arguments in strict mode"],
- "strict_octal_literal", ["Octal literals are not allowed in strict mode."],
- "strict_duplicate_property", ["Duplicate data property in object literal not allowed in strict mode"],
- "accessor_data_property", ["Object literal may not have data and accessor property with the same name"],
- "accessor_get_set", ["Object literal may not have multiple get/set accessors with the same name"],
- "strict_lhs_assignment", ["Assignment to eval or arguments is not allowed in strict mode"],
- "strict_lhs_postfix", ["Postfix increment/decrement may not have eval or arguments operand in strict mode"],
- "strict_lhs_prefix", ["Prefix increment/decrement may not have eval or arguments operand in strict mode"],
- "strict_reserved_word", ["Use of future reserved word in strict mode"],
- "strict_delete", ["Delete of an unqualified identifier in strict mode."],
- "strict_delete_property", ["Cannot delete property '", "%0", "' of ", "%1"],
- "strict_const", ["Use of const in strict mode."],
- "strict_function", ["In strict mode code, functions can only be declared at top level or immediately within another function." ],
- "strict_read_only_property", ["Cannot assign to read only property '", "%0", "' of ", "%1"],
- "strict_cannot_assign", ["Cannot assign to read only '", "%0", "' in strict mode"],
- "strict_poison_pill", ["'caller', 'callee', and 'arguments' properties may not be accessed on strict mode functions or the arguments objects for calls to them"],
- "strict_caller", ["Illegal access to a strict mode caller function."],
- "unprotected_let", ["Illegal let declaration in unprotected statement context."],
- "unprotected_const", ["Illegal const declaration in unprotected statement context."],
- "cant_prevent_ext_external_array_elements", ["Cannot prevent extension of an object with external array elements"],
- "redef_external_array_element", ["Cannot redefine a property of an object with external array elements"],
- "harmony_const_assign", ["Assignment to constant variable."],
- "invalid_module_path", ["Module does not export '", "%0", "', or export is not itself a module"],
- "module_type_error", ["Module '", "%0", "' used improperly"],
- ];
- var messages = { __proto__ : null };
- for (var i = 0; i < messagesDictionary.length; i += 2) {
- var key = messagesDictionary[i];
- var format = messagesDictionary[i + 1];
-
- for (var j = 0; j < format.length; j++) {
- %IgnoreAttributesAndSetProperty(format, %_NumberToString(j), format[j],
- DONT_DELETE | READ_ONLY | DONT_ENUM);
- }
- %IgnoreAttributesAndSetProperty(format, 'length', format.length,
- DONT_DELETE | READ_ONLY | DONT_ENUM);
- %PreventExtensions(format);
- %IgnoreAttributesAndSetProperty(messages,
- key,
- format,
- DONT_DELETE | DONT_ENUM | READ_ONLY);
- }
- %PreventExtensions(messages);
- %IgnoreAttributesAndSetProperty(builtins, "kMessages",
- messages,
- DONT_DELETE | DONT_ENUM | READ_ONLY);
- }
- var message_type = %MessageGetType(message);
- var format = kMessages[message_type];
- if (!format) return "<unknown message " + message_type + ">";
- return FormatString(format, message);
+function FormatMessage(type, args) {
+ var format = kMessages[type];
+ if (!format) return "<unknown message " + type + ">";
+ return FormatString(format, args);
}
@@ -525,8 +525,8 @@ function ScriptLineCount() {
/**
- * Returns the name of script if available, contents of sourceURL comment
- * otherwise. See
+ * If sourceURL comment is available and script starts at zero returns sourceURL
+ * comment contents. Otherwise, script name is returned. See
* http://fbug.googlecode.com/svn/branches/firebug1.1/docs/ReleaseNotes_1.1.txt
* for details on using //@ sourceURL comment to identify scritps that don't
* have name.
@@ -535,14 +535,15 @@ function ScriptLineCount() {
* otherwise.
*/
function ScriptNameOrSourceURL() {
- if (this.name) {
+ if (this.line_offset > 0 || this.column_offset > 0) {
return this.name;
}
// The result is cached as on long scripts it takes noticable time to search
// for the sourceURL.
- if (this.hasCachedNameOrSourceURL)
- return this.cachedNameOrSourceURL;
+ if (this.hasCachedNameOrSourceURL) {
+ return this.cachedNameOrSourceURL;
+ }
this.hasCachedNameOrSourceURL = true;
// TODO(608): the spaces in a regexp below had to be escaped as \040
@@ -760,18 +761,18 @@ function DefineOneShotAccessor(obj, name, fun) {
// Note that the accessors consistently operate on 'obj', not 'this'.
// Since the object may occur in someone else's prototype chain we
// can't rely on 'this' being the same as 'obj'.
- var hasBeenSet = false;
var value;
+ var value_factory = fun;
var getter = function() {
- if (hasBeenSet) {
+ if (value_factory == null) {
return value;
}
- hasBeenSet = true;
- value = fun(obj);
+ value = value_factory(obj);
+ value_factory = null;
return value;
};
var setter = function(v) {
- hasBeenSet = true;
+ value_factory = null;
value = v;
};
%DefineOrRedefineAccessorProperty(obj, name, getter, setter, DONT_ENUM);
@@ -788,15 +789,7 @@ function CallSiteGetThis() {
}
function CallSiteGetTypeName() {
- var constructor = this.receiver.constructor;
- if (!constructor) {
- return %_CallFunction(this.receiver, ObjectToString);
- }
- var constructorName = constructor.name;
- if (!constructorName) {
- return %_CallFunction(this.receiver, ObjectToString);
- }
- return constructorName;
+ return GetTypeName(this, false);
}
function CallSiteIsToplevel() {
@@ -830,8 +823,10 @@ function CallSiteGetFunctionName() {
var name = this.fun.name;
if (name) {
return name;
- } else {
- return %FunctionGetInferredName(this.fun);
+ }
+ name = %FunctionGetInferredName(this.fun);
+ if (name) {
+ return name;
}
// Maybe this is an evaluation?
var script = %FunctionGetScript(this.fun);
@@ -859,9 +854,9 @@ function CallSiteGetMethodName() {
}
var name = null;
for (var prop in this.receiver) {
- if (this.receiver.__lookupGetter__(prop) === this.fun ||
- this.receiver.__lookupSetter__(prop) === this.fun ||
- (!this.receiver.__lookupGetter__(prop) &&
+ if (%_CallFunction(this.receiver, prop, ObjectLookupGetter) === this.fun ||
+ %_CallFunction(this.receiver, prop, ObjectLookupSetter) === this.fun ||
+ (!%_CallFunction(this.receiver, prop, ObjectLookupGetter) &&
this.receiver[prop] === this.fun)) {
// If we find more than one match bail out to avoid confusion.
if (name) {
@@ -927,17 +922,25 @@ function CallSiteToString() {
var fileLocation = "";
if (this.isNative()) {
fileLocation = "native";
- } else if (this.isEval()) {
- fileName = this.getScriptNameOrSourceURL();
- if (!fileName) {
- fileLocation = this.getEvalOrigin();
- }
} else {
- fileName = this.getFileName();
- }
+ if (this.isEval()) {
+ fileName = this.getScriptNameOrSourceURL();
+ if (!fileName) {
+ fileLocation = this.getEvalOrigin();
+ fileLocation += ", "; // Expecting source position to follow.
+ }
+ } else {
+ fileName = this.getFileName();
+ }
- if (fileName) {
- fileLocation += fileName;
+ if (fileName) {
+ fileLocation += fileName;
+ } else {
+ // Source code does not originate from a file and is not native, but we
+ // can still get the source position inside the source string, e.g. in
+ // an eval string.
+ fileLocation += "<anonymous>";
+ }
var lineNumber = this.getLineNumber();
if (lineNumber != null) {
fileLocation += ":" + lineNumber;
@@ -948,24 +951,25 @@ function CallSiteToString() {
}
}
- if (!fileLocation) {
- fileLocation = "unknown source";
- }
var line = "";
- var functionName = this.getFunction().name;
- var addPrefix = true;
+ var functionName = this.getFunctionName();
+ var addSuffix = true;
var isConstructor = this.isConstructor();
var isMethodCall = !(this.isToplevel() || isConstructor);
if (isMethodCall) {
+ var typeName = GetTypeName(this, true);
var methodName = this.getMethodName();
- line += this.getTypeName() + ".";
if (functionName) {
+ if (typeName && functionName.indexOf(typeName) != 0) {
+ line += typeName + ".";
+ }
line += functionName;
- if (methodName && (methodName != functionName)) {
+ if (methodName && functionName.lastIndexOf("." + methodName) !=
+ functionName.length - methodName.length - 1) {
line += " [as " + methodName + "]";
}
} else {
- line += methodName || "<anonymous>";
+ line += typeName + "." + (methodName || "<anonymous>");
}
} else if (isConstructor) {
line += "new " + (functionName || "<anonymous>");
@@ -973,9 +977,9 @@ function CallSiteToString() {
line += functionName;
} else {
line += fileLocation;
- addPrefix = false;
+ addSuffix = false;
}
- if (addPrefix) {
+ if (addSuffix) {
line += " (" + fileLocation + ")";
}
return line;
@@ -1085,6 +1089,19 @@ function FormatRawStackTrace(error, raw_stack) {
}
}
+function GetTypeName(obj, requireConstructor) {
+ var constructor = obj.receiver.constructor;
+ if (!constructor) {
+ return requireConstructor ? null :
+ %_CallFunction(obj.receiver, ObjectToString);
+ }
+ var constructorName = constructor.name;
+ if (!constructorName) {
+ return requireConstructor ? null :
+ %_CallFunction(obj.receiver, ObjectToString);
+ }
+ return constructorName;
+}
function captureStackTrace(obj, cons_opt) {
var stackTraceLimit = $Error.stackTraceLimit;
@@ -1138,13 +1155,7 @@ function SetUpError() {
%IgnoreAttributesAndSetProperty(this, 'stack', void 0, DONT_ENUM);
%IgnoreAttributesAndSetProperty(this, 'arguments', void 0, DONT_ENUM);
%IgnoreAttributesAndSetProperty(this, 'type', void 0, DONT_ENUM);
- if (m === kAddMessageAccessorsMarker) {
- // DefineOneShotAccessor always inserts a message property and
- // ignores setters.
- DefineOneShotAccessor(this, 'message', function (obj) {
- return FormatMessage(%NewMessageObject(obj.type, obj.arguments));
- });
- } else if (!IS_UNDEFINED(m)) {
+ if (!IS_UNDEFINED(m)) {
%IgnoreAttributesAndSetProperty(
this, 'message', ToString(m), DONT_ENUM);
}
@@ -1207,11 +1218,6 @@ function ErrorToStringDetectCycle(error) {
var name = GetPropertyWithoutInvokingMonkeyGetters(error, "name");
name = IS_UNDEFINED(name) ? "Error" : TO_STRING_INLINE(name);
var message = GetPropertyWithoutInvokingMonkeyGetters(error, "message");
- var hasMessage = %_CallFunction(error, "message", ObjectHasOwnProperty);
- if (type && !hasMessage) {
- var args = GetPropertyWithoutInvokingMonkeyGetters(error, "arguments");
- message = FormatMessage(%NewMessageObject(type, args));
- }
message = IS_UNDEFINED(message) ? "" : TO_STRING_INLINE(message);
if (name === "") return message;
if (message === "") return name;
diff --git a/src/3rdparty/v8/src/mips/assembler-mips-inl.h b/src/3rdparty/v8/src/mips/assembler-mips-inl.h
index 2ff4710..3e726a7 100644
--- a/src/3rdparty/v8/src/mips/assembler-mips-inl.h
+++ b/src/3rdparty/v8/src/mips/assembler-mips-inl.h
@@ -156,6 +156,11 @@ void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
}
+Address Assembler::target_address_from_return_address(Address pc) {
+ return pc - kCallTargetAddressOffset;
+}
+
+
Object* RelocInfo::target_object() {
ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
return reinterpret_cast<Object*>(Assembler::target_address_at(pc_));
@@ -208,10 +213,7 @@ Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
JSGlobalPropertyCell* RelocInfo::target_cell() {
ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
- Address address = Memory::Address_at(pc_);
- Object* object = HeapObject::FromAddress(
- address - JSGlobalPropertyCell::kValueOffset);
- return reinterpret_cast<JSGlobalPropertyCell*>(object);
+ return JSGlobalPropertyCell::FromValueAddress(Memory::Address_at(pc_));
}
diff --git a/src/3rdparty/v8/src/mips/assembler-mips.cc b/src/3rdparty/v8/src/mips/assembler-mips.cc
index f347fdc..4ce924d 100644
--- a/src/3rdparty/v8/src/mips/assembler-mips.cc
+++ b/src/3rdparty/v8/src/mips/assembler-mips.cc
@@ -273,8 +273,8 @@ static const int kMinimalBufferSize = 4 * KB;
Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
: AssemblerBase(arg_isolate),
- positions_recorder_(this),
- emit_debug_code_(FLAG_debug_code) {
+ recorded_ast_id_(TypeFeedbackId::None()),
+ positions_recorder_(this) {
if (buffer == NULL) {
// Do our own buffer management.
if (buffer_size <= kMinimalBufferSize) {
@@ -579,17 +579,20 @@ bool Assembler::IsNop(Instr instr, unsigned int type) {
// See Assembler::nop(type).
ASSERT(type < 32);
uint32_t opcode = GetOpcodeField(instr);
+ uint32_t function = GetFunctionField(instr);
uint32_t rt = GetRt(instr);
- uint32_t rs = GetRs(instr);
+ uint32_t rd = GetRd(instr);
uint32_t sa = GetSa(instr);
- // nop(type) == sll(zero_reg, zero_reg, type);
- // Technically all these values will be 0 but
- // this makes more sense to the reader.
+ // Traditional mips nop == sll(zero_reg, zero_reg, 0)
+ // When marking non-zero type, use sll(zero_reg, at, type)
+ // to avoid use of mips ssnop and ehb special encodings
+ // of the sll instruction.
- bool ret = (opcode == SLL &&
- rt == static_cast<uint32_t>(ToNumber(zero_reg)) &&
- rs == static_cast<uint32_t>(ToNumber(zero_reg)) &&
+ Register nop_rt_reg = (type == 0) ? zero_reg : at;
+ bool ret = (opcode == SPECIAL && function == SLL &&
+ rd == static_cast<uint32_t>(ToNumber(zero_reg)) &&
+ rt == static_cast<uint32_t>(ToNumber(nop_rt_reg)) &&
sa == type);
return ret;
@@ -2046,7 +2049,10 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
}
ASSERT(buffer_space() >= kMaxRelocSize); // Too late to grow buffer here.
if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
- RelocInfo reloc_info_with_ast_id(pc_, rmode, RecordedAstId(), NULL);
+ RelocInfo reloc_info_with_ast_id(pc_,
+ rmode,
+ RecordedAstId().ToInt(),
+ NULL);
ClearRecordedAstId();
reloc_info_writer.Write(&reloc_info_with_ast_id);
} else {
diff --git a/src/3rdparty/v8/src/mips/assembler-mips.h b/src/3rdparty/v8/src/mips/assembler-mips.h
index 84714e5..fd2ff0d 100644
--- a/src/3rdparty/v8/src/mips/assembler-mips.h
+++ b/src/3rdparty/v8/src/mips/assembler-mips.h
@@ -318,12 +318,15 @@ const FPURegister f31 = { 31 };
// Register aliases.
// cp is assumed to be a callee saved register.
-static const Register& kLithiumScratchReg = s3; // Scratch register.
-static const Register& kLithiumScratchReg2 = s4; // Scratch register.
-static const Register& kRootRegister = s6; // Roots array pointer.
-static const Register& cp = s7; // JavaScript context pointer.
-static const DoubleRegister& kLithiumScratchDouble = f30;
-static const FPURegister& kDoubleRegZero = f28;
+// Defined using #define instead of "static const Register&" because Clang
+// complains otherwise when a compilation unit that includes this header
+// doesn't use the variables.
+#define kRootRegister s6
+#define cp s7
+#define kLithiumScratchReg s3
+#define kLithiumScratchReg2 s4
+#define kLithiumScratchDouble f30
+#define kDoubleRegZero f28
// FPU (coprocessor 1) control registers.
// Currently only FCSR (#31) is implemented.
@@ -522,9 +525,6 @@ class Assembler : public AssemblerBase {
Assembler(Isolate* isolate, void* buffer, int buffer_size);
~Assembler();
- // Overrides the default provided by FLAG_debug_code.
- void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
-
// GetCode emits any pending (non-emitted) code and fills the descriptor
// desc. GetCode() is idempotent; it returns the same result if no other
// Assembler functions are invoked in between GetCode() calls.
@@ -568,6 +568,10 @@ class Assembler : public AssemblerBase {
static Address target_address_at(Address pc);
static void set_target_address_at(Address pc, Address target);
+ // Return the code target address at a call site from the return address
+ // of that call in the instruction stream.
+ inline static Address target_address_from_return_address(Address pc);
+
static void JumpLabelToJumpRegister(Address pc);
static void QuietNaN(HeapObject* nan);
@@ -628,6 +632,8 @@ class Assembler : public AssemblerBase {
// register.
static const int kPcLoadDelta = 4;
+ static const int kPatchDebugBreakSlotReturnOffset = 4 * kInstrSize;
+
// Number of instructions used for the JS return sequence. The constant is
// used by the debugger to patch the JS return sequence.
static const int kJSReturnSequenceInstructions = 7;
@@ -660,10 +666,13 @@ class Assembler : public AssemblerBase {
FIRST_IC_MARKER = PROPERTY_ACCESS_INLINED
};
- // Type == 0 is the default non-marking type.
+ // Type == 0 is the default non-marking nop. For mips this is a
+ // sll(zero_reg, zero_reg, 0). We use rt_reg == at for non-zero
+ // marking, to avoid conflict with ssnop and ehb instructions.
void nop(unsigned int type = 0) {
ASSERT(type < 32);
- sll(zero_reg, zero_reg, type, true);
+ Register nop_rt_reg = (type == 0) ? zero_reg : at;
+ sll(zero_reg, nop_rt_reg, type, true);
}
@@ -909,17 +918,17 @@ class Assembler : public AssemblerBase {
// Record the AST id of the CallIC being compiled, so that it can be placed
// in the relocation information.
- void SetRecordedAstId(unsigned ast_id) {
- ASSERT(recorded_ast_id_ == kNoASTId);
+ void SetRecordedAstId(TypeFeedbackId ast_id) {
+ ASSERT(recorded_ast_id_.IsNone());
recorded_ast_id_ = ast_id;
}
- unsigned RecordedAstId() {
- ASSERT(recorded_ast_id_ != kNoASTId);
+ TypeFeedbackId RecordedAstId() {
+ ASSERT(!recorded_ast_id_.IsNone());
return recorded_ast_id_;
}
- void ClearRecordedAstId() { recorded_ast_id_ = kNoASTId; }
+ void ClearRecordedAstId() { recorded_ast_id_ = TypeFeedbackId::None(); }
// Record a comment relocation entry that can be used by a disassembler.
// Use --code-comments to enable.
@@ -1016,9 +1025,7 @@ class Assembler : public AssemblerBase {
// Relocation for a type-recording IC has the AST id added to it. This
// member variable is a way to pass the information from the call site to
// the relocation info.
- unsigned recorded_ast_id_;
-
- bool emit_debug_code() const { return emit_debug_code_; }
+ TypeFeedbackId recorded_ast_id_;
int32_t buffer_space() const { return reloc_info_writer.pos() - pc_; }
@@ -1270,7 +1277,6 @@ class Assembler : public AssemblerBase {
friend class BlockTrampolinePoolScope;
PositionsRecorder positions_recorder_;
- bool emit_debug_code_;
friend class PositionsRecorder;
friend class EnsureSpace;
};
diff --git a/src/3rdparty/v8/src/mips/builtins-mips.cc b/src/3rdparty/v8/src/mips/builtins-mips.cc
index eeb84c3..0342e65 100644
--- a/src/3rdparty/v8/src/mips/builtins-mips.cc
+++ b/src/3rdparty/v8/src/mips/builtins-mips.cc
@@ -79,12 +79,13 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
// Load the built-in InternalArray function from the current context.
static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
Register result) {
- // Load the global context.
+ // Load the native context.
- __ lw(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ lw(result,
- FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
- // Load the InternalArray function from the global context.
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ lw(result,
+ FieldMemOperand(result, GlobalObject::kNativeContextOffset));
+ // Load the InternalArray function from the native context.
__ lw(result,
MemOperand(result,
Context::SlotOffset(
@@ -94,12 +95,13 @@ static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
// Load the built-in Array function from the current context.
static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
- // Load the global context.
+ // Load the native context.
- __ lw(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ lw(result,
- FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
- // Load the Array function from the global context.
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ lw(result,
+ FieldMemOperand(result, GlobalObject::kNativeContextOffset));
+ // Load the Array function from the native context.
__ lw(result,
MemOperand(result,
Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
@@ -118,7 +120,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
Label* gc_required) {
const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0);
- __ LoadInitialArrayMap(array_function, scratch2, scratch1);
+ __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
@@ -214,7 +216,8 @@ static void AllocateJSArray(MacroAssembler* masm,
bool fill_with_hole,
Label* gc_required) {
// Load the initial map from the array function.
- __ LoadInitialArrayMap(array_function, scratch2, elements_array_storage);
+ __ LoadInitialArrayMap(array_function, scratch2,
+ elements_array_storage, fill_with_hole);
if (FLAG_debug_code) { // Assert that array size is not zero.
__ Assert(
@@ -449,10 +452,10 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ Branch(call_generic_code);
__ bind(&not_double);
- // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
+ // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
// a3: JSArray
__ lw(a2, FieldMemOperand(a3, HeapObject::kMapOffset));
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
a2,
t5,
@@ -712,6 +715,43 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
}
+static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
+ __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
+ __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
+ __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ Jump(at);
+}
+
+
+void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
+ GenerateTailCallToSharedCode(masm);
+}
+
+
+void Builtins::Generate_ParallelRecompile(MacroAssembler* masm) {
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ // Push a copy of the function onto the stack.
+ __ push(a1);
+ // Push call kind information.
+ __ push(t1);
+
+ __ push(a1); // Function is also the parameter to the runtime call.
+ __ CallRuntime(Runtime::kParallelRecompile, 1);
+
+ // Restore call kind information.
+ __ pop(t1);
+ // Restore receiver.
+ __ pop(a1);
+
+ // Tear down internal frame.
+ }
+
+ GenerateTailCallToSharedCode(masm);
+}
+
+
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool count_constructions) {
@@ -1392,9 +1432,9 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// receiver.
__ bind(&use_global_receiver);
const int kGlobalIndex =
- Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
__ lw(a2, FieldMemOperand(cp, kGlobalIndex));
- __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalContextOffset));
+ __ lw(a2, FieldMemOperand(a2, GlobalObject::kNativeContextOffset));
__ lw(a2, FieldMemOperand(a2, kGlobalIndex));
__ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
@@ -1585,9 +1625,9 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Use the current global receiver object as the receiver.
__ bind(&use_global_receiver);
const int kGlobalOffset =
- Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
__ lw(a0, FieldMemOperand(cp, kGlobalOffset));
- __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset));
+ __ lw(a0, FieldMemOperand(a0, GlobalObject::kNativeContextOffset));
__ lw(a0, FieldMemOperand(a0, kGlobalOffset));
__ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
diff --git a/src/3rdparty/v8/src/mips/code-stubs-mips.cc b/src/3rdparty/v8/src/mips/code-stubs-mips.cc
index a5b4edd..b1fe4d5 100644
--- a/src/3rdparty/v8/src/mips/code-stubs-mips.cc
+++ b/src/3rdparty/v8/src/mips/code-stubs-mips.cc
@@ -87,6 +87,8 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
void FastNewClosureStub::Generate(MacroAssembler* masm) {
// Create a new closure from the given function info in new
// space. Set the context to the current context in cp.
+ Counters* counters = masm->isolate()->counters();
+
Label gc;
// Pop the function info from the stack.
@@ -100,32 +102,44 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
&gc,
TAG_OBJECT);
+ __ IncrementCounter(counters->fast_new_closure_total(), 1, t2, t3);
+
int map_index = (language_mode_ == CLASSIC_MODE)
? Context::FUNCTION_MAP_INDEX
: Context::STRICT_MODE_FUNCTION_MAP_INDEX;
- // Compute the function map in the current global context and set that
+ // Compute the function map in the current native context and set that
// as the map of the allocated object.
- __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalContextOffset));
- __ lw(a2, MemOperand(a2, Context::SlotOffset(map_index)));
- __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
+ __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ lw(a2, FieldMemOperand(a2, GlobalObject::kNativeContextOffset));
+ __ lw(t1, MemOperand(a2, Context::SlotOffset(map_index)));
+ __ sw(t1, FieldMemOperand(v0, HeapObject::kMapOffset));
// Initialize the rest of the function. We don't have to update the
// write barrier because the allocated object is in new space.
__ LoadRoot(a1, Heap::kEmptyFixedArrayRootIndex);
- __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
- __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
+ __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
__ sw(a1, FieldMemOperand(v0, JSObject::kPropertiesOffset));
__ sw(a1, FieldMemOperand(v0, JSObject::kElementsOffset));
- __ sw(a2, FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
+ __ sw(t1, FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
__ sw(a3, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
__ sw(cp, FieldMemOperand(v0, JSFunction::kContextOffset));
__ sw(a1, FieldMemOperand(v0, JSFunction::kLiteralsOffset));
- __ sw(t0, FieldMemOperand(v0, JSFunction::kNextFunctionLinkOffset));
// Initialize the code pointer in the function to be the one
// found in the shared function info object.
+ // But first check if there is an optimized version for our context.
+ Label check_optimized;
+ Label install_unoptimized;
+ if (FLAG_cache_optimized_code) {
+ __ lw(a1,
+ FieldMemOperand(a3, SharedFunctionInfo::kOptimizedCodeMapOffset));
+ __ And(at, a1, a1);
+ __ Branch(&check_optimized, ne, at, Operand(zero_reg));
+ }
+ __ bind(&install_unoptimized);
+ __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
+ __ sw(t0, FieldMemOperand(v0, JSFunction::kNextFunctionLinkOffset));
__ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset));
__ Addu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag));
@@ -133,6 +147,72 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
__ sw(a3, FieldMemOperand(v0, JSFunction::kCodeEntryOffset));
__ Ret();
+ __ bind(&check_optimized);
+
+ __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1, t2, t3);
+
+ // a2 holds native context, a1 points to fixed array of 3-element entries
+ // (native context, optimized code, literals).
+ // The optimized code map must never be empty, so check the first elements.
+ Label install_optimized;
+ // Speculatively move code object into t0.
+ __ lw(t0, FieldMemOperand(a1, FixedArray::kHeaderSize + kPointerSize));
+ __ lw(t1, FieldMemOperand(a1, FixedArray::kHeaderSize));
+ __ Branch(&install_optimized, eq, a2, Operand(t1));
+
+ // Iterate through the rest of map backwards. t0 holds an index as a Smi.
+ Label loop;
+ __ lw(t0, FieldMemOperand(a1, FixedArray::kLengthOffset));
+ __ bind(&loop);
+ // Do not double check first entry.
+
+ __ Branch(&install_unoptimized, eq, t0,
+ Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
+ __ Subu(t0, t0, Operand(
+ Smi::FromInt(SharedFunctionInfo::kEntryLength))); // Skip an entry.
+ __ Addu(t1, a1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ sll(at, t0, kPointerSizeLog2 - kSmiTagSize);
+ __ Addu(t1, t1, Operand(at));
+ __ lw(t1, MemOperand(t1));
+ __ Branch(&loop, ne, a2, Operand(t1));
+ // Hit: fetch the optimized code.
+ __ Addu(t1, a1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ sll(at, t0, kPointerSizeLog2 - kSmiTagSize);
+ __ Addu(t1, t1, Operand(at));
+ __ Addu(t1, t1, Operand(kPointerSize));
+ __ lw(t0, MemOperand(t1));
+
+ __ bind(&install_optimized);
+ __ IncrementCounter(counters->fast_new_closure_install_optimized(),
+ 1, t2, t3);
+
+ // TODO(fschneider): Idea: store proper code pointers in the map and either
+ // unmangle them on marking or do nothing as the whole map is discarded on
+ // major GC anyway.
+ __ Addu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ sw(t0, FieldMemOperand(v0, JSFunction::kCodeEntryOffset));
+
+ // Now link a function into a list of optimized functions.
+ __ lw(t0, ContextOperand(a2, Context::OPTIMIZED_FUNCTIONS_LIST));
+
+ __ sw(t0, FieldMemOperand(v0, JSFunction::kNextFunctionLinkOffset));
+ // No need for write barrier as JSFunction (eax) is in the new space.
+
+ __ sw(v0, ContextOperand(a2, Context::OPTIMIZED_FUNCTIONS_LIST));
+ // Store JSFunction (eax) into edx before issuing write barrier as
+ // it clobbers all the registers passed.
+ __ mov(t0, v0);
+ __ RecordWriteContextSlot(
+ a2,
+ Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST),
+ t0,
+ a1,
+ kRAHasNotBeenSaved,
+ kDontSaveFPRegs);
+
+ // Return result. The argument function info has been popped already.
+ __ Ret();
+
// Create a new closure through the slower runtime call.
__ bind(&gc);
__ LoadRoot(t0, Heap::kFalseValueRootIndex);
@@ -164,16 +244,16 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
__ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
// Set up the fixed slots, copy the global object from the previous context.
- __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
__ li(a1, Operand(Smi::FromInt(0)));
__ sw(a3, MemOperand(v0, Context::SlotOffset(Context::CLOSURE_INDEX)));
__ sw(cp, MemOperand(v0, Context::SlotOffset(Context::PREVIOUS_INDEX)));
__ sw(a1, MemOperand(v0, Context::SlotOffset(Context::EXTENSION_INDEX)));
- __ sw(a2, MemOperand(v0, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ sw(a2, MemOperand(v0, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
// Copy the qml global object from the surrounding context.
- __ lw(a1, MemOperand(cp, Context::SlotOffset(Context::QML_GLOBAL_INDEX)));
- __ sw(a1, MemOperand(v0, Context::SlotOffset(Context::QML_GLOBAL_INDEX)));
+ __ lw(a1, MemOperand(cp, Context::SlotOffset(Context::QML_GLOBAL_OBJECT_INDEX)));
+ __ sw(a1, MemOperand(v0, Context::SlotOffset(Context::QML_GLOBAL_OBJECT_INDEX)));
// Initialize the rest of the slots to undefined.
@@ -216,9 +296,9 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ li(a2, Operand(Smi::FromInt(length)));
__ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
- // If this block context is nested in the global context we get a smi
+ // If this block context is nested in the native context we get a smi
// sentinel instead of a function. The block context should get the
- // canonical empty function of the global context as its closure which
+ // canonical empty function of the native context as its closure which
// we still have to look up.
Label after_sentinel;
__ JumpIfNotSmi(a3, &after_sentinel);
@@ -227,20 +307,20 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ Assert(eq, message, a3, Operand(zero_reg));
}
__ lw(a3, GlobalObjectOperand());
- __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalContextOffset));
+ __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
__ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX));
__ bind(&after_sentinel);
// Set up the fixed slots, copy the global object from the previous context.
- __ lw(a2, ContextOperand(cp, Context::GLOBAL_INDEX));
+ __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
__ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX));
__ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX));
__ sw(a1, ContextOperand(v0, Context::EXTENSION_INDEX));
- __ sw(a2, ContextOperand(v0, Context::GLOBAL_INDEX));
+ __ sw(a2, ContextOperand(v0, Context::GLOBAL_OBJECT_INDEX));
// Copy the qml global object from the surrounding context.
- __ lw(a1, ContextOperand(cp, Context::QML_GLOBAL_INDEX));
- __ sw(a1, ContextOperand(v0, Context::QML_GLOBAL_INDEX));
+ __ lw(a1, ContextOperand(cp, Context::QML_GLOBAL_OBJECT_INDEX));
+ __ sw(a1, ContextOperand(v0, Context::QML_GLOBAL_OBJECT_INDEX));
// Initialize the rest of the slots to the hole value.
__ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
@@ -584,11 +664,9 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
Register scratch1,
Register scratch2,
Label* not_number) {
- if (FLAG_debug_code) {
- __ AbortIfNotRootValue(heap_number_map,
- Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
- }
+ __ AssertRootValue(heap_number_map,
+ Heap::kHeapNumberMapRootIndex,
+ "HeapNumberMap register clobbered.");
Label is_smi, done;
@@ -650,11 +728,9 @@ void FloatingPointHelper::ConvertNumberToInt32(MacroAssembler* masm,
Register scratch3,
FPURegister double_scratch,
Label* not_number) {
- if (FLAG_debug_code) {
- __ AbortIfNotRootValue(heap_number_map,
- Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
- }
+ __ AssertRootValue(heap_number_map,
+ Heap::kHeapNumberMapRootIndex,
+ "HeapNumberMap register clobbered.");
Label done;
Label not_in_int32_range;
@@ -790,11 +866,9 @@ void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm,
__ Branch(&done);
__ bind(&obj_is_not_smi);
- if (FLAG_debug_code) {
- __ AbortIfNotRootValue(heap_number_map,
- Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
- }
+ __ AssertRootValue(heap_number_map,
+ Heap::kHeapNumberMapRootIndex,
+ "HeapNumberMap register clobbered.");
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32);
// Load the number.
@@ -861,11 +935,9 @@ void FloatingPointHelper::LoadNumberAsInt32(MacroAssembler* masm,
__ UntagAndJumpIfSmi(dst, object, &done);
- if (FLAG_debug_code) {
- __ AbortIfNotRootValue(heap_number_map,
- Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
- }
+ __ AssertRootValue(heap_number_map,
+ Heap::kHeapNumberMapRootIndex,
+ "HeapNumberMap register clobbered.");
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32);
// Object is a heap number.
@@ -2588,9 +2660,9 @@ void BinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
Register scratch3 = t0;
ASSERT(smi_operands || (not_numbers != NULL));
- if (smi_operands && FLAG_debug_code) {
- __ AbortIfNotSmi(left);
- __ AbortIfNotSmi(right);
+ if (smi_operands) {
+ __ AssertSmi(left);
+ __ AssertSmi(right);
}
Register heap_number_map = t2;
@@ -3179,7 +3251,7 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
} else {
// Tail call that writes the int32 in a2 to the heap number in v0, using
// a3 and a0 as scratch. v0 is preserved and returned.
- __ mov(a0, t1);
+ __ mov(v0, t1);
WriteInt32ToHeapNumberStub stub(a2, v0, a3, a0);
__ TailCallStub(&stub);
}
@@ -3502,23 +3574,23 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
1,
1);
} else {
- if (!CpuFeatures::IsSupported(FPU)) UNREACHABLE();
+ ASSERT(CpuFeatures::IsSupported(FPU));
CpuFeatures::Scope scope(FPU);
Label no_update;
Label skip_cache;
// Call C function to calculate the result and update the cache.
- // Register a0 holds precalculated cache entry address; preserve
- // it on the stack and pop it into register cache_entry after the
- // call.
- __ Push(cache_entry, a2, a3);
+ // a0: precalculated cache entry address.
+ // a2 and a3: parts of the double value.
+ // Store a0, a2 and a3 on stack for later before calling C function.
+ __ Push(a3, a2, cache_entry);
GenerateCallCFunction(masm, scratch0);
__ GetCFunctionDoubleResult(f4);
// Try to update the cache. If we cannot allocate a
// heap number, we return the result without updating.
- __ Pop(cache_entry, a2, a3);
+ __ Pop(a3, a2, cache_entry);
__ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
__ AllocateHeapNumber(t2, scratch0, scratch1, t1, &no_update);
__ sdc1(f4, FieldMemOperand(t2, HeapNumber::kValueOffset));
@@ -4615,14 +4687,14 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
// v0 = address of new object(s) (tagged)
// a2 = argument count (tagged)
- // Get the arguments boilerplate from the current (global) context into t0.
+ // Get the arguments boilerplate from the current native context into t0.
const int kNormalOffset =
Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
const int kAliasedOffset =
Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX);
- __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ lw(t0, FieldMemOperand(t0, GlobalObject::kGlobalContextOffset));
+ __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ lw(t0, FieldMemOperand(t0, GlobalObject::kNativeContextOffset));
Label skip2_ne, skip2_eq;
__ Branch(&skip2_ne, ne, a1, Operand(zero_reg));
__ lw(t0, MemOperand(t0, kNormalOffset));
@@ -4810,9 +4882,9 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
static_cast<AllocationFlags>(TAG_OBJECT |
SIZE_IN_WORDS));
- // Get the arguments boilerplate from the current (global) context.
- __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ lw(t0, FieldMemOperand(t0, GlobalObject::kGlobalContextOffset));
+ // Get the arguments boilerplate from the current native context.
+ __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ lw(t0, FieldMemOperand(t0, GlobalObject::kNativeContextOffset));
__ lw(t0, MemOperand(t0, Context::SlotOffset(
Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX)));
@@ -4946,7 +5018,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
__ Addu(a2, a2, Operand(2)); // a2 was a smi.
// Check that the static offsets vector buffer is large enough.
- __ Branch(&runtime, hi, a2, Operand(OffsetsVector::kStaticOffsetsVectorSize));
+ __ Branch(
+ &runtime, hi, a2, Operand(Isolate::kJSRegexpStaticOffsetsVectorSize));
// a2: Number of capture registers
// regexp_data: RegExp data (FixedArray)
@@ -5059,7 +5132,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// regexp_data: RegExp data (FixedArray)
// a0: Instance type of subject string
STATIC_ASSERT(kStringEncodingMask == 4);
- STATIC_ASSERT(kAsciiStringTag == 4);
+ STATIC_ASSERT(kOneByteStringTag == 4);
STATIC_ASSERT(kTwoByteStringTag == 0);
// Find the code object based on the assumptions above.
__ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for ASCII.
@@ -5092,7 +5165,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
1, a0, a2);
// Isolates: note we add an additional parameter here (isolate pointer).
- const int kRegExpExecuteArguments = 8;
+ const int kRegExpExecuteArguments = 9;
const int kParameterRegisters = 4;
__ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
@@ -5103,27 +5176,33 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// allocating space for the c argument slots, we don't need to calculate
// that into the argument positions on the stack. This is how the stack will
// look (sp meaning the value of sp at this moment):
+ // [sp + 5] - Argument 9
// [sp + 4] - Argument 8
// [sp + 3] - Argument 7
// [sp + 2] - Argument 6
// [sp + 1] - Argument 5
// [sp + 0] - saved ra
- // Argument 8: Pass current isolate address.
+ // Argument 9: Pass current isolate address.
// CFunctionArgumentOperand handles MIPS stack argument slots.
__ li(a0, Operand(ExternalReference::isolate_address()));
- __ sw(a0, MemOperand(sp, 4 * kPointerSize));
+ __ sw(a0, MemOperand(sp, 5 * kPointerSize));
- // Argument 7: Indicate that this is a direct call from JavaScript.
+ // Argument 8: Indicate that this is a direct call from JavaScript.
__ li(a0, Operand(1));
- __ sw(a0, MemOperand(sp, 3 * kPointerSize));
+ __ sw(a0, MemOperand(sp, 4 * kPointerSize));
- // Argument 6: Start (high end) of backtracking stack memory area.
+ // Argument 7: Start (high end) of backtracking stack memory area.
__ li(a0, Operand(address_of_regexp_stack_memory_address));
__ lw(a0, MemOperand(a0, 0));
__ li(a2, Operand(address_of_regexp_stack_memory_size));
__ lw(a2, MemOperand(a2, 0));
__ addu(a0, a0, a2);
+ __ sw(a0, MemOperand(sp, 3 * kPointerSize));
+
+ // Argument 6: Set the number of capture registers to zero to force global
+ // regexps to behave as non-global. This does not affect non-global regexps.
+ __ mov(a0, zero_reg);
__ sw(a0, MemOperand(sp, 2 * kPointerSize));
// Argument 5: static offsets vector buffer.
@@ -5174,7 +5253,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Check the result.
Label success;
- __ Branch(&success, eq, v0, Operand(NativeRegExpMacroAssembler::SUCCESS));
+ __ Branch(&success, eq, v0, Operand(1));
+ // We expect exactly one result since we force the called regexp to behave
+ // as non-global.
Label failure;
__ Branch(&failure, eq, v0, Operand(NativeRegExpMacroAssembler::FAILURE));
// If not exception it can only be retry. Handle that in the runtime system.
@@ -5337,10 +5418,10 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
// Set empty properties FixedArray.
// Set elements to point to FixedArray allocated right after the JSArray.
// Interleave operations for better latency.
- __ lw(a2, ContextOperand(cp, Context::GLOBAL_INDEX));
+ __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
__ Addu(a3, v0, Operand(JSRegExpResult::kSize));
__ li(t0, Operand(masm->isolate()->factory()->empty_fixed_array()));
- __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalContextOffset));
+ __ lw(a2, FieldMemOperand(a2, GlobalObject::kNativeContextOffset));
__ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
__ lw(a2, ContextOperand(a2, Context::REGEXP_RESULT_MAP_INDEX));
__ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
@@ -5365,12 +5446,12 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
// Set FixedArray length.
__ sll(t2, t1, kSmiTagSize);
__ sw(t2, FieldMemOperand(a3, FixedArray::kLengthOffset));
- // Fill contents of fixed-array with the-hole.
- __ li(a2, Operand(masm->isolate()->factory()->the_hole_value()));
+ // Fill contents of fixed-array with undefined.
+ __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
__ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- // Fill fixed array elements with hole.
+ // Fill fixed array elements with undefined.
// v0: JSArray, tagged.
- // a2: the hole.
+ // a2: undefined.
// a3: Start of elements in FixedArray.
// t1: Number of elements to fill.
Label loop;
@@ -5449,7 +5530,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ Branch(&call, ne, t0, Operand(at));
// Patch the receiver on the stack with the global receiver object.
- __ lw(a3, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ lw(a3,
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
__ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalReceiverOffset));
__ sw(a3, MemOperand(sp, argc_ * kPointerSize));
__ bind(&call);
@@ -6217,7 +6299,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// string's encoding is wrong because we always have to recheck encoding of
// the newly created string's parent anyways due to externalized strings.
Label two_byte_slice, set_slice_header;
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+ STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
__ And(t0, a1, Operand(kStringEncodingMask));
__ Branch(&two_byte_slice, eq, t0, Operand(zero_reg));
@@ -6260,7 +6342,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ bind(&allocate_result);
// Sequential acii string. Allocate the result.
- STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0);
+ STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
__ And(t0, a1, Operand(kStringEncodingMask));
__ Branch(&two_byte_sequential, eq, t0, Operand(zero_reg));
@@ -6632,9 +6714,10 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ Branch(&ascii_data, ne, at, Operand(zero_reg));
__ xor_(t0, t0, t1);
- STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
- __ And(t0, t0, Operand(kAsciiStringTag | kAsciiDataHintTag));
- __ Branch(&ascii_data, eq, t0, Operand(kAsciiStringTag | kAsciiDataHintTag));
+ STATIC_ASSERT(kOneByteStringTag != 0 && kAsciiDataHintTag != 0);
+ __ And(t0, t0, Operand(kOneByteStringTag | kAsciiDataHintTag));
+ __ Branch(
+ &ascii_data, eq, t0, Operand(kOneByteStringTag | kAsciiDataHintTag));
// Allocate a two byte cons string.
__ AllocateTwoByteConsString(v0, t2, t0, t1, &call_runtime);
@@ -7244,8 +7327,7 @@ void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
ASSERT(!name.is(scratch1));
ASSERT(!name.is(scratch2));
- // Assert that name contains a string.
- if (FLAG_debug_code) __ AbortIfNotString(name);
+ __ AssertString(name);
// Compute the capacity mask.
__ lw(scratch1, FieldMemOperand(elements, kCapacityOffset));
@@ -7431,8 +7513,8 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
// KeyedStoreStubCompiler::GenerateStoreFastElement.
{ REG(a3), REG(a2), REG(t0), EMIT_REMEMBERED_SET },
{ REG(a2), REG(a3), REG(t0), EMIT_REMEMBERED_SET },
- // ElementsTransitionGenerator::GenerateSmiOnlyToObject
- // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
+ // ElementsTransitionGenerator::GenerateMapChangeElementTransition
+ // and ElementsTransitionGenerator::GenerateSmiToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
{ REG(a2), REG(a3), REG(t5), EMIT_REMEMBERED_SET },
{ REG(a2), REG(a3), REG(t5), OMIT_REMEMBERED_SET },
@@ -7441,6 +7523,8 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
{ REG(a2), REG(t2), REG(t5), EMIT_REMEMBERED_SET },
// StoreArrayLiteralElementStub::Generate
{ REG(t1), REG(a0), REG(t2), EMIT_REMEMBERED_SET },
+ // FastNewClosureStub::Generate
+ { REG(a2), REG(t0), REG(a1), EMIT_REMEMBERED_SET },
// Null termination.
{ REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
@@ -7489,6 +7573,11 @@ void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
}
+bool CodeStub::CanUseFPRegisters() {
+ return CpuFeatures::IsSupported(FPU);
+}
+
+
// Takes the input in 3 registers: address_ value_ and object_. A pointer to
// the value has just been written into the object, now this stub makes sure
// we keep the GC informed. The word in the object where the value has been
@@ -7614,6 +7703,16 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
Label need_incremental;
Label need_incremental_pop_scratch;
+ __ And(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
+ __ lw(regs_.scratch1(),
+ MemOperand(regs_.scratch0(),
+ MemoryChunk::kWriteBarrierCounterOffset));
+ __ Subu(regs_.scratch1(), regs_.scratch1(), Operand(1));
+ __ sw(regs_.scratch1(),
+ MemOperand(regs_.scratch0(),
+ MemoryChunk::kWriteBarrierCounterOffset));
+ __ Branch(&need_incremental, lt, regs_.scratch1(), Operand(zero_reg));
+
// Let's look at the color of the object: If it is not black we don't have
// to inform the incremental marker.
__ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
@@ -7698,9 +7797,9 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
Label fast_elements;
__ CheckFastElements(a2, t1, &double_elements);
- // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+ // Check for FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS elements
__ JumpIfSmi(a0, &smi_element);
- __ CheckFastSmiOnlyElements(a2, t1, &fast_elements);
+ __ CheckFastSmiElements(a2, t1, &fast_elements);
// Store into the array literal requires a elements transition. Call into
// the runtime.
@@ -7712,7 +7811,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ Push(t1, t0);
__ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
- // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+ // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
__ bind(&fast_elements);
__ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset));
__ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize);
@@ -7725,8 +7824,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a0);
- // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
- // FAST_ELEMENTS, and value is Smi.
+ // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
+ // and value is Smi.
__ bind(&smi_element);
__ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset));
__ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize);
@@ -7735,16 +7834,78 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a0);
- // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
+ // Array literal has ElementsKind of FAST_*_DOUBLE_ELEMENTS.
__ bind(&double_elements);
__ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset));
- __ StoreNumberToDoubleElements(a0, a3, a1, t1, t2, t3, t5, a2,
+ __ StoreNumberToDoubleElements(a0, a3, a1,
+ // Overwrites all regs after this.
+ t1, t2, t3, t5, a2,
&slow_elements);
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a0);
}
+void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
+ if (entry_hook_ != NULL) {
+ ProfileEntryHookStub stub;
+ __ push(ra);
+ __ CallStub(&stub);
+ __ pop(ra);
+ }
+}
+
+
+void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
+ // The entry hook is a "push ra" instruction, followed by a call.
+ // Note: on MIPS "push" is 2 instruction
+ const int32_t kReturnAddressDistanceFromFunctionStart =
+ Assembler::kCallTargetAddressOffset + (2 * Assembler::kInstrSize);
+
+ // Save live volatile registers.
+ __ Push(ra, t1, a1);
+ const int32_t kNumSavedRegs = 3;
+
+ // Compute the function's address for the first argument.
+ __ Subu(a0, ra, Operand(kReturnAddressDistanceFromFunctionStart));
+
+ // The caller's return address is above the saved temporaries.
+ // Grab that for the second argument to the hook.
+ __ Addu(a1, sp, Operand(kNumSavedRegs * kPointerSize));
+
+ // Align the stack if necessary.
+ int frame_alignment = masm->ActivationFrameAlignment();
+ if (frame_alignment > kPointerSize) {
+ __ mov(t1, sp);
+ ASSERT(IsPowerOf2(frame_alignment));
+ __ And(sp, sp, Operand(-frame_alignment));
+ }
+
+#if defined(V8_HOST_ARCH_MIPS)
+ __ li(at, Operand(reinterpret_cast<int32_t>(&entry_hook_)));
+ __ lw(at, MemOperand(at));
+#else
+ // Under the simulator we need to indirect the entry hook through a
+ // trampoline function at a known address.
+ Address trampoline_address = reinterpret_cast<Address>(
+ reinterpret_cast<intptr_t>(EntryHookTrampoline));
+ ApiFunction dispatcher(trampoline_address);
+ __ li(at, Operand(ExternalReference(&dispatcher,
+ ExternalReference::BUILTIN_CALL,
+ masm->isolate())));
+#endif
+ __ Call(at);
+
+ // Restore the stack pointer if needed.
+ if (frame_alignment > kPointerSize) {
+ __ mov(sp, t1);
+ }
+
+ __ Pop(ra, t1, a1);
+ __ Ret();
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/mips/codegen-mips.cc b/src/3rdparty/v8/src/mips/codegen-mips.cc
index 9acccdc..44e0359 100644
--- a/src/3rdparty/v8/src/mips/codegen-mips.cc
+++ b/src/3rdparty/v8/src/mips/codegen-mips.cc
@@ -72,7 +72,7 @@ void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
// -------------------------------------------------------------------------
// Code generators
-void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
+void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : value
@@ -95,7 +95,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
}
-void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
+void ElementsTransitionGenerator::GenerateSmiToDouble(
MacroAssembler* masm, Label* fail) {
// ----------- S t a t e -------------
// -- a0 : value
diff --git a/src/3rdparty/v8/src/mips/deoptimizer-mips.cc b/src/3rdparty/v8/src/mips/deoptimizer-mips.cc
index 62f3155..9fd815b 100644
--- a/src/3rdparty/v8/src/mips/deoptimizer-mips.cc
+++ b/src/3rdparty/v8/src/mips/deoptimizer-mips.cc
@@ -48,6 +48,10 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
if (!function->IsOptimized()) return;
+ // The optimized code is going to be patched, so we cannot use it
+ // any more. Play safe and reset the whole cache.
+ function->shared()->ClearOptimizedCodeMap();
+
// Get the optimized code.
Code* code = function->code();
Address code_start_address = code->instruction_start();
@@ -96,8 +100,7 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
// ignore all slots that might have been recorded on it.
isolate->heap()->mark_compact_collector()->InvalidateCode(code);
- // Set the code for the function to non-optimized version.
- function->ReplaceCode(function->shared()->code());
+ ReplaceCodeForRelatedFunctions(function, code);
if (FLAG_trace_deopt) {
PrintF("[forced deoptimization: ");
@@ -186,11 +189,11 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
}
-static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) {
+static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) {
ByteArray* translations = data->TranslationByteArray();
int length = data->DeoptCount();
for (int i = 0; i < length; i++) {
- if (static_cast<unsigned>(data->AstId(i)->value()) == ast_id) {
+ if (data->AstId(i) == ast_id) {
TranslationIterator it(translations, data->TranslationIndex(i)->value());
int value = it.Next();
ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value));
@@ -209,7 +212,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
optimized_code_->deoptimization_data());
unsigned ast_id = data->OsrAstId()->value();
- int bailout_id = LookupBailoutId(data, ast_id);
+ int bailout_id = LookupBailoutId(data, BailoutId(ast_id));
unsigned translation_index = data->TranslationIndex(bailout_id)->value();
ByteArray* translations = data->TranslationByteArray();
@@ -229,9 +232,9 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
unsigned node_id = iterator.Next();
USE(node_id);
ASSERT(node_id == ast_id);
- JSFunction* function = JSFunction::cast(ComputeLiteral(iterator.Next()));
- USE(function);
- ASSERT(function == function_);
+ int closure_id = iterator.Next();
+ USE(closure_id);
+ ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
unsigned height = iterator.Next();
unsigned height_in_bytes = height * kPointerSize;
USE(height_in_bytes);
@@ -342,8 +345,8 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
if (FLAG_trace_osr) {
PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ",
ok ? "finished" : "aborted",
- reinterpret_cast<intptr_t>(function));
- function->PrintName();
+ reinterpret_cast<intptr_t>(function_));
+ function_->PrintName();
PrintF(" => pc=0x%0x]\n", output_[0]->GetPc());
}
}
@@ -567,19 +570,145 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
}
+void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
+ int frame_index,
+ bool is_setter_stub_frame) {
+ JSFunction* accessor = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ // The receiver (and the implicit return value, if any) are expected in
+ // registers by the LoadIC/StoreIC, so they don't belong to the output stack
+ // frame. This means that we have to use a height of 0.
+ unsigned height = 0;
+ unsigned height_in_bytes = height * kPointerSize;
+ const char* kind = is_setter_stub_frame ? "setter" : "getter";
+ if (FLAG_trace_deopt) {
+ PrintF(" translating %s stub => height=%u\n", kind, height_in_bytes);
+ }
+
+ // We need 5 stack entries from StackFrame::INTERNAL (ra, fp, cp, frame type,
+ // code object, see MacroAssembler::EnterFrame). For a setter stub frame we
+ // need one additional entry for the implicit return value, see
+ // StoreStubCompiler::CompileStoreViaSetter.
+ unsigned fixed_frame_entries = 5 + (is_setter_stub_frame ? 1 : 0);
+ unsigned fixed_frame_size = fixed_frame_entries * kPointerSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, accessor);
+ output_frame->SetFrameType(StackFrame::INTERNAL);
+
+ // A frame for an accessor stub can not be the topmost or bottommost one.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous frame's top and
+ // this frame's size.
+ uint32_t top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ unsigned output_offset = output_frame_size;
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; caller's pc\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // The context can be gotten from the previous frame.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetContext();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; context\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // A marker value is used in place of the function.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::INTERNAL));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; function (%s sentinel)\n",
+ top_address + output_offset, output_offset, value, kind);
+ }
+
+ // Get Code object from accessor stub.
+ output_offset -= kPointerSize;
+ Builtins::Name name = is_setter_stub_frame ?
+ Builtins::kStoreIC_Setter_ForDeopt :
+ Builtins::kLoadIC_Getter_ForDeopt;
+ Code* accessor_stub = isolate_->builtins()->builtin(name);
+ value = reinterpret_cast<intptr_t>(accessor_stub);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; code object\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Skip receiver.
+ Translation::Opcode opcode =
+ static_cast<Translation::Opcode>(iterator->Next());
+ iterator->Skip(Translation::NumberOfOperandsFor(opcode));
+
+ if (is_setter_stub_frame) {
+ // The implicit return value was part of the artificial setter stub
+ // environment.
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Smi* offset = is_setter_stub_frame ?
+ isolate_->heap()->setter_stub_deopt_pc_offset() :
+ isolate_->heap()->getter_stub_deopt_pc_offset();
+ intptr_t pc = reinterpret_cast<intptr_t>(
+ accessor_stub->instruction_start() + offset->value());
+ output_frame->SetPc(pc);
+}
+
+
// This code is very similar to ia32/arm code, but relies on register names
// (fp, sp) and how the frame is laid out.
void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
int frame_index) {
// Read the ast node id, function, and frame height for this output frame.
- int node_id = iterator->Next();
- JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ BailoutId node_id = BailoutId(iterator->Next());
+ JSFunction* function;
+ if (frame_index != 0) {
+ function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ } else {
+ int closure_id = iterator->Next();
+ USE(closure_id);
+ ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
+ function = function_;
+ }
unsigned height = iterator->Next();
unsigned height_in_bytes = height * kPointerSize;
if (FLAG_trace_deopt) {
PrintF(" translating ");
function->PrintName();
- PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes);
+ PrintF(" => node=%d, height=%d\n", node_id.ToInt(), height_in_bytes);
}
// The 'fixed' part of the frame consists of the incoming parameters and
diff --git a/src/3rdparty/v8/src/mips/full-codegen-mips.cc b/src/3rdparty/v8/src/mips/full-codegen-mips.cc
index 842232a..46c9ecb 100644
--- a/src/3rdparty/v8/src/mips/full-codegen-mips.cc
+++ b/src/3rdparty/v8/src/mips/full-codegen-mips.cc
@@ -143,6 +143,8 @@ void FullCodeGenerator::Generate() {
SetFunctionPosition(function());
Comment cmnt(masm_, "[ function compiled by full code generator");
+ ProfileEntryHookStub::MaybeCallEntryHook(masm_);
+
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
@@ -191,11 +193,14 @@ void FullCodeGenerator::Generate() {
int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
if (heap_slots > 0 ||
(scope()->is_qml_mode() && scope()->is_global_scope())) {
- Comment cmnt(masm_, "[ Allocate local context");
- // Argument to NewContext is the function, which is in a1.
+ Comment cmnt(masm_, "[ Allocate context");
+ // Argument to NewContext is the function, which is still in a1.
__ push(a1);
- if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
+ if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
+ __ Push(info->scope()->GetScopeInfo());
+ __ CallRuntime(Runtime::kNewGlobalContext, 2);
+ } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+ FastNewContextStub stub((heap_slots < 0) ? 0 : heap_slots);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kNewFunctionContext, 1);
@@ -271,7 +276,7 @@ void FullCodeGenerator::Generate() {
scope()->VisitIllegalRedeclaration(this);
} else {
- PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
+ PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
{ Comment cmnt(masm_, "[ Declarations");
// For named function expressions, declare the function name as a
// constant.
@@ -286,7 +291,7 @@ void FullCodeGenerator::Generate() {
}
{ Comment cmnt(masm_, "[ Stack check");
- PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
+ PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
Label ok;
__ LoadRoot(t0, Heap::kStackLimitRootIndex);
__ Branch(&ok, hs, sp, Operand(t0));
@@ -333,7 +338,7 @@ void FullCodeGenerator::EmitProfilingCounterReset() {
}
if (isolate()->IsDebuggerActive()) {
// Detect debug break requests as soon as possible.
- reset_value = 10;
+ reset_value = FLAG_interrupt_budget >> 4;
}
__ li(a2, Operand(profiling_counter_));
__ li(a3, Operand(Smi::FromInt(reset_value)));
@@ -341,10 +346,6 @@ void FullCodeGenerator::EmitProfilingCounterReset() {
}
-static const int kMaxBackEdgeWeight = 127;
-static const int kBackEdgeDistanceDivisor = 142;
-
-
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
Label* back_edge_target) {
// The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
@@ -361,7 +362,7 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
ASSERT(back_edge_target->is_bound());
int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
weight = Min(kMaxBackEdgeWeight,
- Max(1, distance / kBackEdgeDistanceDivisor));
+ Max(1, distance / kBackEdgeDistanceUnit));
}
EmitProfilingCounterDecrement(weight);
__ slt(at, a3, zero_reg);
@@ -414,7 +415,7 @@ void FullCodeGenerator::EmitReturnSequence() {
} else if (FLAG_weighted_back_edges) {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
- Max(1, distance / kBackEdgeDistanceDivisor));
+ Max(1, distance / kBackEdgeDistanceUnit));
}
EmitProfilingCounterDecrement(weight);
Label ok;
@@ -794,7 +795,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
// The variable in the declaration always resides in the current function
// context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
- if (FLAG_debug_code) {
+ if (generate_debug_code_) {
// Check that we're not inside a with or catch context.
__ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
__ LoadRoot(t0, Heap::kWithContextMapRootIndex);
@@ -818,11 +819,12 @@ void FullCodeGenerator::VisitVariableDeclaration(
bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
switch (variable->location()) {
case Variable::UNALLOCATED:
- globals_->Add(variable->name());
+ globals_->Add(variable->name(), zone());
globals_->Add(variable->binding_needs_init()
? isolate()->factory()->the_hole_value()
- : isolate()->factory()->undefined_value());
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ : isolate()->factory()->undefined_value(),
+ zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
break;
case Variable::PARAMETER:
@@ -849,10 +851,9 @@ void FullCodeGenerator::VisitVariableDeclaration(
Comment cmnt(masm_, "[ VariableDeclaration");
__ li(a2, Operand(variable->name()));
// Declaration nodes are always introduced in one of four modes.
- ASSERT(mode == VAR || mode == LET ||
- mode == CONST || mode == CONST_HARMONY);
- PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
- ? READ_ONLY : NONE;
+ ASSERT(IsDeclaredVariableMode(mode));
+ PropertyAttributes attr =
+ IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
__ li(a1, Operand(Smi::FromInt(attr)));
// Push initial value, if any.
// Note: For variables we must not push an initial value (such as
@@ -879,13 +880,13 @@ void FullCodeGenerator::VisitFunctionDeclaration(
Variable* variable = proxy->var();
switch (variable->location()) {
case Variable::UNALLOCATED: {
- globals_->Add(variable->name());
+ globals_->Add(variable->name(), zone());
Handle<SharedFunctionInfo> function =
Compiler::BuildFunctionInfo(declaration->fun(), script());
// Check for stack-overflow exception.
if (function.is_null()) return SetStackOverflow();
- globals_->Add(function);
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ globals_->Add(function, zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
break;
}
@@ -939,9 +940,9 @@ void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
switch (variable->location()) {
case Variable::UNALLOCATED: {
Comment cmnt(masm_, "[ ModuleDeclaration");
- globals_->Add(variable->name());
- globals_->Add(instance);
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ globals_->Add(variable->name(), zone());
+ globals_->Add(instance, zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
Visit(declaration->module());
break;
}
@@ -1144,25 +1145,32 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// modification check. Otherwise, we got a fixed array, and we have
// to do a slow check.
Label fixed_array;
- __ mov(a2, v0);
- __ lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset));
+ __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kMetaMapRootIndex);
- __ Branch(&fixed_array, ne, a1, Operand(at));
+ __ Branch(&fixed_array, ne, a2, Operand(at));
// We got a map in register v0. Get the enumeration cache from it.
+ Label no_descriptors;
__ bind(&use_cache);
- __ LoadInstanceDescriptors(v0, a1);
- __ lw(a1, FieldMemOperand(a1, DescriptorArray::kEnumerationIndexOffset));
- __ lw(a2, FieldMemOperand(a1, DescriptorArray::kEnumCacheBridgeCacheOffset));
+
+ __ EnumLength(a1, v0);
+ __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
+
+ __ LoadInstanceDescriptors(v0, a2);
+ __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
+ __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
// Set up the four remaining stack slots.
__ push(v0); // Map.
- __ lw(a1, FieldMemOperand(a2, FixedArray::kLengthOffset));
__ li(a0, Operand(Smi::FromInt(0)));
// Push enumeration cache, enumeration cache length (as smi) and zero.
__ Push(a2, a1, a0);
__ jmp(&loop);
+ __ bind(&no_descriptors);
+ __ Drop(1);
+ __ jmp(&exit);
+
// We got a fixed array in register v0. Iterate through that.
Label non_proxy;
__ bind(&fixed_array);
@@ -1171,7 +1179,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
isolate()->factory()->NewJSGlobalPropertyCell(
Handle<Object>(
Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
- RecordTypeFeedbackCell(stmt->PrepareId(), cell);
+ RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
__ LoadHeapObject(a1, cell);
__ li(a2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
__ sw(a2, FieldMemOperand(a1, JSGlobalPropertyCell::kValueOffset));
@@ -1327,9 +1335,9 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
__ Move(next, current);
}
__ bind(&loop);
- // Terminate at global context.
+ // Terminate at native context.
__ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
- __ LoadRoot(t0, Heap::kGlobalContextMapRootIndex);
+ __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
__ Branch(&fast, eq, temp, Operand(t0));
// Check that extension is NULL.
__ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
@@ -1615,9 +1623,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// Mark all computed expressions that are bound to a key that
// is shadowed by a later occurrence of the same key. For the
// marked expressions, no store code is emitted.
- expr->CalculateEmitStore();
+ expr->CalculateEmitStore(zone());
- AccessorTable accessor_table(isolate()->zone());
+ AccessorTable accessor_table(zone());
for (int i = 0; i < expr->properties()->length(); i++) {
ObjectLiteral::Property* property = expr->properties()->at(i);
if (property->IsCompileTimeValue()) continue;
@@ -1644,7 +1652,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, key->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -1715,7 +1723,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT_EQ(2, constant_elements->length());
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
- bool has_fast_elements = constant_elements_kind == FAST_ELEMENTS;
+ bool has_fast_elements =
+ IsFastObjectElementsKind(constant_elements_kind);
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
@@ -1737,8 +1746,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
- ASSERT(constant_elements_kind == FAST_ELEMENTS ||
- constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
+ ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode = has_fast_elements
? FastCloneShallowArrayStub::CLONE_ELEMENTS
@@ -1767,7 +1775,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
VisitForAccumulatorValue(subexpr);
- if (constant_elements_kind == FAST_ELEMENTS) {
+ if (IsFastObjectElementsKind(constant_elements_kind)) {
int offset = FixedArray::kHeaderSize + (i * kPointerSize);
__ lw(t2, MemOperand(sp)); // Copy of array literal.
__ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
@@ -1855,11 +1863,11 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
break;
case NAMED_PROPERTY:
EmitNamedPropertyLoad(property);
- PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
+ PrepareForBailoutForId(property->LoadId(), TOS_REG);
break;
case KEYED_PROPERTY:
EmitKeyedPropertyLoad(property);
- PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
+ PrepareForBailoutForId(property->LoadId(), TOS_REG);
break;
}
}
@@ -1917,7 +1925,7 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
__ li(a2, Operand(key->handle()));
// Call load IC. It has arguments receiver and property name a0 and a2.
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
}
@@ -1926,7 +1934,7 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
__ mov(a0, result_register());
// Call keyed load IC. It has arguments key and receiver in a0 and a1.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
}
@@ -1954,7 +1962,8 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&stub_call);
BinaryOpStub stub(op, mode);
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
__ jmp(&done);
@@ -2037,7 +2046,8 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
__ pop(a1);
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
context()->Plug(v0);
}
@@ -2169,7 +2179,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// in harmony mode.
if (var->IsStackAllocated() || var->IsContextSlot()) {
MemOperand location = VarOperand(var, a1);
- if (FLAG_debug_code && op == Token::INIT_LET) {
+ if (generate_debug_code_ && op == Token::INIT_LET) {
// Check for an uninitialized let binding.
__ lw(a2, location);
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
@@ -2202,44 +2212,17 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
ASSERT(prop != NULL);
ASSERT(prop->key()->AsLiteral() != NULL);
- // If the assignment starts a block of assignments to the same object,
- // change to slow case to avoid the quadratic behavior of repeatedly
- // adding fast properties.
- if (expr->starts_initialization_block()) {
- __ push(result_register());
- __ lw(t0, MemOperand(sp, kPointerSize)); // Receiver is now under value.
- __ push(t0);
- __ CallRuntime(Runtime::kToSlowProperties, 1);
- __ pop(result_register());
- }
-
// Record source code position before IC call.
SetSourcePosition(expr->position());
__ mov(a0, result_register()); // Load the value.
__ li(a2, Operand(prop->key()->AsLiteral()->handle()));
- // Load receiver to a1. Leave a copy in the stack if needed for turning the
- // receiver into fast case.
- if (expr->ends_initialization_block()) {
- __ lw(a1, MemOperand(sp));
- } else {
- __ pop(a1);
- }
+ __ pop(a1);
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
-
- // If the assignment ends an initialization block, revert to fast case.
- if (expr->ends_initialization_block()) {
- __ push(v0); // Result of assignment, saved even if not needed.
- // Receiver is under the result value.
- __ lw(t0, MemOperand(sp, kPointerSize));
- __ push(t0);
- __ CallRuntime(Runtime::kToFastProperties, 1);
- __ pop(v0);
- __ Drop(1);
- }
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
+
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(v0);
}
@@ -2248,18 +2231,6 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
// Assignment to a property, using a keyed store IC.
- // If the assignment starts a block of assignments to the same object,
- // change to slow case to avoid the quadratic behavior of repeatedly
- // adding fast properties.
- if (expr->starts_initialization_block()) {
- __ push(result_register());
- // Receiver is now under the key and value.
- __ lw(t0, MemOperand(sp, 2 * kPointerSize));
- __ push(t0);
- __ CallRuntime(Runtime::kToSlowProperties, 1);
- __ pop(result_register());
- }
-
// Record source code position before IC call.
SetSourcePosition(expr->position());
// Call keyed store IC.
@@ -2269,29 +2240,13 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
// - a2 is the receiver.
__ mov(a0, result_register());
__ pop(a1); // Key.
- // Load receiver to a2. Leave a copy in the stack if needed for turning the
- // receiver into fast case.
- if (expr->ends_initialization_block()) {
- __ lw(a2, MemOperand(sp));
- } else {
- __ pop(a2);
- }
+ __ pop(a2);
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
-
- // If the assignment ends an initialization block, revert to fast case.
- if (expr->ends_initialization_block()) {
- __ push(v0); // Result of assignment, saved even if not needed.
- // Receiver is under the result value.
- __ lw(t0, MemOperand(sp, kPointerSize));
- __ push(t0);
- __ CallRuntime(Runtime::kToFastProperties, 1);
- __ pop(v0);
- __ Drop(1);
- }
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
+
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(v0);
}
@@ -2304,6 +2259,7 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
if (key->IsPropertyName()) {
VisitForAccumulatorValue(expr->obj());
EmitNamedPropertyLoad(expr);
+ PrepareForBailoutForId(expr->LoadId(), TOS_REG);
context()->Plug(v0);
} else {
VisitForStackValue(expr->obj());
@@ -2317,9 +2273,9 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
void FullCodeGenerator::CallIC(Handle<Code> code,
RelocInfo::Mode rmode,
- unsigned ast_id) {
+ TypeFeedbackId id) {
ic_total_count_++;
- __ Call(code, rmode, ast_id);
+ __ Call(code, rmode, id);
}
@@ -2340,7 +2296,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
// Call the IC initialization code.
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- CallIC(ic, mode, expr->id());
+ CallIC(ic, mode, expr->CallFeedbackId());
RecordJSReturnSite(expr);
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2373,7 +2329,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
__ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
RecordJSReturnSite(expr);
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2393,16 +2349,14 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
// Record source position for debugger.
SetSourcePosition(expr->position());
- // Record call targets in unoptimized code, but not in the snapshot.
- if (!Serializer::enabled()) {
- flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
- Handle<Object> uninitialized =
- TypeFeedbackCells::UninitializedSentinel(isolate());
- Handle<JSGlobalPropertyCell> cell =
- isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
- RecordTypeFeedbackCell(expr->id(), cell);
- __ li(a2, Operand(cell));
- }
+ // Record call targets.
+ flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
+ __ li(a2, Operand(cell));
CallFunctionStub stub(arg_count, flags);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
@@ -2595,21 +2549,15 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ li(a0, Operand(arg_count));
__ lw(a1, MemOperand(sp, arg_count * kPointerSize));
- // Record call targets in unoptimized code, but not in the snapshot.
- CallFunctionFlags flags;
- if (!Serializer::enabled()) {
- flags = RECORD_CALL_TARGET;
- Handle<Object> uninitialized =
- TypeFeedbackCells::UninitializedSentinel(isolate());
- Handle<JSGlobalPropertyCell> cell =
- isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
- RecordTypeFeedbackCell(expr->id(), cell);
- __ li(a2, Operand(cell));
- } else {
- flags = NO_CALL_FUNCTION_FLAGS;
- }
+ // Record call targets in unoptimized code.
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
+ __ li(a2, Operand(cell));
- CallConstructStub stub(flags);
+ CallConstructStub stub(RECORD_CALL_TARGET);
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(v0);
@@ -2750,7 +2698,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
- if (FLAG_debug_code) __ AbortIfSmi(v0);
+ __ AssertNotSmi(v0);
__ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
__ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
@@ -2764,28 +2712,31 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
__ Branch(if_false, eq, a2, Operand(t0));
// Look for valueOf symbol in the descriptor array, and indicate false if
- // found. The type is not checked, so if it is a transition it is a false
- // negative.
+ // found. Since we omit an enumeration index check, if it is added via a
+ // transition that shares its descriptor array, this is a false positive.
+ Label entry, loop, done;
+
+ // Skip loop if no descriptors are valid.
+ __ NumberOfOwnDescriptors(a3, a1);
+ __ Branch(&done, eq, a3, Operand(zero_reg));
+
__ LoadInstanceDescriptors(a1, t0);
- __ lw(a3, FieldMemOperand(t0, FixedArray::kLengthOffset));
- // t0: descriptor array
- // a3: length of descriptor array
- // Calculate the end of the descriptor array.
+ // t0: descriptor array.
+ // a3: valid entries in the descriptor array.
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kPointerSize == 4);
- __ Addu(a2, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ li(at, Operand(DescriptorArray::kDescriptorSize));
+ __ Mul(a3, a3, at);
+ // Calculate location of the first key name.
+ __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
+ // Calculate the end of the descriptor array.
+ __ mov(a2, t0);
__ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(a2, a2, t1);
- // Calculate location of the first key name.
- __ Addu(t0,
- t0,
- Operand(FixedArray::kHeaderSize - kHeapObjectTag +
- DescriptorArray::kFirstIndex * kPointerSize));
// Loop through all the keys in the descriptor array. If one of these is the
// symbol valueOf the result is false.
- Label entry, loop;
// The use of t2 to store the valueOf symbol asumes that it is not otherwise
// used in the loop below.
__ LoadRoot(t2, Heap::kvalue_of_symbolRootIndex);
@@ -2793,17 +2744,18 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
__ bind(&loop);
__ lw(a3, MemOperand(t0, 0));
__ Branch(if_false, eq, a3, Operand(t2));
- __ Addu(t0, t0, Operand(kPointerSize));
+ __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
__ bind(&entry);
__ Branch(&loop, ne, t0, Operand(a2));
- // If a valueOf property is not found on the object check that it's
+ __ bind(&done);
+ // If a valueOf property is not found on the object check that its
// prototype is the un-modified String prototype. If not result is false.
__ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
__ JumpIfSmi(a2, if_false);
__ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
- __ lw(a3, ContextOperand(cp, Context::GLOBAL_INDEX));
- __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalContextOffset));
+ __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
+ __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
__ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
__ Branch(if_false, ne, a2, Operand(a3));
@@ -3083,8 +3035,8 @@ void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
if (CpuFeatures::IsSupported(FPU)) {
__ PrepareCallCFunction(1, a0);
- __ lw(a0, ContextOperand(cp, Context::GLOBAL_INDEX));
- __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset));
+ __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
+ __ lw(a0, FieldMemOperand(a0, GlobalObject::kNativeContextOffset));
__ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
CpuFeatures::Scope scope(FPU);
@@ -3101,8 +3053,8 @@ void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
} else {
__ PrepareCallCFunction(2, a0);
__ mov(a0, s0);
- __ lw(a1, ContextOperand(cp, Context::GLOBAL_INDEX));
- __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalContextOffset));
+ __ lw(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
+ __ lw(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
__ CallCFunction(
ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
}
@@ -3166,21 +3118,19 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
VisitForAccumulatorValue(args->at(0)); // Load the object.
- Label runtime, done;
+ Label runtime, done, not_date_object;
Register object = v0;
Register result = v0;
Register scratch0 = t5;
Register scratch1 = a1;
-#ifdef DEBUG
- __ AbortIfSmi(object);
+ __ JumpIfSmi(object, &not_date_object);
__ GetObjectType(object, scratch1, scratch1);
- __ Assert(eq, "Trying to get date field from non-date.",
- scratch1, Operand(JS_DATE_TYPE));
-#endif
+ __ Branch(&not_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
if (index->value() == 0) {
__ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
+ __ jmp(&done);
} else {
if (index->value() < JSDate::kFirstUncachedField) {
ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
@@ -3197,9 +3147,12 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
__ li(a1, Operand(index));
__ Move(a0, object);
__ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
- __ bind(&done);
+ __ jmp(&done);
}
+ __ bind(&not_date_object);
+ __ CallRuntime(Runtime::kThrowNotDateError, 0);
+ __ bind(&done);
context()->Plug(v0);
}
@@ -3474,10 +3427,11 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
}
VisitForAccumulatorValue(args->last()); // Function.
- // Check for proxy.
- Label proxy, done;
+ Label runtime, done;
+ // Check for non-function argument (including proxy).
+ __ JumpIfSmi(v0, &runtime);
__ GetObjectType(v0, a1, a1);
- __ Branch(&proxy, eq, a1, Operand(JS_FUNCTION_PROXY_TYPE));
+ __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
// InvokeFunction requires the function in a1. Move it in there.
__ mov(a1, result_register());
@@ -3487,7 +3441,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
__ jmp(&done);
- __ bind(&proxy);
+ __ bind(&runtime);
__ push(v0);
__ CallRuntime(Runtime::kCall, args->length());
__ bind(&done);
@@ -3516,7 +3470,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
Handle<FixedArray> jsfunction_result_caches(
- isolate()->global_context()->jsfunction_result_caches());
+ isolate()->native_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
__ Abort("Attempt to use undefined cache.");
__ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
@@ -3528,8 +3482,8 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
Register key = v0;
Register cache = a1;
- __ lw(cache, ContextOperand(cp, Context::GLOBAL_INDEX));
- __ lw(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset));
+ __ lw(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
+ __ lw(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
__ lw(cache,
ContextOperand(
cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
@@ -3625,9 +3579,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
- if (FLAG_debug_code) {
- __ AbortIfNotString(v0);
- }
+ __ AssertString(v0);
__ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
__ IndexFromHash(v0, v0);
@@ -3701,7 +3653,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// string_length: Accumulated sum of string lengths (smi).
// element: Current array element.
// elements_end: Array end.
- if (FLAG_debug_code) {
+ if (generate_debug_code_) {
__ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin",
array_length, Operand(zero_reg));
}
@@ -3904,7 +3856,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- CallIC(ic, mode, expr->id());
+ CallIC(ic, mode, expr->CallRuntimeFeedbackId());
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
} else {
@@ -4060,7 +4012,8 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
VisitForAccumulatorValue(expr->expression());
SetSourcePosition(expr->position());
__ mov(a0, result_register());
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->UnaryOperationFeedbackId());
context()->Plug(v0);
}
@@ -4118,7 +4071,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
if (assign_type == VARIABLE) {
PrepareForBailout(expr->expression(), TOS_REG);
} else {
- PrepareForBailoutForId(expr->CountId(), TOS_REG);
+ PrepareForBailoutForId(prop->LoadId(), TOS_REG);
}
// Call ToNumber only if operand is not a smi.
@@ -4171,7 +4124,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
SetSourcePosition(expr->position());
BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
patch_site.EmitPatchInfo();
__ bind(&done);
@@ -4204,7 +4157,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4222,7 +4175,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4426,7 +4379,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
@@ -4507,7 +4460,7 @@ void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
Scope* declaration_scope = scope()->DeclarationScope();
if (declaration_scope->is_global_scope() ||
declaration_scope->is_module_scope()) {
- // Contexts nested in the global context have a canonical empty function
+ // Contexts nested in the native context have a canonical empty function
// as their closure, not the anonymous closure containing the global
// code. Pass a smi sentinel and let the runtime look up the empty
// function.
@@ -4537,14 +4490,57 @@ void FullCodeGenerator::EnterFinallyBlock() {
ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
STATIC_ASSERT(0 == kSmiTag);
__ Addu(a1, a1, Operand(a1)); // Convert to smi.
+
+ // Store result register while executing finally block.
+ __ push(a1);
+
+ // Store pending message while executing finally block.
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ li(at, Operand(pending_message_obj));
+ __ lw(a1, MemOperand(at));
+ __ push(a1);
+
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ li(at, Operand(has_pending_message));
+ __ lw(a1, MemOperand(at));
+ __ SmiTag(a1);
+ __ push(a1);
+
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ li(at, Operand(pending_message_script));
+ __ lw(a1, MemOperand(at));
__ push(a1);
}
void FullCodeGenerator::ExitFinallyBlock() {
ASSERT(!result_register().is(a1));
+ // Restore pending message from stack.
+ __ pop(a1);
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ li(at, Operand(pending_message_script));
+ __ sw(a1, MemOperand(at));
+
+ __ pop(a1);
+ __ SmiUntag(a1);
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ li(at, Operand(has_pending_message));
+ __ sw(a1, MemOperand(at));
+
+ __ pop(a1);
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ li(at, Operand(pending_message_obj));
+ __ sw(a1, MemOperand(at));
+
// Restore result register from stack.
__ pop(a1);
+
// Uncook return address and return.
__ pop(result_register());
ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
diff --git a/src/3rdparty/v8/src/mips/ic-mips.cc b/src/3rdparty/v8/src/mips/ic-mips.cc
index 964a7e2..cf70681 100644
--- a/src/3rdparty/v8/src/mips/ic-mips.cc
+++ b/src/3rdparty/v8/src/mips/ic-mips.cc
@@ -398,7 +398,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
Code::Flags flags = Code::ComputeFlags(kind,
MONOMORPHIC,
extra_state,
- NORMAL,
+ Code::NORMAL,
argc);
Isolate::Current()->stub_cache()->GenerateProbe(
masm, flags, a1, a2, a3, t0, t1, t2);
@@ -1189,6 +1189,145 @@ void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
}
+static void KeyedStoreGenerateGenericHelper(
+ MacroAssembler* masm,
+ Label* fast_object,
+ Label* fast_double,
+ Label* slow,
+ KeyedStoreCheckMap check_map,
+ KeyedStoreIncrementLength increment_length,
+ Register value,
+ Register key,
+ Register receiver,
+ Register receiver_map,
+ Register elements_map,
+ Register elements) {
+ Label transition_smi_elements;
+ Label finish_object_store, non_double_value, transition_double_elements;
+ Label fast_double_without_map_check;
+
+ // Fast case: Do the store, could be either Object or double.
+ __ bind(fast_object);
+ Register scratch_value = t0;
+ Register address = t1;
+ if (check_map == kCheckMap) {
+ __ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
+ __ Branch(fast_double, ne, elements_map,
+ Operand(masm->isolate()->factory()->fixed_array_map()));
+ }
+ // Smi stores don't require further checks.
+ Label non_smi_value;
+ __ JumpIfNotSmi(value, &non_smi_value);
+
+ if (increment_length == kIncrementLength) {
+ // Add 1 to receiver->length.
+ __ Addu(scratch_value, key, Operand(Smi::FromInt(1)));
+ __ sw(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
+ }
+ // It's irrelevant whether array is smi-only or not when writing a smi.
+ __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
+ __ Addu(address, address, scratch_value);
+ __ sw(value, MemOperand(address));
+ __ Ret();
+
+ __ bind(&non_smi_value);
+ // Escape to elements kind transition case.
+ __ CheckFastObjectElements(receiver_map, scratch_value,
+ &transition_smi_elements);
+
+ // Fast elements array, store the value to the elements backing store.
+ __ bind(&finish_object_store);
+ if (increment_length == kIncrementLength) {
+ // Add 1 to receiver->length.
+ __ Addu(scratch_value, key, Operand(Smi::FromInt(1)));
+ __ sw(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
+ }
+ __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
+ __ Addu(address, address, scratch_value);
+ __ sw(value, MemOperand(address));
+ // Update write barrier for the elements array address.
+ __ mov(scratch_value, value); // Preserve the value which is returned.
+ __ RecordWrite(elements,
+ address,
+ scratch_value,
+ kRAHasNotBeenSaved,
+ kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ __ Ret();
+
+ __ bind(fast_double);
+ if (check_map == kCheckMap) {
+ // Check for fast double array case. If this fails, call through to the
+ // runtime.
+ __ LoadRoot(at, Heap::kFixedDoubleArrayMapRootIndex);
+ __ Branch(slow, ne, elements_map, Operand(at));
+ }
+ __ bind(&fast_double_without_map_check);
+ __ StoreNumberToDoubleElements(value,
+ key,
+ receiver,
+ elements, // Overwritten.
+ a3, // Scratch regs...
+ t0,
+ t1,
+ t2,
+ &transition_double_elements);
+ if (increment_length == kIncrementLength) {
+ // Add 1 to receiver->length.
+ __ Addu(scratch_value, key, Operand(Smi::FromInt(1)));
+ __ sw(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
+ }
+ __ Ret();
+
+ __ bind(&transition_smi_elements);
+ // Transition the array appropriately depending on the value type.
+ __ lw(t0, FieldMemOperand(value, HeapObject::kMapOffset));
+ __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
+ __ Branch(&non_double_value, ne, t0, Operand(at));
+
+ // Value is a double. Transition FAST_SMI_ELEMENTS ->
+ // FAST_DOUBLE_ELEMENTS and complete the store.
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_DOUBLE_ELEMENTS,
+ receiver_map,
+ t0,
+ slow);
+ ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, slow);
+ __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&fast_double_without_map_check);
+
+ __ bind(&non_double_value);
+ // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_ELEMENTS,
+ receiver_map,
+ t0,
+ slow);
+ ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
+ ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm);
+ __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
+
+ __ bind(&transition_double_elements);
+ // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
+ // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
+ // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
+ FAST_ELEMENTS,
+ receiver_map,
+ t0,
+ slow);
+ ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
+ ElementsTransitionGenerator::GenerateDoubleToObject(masm, slow);
+ __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
+}
+
+
void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
StrictModeFlag strict_mode) {
// ---------- S t a t e --------------
@@ -1197,11 +1336,9 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
// -- a2 : receiver
// -- ra : return address
// -----------------------------------
- Label slow, array, extra, check_if_double_array;
- Label fast_object_with_map_check, fast_object_without_map_check;
- Label fast_double_with_map_check, fast_double_without_map_check;
- Label transition_smi_elements, finish_object_store, non_double_value;
- Label transition_double_elements;
+ Label slow, fast_object, fast_object_grow;
+ Label fast_double, fast_double_grow;
+ Label array, extra, check_if_double_array;
// Register usage.
Register value = a0;
@@ -1233,7 +1370,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
// Check array bounds. Both the key and the length of FixedArray are smis.
__ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
- __ Branch(&fast_object_with_map_check, lo, key, Operand(t0));
+ __ Branch(&fast_object, lo, key, Operand(t0));
// Slow case, handle jump to runtime.
__ bind(&slow);
@@ -1258,19 +1395,11 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ Branch(
&check_if_double_array, ne, elements_map, Heap::kFixedArrayMapRootIndex);
- // Calculate key + 1 as smi.
- STATIC_ASSERT(kSmiTag == 0);
- __ Addu(t0, key, Operand(Smi::FromInt(1)));
- __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
- __ Branch(&fast_object_without_map_check);
+ __ jmp(&fast_object_grow);
__ bind(&check_if_double_array);
__ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
- // Add 1 to key, and go to common element store code for doubles.
- STATIC_ASSERT(kSmiTag == 0);
- __ Addu(t0, key, Operand(Smi::FromInt(1)));
- __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
- __ jmp(&fast_double_without_map_check);
+ __ jmp(&fast_double_grow);
// Array case: Get the length and the elements array from the JS
// array. Check that the array is in fast mode (and writable); if it
@@ -1281,109 +1410,15 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
// Check the key against the length in the array.
__ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
__ Branch(&extra, hs, key, Operand(t0));
- // Fall through to fast case.
-
- __ bind(&fast_object_with_map_check);
- Register scratch_value = t0;
- Register address = t1;
- __ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
- __ Branch(&fast_double_with_map_check,
- ne,
- elements_map,
- Heap::kFixedArrayMapRootIndex);
- __ bind(&fast_object_without_map_check);
- // Smi stores don't require further checks.
- Label non_smi_value;
- __ JumpIfNotSmi(value, &non_smi_value);
- // It's irrelevant whether array is smi-only or not when writing a smi.
- __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
- __ Addu(address, address, scratch_value);
- __ sw(value, MemOperand(address));
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, value);
-
- __ bind(&non_smi_value);
- // Escape to elements kind transition case.
- __ CheckFastObjectElements(receiver_map, scratch_value,
- &transition_smi_elements);
- // Fast elements array, store the value to the elements backing store.
- __ bind(&finish_object_store);
- __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
- __ Addu(address, address, scratch_value);
- __ sw(value, MemOperand(address));
- // Update write barrier for the elements array address.
- __ mov(v0, value); // Preserve the value which is returned.
- __ RecordWrite(elements,
- address,
- value,
- kRAHasNotBeenSaved,
- kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
- OMIT_SMI_CHECK);
- __ Ret();
-
- __ bind(&fast_double_with_map_check);
- // Check for fast double array case. If this fails, call through to the
- // runtime.
- __ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
- __ bind(&fast_double_without_map_check);
- __ StoreNumberToDoubleElements(value,
- key,
- receiver,
- elements,
- a3,
- t0,
- t1,
- t2,
- &transition_double_elements);
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, value);
-
- __ bind(&transition_smi_elements);
- // Transition the array appropriately depending on the value type.
- __ lw(t0, FieldMemOperand(value, HeapObject::kMapOffset));
- __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
- __ Branch(&non_double_value, ne, t0, Operand(at));
-
- // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
- // FAST_DOUBLE_ELEMENTS and complete the store.
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_DOUBLE_ELEMENTS,
- receiver_map,
- t0,
- &slow);
- ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
- __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
- __ jmp(&fast_double_without_map_check);
-
- __ bind(&non_double_value);
- // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_ELEMENTS,
- receiver_map,
- t0,
- &slow);
- ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
- __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
- __ jmp(&finish_object_store);
- __ bind(&transition_double_elements);
- // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
- // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
- // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
- __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
- FAST_ELEMENTS,
- receiver_map,
- t0,
- &slow);
- ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
- ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
- __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
- __ jmp(&finish_object_store);
+ KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
+ &slow, kCheckMap, kDontIncrementLength,
+ value, key, receiver, receiver_map,
+ elements_map, elements);
+ KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
+ &slow, kDontCheckMap, kIncrementLength,
+ value, key, receiver, receiver_map,
+ elements_map, elements);
}
@@ -1471,7 +1506,7 @@ void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
// Must return the modified receiver in v0.
if (!FLAG_trace_elements_transitions) {
Label fail;
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a2);
__ bind(&fail);
diff --git a/src/3rdparty/v8/src/mips/lithium-codegen-mips.cc b/src/3rdparty/v8/src/mips/lithium-codegen-mips.cc
index d016743..b268fb3 100644
--- a/src/3rdparty/v8/src/mips/lithium-codegen-mips.cc
+++ b/src/3rdparty/v8/src/mips/lithium-codegen-mips.cc
@@ -89,17 +89,8 @@ void LCodeGen::FinishCode(Handle<Code> code) {
}
-void LCodeGen::Abort(const char* format, ...) {
- if (FLAG_trace_bailout) {
- SmartArrayPointer<char> name(
- info()->shared_info()->DebugName()->ToCString());
- PrintF("Aborting LCodeGen in @\"%s\": ", *name);
- va_list arguments;
- va_start(arguments, format);
- OS::VPrint(format, arguments);
- va_end(arguments);
- PrintF("\n");
- }
+void LChunkBuilder::Abort(const char* reason) {
+ info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -125,6 +116,8 @@ void LCodeGen::Comment(const char* format, ...) {
bool LCodeGen::GeneratePrologue() {
ASSERT(is_generating());
+ ProfileEntryHookStub::MaybeCallEntryHook(masm_);
+
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
@@ -260,7 +253,7 @@ bool LCodeGen::GenerateDeferredCode() {
bool LCodeGen::GenerateDeoptJumpTable() {
// TODO(plind): not clear that this will have advantage for MIPS.
// Skipping it for now. Raised issue #100 for this.
- Abort("Unimplemented: %s", "GenerateDeoptJumpTable");
+ Abort("Unimplemented: GenerateDeoptJumpTable");
return false;
}
@@ -293,7 +286,8 @@ Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
return ToRegister(op->index());
} else if (op->IsConstantOperand()) {
LConstantOperand* const_op = LConstantOperand::cast(op);
- Handle<Object> literal = chunk_->LookupLiteral(const_op);
+ HConstant* constant = chunk_->LookupConstant(const_op);
+ Handle<Object> literal = constant->handle();
Representation r = chunk_->LookupLiteralRepresentation(const_op);
if (r.IsInteger32()) {
ASSERT(literal->IsNumber());
@@ -331,7 +325,8 @@ DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
return ToDoubleRegister(op->index());
} else if (op->IsConstantOperand()) {
LConstantOperand* const_op = LConstantOperand::cast(op);
- Handle<Object> literal = chunk_->LookupLiteral(const_op);
+ HConstant* constant = chunk_->LookupConstant(const_op);
+ Handle<Object> literal = constant->handle();
Representation r = chunk_->LookupLiteralRepresentation(const_op);
if (r.IsInteger32()) {
ASSERT(literal->IsNumber());
@@ -355,9 +350,9 @@ DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
- Handle<Object> literal = chunk_->LookupLiteral(op);
+ HConstant* constant = chunk_->LookupConstant(op);
ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
- return literal;
+ return constant->handle();
}
@@ -367,33 +362,33 @@ bool LCodeGen::IsInteger32(LConstantOperand* op) const {
int LCodeGen::ToInteger32(LConstantOperand* op) const {
- Handle<Object> value = chunk_->LookupLiteral(op);
+ HConstant* constant = chunk_->LookupConstant(op);
ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
- ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
- value->Number());
- return static_cast<int32_t>(value->Number());
+ ASSERT(constant->HasInteger32Value());
+ return constant->Integer32Value();
}
double LCodeGen::ToDouble(LConstantOperand* op) const {
- Handle<Object> value = chunk_->LookupLiteral(op);
- return value->Number();
+ HConstant* constant = chunk_->LookupConstant(op);
+ ASSERT(constant->HasDoubleValue());
+ return constant->DoubleValue();
}
Operand LCodeGen::ToOperand(LOperand* op) {
if (op->IsConstantOperand()) {
LConstantOperand* const_op = LConstantOperand::cast(op);
- Handle<Object> literal = chunk_->LookupLiteral(const_op);
+ HConstant* constant = chunk()->LookupConstant(const_op);
Representation r = chunk_->LookupLiteralRepresentation(const_op);
if (r.IsInteger32()) {
- ASSERT(literal->IsNumber());
- return Operand(static_cast<int32_t>(literal->Number()));
+ ASSERT(constant->HasInteger32Value());
+ return Operand(constant->Integer32Value());
} else if (r.IsDouble()) {
Abort("ToOperand Unsupported double immediate.");
}
ASSERT(r.IsTagged());
- return Operand(literal);
+ return Operand(constant->handle());
} else if (op->IsRegister()) {
return Operand(ToRegister(op));
} else if (op->IsDoubleRegister()) {
@@ -438,7 +433,9 @@ MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
void LCodeGen::WriteTranslation(LEnvironment* environment,
- Translation* translation) {
+ Translation* translation,
+ int* arguments_index,
+ int* arguments_count) {
if (environment == NULL) return;
// The translation includes one command per value in the environment.
@@ -446,8 +443,21 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
// The output frame height does not include the parameters.
int height = translation_size - environment->parameter_count();
- WriteTranslation(environment->outer(), translation);
- int closure_id = DefineDeoptimizationLiteral(environment->closure());
+ // Function parameters are arguments to the outermost environment. The
+ // arguments index points to the first element of a sequence of tagged
+ // values on the stack that represent the arguments. This needs to be
+ // kept in sync with the LArgumentsElements implementation.
+ *arguments_index = -environment->parameter_count();
+ *arguments_count = environment->parameter_count();
+
+ WriteTranslation(environment->outer(),
+ translation,
+ arguments_index,
+ arguments_count);
+ int closure_id = *info()->closure() != *environment->closure()
+ ? DefineDeoptimizationLiteral(environment->closure())
+ : Translation::kSelfLiteralId;
+
switch (environment->frame_type()) {
case JS_FUNCTION:
translation->BeginJSFrame(environment->ast_id(), closure_id, height);
@@ -455,12 +465,31 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
case JS_CONSTRUCT:
translation->BeginConstructStubFrame(closure_id, translation_size);
break;
+ case JS_GETTER:
+ ASSERT(translation_size == 1);
+ ASSERT(height == 0);
+ translation->BeginGetterStubFrame(closure_id);
+ break;
+ case JS_SETTER:
+ ASSERT(translation_size == 2);
+ ASSERT(height == 0);
+ translation->BeginSetterStubFrame(closure_id);
+ break;
case ARGUMENTS_ADAPTOR:
translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
break;
- default:
- UNREACHABLE();
}
+
+ // Inlined frames which push their arguments cause the index to be
+ // bumped and a new stack area to be used for materialization.
+ if (environment->entry() != NULL &&
+ environment->entry()->arguments_pushed()) {
+ *arguments_index = *arguments_index < 0
+ ? GetStackSlotCount()
+ : *arguments_index + *arguments_count;
+ *arguments_count = environment->entry()->arguments_count() + 1;
+ }
+
for (int i = 0; i < translation_size; ++i) {
LOperand* value = environment->values()->at(i);
// spilled_registers_ and spilled_double_registers_ are either
@@ -471,7 +500,10 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
translation->MarkDuplicate();
AddToTranslation(translation,
environment->spilled_registers()[value->index()],
- environment->HasTaggedValueAt(i));
+ environment->HasTaggedValueAt(i),
+ environment->HasUint32ValueAt(i),
+ *arguments_index,
+ *arguments_count);
} else if (
value->IsDoubleRegister() &&
environment->spilled_double_registers()[value->index()] != NULL) {
@@ -479,26 +511,39 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
AddToTranslation(
translation,
environment->spilled_double_registers()[value->index()],
- false);
+ false,
+ false,
+ *arguments_index,
+ *arguments_count);
}
}
- AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
+ AddToTranslation(translation,
+ value,
+ environment->HasTaggedValueAt(i),
+ environment->HasUint32ValueAt(i),
+ *arguments_index,
+ *arguments_count);
}
}
void LCodeGen::AddToTranslation(Translation* translation,
LOperand* op,
- bool is_tagged) {
+ bool is_tagged,
+ bool is_uint32,
+ int arguments_index,
+ int arguments_count) {
if (op == NULL) {
// TODO(twuerthinger): Introduce marker operands to indicate that this value
// is not present and must be reconstructed from the deoptimizer. Currently
// this is only used for the arguments object.
- translation->StoreArgumentsObject();
+ translation->StoreArgumentsObject(arguments_index, arguments_count);
} else if (op->IsStackSlot()) {
if (is_tagged) {
translation->StoreStackSlot(op->index());
+ } else if (is_uint32) {
+ translation->StoreUint32StackSlot(op->index());
} else {
translation->StoreInt32StackSlot(op->index());
}
@@ -512,6 +557,8 @@ void LCodeGen::AddToTranslation(Translation* translation,
Register reg = ToRegister(op);
if (is_tagged) {
translation->StoreRegister(reg);
+ } else if (is_uint32) {
+ translation->StoreUint32Register(reg);
} else {
translation->StoreInt32Register(reg);
}
@@ -519,8 +566,8 @@ void LCodeGen::AddToTranslation(Translation* translation,
DoubleRegister reg = ToDoubleRegister(op);
translation->StoreDoubleRegister(reg);
} else if (op->IsConstantOperand()) {
- Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
- int src_index = DefineDeoptimizationLiteral(literal);
+ HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op));
+ int src_index = DefineDeoptimizationLiteral(constant->handle());
translation->StoreLiteral(src_index);
} else {
UNREACHABLE();
@@ -587,20 +634,22 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
int frame_count = 0;
int jsframe_count = 0;
+ int args_index = 0;
+ int args_count = 0;
for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
++frame_count;
if (e->frame_type() == JS_FUNCTION) {
++jsframe_count;
}
}
- Translation translation(&translations_, frame_count, jsframe_count);
- WriteTranslation(environment, &translation);
+ Translation translation(&translations_, frame_count, jsframe_count, zone());
+ WriteTranslation(environment, &translation, &args_index, &args_count);
int deoptimization_index = deoptimizations_.length();
int pc_offset = masm()->pc_offset();
environment->Register(deoptimization_index,
translation.index(),
(mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
- deoptimizations_.Add(environment);
+ deoptimizations_.Add(environment, zone());
}
}
@@ -658,13 +707,13 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
}
data->SetLiteralArray(*literals);
- data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
+ data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
// Populate the deoptimization entries.
for (int i = 0; i < length; i++) {
LEnvironment* env = deoptimizations_[i];
- data->SetAstId(i, Smi::FromInt(env->ast_id()));
+ data->SetAstId(i, env->ast_id());
data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
data->SetArgumentsStackHeight(i,
Smi::FromInt(env->arguments_stack_height()));
@@ -679,7 +728,7 @@ int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
for (int i = 0; i < deoptimization_literals_.length(); ++i) {
if (deoptimization_literals_[i].is_identical_to(literal)) return i;
}
- deoptimization_literals_.Add(literal);
+ deoptimization_literals_.Add(literal, zone());
return result;
}
@@ -725,14 +774,14 @@ void LCodeGen::RecordSafepoint(
for (int i = 0; i < operands->length(); i++) {
LOperand* pointer = operands->at(i);
if (pointer->IsStackSlot()) {
- safepoint.DefinePointerSlot(pointer->index());
+ safepoint.DefinePointerSlot(pointer->index(), zone());
} else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
- safepoint.DefinePointerRegister(ToRegister(pointer));
+ safepoint.DefinePointerRegister(ToRegister(pointer), zone());
}
}
if (kind & Safepoint::kWithRegisters) {
// Register cp always contains a pointer to the context.
- safepoint.DefinePointerRegister(cp);
+ safepoint.DefinePointerRegister(cp, zone());
}
}
@@ -744,7 +793,7 @@ void LCodeGen::RecordSafepoint(LPointerMap* pointers,
void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
- LPointerMap empty_pointers(RelocInfo::kNoPosition);
+ LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
RecordSafepoint(&empty_pointers, deopt_mode);
}
@@ -863,7 +912,7 @@ void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
void LCodeGen::DoModI(LModI* instr) {
Register scratch = scratch0();
- const Register left = ToRegister(instr->InputAt(0));
+ const Register left = ToRegister(instr->left());
const Register result = ToRegister(instr->result());
Label done;
@@ -891,7 +940,7 @@ void LCodeGen::DoModI(LModI* instr) {
__ And(result, scratch, p2constant - 1);
} else {
// div runs in the background while we check for special cases.
- Register right = EmitLoadRegister(instr->InputAt(1), scratch);
+ Register right = EmitLoadRegister(instr->right(), scratch);
__ div(left, right);
// Check for x % 0.
@@ -911,8 +960,8 @@ void LCodeGen::DoModI(LModI* instr) {
void LCodeGen::DoDivI(LDivI* instr) {
- const Register left = ToRegister(instr->InputAt(0));
- const Register right = ToRegister(instr->InputAt(1));
+ const Register left = ToRegister(instr->left());
+ const Register right = ToRegister(instr->right());
const Register result = ToRegister(instr->result());
// On MIPS div is asynchronous - it will run in the background while we
@@ -950,8 +999,8 @@ void LCodeGen::DoMulI(LMulI* instr) {
Register scratch = scratch0();
Register result = ToRegister(instr->result());
// Note that result may alias left.
- Register left = ToRegister(instr->InputAt(0));
- LOperand* right_op = instr->InputAt(1);
+ Register left = ToRegister(instr->left());
+ LOperand* right_op = instr->right();
bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
bool bailout_on_minus_zero =
@@ -1021,7 +1070,7 @@ void LCodeGen::DoMulI(LMulI* instr) {
} else {
Register right = EmitLoadRegister(right_op, scratch);
if (bailout_on_minus_zero) {
- __ Or(ToRegister(instr->TempAt(0)), left, right);
+ __ Or(ToRegister(instr->temp()), left, right);
}
if (can_overflow) {
@@ -1041,7 +1090,7 @@ void LCodeGen::DoMulI(LMulI* instr) {
__ Branch(&done, ne, result, Operand(zero_reg));
DeoptimizeIf(lt,
instr->environment(),
- ToRegister(instr->TempAt(0)),
+ ToRegister(instr->temp()),
Operand(zero_reg));
__ bind(&done);
}
@@ -1050,8 +1099,8 @@ void LCodeGen::DoMulI(LMulI* instr) {
void LCodeGen::DoBitI(LBitI* instr) {
- LOperand* left_op = instr->InputAt(0);
- LOperand* right_op = instr->InputAt(1);
+ LOperand* left_op = instr->left();
+ LOperand* right_op = instr->right();
ASSERT(left_op->IsRegister());
Register left = ToRegister(left_op);
Register result = ToRegister(instr->result());
@@ -1084,8 +1133,8 @@ void LCodeGen::DoBitI(LBitI* instr) {
void LCodeGen::DoShiftI(LShiftI* instr) {
// Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so
// result may alias either of them.
- LOperand* right_op = instr->InputAt(1);
- Register left = ToRegister(instr->InputAt(0));
+ LOperand* right_op = instr->right();
+ Register left = ToRegister(instr->left());
Register result = ToRegister(instr->result());
if (right_op->IsRegister()) {
@@ -1147,8 +1196,8 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
void LCodeGen::DoSubI(LSubI* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
LOperand* result = instr->result();
bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
@@ -1212,21 +1261,28 @@ void LCodeGen::DoConstantT(LConstantT* instr) {
void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Register result = ToRegister(instr->result());
- Register array = ToRegister(instr->InputAt(0));
+ Register array = ToRegister(instr->value());
__ lw(result, FieldMemOperand(array, JSArray::kLengthOffset));
}
void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
Register result = ToRegister(instr->result());
- Register array = ToRegister(instr->InputAt(0));
+ Register array = ToRegister(instr->value());
__ lw(result, FieldMemOperand(array, FixedArrayBase::kLengthOffset));
}
+void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
+ Register result = ToRegister(instr->result());
+ Register map = ToRegister(instr->value());
+ __ EnumLength(result, map);
+}
+
+
void LCodeGen::DoElementsKind(LElementsKind* instr) {
Register result = ToRegister(instr->result());
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
// Load map into |result|.
__ lw(result, FieldMemOperand(input, HeapObject::kMapOffset));
@@ -1239,9 +1295,9 @@ void LCodeGen::DoElementsKind(LElementsKind* instr) {
void LCodeGen::DoValueOf(LValueOf* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
- Register map = ToRegister(instr->TempAt(0));
+ Register map = ToRegister(instr->temp());
Label done;
// If the object is a smi return the object.
@@ -1258,9 +1314,9 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
void LCodeGen::DoDateField(LDateField* instr) {
- Register object = ToRegister(instr->InputAt(0));
+ Register object = ToRegister(instr->date());
Register result = ToRegister(instr->result());
- Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch = ToRegister(instr->temp());
Smi* index = instr->index();
Label runtime, done;
ASSERT(object.is(a0));
@@ -1268,12 +1324,10 @@ void LCodeGen::DoDateField(LDateField* instr) {
ASSERT(!scratch.is(scratch0()));
ASSERT(!scratch.is(object));
-#ifdef DEBUG
- __ AbortIfSmi(object);
+ __ And(at, object, Operand(kSmiTagMask));
+ DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
__ GetObjectType(object, scratch, scratch);
- __ Assert(eq, "Trying to get date field from non-date.",
- scratch, Operand(JS_DATE_TYPE));
-#endif
+ DeoptimizeIf(ne, instr->environment(), scratch, Operand(JS_DATE_TYPE));
if (index->value() == 0) {
__ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
@@ -1298,14 +1352,14 @@ void LCodeGen::DoDateField(LDateField* instr) {
void LCodeGen::DoBitNotI(LBitNotI* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
__ Nor(result, zero_reg, Operand(input));
}
void LCodeGen::DoThrow(LThrow* instr) {
- Register input_reg = EmitLoadRegister(instr->InputAt(0), at);
+ Register input_reg = EmitLoadRegister(instr->value(), at);
__ push(input_reg);
CallRuntime(Runtime::kThrow, 1, instr);
@@ -1316,8 +1370,8 @@ void LCodeGen::DoThrow(LThrow* instr) {
void LCodeGen::DoAddI(LAddI* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
LOperand* result = instr->result();
bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
@@ -1354,9 +1408,71 @@ void LCodeGen::DoAddI(LAddI* instr) {
}
+void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
+ HMathMinMax::Operation operation = instr->hydrogen()->operation();
+ Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge;
+ if (instr->hydrogen()->representation().IsInteger32()) {
+ Register left_reg = ToRegister(left);
+ Operand right_op = (right->IsRegister() || right->IsConstantOperand())
+ ? ToOperand(right)
+ : Operand(EmitLoadRegister(right, at));
+ Register result_reg = ToRegister(instr->result());
+ Label return_right, done;
+ if (!result_reg.is(left_reg)) {
+ __ Branch(&return_right, NegateCondition(condition), left_reg, right_op);
+ __ mov(result_reg, left_reg);
+ __ Branch(&done);
+ }
+ __ Branch(&done, condition, left_reg, right_op);
+ __ bind(&return_right);
+ __ Addu(result_reg, zero_reg, right_op);
+ __ bind(&done);
+ } else {
+ ASSERT(instr->hydrogen()->representation().IsDouble());
+ FPURegister left_reg = ToDoubleRegister(left);
+ FPURegister right_reg = ToDoubleRegister(right);
+ FPURegister result_reg = ToDoubleRegister(instr->result());
+ Label check_nan_left, check_zero, return_left, return_right, done;
+ __ BranchF(&check_zero, &check_nan_left, eq, left_reg, right_reg);
+ __ BranchF(&return_left, NULL, condition, left_reg, right_reg);
+ __ Branch(&return_right);
+
+ __ bind(&check_zero);
+ // left == right != 0.
+ __ BranchF(&return_left, NULL, ne, left_reg, kDoubleRegZero);
+ // At this point, both left and right are either 0 or -0.
+ if (operation == HMathMinMax::kMathMin) {
+ __ neg_d(left_reg, left_reg);
+ __ sub_d(result_reg, left_reg, right_reg);
+ __ neg_d(result_reg, result_reg);
+ } else {
+ __ add_d(result_reg, left_reg, right_reg);
+ }
+ __ Branch(&done);
+
+ __ bind(&check_nan_left);
+ // left == NaN.
+ __ BranchF(NULL, &return_left, eq, left_reg, left_reg);
+ __ bind(&return_right);
+ if (!right_reg.is(result_reg)) {
+ __ mov_d(result_reg, right_reg);
+ }
+ __ Branch(&done);
+
+ __ bind(&return_left);
+ if (!left_reg.is(result_reg)) {
+ __ mov_d(result_reg, left_reg);
+ }
+ __ bind(&done);
+ }
+}
+
+
void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
- DoubleRegister left = ToDoubleRegister(instr->InputAt(0));
- DoubleRegister right = ToDoubleRegister(instr->InputAt(1));
+ DoubleRegister left = ToDoubleRegister(instr->left());
+ DoubleRegister right = ToDoubleRegister(instr->right());
DoubleRegister result = ToDoubleRegister(instr->result());
switch (instr->op()) {
case Token::ADD:
@@ -1396,8 +1512,8 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
- ASSERT(ToRegister(instr->InputAt(0)).is(a1));
- ASSERT(ToRegister(instr->InputAt(1)).is(a0));
+ ASSERT(ToRegister(instr->left()).is(a1));
+ ASSERT(ToRegister(instr->right()).is(a0));
ASSERT(ToRegister(instr->result()).is(v0));
BinaryOpStub stub(instr->op(), NO_OVERWRITE);
@@ -1461,15 +1577,15 @@ void LCodeGen::DoBranch(LBranch* instr) {
Representation r = instr->hydrogen()->value()->representation();
if (r.IsInteger32()) {
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
EmitBranch(true_block, false_block, ne, reg, Operand(zero_reg));
} else if (r.IsDouble()) {
- DoubleRegister reg = ToDoubleRegister(instr->InputAt(0));
+ DoubleRegister reg = ToDoubleRegister(instr->value());
// Test the double value. Zero and NaN are false.
EmitBranchF(true_block, false_block, ne, reg, kDoubleRegZero);
} else {
ASSERT(r.IsTagged());
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
HType type = instr->hydrogen()->value()->type();
if (type.IsBoolean()) {
__ LoadRoot(at, Heap::kTrueValueRootIndex);
@@ -1603,8 +1719,8 @@ Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
int false_block = chunk_->LookupDestination(instr->false_block_id());
int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -1655,8 +1771,8 @@ void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
- Register left = ToRegister(instr->InputAt(0));
- Register right = ToRegister(instr->InputAt(1));
+ Register left = ToRegister(instr->left());
+ Register right = ToRegister(instr->right());
int false_block = chunk_->LookupDestination(instr->false_block_id());
int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -1665,7 +1781,7 @@ void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
- Register left = ToRegister(instr->InputAt(0));
+ Register left = ToRegister(instr->left());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1677,7 +1793,7 @@ void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
Register scratch = scratch0();
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
int false_block = chunk_->LookupDestination(instr->false_block_id());
// If the expression is known to be untagged or a smi, then it's definitely
@@ -1743,8 +1859,8 @@ Condition LCodeGen::EmitIsObject(Register input,
void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
- Register reg = ToRegister(instr->InputAt(0));
- Register temp1 = ToRegister(instr->TempAt(0));
+ Register reg = ToRegister(instr->value());
+ Register temp1 = ToRegister(instr->temp());
Register temp2 = scratch0();
int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -1771,8 +1887,8 @@ Condition LCodeGen::EmitIsString(Register input,
void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
- Register reg = ToRegister(instr->InputAt(0));
- Register temp1 = ToRegister(instr->TempAt(0));
+ Register reg = ToRegister(instr->value());
+ Register temp1 = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1790,15 +1906,15 @@ void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
- Register input_reg = EmitLoadRegister(instr->InputAt(0), at);
+ Register input_reg = EmitLoadRegister(instr->value(), at);
__ And(at, input_reg, kSmiTagMask);
EmitBranch(true_block, false_block, eq, at, Operand(zero_reg));
}
void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
+ Register input = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1867,7 +1983,7 @@ static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Register scratch = scratch0();
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1886,12 +2002,10 @@ void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
- if (FLAG_debug_code) {
- __ AbortIfNotString(input);
- }
+ __ AssertString(input);
__ lw(result, FieldMemOperand(input, String::kHashFieldOffset));
__ IndexFromHash(result, result);
@@ -1900,7 +2014,7 @@ void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
void LCodeGen::DoHasCachedArrayIndexAndBranch(
LHasCachedArrayIndexAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register scratch = scratch0();
int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -1980,9 +2094,9 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register temp = scratch0();
- Register temp2 = ToRegister(instr->TempAt(0));
+ Register temp2 = ToRegister(instr->temp());
Handle<String> class_name = instr->hydrogen()->class_name();
int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -1998,8 +2112,8 @@ void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
- Register reg = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
+ Register reg = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
int true_block = instr->true_block_id();
int false_block = instr->false_block_id();
@@ -2010,8 +2124,8 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Label true_label, done;
- ASSERT(ToRegister(instr->InputAt(0)).is(a0)); // Object is in a0.
- ASSERT(ToRegister(instr->InputAt(1)).is(a1)); // Function is in a1.
+ ASSERT(ToRegister(instr->left()).is(a0)); // Object is in a0.
+ ASSERT(ToRegister(instr->right()).is(a1)); // Function is in a1.
Register result = ToRegister(instr->result());
ASSERT(result.is(v0));
@@ -2045,11 +2159,11 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
};
DeferredInstanceOfKnownGlobal* deferred;
- deferred = new DeferredInstanceOfKnownGlobal(this, instr);
+ deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
Label done, false_result;
- Register object = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
+ Register object = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
Register result = ToRegister(instr->result());
ASSERT(object.is(a0));
@@ -2124,7 +2238,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
// Get the temp register reserved by the instruction. This needs to be t0 as
// its slot of the pushing of safepoint registers is used to communicate the
// offset to the location of the map check.
- Register temp = ToRegister(instr->TempAt(0));
+ Register temp = ToRegister(instr->temp());
ASSERT(temp.is(t0));
__ LoadHeapObject(InstanceofStub::right(), instr->function());
static const int kAdditionalDelta = 7;
@@ -2219,7 +2333,7 @@ void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
// it as no longer deleted.
if (instr->hydrogen()->RequiresHoleCheck()) {
// We use a temp to check the payload.
- Register payload = ToRegister(instr->TempAt(0));
+ Register payload = ToRegister(instr->temp());
__ lw(payload, FieldMemOperand(cell, JSGlobalPropertyCell::kValueOffset));
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
DeoptimizeIf(eq, instr->environment(), payload, Operand(at));
@@ -2302,7 +2416,7 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
- Register object = ToRegister(instr->InputAt(0));
+ Register object = ToRegister(instr->object());
Register result = ToRegister(instr->result());
if (instr->hydrogen()->is_in_object()) {
__ lw(result, FieldMemOperand(object, instr->hydrogen()->offset()));
@@ -2316,12 +2430,12 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name) {
+ Handle<String> name,
+ LEnvironment* env) {
LookupResult lookup(isolate());
- type->LookupInDescriptors(NULL, *name, &lookup);
- ASSERT(lookup.IsFound() &&
- (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
- if (lookup.type() == FIELD) {
+ type->LookupDescriptor(NULL, *name, &lookup);
+ ASSERT(lookup.IsFound() || lookup.IsCacheable());
+ if (lookup.IsField()) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
int offset = index * kPointerSize;
if (index < 0) {
@@ -2333,9 +2447,22 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
__ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
__ lw(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
}
- } else {
+ } else if (lookup.IsConstantFunction()) {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
__ LoadHeapObject(result, function);
+ } else {
+ // Negative lookup.
+ // Check prototypes.
+ Handle<HeapObject> current(HeapObject::cast((*type)->prototype()));
+ Heap* heap = type->GetHeap();
+ while (*current != heap->null_value()) {
+ __ LoadHeapObject(result, current);
+ __ lw(result, FieldMemOperand(result, HeapObject::kMapOffset));
+ DeoptimizeIf(ne, env, result, Operand(Handle<Map>(current->map())));
+ current =
+ Handle<HeapObject>(HeapObject::cast(current->map()->prototype()));
+ }
+ __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
}
}
@@ -2343,7 +2470,7 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
Register object = ToRegister(instr->object());
Register result = ToRegister(instr->result());
- Register scratch = scratch0();
+ Register object_map = scratch0();
int map_count = instr->hydrogen()->types()->length();
bool need_generic = instr->hydrogen()->need_generic();
@@ -2354,17 +2481,25 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
}
Handle<String> name = instr->hydrogen()->name();
Label done;
- __ lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
+ __ lw(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
for (int i = 0; i < map_count; ++i) {
bool last = (i == map_count - 1);
Handle<Map> map = instr->hydrogen()->types()->at(i);
+ Label check_passed;
+ __ CompareMapAndBranch(
+ object_map, map, &check_passed,
+ eq, &check_passed, ALLOW_ELEMENT_TRANSITION_MAPS);
if (last && !need_generic) {
- DeoptimizeIf(ne, instr->environment(), scratch, Operand(map));
- EmitLoadFieldOrConstantFunction(result, object, map, name);
+ DeoptimizeIf(al, instr->environment());
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
} else {
Label next;
- __ Branch(&next, ne, scratch, Operand(map));
- EmitLoadFieldOrConstantFunction(result, object, map, name);
+ __ Branch(&next);
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
__ Branch(&done);
__ bind(&next);
}
@@ -2434,7 +2569,7 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
void LCodeGen::DoLoadElements(LLoadElements* instr) {
Register result = ToRegister(instr->result());
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->object());
Register scratch = scratch0();
__ lw(result, FieldMemOperand(input, JSObject::kElementsOffset));
@@ -2449,8 +2584,10 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
__ lbu(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
__ Ext(scratch, scratch, Map::kElementsKindShift,
Map::kElementsKindBitCount);
- __ Branch(&done, eq, scratch,
- Operand(FAST_ELEMENTS));
+ __ Branch(&fail, lt, scratch,
+ Operand(GetInitialFastElementsKind()));
+ __ Branch(&done, le, scratch,
+ Operand(TERMINAL_FAST_ELEMENTS_KIND));
__ Branch(&fail, lt, scratch,
Operand(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
__ Branch(&done, le, scratch,
@@ -2465,7 +2602,7 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
void LCodeGen::DoLoadExternalArrayPointer(
LLoadExternalArrayPointer* instr) {
Register to_reg = ToRegister(instr->result());
- Register from_reg = ToRegister(instr->InputAt(0));
+ Register from_reg = ToRegister(instr->object());
__ lw(to_reg, FieldMemOperand(from_reg,
ExternalArray::kExternalPointerOffset));
}
@@ -2476,14 +2613,6 @@ void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Register length = ToRegister(instr->length());
Register index = ToRegister(instr->index());
Register result = ToRegister(instr->result());
-
- // Bailout index is not a valid argument index. Use unsigned check to get
- // negative check for free.
-
- // TODO(plind): Shoud be optimized to do the sub before the DeoptimizeIf(),
- // as they do in Arm. It will save us an instruction.
- DeoptimizeIf(ls, instr->environment(), length, Operand(index));
-
// There are two words between the frame pointer and the last argument.
// Subtracting from length accounts for one of them, add one more.
__ subu(length, length, index);
@@ -2494,65 +2623,8 @@ void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
}
-void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
- Register elements = ToRegister(instr->elements());
- Register key = EmitLoadRegister(instr->key(), scratch0());
- Register result = ToRegister(instr->result());
- Register scratch = scratch0();
-
- // Load the result.
- __ sll(scratch, key, kPointerSizeLog2); // Key indexes words.
- __ addu(scratch, elements, scratch);
- __ lw(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
-
- // Check for the hole value.
- if (instr->hydrogen()->RequiresHoleCheck()) {
- __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
- DeoptimizeIf(eq, instr->environment(), result, Operand(scratch));
- }
-}
-
-
-void LCodeGen::DoLoadKeyedFastDoubleElement(
- LLoadKeyedFastDoubleElement* instr) {
- Register elements = ToRegister(instr->elements());
- bool key_is_constant = instr->key()->IsConstantOperand();
- Register key = no_reg;
- DoubleRegister result = ToDoubleRegister(instr->result());
- Register scratch = scratch0();
-
- int shift_size =
- ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
- int constant_key = 0;
- if (key_is_constant) {
- constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
- if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
- }
- } else {
- key = ToRegister(instr->key());
- }
-
- if (key_is_constant) {
- __ Addu(elements, elements, Operand(constant_key * (1 << shift_size) +
- FixedDoubleArray::kHeaderSize - kHeapObjectTag));
- } else {
- __ sll(scratch, key, shift_size);
- __ Addu(elements, elements, Operand(scratch));
- __ Addu(elements, elements,
- Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
- }
-
- __ lw(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
- DeoptimizeIf(eq, instr->environment(), scratch, Operand(kHoleNanUpper32));
-
- __ ldc1(result, MemOperand(elements));
-}
-
-
-void LCodeGen::DoLoadKeyedSpecializedArrayElement(
- LLoadKeyedSpecializedArrayElement* instr) {
- Register external_pointer = ToRegister(instr->external_pointer());
+void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
+ Register external_pointer = ToRegister(instr->elements());
Register key = no_reg;
ElementsKind elements_kind = instr->elements_kind();
bool key_is_constant = instr->key()->IsConstantOperand();
@@ -2565,36 +2637,33 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
} else {
key = ToRegister(instr->key());
}
- int shift_size = ElementsKindToShiftSize(elements_kind);
+ int element_size_shift = ElementsKindToShiftSize(elements_kind);
+ int shift_size = (instr->hydrogen()->key()->representation().IsTagged())
+ ? (element_size_shift - kSmiTagSize) : element_size_shift;
+ int additional_offset = instr->additional_index() << element_size_shift;
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
FPURegister result = ToDoubleRegister(instr->result());
if (key_is_constant) {
- __ Addu(scratch0(), external_pointer, constant_key * (1 << shift_size));
+ __ Addu(scratch0(), external_pointer, constant_key << element_size_shift);
} else {
__ sll(scratch0(), key, shift_size);
__ Addu(scratch0(), scratch0(), external_pointer);
}
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
- __ lwc1(result, MemOperand(scratch0()));
+ __ lwc1(result, MemOperand(scratch0(), additional_offset));
__ cvt_d_s(result, result);
} else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
- __ ldc1(result, MemOperand(scratch0()));
+ __ ldc1(result, MemOperand(scratch0(), additional_offset));
}
} else {
Register result = ToRegister(instr->result());
- Register scratch = scratch0();
- MemOperand mem_operand(zero_reg);
- if (key_is_constant) {
- mem_operand = MemOperand(external_pointer,
- constant_key * (1 << shift_size));
- } else {
- __ sll(scratch, key, shift_size);
- __ Addu(scratch, scratch, external_pointer);
- mem_operand = MemOperand(scratch);
- }
+ MemOperand mem_operand = PrepareKeyedOperand(
+ key, external_pointer, key_is_constant, constant_key,
+ element_size_shift, shift_size,
+ instr->additional_index(), additional_offset);
switch (elements_kind) {
case EXTERNAL_BYTE_ELEMENTS:
__ lb(result, mem_operand);
@@ -2614,17 +2683,19 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
break;
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
__ lw(result, mem_operand);
- // TODO(danno): we could be more clever here, perhaps having a special
- // version of the stub that detects if the overflow case actually
- // happens, and generate code that returns a double rather than int.
- DeoptimizeIf(Ugreater_equal, instr->environment(),
- result, Operand(0x80000000));
+ if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) {
+ DeoptimizeIf(Ugreater_equal, instr->environment(),
+ result, Operand(0x80000000));
+ }
break;
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -2634,6 +2705,145 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
}
+void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
+ Register elements = ToRegister(instr->elements());
+ bool key_is_constant = instr->key()->IsConstantOperand();
+ Register key = no_reg;
+ DoubleRegister result = ToDoubleRegister(instr->result());
+ Register scratch = scratch0();
+
+ int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
+ int shift_size = (instr->hydrogen()->key()->representation().IsTagged())
+ ? (element_size_shift - kSmiTagSize) : element_size_shift;
+ int constant_key = 0;
+ if (key_is_constant) {
+ constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
+ if (constant_key & 0xF0000000) {
+ Abort("array index constant value too big.");
+ }
+ } else {
+ key = ToRegister(instr->key());
+ }
+
+ if (key_is_constant) {
+ __ Addu(elements, elements,
+ Operand(((constant_key + instr->additional_index()) <<
+ element_size_shift) +
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag));
+ } else {
+ __ sll(scratch, key, shift_size);
+ __ Addu(elements, elements, Operand(scratch));
+ __ Addu(elements, elements,
+ Operand((FixedDoubleArray::kHeaderSize - kHeapObjectTag) +
+ (instr->additional_index() << element_size_shift)));
+ }
+
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ __ lw(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
+ DeoptimizeIf(eq, instr->environment(), scratch, Operand(kHoleNanUpper32));
+ }
+
+ __ ldc1(result, MemOperand(elements));
+}
+
+
+void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
+ Register elements = ToRegister(instr->elements());
+ Register result = ToRegister(instr->result());
+ Register scratch = scratch0();
+ Register store_base = scratch;
+ int offset = 0;
+
+ if (instr->key()->IsConstantOperand()) {
+ LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
+ offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) +
+ instr->additional_index());
+ store_base = elements;
+ } else {
+ Register key = EmitLoadRegister(instr->key(), scratch0());
+ // Even though the HLoadKeyed instruction forces the input
+ // representation for the key to be an integer, the input gets replaced
+ // during bound check elimination with the index argument to the bounds
+ // check, which can be tagged, so that case must be handled here, too.
+ if (instr->hydrogen()->key()->representation().IsTagged()) {
+ __ sll(scratch, key, kPointerSizeLog2 - kSmiTagSize);
+ __ addu(scratch, elements, scratch);
+ } else {
+ __ sll(scratch, key, kPointerSizeLog2);
+ __ addu(scratch, elements, scratch);
+ }
+ offset = FixedArray::OffsetOfElementAt(instr->additional_index());
+ }
+ __ lw(result, FieldMemOperand(store_base, offset));
+
+ // Check for the hole value.
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
+ __ And(scratch, result, Operand(kSmiTagMask));
+ DeoptimizeIf(ne, instr->environment(), scratch, Operand(zero_reg));
+ } else {
+ __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
+ DeoptimizeIf(eq, instr->environment(), result, Operand(scratch));
+ }
+ }
+}
+
+
+void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
+ if (instr->is_external()) {
+ DoLoadKeyedExternalArray(instr);
+ } else if (instr->hydrogen()->representation().IsDouble()) {
+ DoLoadKeyedFixedDoubleArray(instr);
+ } else {
+ DoLoadKeyedFixedArray(instr);
+ }
+}
+
+
+MemOperand LCodeGen::PrepareKeyedOperand(Register key,
+ Register base,
+ bool key_is_constant,
+ int constant_key,
+ int element_size,
+ int shift_size,
+ int additional_index,
+ int additional_offset) {
+ if (additional_index != 0 && !key_is_constant) {
+ additional_index *= 1 << (element_size - shift_size);
+ __ Addu(scratch0(), key, Operand(additional_index));
+ }
+
+ if (key_is_constant) {
+ return MemOperand(base,
+ (constant_key << element_size) + additional_offset);
+ }
+
+ if (additional_index == 0) {
+ if (shift_size >= 0) {
+ __ sll(scratch0(), key, shift_size);
+ __ Addu(scratch0(), base, scratch0());
+ return MemOperand(scratch0());
+ } else {
+ ASSERT_EQ(-1, shift_size);
+ __ srl(scratch0(), key, 1);
+ __ Addu(scratch0(), base, scratch0());
+ return MemOperand(scratch0());
+ }
+ }
+
+ if (shift_size >= 0) {
+ __ sll(scratch0(), scratch0(), shift_size);
+ __ Addu(scratch0(), base, scratch0());
+ return MemOperand(scratch0());
+ } else {
+ ASSERT_EQ(-1, shift_size);
+ __ srl(scratch0(), scratch0(), 1);
+ __ Addu(scratch0(), base, scratch0());
+ return MemOperand(scratch0());
+ }
+}
+
+
void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
ASSERT(ToRegister(instr->object()).is(a1));
ASSERT(ToRegister(instr->key()).is(a0));
@@ -2666,7 +2876,7 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
- Register elem = ToRegister(instr->InputAt(0));
+ Register elem = ToRegister(instr->elements());
Register result = ToRegister(instr->result());
Label done;
@@ -2784,7 +2994,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
void LCodeGen::DoPushArgument(LPushArgument* instr) {
- LOperand* argument = instr->InputAt(0);
+ LOperand* argument = instr->value();
if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
Abort("DoPushArgument not implemented for double type.");
} else {
@@ -2801,7 +3011,7 @@ void LCodeGen::DoDrop(LDrop* instr) {
void LCodeGen::DoThisFunction(LThisFunction* instr) {
Register result = ToRegister(instr->result());
- __ LoadHeapObject(result, instr->hydrogen()->closure());
+ __ lw(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
}
@@ -2830,12 +3040,14 @@ void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
Register result = ToRegister(instr->result());
- __ lw(result, ContextOperand(cp, instr->qml_global()?Context::QML_GLOBAL_INDEX:Context::GLOBAL_INDEX));
+ __ lw(result, ContextOperand(cp, instr->qml_global()
+ ? Context::QML_GLOBAL_OBJECT_INDEX
+ : Context::GLOBAL_OBJECT_INDEX));
}
void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
- Register global = ToRegister(instr->global());
+ Register global = ToRegister(instr->global_object());
Register result = ToRegister(instr->result());
__ lw(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
}
@@ -2857,14 +3069,8 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
__ LoadHeapObject(a1, function);
}
- // Change context if needed.
- bool change_context =
- (info()->closure()->context() != function->context()) ||
- scope()->contains_with() ||
- (scope()->num_heap_slots() > 0);
- if (change_context) {
- __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
- }
+ // Change context.
+ __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
// Set r0 to arguments count if adaption is not needed. Assumes that r0
// is available to write to at this point.
@@ -2902,7 +3108,7 @@ void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
Register scratch = scratch0();
@@ -2967,7 +3173,7 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
Label done;
@@ -2998,7 +3204,7 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
Representation r = instr->hydrogen()->value()->representation();
if (r.IsDouble()) {
- FPURegister input = ToDoubleRegister(instr->InputAt(0));
+ FPURegister input = ToDoubleRegister(instr->value());
FPURegister result = ToDoubleRegister(instr->result());
__ abs_d(result, input);
} else if (r.IsInteger32()) {
@@ -3006,8 +3212,8 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
} else {
// Representation is tagged.
DeferredMathAbsTaggedHeapNumber* deferred =
- new DeferredMathAbsTaggedHeapNumber(this, instr);
- Register input = ToRegister(instr->InputAt(0));
+ new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
+ Register input = ToRegister(instr->value());
// Smi check.
__ JumpIfNotSmi(input, deferred->entry());
// If smi, handle it directly.
@@ -3018,11 +3224,11 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
- DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
+ DoubleRegister input = ToDoubleRegister(instr->value());
Register result = ToRegister(instr->result());
FPURegister single_scratch = double_scratch0().low();
Register scratch1 = scratch0();
- Register except_flag = ToRegister(instr->TempAt(0));
+ Register except_flag = ToRegister(instr->temp());
__ EmitFPUTruncate(kRoundToMinusInf,
single_scratch,
@@ -3049,7 +3255,7 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
- DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
+ DoubleRegister input = ToDoubleRegister(instr->value());
Register result = ToRegister(instr->result());
Register scratch = scratch0();
Label done, check_sign_on_zero;
@@ -3126,16 +3332,16 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
- DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
+ DoubleRegister input = ToDoubleRegister(instr->value());
DoubleRegister result = ToDoubleRegister(instr->result());
__ sqrt_d(result, input);
}
void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
- DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
+ DoubleRegister input = ToDoubleRegister(instr->value());
DoubleRegister result = ToDoubleRegister(instr->result());
- DoubleRegister temp = ToDoubleRegister(instr->TempAt(0));
+ DoubleRegister temp = ToDoubleRegister(instr->temp());
ASSERT(!input.is(result));
@@ -3160,11 +3366,11 @@ void LCodeGen::DoPower(LPower* instr) {
Representation exponent_type = instr->hydrogen()->right()->representation();
// Having marked this as a call, we can use any registers.
// Just make sure that the input/output registers are the expected ones.
- ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
- ToDoubleRegister(instr->InputAt(1)).is(f4));
- ASSERT(!instr->InputAt(1)->IsRegister() ||
- ToRegister(instr->InputAt(1)).is(a2));
- ASSERT(ToDoubleRegister(instr->InputAt(0)).is(f2));
+ ASSERT(!instr->right()->IsDoubleRegister() ||
+ ToDoubleRegister(instr->right()).is(f4));
+ ASSERT(!instr->right()->IsRegister() ||
+ ToRegister(instr->right()).is(a2));
+ ASSERT(ToDoubleRegister(instr->left()).is(f2));
ASSERT(ToDoubleRegister(instr->result()).is(f0));
if (exponent_type.IsTagged()) {
@@ -3197,20 +3403,20 @@ void LCodeGen::DoRandom(LRandom* instr) {
LRandom* instr_;
};
- DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
+ DeferredDoRandom* deferred = new(zone()) DeferredDoRandom(this, instr);
// Having marked this instruction as a call we can use any
// registers.
ASSERT(ToDoubleRegister(instr->result()).is(f0));
- ASSERT(ToRegister(instr->InputAt(0)).is(a0));
+ ASSERT(ToRegister(instr->global_object()).is(a0));
static const int kSeedSize = sizeof(uint32_t);
STATIC_ASSERT(kPointerSize == kSeedSize);
- __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset));
+ __ lw(a0, FieldMemOperand(a0, GlobalObject::kNativeContextOffset));
static const int kRandomSeedOffset =
FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
__ lw(a2, FieldMemOperand(a0, kRandomSeedOffset));
- // a2: FixedArray of the global context's random seeds
+ // a2: FixedArray of the native context's random seeds
// Load state[0].
__ lw(a1, FieldMemOperand(a2, ByteArray::kHeaderSize));
@@ -3411,7 +3617,7 @@ void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
void LCodeGen::DoCallNew(LCallNew* instr) {
- ASSERT(ToRegister(instr->InputAt(0)).is(a1));
+ ASSERT(ToRegister(instr->constructor()).is(a1));
ASSERT(ToRegister(instr->result()).is(v0));
CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
@@ -3436,6 +3642,18 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
if (!instr->transition().is_null()) {
__ li(scratch, Operand(instr->transition()));
__ sw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
+ if (instr->hydrogen()->NeedsWriteBarrierForMap()) {
+ Register temp = ToRegister(instr->temp());
+ // Update the write barrier for the map field.
+ __ RecordWriteField(object,
+ HeapObject::kMapOffset,
+ scratch,
+ temp,
+ kRAHasBeenSaved,
+ kSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ }
}
// Do the store.
@@ -3487,101 +3705,52 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
}
-void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
- DeoptimizeIf(hs,
- instr->environment(),
- ToRegister(instr->index()),
- Operand(ToRegister(instr->length())));
-}
-
-
-void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
- Register value = ToRegister(instr->value());
- Register elements = ToRegister(instr->object());
- Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
- Register scratch = scratch0();
-
- // Do the store.
- if (instr->key()->IsConstantOperand()) {
- ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
- LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
- int offset =
- ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
- __ sw(value, FieldMemOperand(elements, offset));
- } else {
- __ sll(scratch, key, kPointerSizeLog2);
- __ addu(scratch, elements, scratch);
- __ sw(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
- }
-
- if (instr->hydrogen()->NeedsWriteBarrier()) {
- HType type = instr->hydrogen()->value()->type();
- SmiCheck check_needed =
- type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
- // Compute address of modified element and store it into key register.
- __ Addu(key, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ RecordWrite(elements,
- key,
- value,
- kRAHasBeenSaved,
- kSaveFPRegs,
- EMIT_REMEMBERED_SET,
- check_needed);
+void LCodeGen::DeoptIfTaggedButNotSmi(LEnvironment* environment,
+ HValue* value,
+ LOperand* operand) {
+ if (value->representation().IsTagged() && !value->type().IsSmi()) {
+ if (operand->IsRegister()) {
+ __ And(at, ToRegister(operand), Operand(kSmiTagMask));
+ DeoptimizeIf(ne, environment, at, Operand(zero_reg));
+ } else {
+ __ li(at, ToOperand(operand));
+ __ And(at, at, Operand(kSmiTagMask));
+ DeoptimizeIf(ne, environment, at, Operand(zero_reg));
+ }
}
}
-void LCodeGen::DoStoreKeyedFastDoubleElement(
- LStoreKeyedFastDoubleElement* instr) {
- DoubleRegister value = ToDoubleRegister(instr->value());
- Register elements = ToRegister(instr->elements());
- Register key = no_reg;
- Register scratch = scratch0();
- bool key_is_constant = instr->key()->IsConstantOperand();
- int constant_key = 0;
- Label not_nan;
-
- // Calculate the effective address of the slot in the array to store the
- // double value.
- if (key_is_constant) {
- constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
- if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
+void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
+ DeoptIfTaggedButNotSmi(instr->environment(),
+ instr->hydrogen()->length(),
+ instr->length());
+ DeoptIfTaggedButNotSmi(instr->environment(),
+ instr->hydrogen()->index(),
+ instr->index());
+ if (instr->index()->IsConstantOperand()) {
+ int constant_index =
+ ToInteger32(LConstantOperand::cast(instr->index()));
+ if (instr->hydrogen()->length()->representation().IsTagged()) {
+ __ li(at, Operand(Smi::FromInt(constant_index)));
+ } else {
+ __ li(at, Operand(constant_index));
}
+ DeoptimizeIf(hs,
+ instr->environment(),
+ at,
+ Operand(ToRegister(instr->length())));
} else {
- key = ToRegister(instr->key());
- }
- int shift_size = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
- if (key_is_constant) {
- __ Addu(scratch, elements, Operand(constant_key * (1 << shift_size) +
- FixedDoubleArray::kHeaderSize - kHeapObjectTag));
- } else {
- __ sll(scratch, key, shift_size);
- __ Addu(scratch, elements, Operand(scratch));
- __ Addu(scratch, scratch,
- Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
- }
-
- if (instr->NeedsCanonicalization()) {
- Label is_nan;
- // Check for NaN. All NaNs must be canonicalized.
- __ BranchF(NULL, &is_nan, eq, value, value);
- __ Branch(&not_nan);
-
- // Only load canonical NaN if the comparison above set the overflow.
- __ bind(&is_nan);
- __ Move(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double());
+ DeoptimizeIf(hs,
+ instr->environment(),
+ ToRegister(instr->index()),
+ Operand(ToRegister(instr->length())));
}
-
- __ bind(&not_nan);
- __ sdc1(value, MemOperand(scratch));
}
-void LCodeGen::DoStoreKeyedSpecializedArrayElement(
- LStoreKeyedSpecializedArrayElement* instr) {
-
- Register external_pointer = ToRegister(instr->external_pointer());
+void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
+ Register external_pointer = ToRegister(instr->elements());
Register key = no_reg;
ElementsKind elements_kind = instr->elements_kind();
bool key_is_constant = instr->key()->IsConstantOperand();
@@ -3594,13 +3763,17 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
} else {
key = ToRegister(instr->key());
}
- int shift_size = ElementsKindToShiftSize(elements_kind);
+ int element_size_shift = ElementsKindToShiftSize(elements_kind);
+ int shift_size = (instr->hydrogen()->key()->representation().IsTagged())
+ ? (element_size_shift - kSmiTagSize) : element_size_shift;
+ int additional_offset = instr->additional_index() << element_size_shift;
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
FPURegister value(ToDoubleRegister(instr->value()));
if (key_is_constant) {
- __ Addu(scratch0(), external_pointer, constant_key * (1 << shift_size));
+ __ Addu(scratch0(), external_pointer, constant_key <<
+ element_size_shift);
} else {
__ sll(scratch0(), key, shift_size);
__ Addu(scratch0(), scratch0(), external_pointer);
@@ -3608,22 +3781,16 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
__ cvt_s_d(double_scratch0(), value);
- __ swc1(double_scratch0(), MemOperand(scratch0()));
+ __ swc1(double_scratch0(), MemOperand(scratch0(), additional_offset));
} else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
- __ sdc1(value, MemOperand(scratch0()));
+ __ sdc1(value, MemOperand(scratch0(), additional_offset));
}
} else {
Register value(ToRegister(instr->value()));
- MemOperand mem_operand(zero_reg);
- Register scratch = scratch0();
- if (key_is_constant) {
- mem_operand = MemOperand(external_pointer,
- constant_key * (1 << shift_size));
- } else {
- __ sll(scratch, key, shift_size);
- __ Addu(scratch, scratch, external_pointer);
- mem_operand = MemOperand(scratch);
- }
+ MemOperand mem_operand = PrepareKeyedOperand(
+ key, external_pointer, key_is_constant, constant_key,
+ element_size_shift, shift_size,
+ instr->additional_index(), additional_offset);
switch (elements_kind) {
case EXTERNAL_PIXEL_ELEMENTS:
case EXTERNAL_BYTE_ELEMENTS:
@@ -3642,7 +3809,10 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3651,6 +3821,117 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
}
}
+
+void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
+ DoubleRegister value = ToDoubleRegister(instr->value());
+ Register elements = ToRegister(instr->elements());
+ Register key = no_reg;
+ Register scratch = scratch0();
+ bool key_is_constant = instr->key()->IsConstantOperand();
+ int constant_key = 0;
+ Label not_nan;
+
+ // Calculate the effective address of the slot in the array to store the
+ // double value.
+ if (key_is_constant) {
+ constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
+ if (constant_key & 0xF0000000) {
+ Abort("array index constant value too big.");
+ }
+ } else {
+ key = ToRegister(instr->key());
+ }
+ int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
+ int shift_size = (instr->hydrogen()->key()->representation().IsTagged())
+ ? (element_size_shift - kSmiTagSize) : element_size_shift;
+ if (key_is_constant) {
+ __ Addu(scratch, elements, Operand((constant_key << element_size_shift) +
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag));
+ } else {
+ __ sll(scratch, key, shift_size);
+ __ Addu(scratch, elements, Operand(scratch));
+ __ Addu(scratch, scratch,
+ Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
+ }
+
+ if (instr->NeedsCanonicalization()) {
+ Label is_nan;
+ // Check for NaN. All NaNs must be canonicalized.
+ __ BranchF(NULL, &is_nan, eq, value, value);
+ __ Branch(&not_nan);
+
+ // Only load canonical NaN if the comparison above set the overflow.
+ __ bind(&is_nan);
+ __ Move(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double());
+ }
+
+ __ bind(&not_nan);
+ __ sdc1(value, MemOperand(scratch, instr->additional_index() <<
+ element_size_shift));
+}
+
+
+void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) {
+ Register value = ToRegister(instr->value());
+ Register elements = ToRegister(instr->elements());
+ Register key = instr->key()->IsRegister() ? ToRegister(instr->key())
+ : no_reg;
+ Register scratch = scratch0();
+ Register store_base = scratch;
+ int offset = 0;
+
+ // Do the store.
+ if (instr->key()->IsConstantOperand()) {
+ ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
+ LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
+ offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) +
+ instr->additional_index());
+ store_base = elements;
+ } else {
+ // Even though the HLoadKeyed instruction forces the input
+ // representation for the key to be an integer, the input gets replaced
+ // during bound check elimination with the index argument to the bounds
+ // check, which can be tagged, so that case must be handled here, too.
+ if (instr->hydrogen()->key()->representation().IsTagged()) {
+ __ sll(scratch, key, kPointerSizeLog2 - kSmiTagSize);
+ __ addu(scratch, elements, scratch);
+ } else {
+ __ sll(scratch, key, kPointerSizeLog2);
+ __ addu(scratch, elements, scratch);
+ }
+ offset = FixedArray::OffsetOfElementAt(instr->additional_index());
+ }
+ __ sw(value, FieldMemOperand(store_base, offset));
+
+ if (instr->hydrogen()->NeedsWriteBarrier()) {
+ HType type = instr->hydrogen()->value()->type();
+ SmiCheck check_needed =
+ type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
+ // Compute address of modified element and store it into key register.
+ __ Addu(key, store_base, Operand(offset - kHeapObjectTag));
+ __ RecordWrite(elements,
+ key,
+ value,
+ kRAHasBeenSaved,
+ kSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ check_needed);
+ }
+}
+
+
+void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) {
+ // By cases: external, fast double
+ if (instr->is_external()) {
+ DoStoreKeyedExternalArray(instr);
+ } else if (instr->hydrogen()->value()->representation().IsDouble()) {
+ DoStoreKeyedFixedDoubleArray(instr);
+ } else {
+ DoStoreKeyedFixedArray(instr);
+ }
+}
+
+
void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
ASSERT(ToRegister(instr->object()).is(a2));
ASSERT(ToRegister(instr->key()).is(a1));
@@ -3665,7 +3946,7 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
Register object_reg = ToRegister(instr->object());
- Register new_map_reg = ToRegister(instr->new_map_reg());
+ Register new_map_reg = ToRegister(instr->new_map_temp());
Register scratch = scratch0();
Handle<Map> from_map = instr->original_map();
@@ -3680,21 +3961,22 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
__ Branch(&not_applicable, ne, scratch, Operand(from_map));
__ li(new_map_reg, Operand(to_map));
- if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ if (IsFastSmiElementsKind(from_kind) && IsFastObjectElementsKind(to_kind)) {
__ sw(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset));
// Write barrier.
__ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
scratch, kRAHasBeenSaved, kDontSaveFPRegs);
- } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
- to_kind == FAST_DOUBLE_ELEMENTS) {
- Register fixed_object_reg = ToRegister(instr->temp_reg());
+ } else if (IsFastSmiElementsKind(from_kind) &&
+ IsFastDoubleElementsKind(to_kind)) {
+ Register fixed_object_reg = ToRegister(instr->temp());
ASSERT(fixed_object_reg.is(a2));
ASSERT(new_map_reg.is(a3));
__ mov(fixed_object_reg, object_reg);
CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
RelocInfo::CODE_TARGET, instr);
- } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
- Register fixed_object_reg = ToRegister(instr->temp_reg());
+ } else if (IsFastDoubleElementsKind(from_kind) &&
+ IsFastObjectElementsKind(to_kind)) {
+ Register fixed_object_reg = ToRegister(instr->temp());
ASSERT(fixed_object_reg.is(a2));
ASSERT(new_map_reg.is(a3));
__ mov(fixed_object_reg, object_reg);
@@ -3727,7 +4009,7 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
};
DeferredStringCharCodeAt* deferred =
- new DeferredStringCharCodeAt(this, instr);
+ new(zone()) DeferredStringCharCodeAt(this, instr);
StringCharLoadGenerator::Generate(masm(),
ToRegister(instr->string()),
ToRegister(instr->index()),
@@ -3761,9 +4043,7 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
__ push(index);
}
CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
- if (FLAG_debug_code) {
- __ AbortIfNotSmi(v0);
- }
+ __ AssertSmi(v0);
__ SmiUntag(v0);
__ StoreToSafepointRegisterSlot(v0, result);
}
@@ -3781,7 +4061,7 @@ void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
};
DeferredStringCharFromCode* deferred =
- new DeferredStringCharFromCode(this, instr);
+ new(zone()) DeferredStringCharFromCode(this, instr);
ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
Register char_code = ToRegister(instr->char_code());
@@ -3819,14 +4099,14 @@ void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
void LCodeGen::DoStringLength(LStringLength* instr) {
- Register string = ToRegister(instr->InputAt(0));
+ Register string = ToRegister(instr->string());
Register result = ToRegister(instr->result());
__ lw(result, FieldMemOperand(string, String::kLengthOffset));
}
void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister() || input->IsStackSlot());
LOperand* output = instr->result();
ASSERT(output->IsDoubleRegister());
@@ -3842,47 +4122,95 @@ void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
}
+void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
+ LOperand* input = instr->value();
+ LOperand* output = instr->result();
+
+ FPURegister dbl_scratch = double_scratch0();
+ __ mtc1(ToRegister(input), dbl_scratch);
+ __ Cvt_d_uw(ToDoubleRegister(output), dbl_scratch, f22);
+}
+
+
void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
class DeferredNumberTagI: public LDeferredCode {
public:
DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
: LDeferredCode(codegen), instr_(instr) { }
- virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
+ virtual void Generate() {
+ codegen()->DoDeferredNumberTagI(instr_,
+ instr_->value(),
+ SIGNED_INT32);
+ }
virtual LInstruction* instr() { return instr_; }
private:
LNumberTagI* instr_;
};
- Register src = ToRegister(instr->InputAt(0));
+ Register src = ToRegister(instr->value());
Register dst = ToRegister(instr->result());
Register overflow = scratch0();
- DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
+ DeferredNumberTagI* deferred = new(zone()) DeferredNumberTagI(this, instr);
__ SmiTagCheckOverflow(dst, src, overflow);
__ BranchOnOverflow(deferred->entry(), overflow);
__ bind(deferred->exit());
}
-void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
+void LCodeGen::DoNumberTagU(LNumberTagU* instr) {
+ class DeferredNumberTagU: public LDeferredCode {
+ public:
+ DeferredNumberTagU(LCodeGen* codegen, LNumberTagU* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() {
+ codegen()->DoDeferredNumberTagI(instr_,
+ instr_->value(),
+ UNSIGNED_INT32);
+ }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LNumberTagU* instr_;
+ };
+
+ LOperand* input = instr->value();
+ ASSERT(input->IsRegister() && input->Equals(instr->result()));
+ Register reg = ToRegister(input);
+
+ DeferredNumberTagU* deferred = new(zone()) DeferredNumberTagU(this, instr);
+ __ Branch(deferred->entry(), hi, reg, Operand(Smi::kMaxValue));
+ __ SmiTag(reg, reg);
+ __ bind(deferred->exit());
+}
+
+
+void LCodeGen::DoDeferredNumberTagI(LInstruction* instr,
+ LOperand* value,
+ IntegerSignedness signedness) {
Label slow;
- Register src = ToRegister(instr->InputAt(0));
+ Register src = ToRegister(value);
Register dst = ToRegister(instr->result());
FPURegister dbl_scratch = double_scratch0();
// Preserve the value of all registers.
PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
- // There was overflow, so bits 30 and 31 of the original integer
- // disagree. Try to allocate a heap number in new space and store
- // the value in there. If that fails, call the runtime system.
Label done;
- if (dst.is(src)) {
- __ SmiUntag(src, dst);
- __ Xor(src, src, Operand(0x80000000));
+ if (signedness == SIGNED_INT32) {
+ // There was overflow, so bits 30 and 31 of the original integer
+ // disagree. Try to allocate a heap number in new space and store
+ // the value in there. If that fails, call the runtime system.
+ if (dst.is(src)) {
+ __ SmiUntag(src, dst);
+ __ Xor(src, src, Operand(0x80000000));
+ }
+ __ mtc1(src, dbl_scratch);
+ __ cvt_d_w(dbl_scratch, dbl_scratch);
+ } else {
+ __ mtc1(src, dbl_scratch);
+ __ Cvt_d_uw(dbl_scratch, dbl_scratch, f22);
}
- __ mtc1(src, dbl_scratch);
- __ cvt_d_w(dbl_scratch, dbl_scratch);
+
if (FLAG_inline_new) {
__ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
__ AllocateHeapNumber(t1, a3, t0, t2, &slow);
@@ -3919,13 +4247,13 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
LNumberTagD* instr_;
};
- DoubleRegister input_reg = ToDoubleRegister(instr->InputAt(0));
+ DoubleRegister input_reg = ToDoubleRegister(instr->value());
Register scratch = scratch0();
Register reg = ToRegister(instr->result());
- Register temp1 = ToRegister(instr->TempAt(0));
- Register temp2 = ToRegister(instr->TempAt(1));
+ Register temp1 = ToRegister(instr->temp());
+ Register temp2 = ToRegister(instr->temp2());
- DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
+ DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
if (FLAG_inline_new) {
__ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
__ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
@@ -3952,13 +4280,13 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
void LCodeGen::DoSmiTag(LSmiTag* instr) {
ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
- __ SmiTag(ToRegister(instr->result()), ToRegister(instr->InputAt(0)));
+ __ SmiTag(ToRegister(instr->result()), ToRegister(instr->value()));
}
void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Register scratch = scratch0();
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
if (instr->needs_check()) {
STATIC_ASSERT(kHeapObjectTag == 1);
@@ -4023,9 +4351,9 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
- Register input_reg = ToRegister(instr->InputAt(0));
+ Register input_reg = ToRegister(instr->value());
Register scratch1 = scratch0();
- Register scratch2 = ToRegister(instr->TempAt(0));
+ Register scratch2 = ToRegister(instr->temp());
DoubleRegister double_scratch = double_scratch0();
FPURegister single_scratch = double_scratch.low();
@@ -4042,8 +4370,8 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
// of the if.
if (instr->truncating()) {
- Register scratch3 = ToRegister(instr->TempAt(1));
- DoubleRegister double_scratch2 = ToDoubleRegister(instr->TempAt(2));
+ Register scratch3 = ToRegister(instr->temp2());
+ DoubleRegister double_scratch2 = ToDoubleRegister(instr->temp3());
ASSERT(!scratch3.is(input_reg) &&
!scratch3.is(scratch1) &&
!scratch3.is(scratch2));
@@ -4113,13 +4441,13 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
LTaggedToI* instr_;
};
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister());
ASSERT(input->Equals(instr->result()));
Register input_reg = ToRegister(input);
- DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
+ DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr);
// Let the deferred code handle the HeapObject case.
__ JumpIfNotSmi(input_reg, deferred->entry());
@@ -4131,7 +4459,7 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister());
LOperand* result = instr->result();
ASSERT(result->IsDoubleRegister());
@@ -4149,12 +4477,12 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Register result_reg = ToRegister(instr->result());
Register scratch1 = scratch0();
- Register scratch2 = ToRegister(instr->TempAt(0));
- DoubleRegister double_input = ToDoubleRegister(instr->InputAt(0));
+ Register scratch2 = ToRegister(instr->temp());
+ DoubleRegister double_input = ToDoubleRegister(instr->value());
FPURegister single_scratch = double_scratch0().low();
if (instr->truncating()) {
- Register scratch3 = ToRegister(instr->TempAt(1));
+ Register scratch3 = ToRegister(instr->temp2());
__ EmitECMATruncate(result_reg,
double_input,
single_scratch,
@@ -4181,21 +4509,21 @@ void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
__ And(at, ToRegister(input), Operand(kSmiTagMask));
DeoptimizeIf(ne, instr->environment(), at, Operand(zero_reg));
}
void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
__ And(at, ToRegister(input), Operand(kSmiTagMask));
DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
}
void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register scratch = scratch0();
__ GetObjectType(input, scratch, scratch);
@@ -4265,7 +4593,7 @@ void LCodeGen::DoCheckMapCommon(Register reg,
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
Register scratch = scratch0();
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
Label success;
@@ -4284,7 +4612,7 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
DoubleRegister value_reg = ToDoubleRegister(instr->unclamped());
Register result_reg = ToRegister(instr->result());
- DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
+ DoubleRegister temp_reg = ToDoubleRegister(instr->temp());
__ ClampDoubleToUint8(result_reg, value_reg, temp_reg);
}
@@ -4300,7 +4628,7 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
Register scratch = scratch0();
Register input_reg = ToRegister(instr->unclamped());
Register result_reg = ToRegister(instr->result());
- DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
+ DoubleRegister temp_reg = ToDoubleRegister(instr->temp());
Label is_smi, done, heap_number;
// Both smi and heap number cases are handled.
@@ -4332,8 +4660,9 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
- Register temp1 = ToRegister(instr->TempAt(0));
- Register temp2 = ToRegister(instr->TempAt(1));
+ ASSERT(instr->temp()->Equals(instr->result()));
+ Register temp1 = ToRegister(instr->temp());
+ Register temp2 = ToRegister(instr->temp2());
Handle<JSObject> holder = instr->holder();
Handle<JSObject> current_prototype = instr->prototype();
@@ -4370,11 +4699,12 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
LAllocateObject* instr_;
};
- DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+ DeferredAllocateObject* deferred =
+ new(zone()) DeferredAllocateObject(this, instr);
Register result = ToRegister(instr->result());
- Register scratch = ToRegister(instr->TempAt(0));
- Register scratch2 = ToRegister(instr->TempAt(1));
+ Register scratch = ToRegister(instr->temp());
+ Register scratch2 = ToRegister(instr->temp2());
Handle<JSFunction> constructor = instr->hydrogen()->constructor();
Handle<Map> initial_map(constructor->initial_map());
int instance_size = initial_map->instance_size();
@@ -4442,14 +4772,15 @@ void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
- Heap* heap = isolate()->heap();
+ Handle<FixedArray> literals(instr->environment()->closure()->literals());
ElementsKind boilerplate_elements_kind =
instr->hydrogen()->boilerplate_elements_kind();
// Deopt if the array literal boilerplate ElementsKind is of a type different
// than the expected one. The check isn't necessary if the boilerplate has
- // already been converted to FAST_ELEMENTS.
- if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
__ LoadHeapObject(a1, instr->hydrogen()->boilerplate_object());
// Load map into a2.
__ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
@@ -4462,12 +4793,13 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
a2,
Operand(boilerplate_elements_kind));
}
- __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
- __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
+
+ // Set up the parameters to the stub/runtime call.
+ __ LoadHeapObject(a3, literals);
__ li(a2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
// Boilerplate already exists, constant elements are never accessed.
// Pass an empty fixed array.
- __ li(a1, Operand(Handle<FixedArray>(heap->empty_fixed_array())));
+ __ li(a1, Operand(isolate()->factory()->empty_fixed_array()));
__ Push(a3, a2, a1);
// Pick the right runtime function or stub to call.
@@ -4562,8 +4894,8 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
for (int i = 0; i < elements_length; i++) {
int64_t value = double_array->get_representation(i);
// We only support little endian mode...
- int32_t value_low = value & 0xFFFFFFFF;
- int32_t value_high = value >> 32;
+ int32_t value_low = static_cast<int32_t>(value & 0xFFFFFFFF);
+ int32_t value_high = static_cast<int32_t>(value >> 32);
int total_offset =
elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
__ li(a2, Operand(value_low));
@@ -4602,10 +4934,11 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
ElementsKind boilerplate_elements_kind =
instr->hydrogen()->boilerplate()->GetElementsKind();
- // Deopt if the literal boilerplate ElementsKind is of a type different than
- // the expected one. The check isn't necessary if the boilerplate has already
- // been converted to FAST_ELEMENTS.
- if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ // Deopt if the array literal boilerplate ElementsKind is of a type different
+ // than the expected one. The check isn't necessary if the boilerplate has
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
__ LoadHeapObject(a1, instr->hydrogen()->boilerplate());
// Load map into a2.
__ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
@@ -4667,7 +5000,7 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
- ASSERT(ToRegister(instr->InputAt(0)).is(a0));
+ ASSERT(ToRegister(instr->value()).is(a0));
ASSERT(ToRegister(instr->result()).is(v0));
__ push(a0);
CallRuntime(Runtime::kToFastProperties, 1, instr);
@@ -4677,15 +5010,13 @@ void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Label materialized;
// Registers will be used as follows:
- // a3 = JS function.
// t3 = literals array.
// a1 = regexp literal.
// a0 = regexp literal clone.
// a2 and t0-t2 are used as temporaries.
- __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
- __ lw(t3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
- int literal_offset = FixedArray::kHeaderSize +
- instr->hydrogen()->literal_index() * kPointerSize;
+ int literal_offset =
+ FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
+ __ LoadHeapObject(t3, instr->hydrogen()->literals());
__ lw(a1, FieldMemOperand(t3, literal_offset));
__ LoadRoot(at, Heap::kUndefinedValueRootIndex);
__ Branch(&materialized, ne, a1, Operand(at));
@@ -4751,14 +5082,14 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
void LCodeGen::DoTypeof(LTypeof* instr) {
ASSERT(ToRegister(instr->result()).is(v0));
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
__ push(input);
CallRuntime(Runtime::kTypeof, 1, instr);
}
void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
Label* true_label = chunk_->GetAssemblyLabel(true_block);
@@ -4885,7 +5216,7 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
- Register temp1 = ToRegister(instr->TempAt(0));
+ Register temp1 = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -5013,7 +5344,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
DeferredStackCheck* deferred_stack_check =
- new DeferredStackCheck(this, instr);
+ new(zone()) DeferredStackCheck(this, instr);
__ LoadRoot(at, Heap::kStackLimitRootIndex);
__ Branch(deferred_stack_check->entry(), lo, sp, Operand(at));
EnsureSpaceForLazyDeopt();
@@ -5084,12 +5415,21 @@ void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
Register map = ToRegister(instr->map());
Register result = ToRegister(instr->result());
+ Label load_cache, done;
+ __ EnumLength(result, map);
+ __ Branch(&load_cache, ne, result, Operand(Smi::FromInt(0)));
+ __ li(result, Operand(isolate()->factory()->empty_fixed_array()));
+ __ jmp(&done);
+
+ __ bind(&load_cache);
__ LoadInstanceDescriptors(map, result);
__ lw(result,
- FieldMemOperand(result, DescriptorArray::kEnumerationIndexOffset));
+ FieldMemOperand(result, DescriptorArray::kEnumCacheOffset));
__ lw(result,
FieldMemOperand(result, FixedArray::SizeFor(instr->idx())));
DeoptimizeIf(eq, instr->environment(), result, Operand(zero_reg));
+
+ __ bind(&done);
}
diff --git a/src/3rdparty/v8/src/mips/lithium-codegen-mips.h b/src/3rdparty/v8/src/mips/lithium-codegen-mips.h
index 94bb945..7363eb8 100644
--- a/src/3rdparty/v8/src/mips/lithium-codegen-mips.h
+++ b/src/3rdparty/v8/src/mips/lithium-codegen-mips.h
@@ -44,21 +44,24 @@ class SafepointGenerator;
class LCodeGen BASE_EMBEDDED {
public:
LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
- : chunk_(chunk),
+ : zone_(info->zone()),
+ chunk_(static_cast<LPlatformChunk*>(chunk)),
masm_(assembler),
info_(info),
current_block_(-1),
current_instruction_(-1),
instructions_(chunk->instructions()),
- deoptimizations_(4),
- deopt_jump_table_(4),
- deoptimization_literals_(8),
+ deoptimizations_(4, info->zone()),
+ deopt_jump_table_(4, info->zone()),
+ deoptimization_literals_(8, info->zone()),
inlined_function_count_(0),
scope_(info->scope()),
status_(UNUSED),
- deferred_(8),
+ translations_(info->zone()),
+ deferred_(8, info->zone()),
osr_pc_offset_(-1),
last_lazy_deopt_pc_(0),
+ safepoints_(info->zone()),
resolver_(this),
expected_safepoint_kind_(Safepoint::kSimple) {
PopulateDeoptimizationLiteralsWithInlinedFunctions();
@@ -71,6 +74,7 @@ class LCodeGen BASE_EMBEDDED {
Isolate* isolate() const { return info_->isolate(); }
Factory* factory() const { return isolate()->factory(); }
Heap* heap() const { return isolate()->heap(); }
+ Zone* zone() const { return zone_; }
// Support for converting LOperands to assembler types.
// LOperand must be a register.
@@ -106,7 +110,12 @@ class LCodeGen BASE_EMBEDDED {
void FinishCode(Handle<Code> code);
void DoDeferredNumberTagD(LNumberTagD* instr);
- void DoDeferredNumberTagI(LNumberTagI* instr);
+
+ enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
+ void DoDeferredNumberTagI(LInstruction* instr,
+ LOperand* value,
+ IntegerSignedness signedness);
+
void DoDeferredTaggedToI(LTaggedToI* instr);
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
void DoDeferredStackCheck(LStackCheck* instr);
@@ -124,8 +133,20 @@ class LCodeGen BASE_EMBEDDED {
void DoParallelMove(LParallelMove* move);
void DoGap(LGap* instr);
+ MemOperand PrepareKeyedOperand(Register key,
+ Register base,
+ bool key_is_constant,
+ int constant_key,
+ int element_size,
+ int shift_size,
+ int additional_index,
+ int additional_offset);
+
// Emit frame translation commands for an environment.
- void WriteTranslation(LEnvironment* environment, Translation* translation);
+ void WriteTranslation(LEnvironment* environment,
+ Translation* translation,
+ int* arguments_index,
+ int* arguments_count);
// Declare methods that deal with the individual node types.
#define DECLARE_DO(type) void Do##type(L##type* node);
@@ -149,7 +170,7 @@ class LCodeGen BASE_EMBEDDED {
return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
}
- LChunk* chunk() const { return chunk_; }
+ LPlatformChunk* chunk() const { return chunk_; }
Scope* scope() const { return scope_; }
HGraph* graph() const { return chunk_->graph(); }
@@ -170,10 +191,10 @@ class LCodeGen BASE_EMBEDDED {
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
int GetParameterCount() const { return scope()->num_parameters(); }
- void Abort(const char* format, ...);
+ void Abort(const char* reason);
void Comment(const char* format, ...);
- void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code); }
+ void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
// Code generation passes. Returns true if code generation should
// continue.
@@ -239,7 +260,10 @@ class LCodeGen BASE_EMBEDDED {
void AddToTranslation(Translation* translation,
LOperand* op,
- bool is_tagged);
+ bool is_tagged,
+ bool is_uint32,
+ int arguments_index,
+ int arguments_count);
void PopulateDeoptimizationData(Handle<Code> code);
int DefineDeoptimizationLiteral(Handle<Object> literal);
@@ -294,6 +318,10 @@ class LCodeGen BASE_EMBEDDED {
bool deoptimize_on_minus_zero,
LEnvironment* env);
+ void DeoptIfTaggedButNotSmi(LEnvironment* environment,
+ HValue* value,
+ LOperand* operand);
+
// Emits optimized code for typeof x == "y". Modifies input register.
// Returns the condition on which a final split to
// true and false label should be made, to optimize fallthrough.
@@ -329,7 +357,8 @@ class LCodeGen BASE_EMBEDDED {
void EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name);
+ Handle<String> name,
+ LEnvironment* env);
// Emits optimized code to deep-copy the contents of statically known
// object graphs (e.g. object literal boilerplate).
@@ -347,8 +376,15 @@ class LCodeGen BASE_EMBEDDED {
};
void EnsureSpaceForLazyDeopt();
-
- LChunk* const chunk_;
+ void DoLoadKeyedExternalArray(LLoadKeyed* instr);
+ void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
+ void DoLoadKeyedFixedArray(LLoadKeyed* instr);
+ void DoStoreKeyedExternalArray(LStoreKeyed* instr);
+ void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
+ void DoStoreKeyedFixedArray(LStoreKeyed* instr);
+
+ Zone* zone_;
+ LPlatformChunk* const chunk_;
MacroAssembler* const masm_;
CompilationInfo* const info_;
diff --git a/src/3rdparty/v8/src/mips/lithium-gap-resolver-mips.cc b/src/3rdparty/v8/src/mips/lithium-gap-resolver-mips.cc
index 4a5fbe3..87efae5 100644
--- a/src/3rdparty/v8/src/mips/lithium-gap-resolver-mips.cc
+++ b/src/3rdparty/v8/src/mips/lithium-gap-resolver-mips.cc
@@ -35,7 +35,7 @@ namespace internal {
LGapResolver::LGapResolver(LCodeGen* owner)
: cgen_(owner),
- moves_(32),
+ moves_(32, owner->zone()),
root_index_(0),
in_cycle_(false),
saved_destination_(NULL) {}
@@ -80,7 +80,7 @@ void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) {
const ZoneList<LMoveOperands>* moves = parallel_move->move_operands();
for (int i = 0; i < moves->length(); ++i) {
LMoveOperands move = moves->at(i);
- if (!move.IsRedundant()) moves_.Add(move);
+ if (!move.IsRedundant()) moves_.Add(move, cgen_->zone());
}
Verify();
}
diff --git a/src/3rdparty/v8/src/mips/lithium-mips.cc b/src/3rdparty/v8/src/mips/lithium-mips.cc
index eab5945..7b71758 100644
--- a/src/3rdparty/v8/src/mips/lithium-mips.cc
+++ b/src/3rdparty/v8/src/mips/lithium-mips.cc
@@ -194,22 +194,22 @@ void LGoto::PrintDataTo(StringStream* stream) {
void LBranch::PrintDataTo(StringStream* stream) {
stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
}
void LCmpIDAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if ");
- InputAt(0)->PrintTo(stream);
+ left()->PrintTo(stream);
stream->Add(" %s ", Token::String(op()));
- InputAt(1)->PrintTo(stream);
+ right()->PrintTo(stream);
stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
}
void LIsNilAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if ");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(kind() == kStrictEquality ? " === " : " == ");
stream->Add(nil() == kNullValue ? "null" : "undefined");
stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
@@ -218,57 +218,57 @@ void LIsNilAndBranch::PrintDataTo(StringStream* stream) {
void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_object(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_string(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_smi(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_undetectable(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if string_compare(");
- InputAt(0)->PrintTo(stream);
- InputAt(1)->PrintTo(stream);
+ left()->PrintTo(stream);
+ right()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if has_instance_type(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if has_cached_array_index(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if class_of_test(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(", \"%o\") then B%d else B%d",
*hydrogen()->class_name(),
true_block_id(),
@@ -278,7 +278,7 @@ void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if typeof ");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(" == \"%s\" then B%d else B%d",
*hydrogen()->type_literal()->ToCString(),
true_block_id(), false_block_id());
@@ -292,26 +292,26 @@ void LCallConstantFunction::PrintDataTo(StringStream* stream) {
void LUnaryMathOperation::PrintDataTo(StringStream* stream) {
stream->Add("/%s ", hydrogen()->OpName());
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
}
void LLoadContextSlot::PrintDataTo(StringStream* stream) {
- InputAt(0)->PrintTo(stream);
+ context()->PrintTo(stream);
stream->Add("[%d]", slot_index());
}
void LStoreContextSlot::PrintDataTo(StringStream* stream) {
- InputAt(0)->PrintTo(stream);
+ context()->PrintTo(stream);
stream->Add("[%d] <- ", slot_index());
- InputAt(1)->PrintTo(stream);
+ value()->PrintTo(stream);
}
void LInvokeFunction::PrintDataTo(StringStream* stream) {
stream->Add("= ");
- InputAt(0)->PrintTo(stream);
+ function()->PrintTo(stream);
stream->Add(" #%d / ", arity());
}
@@ -340,17 +340,15 @@ void LCallKnownGlobal::PrintDataTo(StringStream* stream) {
void LCallNew::PrintDataTo(StringStream* stream) {
stream->Add("= ");
- InputAt(0)->PrintTo(stream);
+ constructor()->PrintTo(stream);
stream->Add(" #%d / ", arity());
}
void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
arguments()->PrintTo(stream);
-
stream->Add(" length ");
length()->PrintTo(stream);
-
stream->Add(" index ");
index()->PrintTo(stream);
}
@@ -374,16 +372,7 @@ void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
}
-void LStoreKeyedFastElement::PrintDataTo(StringStream* stream) {
- object()->PrintTo(stream);
- stream->Add("[");
- key()->PrintTo(stream);
- stream->Add("] <- ");
- value()->PrintTo(stream);
-}
-
-
-void LStoreKeyedFastDoubleElement::PrintDataTo(StringStream* stream) {
+void LStoreKeyed::PrintDataTo(StringStream* stream) {
elements()->PrintTo(stream);
stream->Add("[");
key()->PrintTo(stream);
@@ -407,146 +396,26 @@ void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
}
-LChunk::LChunk(CompilationInfo* info, HGraph* graph)
- : spill_slot_count_(0),
- info_(info),
- graph_(graph),
- instructions_(32),
- pointer_maps_(8),
- inlined_closures_(1) {
-}
-
-
-int LChunk::GetNextSpillIndex(bool is_double) {
+int LPlatformChunk::GetNextSpillIndex(bool is_double) {
// Skip a slot if for a double-width slot.
if (is_double) spill_slot_count_++;
return spill_slot_count_++;
}
-LOperand* LChunk::GetNextSpillSlot(bool is_double) {
+LOperand* LPlatformChunk::GetNextSpillSlot(bool is_double) {
int index = GetNextSpillIndex(is_double);
if (is_double) {
- return LDoubleStackSlot::Create(index);
- } else {
- return LStackSlot::Create(index);
- }
-}
-
-
-void LChunk::MarkEmptyBlocks() {
- HPhase phase("L_Mark empty blocks", this);
- for (int i = 0; i < graph()->blocks()->length(); ++i) {
- HBasicBlock* block = graph()->blocks()->at(i);
- int first = block->first_instruction_index();
- int last = block->last_instruction_index();
- LInstruction* first_instr = instructions()->at(first);
- LInstruction* last_instr = instructions()->at(last);
-
- LLabel* label = LLabel::cast(first_instr);
- if (last_instr->IsGoto()) {
- LGoto* goto_instr = LGoto::cast(last_instr);
- if (label->IsRedundant() &&
- !label->is_loop_header()) {
- bool can_eliminate = true;
- for (int i = first + 1; i < last && can_eliminate; ++i) {
- LInstruction* cur = instructions()->at(i);
- if (cur->IsGap()) {
- LGap* gap = LGap::cast(cur);
- if (!gap->IsRedundant()) {
- can_eliminate = false;
- }
- } else {
- can_eliminate = false;
- }
- }
-
- if (can_eliminate) {
- label->set_replacement(GetLabel(goto_instr->block_id()));
- }
- }
- }
- }
-}
-
-
-void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
- LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
- int index = -1;
- if (instr->IsControl()) {
- instructions_.Add(gap);
- index = instructions_.length();
- instructions_.Add(instr);
+ return LDoubleStackSlot::Create(index, zone());
} else {
- index = instructions_.length();
- instructions_.Add(instr);
- instructions_.Add(gap);
- }
- if (instr->HasPointerMap()) {
- pointer_maps_.Add(instr->pointer_map());
- instr->pointer_map()->set_lithium_position(index);
+ return LStackSlot::Create(index, zone());
}
}
-LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
- return LConstantOperand::Create(constant->id());
-}
-
-
-int LChunk::GetParameterStackSlot(int index) const {
- // The receiver is at index 0, the first parameter at index 1, so we
- // shift all parameter indexes down by the number of parameters, and
- // make sure they end up negative so they are distinguishable from
- // spill slots.
- int result = index - info()->scope()->num_parameters() - 1;
- ASSERT(result < 0);
- return result;
-}
-
-// A parameter relative to ebp in the arguments stub.
-int LChunk::ParameterAt(int index) {
- ASSERT(-1 <= index); // -1 is the receiver.
- return (1 + info()->scope()->num_parameters() - index) *
- kPointerSize;
-}
-
-
-LGap* LChunk::GetGapAt(int index) const {
- return LGap::cast(instructions_[index]);
-}
-
-
-bool LChunk::IsGapAt(int index) const {
- return instructions_[index]->IsGap();
-}
-
-
-int LChunk::NearestGapPos(int index) const {
- while (!IsGapAt(index)) index--;
- return index;
-}
-
-
-void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
- GetGapAt(index)->GetOrCreateParallelMove(LGap::START)->AddMove(from, to);
-}
-
-
-Handle<Object> LChunk::LookupLiteral(LConstantOperand* operand) const {
- return HConstant::cast(graph_->LookupValue(operand->index()))->handle();
-}
-
-
-Representation LChunk::LookupLiteralRepresentation(
- LConstantOperand* operand) const {
- return graph_->LookupValue(operand->index())->representation();
-}
-
-
-LChunk* LChunkBuilder::Build() {
+LPlatformChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
- chunk_ = new(zone()) LChunk(info(), graph());
+ chunk_ = new(zone()) LPlatformChunk(info(), graph());
HPhase phase("L_Building chunk", chunk_);
status_ = BUILDING;
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
@@ -561,17 +430,8 @@ LChunk* LChunkBuilder::Build() {
}
-void LChunkBuilder::Abort(const char* format, ...) {
- if (FLAG_trace_bailout) {
- SmartArrayPointer<char> name(
- info()->shared_info()->DebugName()->ToCString());
- PrintF("Aborting LChunk building in @\"%s\": ", *name);
- va_list arguments;
- va_start(arguments, format);
- OS::VPrint(format, arguments);
- va_end(arguments);
- PrintF("\n");
- }
+void LCodeGen::Abort(const char* reason) {
+ info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -740,7 +600,7 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
ASSERT(hinstr->next()->IsSimulate());
HSimulate* sim = HSimulate::cast(hinstr->next());
ASSERT(instruction_pending_deoptimization_environment_ == NULL);
- ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
+ ASSERT(pending_deoptimization_ast_id_.IsNone());
instruction_pending_deoptimization_environment_ = instr;
pending_deoptimization_ast_id_ = sim->ast_id();
}
@@ -762,7 +622,7 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
- instr->set_pointer_map(new(zone()) LPointerMap(position_));
+ instr->set_pointer_map(new(zone()) LPointerMap(position_, zone()));
return instr;
}
@@ -835,13 +695,16 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
// Shift operations can only deoptimize if we do a logical shift
// by 0 and the result cannot be truncated to int32.
- bool may_deopt = (op == Token::SHR && constant_value == 0);
bool does_deopt = false;
- if (may_deopt) {
- for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
- if (!it.value()->CheckFlag(HValue::kTruncatingToInt32)) {
- does_deopt = true;
- break;
+ if (op == Token::SHR && constant_value == 0) {
+ if (FLAG_opt_safe_uint32_operations) {
+ does_deopt = !instr->CheckFlag(HInstruction::kUint32);
+ } else {
+ for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
+ if (!it.value()->CheckFlag(HValue::kTruncatingToInt32)) {
+ does_deopt = true;
+ break;
+ }
}
}
}
@@ -974,8 +837,8 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LEnvironment* outer =
CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
- int ast_id = hydrogen_env->ast_id();
- ASSERT(ast_id != AstNode::kNoNumber ||
+ BailoutId ast_id = hydrogen_env->ast_id();
+ ASSERT(!ast_id.IsNone() ||
hydrogen_env->frame_type() != JS_FUNCTION);
int value_count = hydrogen_env->length();
LEnvironment* result = new(zone()) LEnvironment(
@@ -985,7 +848,9 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
hydrogen_env->parameter_count(),
argument_count_,
value_count,
- outer);
+ outer,
+ hydrogen_env->entry(),
+ zone());
int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -999,7 +864,9 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
} else {
op = UseAny(value);
}
- result->AddValue(op, value->representation());
+ result->AddValue(op,
+ value->representation(),
+ value->CheckFlag(HInstruction::kUint32));
}
if (hydrogen_env->frame_type() == JS_FUNCTION) {
@@ -1415,6 +1282,25 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
}
+LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
+ LOperand* left = NULL;
+ LOperand* right = NULL;
+ if (instr->representation().IsInteger32()) {
+ ASSERT(instr->left()->representation().IsInteger32());
+ ASSERT(instr->right()->representation().IsInteger32());
+ left = UseRegisterAtStart(instr->LeastConstantOperand());
+ right = UseOrConstantAtStart(instr->MostConstantOperand());
+ } else {
+ ASSERT(instr->representation().IsDouble());
+ ASSERT(instr->left()->representation().IsDouble());
+ ASSERT(instr->right()->representation().IsDouble());
+ left = UseRegisterAtStart(instr->left());
+ right = UseRegisterAtStart(instr->right());
+ }
+ return DefineAsRegister(new(zone()) LMathMinMax(left, right));
+}
+
+
LInstruction* LChunkBuilder::DoPower(HPower* instr) {
ASSERT(instr->representation().IsDouble());
// We call a C function for double power. It can't trigger a GC.
@@ -1580,6 +1466,12 @@ LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
}
+LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
+ LOperand* map = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new(zone()) LMapEnumLength(map));
+}
+
+
LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
LOperand* object = UseRegisterAtStart(instr->value());
return DefineAsRegister(new(zone()) LElementsKind(object));
@@ -1595,13 +1487,14 @@ LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
LOperand* object = UseFixed(instr->value(), a0);
- LDateField* result = new LDateField(object, FixedTemp(a1), instr->index());
- return MarkAsCall(DefineFixed(result, v0), instr);
+ LDateField* result =
+ new(zone()) LDateField(object, FixedTemp(a1), instr->index());
+ return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
}
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
- LOperand* value = UseRegisterAtStart(instr->index());
+ LOperand* value = UseRegisterOrConstantAtStart(instr->index());
LOperand* length = UseRegister(instr->length());
return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
}
@@ -1644,10 +1537,9 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
} else {
ASSERT(to.IsInteger32());
LOperand* value = UseRegisterAtStart(instr->value());
- bool needs_check = !instr->value()->type().IsSmi();
LInstruction* res = NULL;
- if (!needs_check) {
- res = DefineAsRegister(new(zone()) LSmiUntag(value, needs_check));
+ if (instr->value()->type().IsSmi()) {
+ res = DefineAsRegister(new(zone()) LSmiUntag(value, false));
} else {
LOperand* temp1 = TempRegister();
LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister()
@@ -1686,7 +1578,10 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
if (to.IsTagged()) {
HValue* val = instr->value();
LOperand* value = UseRegisterAtStart(val);
- if (val->HasRange() && val->range()->IsInSmiRange()) {
+ if (val->CheckFlag(HInstruction::kUint32)) {
+ LNumberTagU* result = new(zone()) LNumberTagU(value);
+ return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
+ } else if (val->HasRange() && val->range()->IsInSmiRange()) {
return DefineAsRegister(new(zone()) LSmiTag(value));
} else {
LNumberTagI* result = new(zone()) LNumberTagI(value);
@@ -1694,8 +1589,13 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
}
} else {
ASSERT(to.IsDouble());
- LOperand* value = Use(instr->value());
- return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
+ if (instr->value()->CheckFlag(HInstruction::kUint32)) {
+ return DefineAsRegister(
+ new(zone()) LUint32ToDouble(UseRegister(instr->value())));
+ } else {
+ return DefineAsRegister(
+ new(zone()) LInteger32ToDouble(Use(instr->value())));
+ }
}
}
UNREACHABLE();
@@ -1717,10 +1617,10 @@ LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
- LOperand* temp1 = TempRegister();
+ LUnallocated* temp1 = TempRegister();
LOperand* temp2 = TempRegister();
- LInstruction* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
- return AssignEnvironment(result);
+ LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
+ return AssignEnvironment(Define(result, temp1));
}
@@ -1889,51 +1789,40 @@ LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
}
-LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
- HLoadKeyedFastElement* instr) {
- ASSERT(instr->representation().IsTagged());
- ASSERT(instr->key()->representation().IsInteger32());
- LOperand* obj = UseRegisterAtStart(instr->object());
- LOperand* key = UseRegisterAtStart(instr->key());
- LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
- if (instr->RequiresHoleCheck()) AssignEnvironment(result);
- return DefineAsRegister(result);
-}
-
-
-LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
- HLoadKeyedFastDoubleElement* instr) {
- ASSERT(instr->representation().IsDouble());
- ASSERT(instr->key()->representation().IsInteger32());
- LOperand* elements = UseTempRegister(instr->elements());
+LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
+ ASSERT(instr->key()->representation().IsInteger32() ||
+ instr->key()->representation().IsTagged());
+ ElementsKind elements_kind = instr->elements_kind();
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- LLoadKeyedFastDoubleElement* result =
- new(zone()) LLoadKeyedFastDoubleElement(elements, key);
- return AssignEnvironment(DefineAsRegister(result));
-}
+ LLoadKeyed* result = NULL;
+ if (!instr->is_external()) {
+ LOperand* obj = NULL;
+ if (instr->representation().IsDouble()) {
+ obj = UseTempRegister(instr->elements());
+ } else {
+ ASSERT(instr->representation().IsTagged());
+ obj = UseRegisterAtStart(instr->elements());
+ }
+ result = new(zone()) LLoadKeyed(obj, key);
+ } else {
+ ASSERT(
+ (instr->representation().IsInteger32() &&
+ (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
+ (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
+ (instr->representation().IsDouble() &&
+ ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
+ (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
+ LOperand* external_pointer = UseRegister(instr->elements());
+ result = new(zone()) LLoadKeyed(external_pointer, key);
+ }
-LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
- HLoadKeyedSpecializedArrayElement* instr) {
- ElementsKind elements_kind = instr->elements_kind();
- Representation representation(instr->representation());
- ASSERT(
- (representation.IsInteger32() &&
- (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
- (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
- (representation.IsDouble() &&
- ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
- (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
- ASSERT(instr->key()->representation().IsInteger32());
- LOperand* external_pointer = UseRegister(instr->external_pointer());
- LOperand* key = UseRegisterOrConstant(instr->key());
- LLoadKeyedSpecializedArrayElement* result =
- new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
- LInstruction* load_instr = DefineAsRegister(result);
+ DefineAsRegister(result);
// An unsigned int array load might overflow and cause a deopt, make sure it
// has an environment.
- return (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) ?
- AssignEnvironment(load_instr) : load_instr;
+ bool can_deoptimize = instr->RequiresHoleCheck() ||
+ (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS);
+ return can_deoptimize ? AssignEnvironment(result) : result;
}
@@ -1947,64 +1836,47 @@ LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
}
-LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
- HStoreKeyedFastElement* instr) {
+LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
+ ElementsKind elements_kind = instr->elements_kind();
bool needs_write_barrier = instr->NeedsWriteBarrier();
- ASSERT(instr->value()->representation().IsTagged());
- ASSERT(instr->object()->representation().IsTagged());
- ASSERT(instr->key()->representation().IsInteger32());
-
- LOperand* obj = UseTempRegister(instr->object());
- LOperand* val = needs_write_barrier
- ? UseTempRegister(instr->value())
- : UseRegisterAtStart(instr->value());
LOperand* key = needs_write_barrier
? UseTempRegister(instr->key())
: UseRegisterOrConstantAtStart(instr->key());
- return new(zone()) LStoreKeyedFastElement(obj, key, val);
-}
-
-
-LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
- HStoreKeyedFastDoubleElement* instr) {
- ASSERT(instr->value()->representation().IsDouble());
- ASSERT(instr->elements()->representation().IsTagged());
- ASSERT(instr->key()->representation().IsInteger32());
-
- LOperand* elements = UseRegisterAtStart(instr->elements());
- LOperand* val = UseTempRegister(instr->value());
- LOperand* key = UseRegisterOrConstantAtStart(instr->key());
-
- return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
-}
-
-
-LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
- HStoreKeyedSpecializedArrayElement* instr) {
- Representation representation(instr->value()->representation());
- ElementsKind elements_kind = instr->elements_kind();
- ASSERT(
- (representation.IsInteger32() &&
- (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
- (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
- (representation.IsDouble() &&
- ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
- (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
- ASSERT(instr->external_pointer()->representation().IsExternal());
- ASSERT(instr->key()->representation().IsInteger32());
-
- LOperand* external_pointer = UseRegister(instr->external_pointer());
bool val_is_temp_register =
elements_kind == EXTERNAL_PIXEL_ELEMENTS ||
elements_kind == EXTERNAL_FLOAT_ELEMENTS;
- LOperand* val = val_is_temp_register
+ LOperand* val = val_is_temp_register || needs_write_barrier
? UseTempRegister(instr->value())
: UseRegister(instr->value());
- LOperand* key = UseRegisterOrConstant(instr->key());
+ LStoreKeyed* result = NULL;
+ if (!instr->is_external()) {
+ ASSERT(instr->elements()->representation().IsTagged());
+
+ LOperand* object = NULL;
+ if (instr->value()->representation().IsDouble()) {
+ object = UseRegisterAtStart(instr->elements());
+ } else {
+ ASSERT(instr->value()->representation().IsTagged());
+ object = UseTempRegister(instr->elements());
+ }
+
+ result = new(zone()) LStoreKeyed(object, key, val);
+ } else {
+ ASSERT(
+ (instr->value()->representation().IsInteger32() &&
+ (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
+ (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
+ (instr->value()->representation().IsDouble() &&
+ ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
+ (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
+ ASSERT(instr->elements()->representation().IsExternal());
+
+ LOperand* external_pointer = UseRegister(instr->elements());
+ result = new(zone()) LStoreKeyed(external_pointer, key, val);
+ }
- return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
- key,
- val);
+ ASSERT(result != NULL);
+ return result;
}
@@ -2023,8 +1895,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
LInstruction* LChunkBuilder::DoTransitionElementsKind(
HTransitionElementsKind* instr) {
- if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
- instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) {
+ ElementsKind from_kind = instr->original_map()->elements_kind();
+ ElementsKind to_kind = instr->transitioned_map()->elements_kind();
+ if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
LOperand* object = UseRegister(instr->object());
LOperand* new_map_reg = TempRegister();
LTransitionElementsKind* result =
@@ -2045,16 +1918,28 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool needs_write_barrier = instr->NeedsWriteBarrier();
-
- LOperand* obj = needs_write_barrier
- ? UseTempRegister(instr->object())
- : UseRegisterAtStart(instr->object());
+ bool needs_write_barrier_for_map = !instr->transition().is_null() &&
+ instr->NeedsWriteBarrierForMap();
+
+ LOperand* obj;
+ if (needs_write_barrier) {
+ obj = instr->is_in_object()
+ ? UseRegister(instr->object())
+ : UseTempRegister(instr->object());
+ } else {
+ obj = needs_write_barrier_for_map
+ ? UseRegister(instr->object())
+ : UseRegisterAtStart(instr->object());
+ }
LOperand* val = needs_write_barrier
? UseTempRegister(instr->value())
: UseRegister(instr->value());
- return new(zone()) LStoreNamedField(obj, val);
+ // We need a temporary register for write barrier of the map field.
+ LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL;
+
+ return new(zone()) LStoreNamedField(obj, val, temp);
}
@@ -2097,8 +1982,8 @@ LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
- LAllocateObject* result = new(zone()) LAllocateObject(
- TempRegister(), TempRegister());
+ LAllocateObject* result =
+ new(zone()) LAllocateObject(TempRegister(), TempRegister());
return AssignPointerMap(DefineAsRegister(result));
}
@@ -2137,6 +2022,7 @@ LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
+ ASSERT(argument_count_ == 0);
allocator_->MarkAsOsrEntry();
current_block_->last_environment()->set_ast_id(instr->ast_id());
return AssignEnvironment(new(zone()) LOsrEntry);
@@ -2175,12 +2061,10 @@ LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
- LOperand* arguments = UseRegister(instr->arguments());
+ LOperand* args = UseRegister(instr->arguments());
LOperand* length = UseTempRegister(instr->length());
LOperand* index = UseRegister(instr->index());
- LAccessArgumentsAt* result =
- new(zone()) LAccessArgumentsAt(arguments, length, index);
- return AssignEnvironment(DefineAsRegister(result));
+ return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
}
@@ -2234,7 +2118,7 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
instruction_pending_deoptimization_environment_->
SetDeferredLazyDeoptimizationEnvironment(result->environment());
instruction_pending_deoptimization_environment_ = NULL;
- pending_deoptimization_ast_id_ = AstNode::kNoNumber;
+ pending_deoptimization_ast_id_ = BailoutId::None();
return result;
}
@@ -2260,10 +2144,11 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
instr->function(),
undefined,
instr->call_kind(),
- instr->is_construct());
+ instr->inlining_kind());
if (instr->arguments_var() != NULL) {
inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
}
+ inner->set_entry(instr);
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
return NULL;
@@ -2275,7 +2160,7 @@ LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
HEnvironment* env = current_block_->last_environment();
- if (instr->arguments_pushed()) {
+ if (env->entry()->arguments_pushed()) {
int argument_count = env->arguments_environment()->parameter_count();
pop = new(zone()) LDrop(argument_count);
argument_count_ -= argument_count;
@@ -2306,8 +2191,7 @@ LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
LOperand* map = UseRegister(instr->map());
- return AssignEnvironment(DefineAsRegister(
- new(zone()) LForInCacheArray(map)));
+ return AssignEnvironment(DefineAsRegister(new(zone()) LForInCacheArray(map)));
}
diff --git a/src/3rdparty/v8/src/mips/lithium-mips.h b/src/3rdparty/v8/src/mips/lithium-mips.h
index 62c5398..00e21fd 100644
--- a/src/3rdparty/v8/src/mips/lithium-mips.h
+++ b/src/3rdparty/v8/src/mips/lithium-mips.h
@@ -108,6 +108,7 @@ class LCodeGen;
V(InstanceOfKnownGlobal) \
V(InstructionGap) \
V(Integer32ToDouble) \
+ V(Uint32ToDouble) \
V(InvokeFunction) \
V(IsConstructCallAndBranch) \
V(IsNilAndBranch) \
@@ -115,7 +116,6 @@ class LCodeGen;
V(IsStringAndBranch) \
V(IsSmiAndBranch) \
V(IsUndetectableAndBranch) \
- V(StringCompareAndBranch) \
V(JSArrayLength) \
V(Label) \
V(LazyBailout) \
@@ -125,17 +125,18 @@ class LCodeGen;
V(LoadFunctionPrototype) \
V(LoadGlobalCell) \
V(LoadGlobalGeneric) \
- V(LoadKeyedFastDoubleElement) \
- V(LoadKeyedFastElement) \
+ V(LoadKeyed) \
V(LoadKeyedGeneric) \
- V(LoadKeyedSpecializedArrayElement) \
V(LoadNamedField) \
V(LoadNamedFieldPolymorphic) \
V(LoadNamedGeneric) \
+ V(MapEnumLength) \
+ V(MathMinMax) \
V(ModI) \
V(MulI) \
V(NumberTagD) \
V(NumberTagI) \
+ V(NumberTagU) \
V(NumberUntagD) \
V(ObjectLiteral) \
V(OsrEntry) \
@@ -153,15 +154,14 @@ class LCodeGen;
V(StoreContextSlot) \
V(StoreGlobalCell) \
V(StoreGlobalGeneric) \
- V(StoreKeyedFastDoubleElement) \
- V(StoreKeyedFastElement) \
+ V(StoreKeyed) \
V(StoreKeyedGeneric) \
- V(StoreKeyedSpecializedArrayElement) \
V(StoreNamedField) \
V(StoreNamedGeneric) \
V(StringAdd) \
V(StringCharCodeAt) \
V(StringCharFromCode) \
+ V(StringCompareAndBranch) \
V(StringLength) \
V(SubI) \
V(TaggedToI) \
@@ -255,11 +255,6 @@ class LInstruction: public ZoneObject {
virtual bool HasResult() const = 0;
virtual LOperand* result() = 0;
- virtual int InputCount() = 0;
- virtual LOperand* InputAt(int i) = 0;
- virtual int TempCount() = 0;
- virtual LOperand* TempAt(int i) = 0;
-
LOperand* FirstInput() { return InputAt(0); }
LOperand* Output() { return HasResult() ? result() : NULL; }
@@ -268,6 +263,15 @@ class LInstruction: public ZoneObject {
#endif
private:
+ // Iterator interface.
+ friend class InputIterator;
+ virtual int InputCount() = 0;
+ virtual LOperand* InputAt(int i) = 0;
+
+ friend class TempIterator;
+ virtual int TempCount() = 0;
+ virtual LOperand* TempAt(int i) = 0;
+
LEnvironment* environment_;
SetOncePointer<LPointerMap> pointer_map_;
HValue* hydrogen_value_;
@@ -288,16 +292,17 @@ class LTemplateInstruction: public LInstruction {
void set_result(LOperand* operand) { results_[0] = operand; }
LOperand* result() { return results_[0]; }
- int InputCount() { return I; }
- LOperand* InputAt(int i) { return inputs_[i]; }
-
- int TempCount() { return T; }
- LOperand* TempAt(int i) { return temps_[i]; }
-
protected:
EmbeddedContainer<LOperand*, R> results_;
EmbeddedContainer<LOperand*, I> inputs_;
EmbeddedContainer<LOperand*, T> temps_;
+
+ private:
+ virtual int InputCount() { return I; }
+ virtual LOperand* InputAt(int i) { return inputs_[i]; }
+
+ virtual int TempCount() { return T; }
+ virtual LOperand* TempAt(int i) { return temps_[i]; }
};
@@ -332,8 +337,10 @@ class LGap: public LTemplateInstruction<0, 0, 0> {
LAST_INNER_POSITION = AFTER
};
- LParallelMove* GetOrCreateParallelMove(InnerPosition pos) {
- if (parallel_moves_[pos] == NULL) parallel_moves_[pos] = new LParallelMove;
+ LParallelMove* GetOrCreateParallelMove(InnerPosition pos, Zone* zone) {
+ if (parallel_moves_[pos] == NULL) {
+ parallel_moves_[pos] = new(zone) LParallelMove(zone);
+ }
return parallel_moves_[pos];
}
@@ -513,6 +520,8 @@ class LArgumentsLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = elements;
}
+ LOperand* elements() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ArgumentsLength, "arguments-length")
};
@@ -539,16 +548,22 @@ class LModI: public LTemplateInstruction<1, 2, 3> {
// Used for the standard case.
LModI(LOperand* left,
LOperand* right,
- LOperand* temp1,
+ LOperand* temp,
LOperand* temp2,
LOperand* temp3) {
inputs_[0] = left;
inputs_[1] = right;
- temps_[0] = temp1;
+ temps_[0] = temp;
temps_[1] = temp2;
temps_[2] = temp3;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+ LOperand* temp3() { return temps_[2]; }
+
DECLARE_CONCRETE_INSTRUCTION(ModI, "mod-i")
DECLARE_HYDROGEN_ACCESSOR(Mod)
};
@@ -561,6 +576,9 @@ class LDivI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(DivI, "div-i")
DECLARE_HYDROGEN_ACCESSOR(Div)
};
@@ -574,6 +592,10 @@ class LMulI: public LTemplateInstruction<1, 2, 1> {
temps_[0] = temp;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(MulI, "mul-i")
DECLARE_HYDROGEN_ACCESSOR(Mul)
};
@@ -586,6 +608,9 @@ class LCmpIDAndBranch: public LControlInstruction<2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpIDAndBranch, "cmp-id-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareIDAndBranch)
@@ -605,6 +630,9 @@ class LUnaryMathOperation: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(UnaryMathOperation, "unary-math-operation")
DECLARE_HYDROGEN_ACCESSOR(UnaryMathOperation)
@@ -620,6 +648,9 @@ class LCmpObjectEqAndBranch: public LControlInstruction<2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpObjectEqAndBranch,
"cmp-object-eq-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareObjectEqAndBranch)
@@ -632,6 +663,8 @@ class LCmpConstantEqAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = left;
}
+ LOperand* left() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpConstantEqAndBranch,
"cmp-constant-eq-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareConstantEqAndBranch)
@@ -644,6 +677,8 @@ class LIsNilAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsNilAndBranch, "is-nil-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsNilAndBranch)
@@ -661,6 +696,9 @@ class LIsObjectAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsObjectAndBranch, "is-object-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsObjectAndBranch)
@@ -675,6 +713,9 @@ class LIsStringAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsStringAndBranch, "is-string-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsStringAndBranch)
@@ -688,6 +729,8 @@ class LIsSmiAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsSmiAndBranch, "is-smi-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsSmiAndBranch)
@@ -702,6 +745,9 @@ class LIsUndetectableAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsUndetectableAndBranch,
"is-undetectable-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsUndetectableAndBranch)
@@ -717,6 +763,9 @@ class LStringCompareAndBranch: public LControlInstruction<2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,
"string-compare-and-branch")
DECLARE_HYDROGEN_ACCESSOR(StringCompareAndBranch)
@@ -733,6 +782,8 @@ class LHasInstanceTypeAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(HasInstanceTypeAndBranch,
"has-instance-type-and-branch")
DECLARE_HYDROGEN_ACCESSOR(HasInstanceTypeAndBranch)
@@ -747,6 +798,8 @@ class LGetCachedArrayIndex: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(GetCachedArrayIndex, "get-cached-array-index")
DECLARE_HYDROGEN_ACCESSOR(GetCachedArrayIndex)
};
@@ -758,6 +811,8 @@ class LHasCachedArrayIndexAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(HasCachedArrayIndexAndBranch,
"has-cached-array-index-and-branch")
DECLARE_HYDROGEN_ACCESSOR(HasCachedArrayIndexAndBranch)
@@ -773,6 +828,9 @@ class LClassOfTestAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ClassOfTestAndBranch,
"class-of-test-and-branch")
DECLARE_HYDROGEN_ACCESSOR(ClassOfTestAndBranch)
@@ -788,6 +846,9 @@ class LCmpT: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpT, "cmp-t")
DECLARE_HYDROGEN_ACCESSOR(CompareGeneric)
@@ -802,6 +863,9 @@ class LInstanceOf: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(InstanceOf, "instance-of")
};
@@ -813,6 +877,9 @@ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
"instance-of-known-global")
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
@@ -841,6 +908,7 @@ class LBoundsCheck: public LTemplateInstruction<0, 2, 0> {
LOperand* length() { return inputs_[1]; }
DECLARE_CONCRETE_INSTRUCTION(BoundsCheck, "bounds-check")
+ DECLARE_HYDROGEN_ACCESSOR(BoundsCheck)
};
@@ -851,6 +919,9 @@ class LBitI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
Token::Value op() const { return hydrogen()->op(); }
DECLARE_CONCRETE_INSTRUCTION(BitI, "bit-i")
@@ -867,7 +938,8 @@ class LShiftI: public LTemplateInstruction<1, 2, 0> {
}
Token::Value op() const { return op_; }
-
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
bool can_deopt() const { return can_deopt_; }
DECLARE_CONCRETE_INSTRUCTION(ShiftI, "shift-i")
@@ -885,6 +957,9 @@ class LSubI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(SubI, "sub-i")
DECLARE_HYDROGEN_ACCESSOR(Sub)
};
@@ -923,6 +998,8 @@ class LBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Branch, "branch")
DECLARE_HYDROGEN_ACCESSOR(Branch)
@@ -937,6 +1014,9 @@ class LCmpMapAndBranch: public LTemplateInstruction<0, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpMapAndBranch, "cmp-map-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareMap)
@@ -958,6 +1038,8 @@ class LJSArrayLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(JSArrayLength, "js-array-length")
DECLARE_HYDROGEN_ACCESSOR(JSArrayLength)
};
@@ -969,18 +1051,34 @@ class LFixedArrayBaseLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(FixedArrayBaseLength,
"fixed-array-base-length")
DECLARE_HYDROGEN_ACCESSOR(FixedArrayBaseLength)
};
+class LMapEnumLength: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LMapEnumLength(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(MapEnumLength, "map-enum-length")
+};
+
+
class LElementsKind: public LTemplateInstruction<1, 1, 0> {
public:
explicit LElementsKind(LOperand* value) {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ElementsKind, "elements-kind")
DECLARE_HYDROGEN_ACCESSOR(ElementsKind)
};
@@ -993,6 +1091,9 @@ class LValueOf: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ValueOf, "value-of")
DECLARE_HYDROGEN_ACCESSOR(ValueOf)
};
@@ -1005,9 +1106,12 @@ class LDateField: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* date() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+ Smi* index() const { return index_; }
+
DECLARE_CONCRETE_INSTRUCTION(ValueOf, "date-field")
DECLARE_HYDROGEN_ACCESSOR(ValueOf)
- Smi* index() const { return index_; }
private:
Smi* index_;
@@ -1020,6 +1124,8 @@ class LThrow: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Throw, "throw")
};
@@ -1030,6 +1136,8 @@ class LBitNotI: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(BitNotI, "bit-not-i")
};
@@ -1041,11 +1149,29 @@ class LAddI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(AddI, "add-i")
DECLARE_HYDROGEN_ACCESSOR(Add)
};
+class LMathMinMax: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LMathMinMax(LOperand* left, LOperand* right) {
+ inputs_[0] = left;
+ inputs_[1] = right;
+ }
+
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(MathMinMax, "min-max")
+ DECLARE_HYDROGEN_ACCESSOR(MathMinMax)
+};
+
+
class LPower: public LTemplateInstruction<1, 2, 0> {
public:
LPower(LOperand* left, LOperand* right) {
@@ -1053,6 +1179,9 @@ class LPower: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(Power, "power")
DECLARE_HYDROGEN_ACCESSOR(Power)
};
@@ -1064,6 +1193,8 @@ class LRandom: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = global_object;
}
+ LOperand* global_object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Random, "random")
DECLARE_HYDROGEN_ACCESSOR(Random)
};
@@ -1078,6 +1209,8 @@ class LArithmeticD: public LTemplateInstruction<1, 2, 0> {
}
Token::Value op() const { return op_; }
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
virtual Opcode opcode() const { return LInstruction::kArithmeticD; }
virtual void CompileToNative(LCodeGen* generator);
@@ -1096,12 +1229,14 @@ class LArithmeticT: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+ Token::Value op() const { return op_; }
+
virtual Opcode opcode() const { return LInstruction::kArithmeticT; }
virtual void CompileToNative(LCodeGen* generator);
virtual const char* Mnemonic() const;
- Token::Value op() const { return op_; }
-
private:
Token::Value op_;
};
@@ -1113,6 +1248,8 @@ class LReturn: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Return, "return")
};
@@ -1123,6 +1260,8 @@ class LLoadNamedField: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadNamedField, "load-named-field")
DECLARE_HYDROGEN_ACCESSOR(LoadNamedField)
};
@@ -1134,10 +1273,10 @@ class LLoadNamedFieldPolymorphic: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadNamedField, "load-named-field-polymorphic")
DECLARE_HYDROGEN_ACCESSOR(LoadNamedFieldPolymorphic)
-
- LOperand* object() { return inputs_[0]; }
};
@@ -1147,10 +1286,11 @@ class LLoadNamedGeneric: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadNamedGeneric, "load-named-generic")
DECLARE_HYDROGEN_ACCESSOR(LoadNamedGeneric)
- LOperand* object() { return inputs_[0]; }
Handle<Object> name() const { return hydrogen()->name(); }
};
@@ -1161,10 +1301,10 @@ class LLoadFunctionPrototype: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = function;
}
+ LOperand* function() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadFunctionPrototype, "load-function-prototype")
DECLARE_HYDROGEN_ACCESSOR(LoadFunctionPrototype)
-
- LOperand* function() { return inputs_[0]; }
};
@@ -1174,6 +1314,8 @@ class LLoadElements: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadElements, "load-elements")
};
@@ -1184,73 +1326,47 @@ class LLoadExternalArrayPointer: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadExternalArrayPointer,
"load-external-array-pointer")
};
-class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
+class LLoadKeyed: public LTemplateInstruction<1, 2, 0> {
public:
- LLoadKeyedFastElement(LOperand* elements, LOperand* key) {
+ LLoadKeyed(LOperand* elements, LOperand* key) {
inputs_[0] = elements;
inputs_[1] = key;
}
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastElement, "load-keyed-fast-element")
- DECLARE_HYDROGEN_ACCESSOR(LoadKeyedFastElement)
-
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
-};
-
-
-class LLoadKeyedFastDoubleElement: public LTemplateInstruction<1, 2, 0> {
- public:
- LLoadKeyedFastDoubleElement(LOperand* elements, LOperand* key) {
- inputs_[0] = elements;
- inputs_[1] = key;
+ ElementsKind elements_kind() const {
+ return hydrogen()->elements_kind();
}
-
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastDoubleElement,
- "load-keyed-fast-double-element")
- DECLARE_HYDROGEN_ACCESSOR(LoadKeyedFastDoubleElement)
-
- LOperand* elements() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
-};
-
-
-class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
- public:
- LLoadKeyedSpecializedArrayElement(LOperand* external_pointer,
- LOperand* key) {
- inputs_[0] = external_pointer;
- inputs_[1] = key;
+ bool is_external() const {
+ return hydrogen()->is_external();
}
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedSpecializedArrayElement,
- "load-keyed-specialized-array-element")
- DECLARE_HYDROGEN_ACCESSOR(LoadKeyedSpecializedArrayElement)
+ DECLARE_CONCRETE_INSTRUCTION(LoadKeyed, "load-keyed")
+ DECLARE_HYDROGEN_ACCESSOR(LoadKeyed)
- LOperand* external_pointer() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
- ElementsKind elements_kind() const {
- return hydrogen()->elements_kind();
- }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
class LLoadKeyedGeneric: public LTemplateInstruction<1, 2, 0> {
public:
- LLoadKeyedGeneric(LOperand* obj, LOperand* key) {
- inputs_[0] = obj;
+ LLoadKeyedGeneric(LOperand* object, LOperand* key) {
+ inputs_[0] = object;
inputs_[1] = key;
}
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedGeneric, "load-keyed-generic")
-
LOperand* object() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadKeyedGeneric, "load-keyed-generic")
};
@@ -1267,10 +1383,11 @@ class LLoadGlobalGeneric: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = global_object;
}
+ LOperand* global_object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadGlobalGeneric, "load-global-generic")
DECLARE_HYDROGEN_ACCESSOR(LoadGlobalGeneric)
- LOperand* global_object() { return inputs_[0]; }
Handle<Object> name() const { return hydrogen()->name(); }
bool for_typeof() const { return hydrogen()->for_typeof(); }
};
@@ -1283,10 +1400,11 @@ class LStoreGlobalCell: public LTemplateInstruction<0, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreGlobalCell, "store-global-cell")
DECLARE_HYDROGEN_ACCESSOR(StoreGlobalCell)
-
- LOperand* value() { return inputs_[0]; }
};
@@ -1298,12 +1416,13 @@ class LStoreGlobalGeneric: public LTemplateInstruction<0, 2, 0> {
inputs_[1] = value;
}
+ LOperand* global_object() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreGlobalGeneric, "store-global-generic")
DECLARE_HYDROGEN_ACCESSOR(StoreGlobalGeneric)
- LOperand* global_object() { return InputAt(0); }
Handle<Object> name() const { return hydrogen()->name(); }
- LOperand* value() { return InputAt(1); }
StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
@@ -1314,10 +1433,11 @@ class LLoadContextSlot: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = context;
}
+ LOperand* context() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadContextSlot, "load-context-slot")
DECLARE_HYDROGEN_ACCESSOR(LoadContextSlot)
- LOperand* context() { return InputAt(0); }
int slot_index() { return hydrogen()->slot_index(); }
virtual void PrintDataTo(StringStream* stream);
@@ -1331,11 +1451,12 @@ class LStoreContextSlot: public LTemplateInstruction<0, 2, 0> {
inputs_[1] = value;
}
+ LOperand* context() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreContextSlot, "store-context-slot")
DECLARE_HYDROGEN_ACCESSOR(StoreContextSlot)
- LOperand* context() { return InputAt(0); }
- LOperand* value() { return InputAt(1); }
int slot_index() { return hydrogen()->slot_index(); }
virtual void PrintDataTo(StringStream* stream);
@@ -1348,6 +1469,8 @@ class LPushArgument: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(PushArgument, "push-argument")
};
@@ -1384,9 +1507,9 @@ class LOuterContext: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = context;
}
- DECLARE_CONCRETE_INSTRUCTION(OuterContext, "outer-context")
+ LOperand* context() { return inputs_[0]; }
- LOperand* context() { return InputAt(0); }
+ DECLARE_CONCRETE_INSTRUCTION(OuterContext, "outer-context")
};
@@ -1404,10 +1527,12 @@ class LGlobalObject: public LTemplateInstruction<1, 1, 0> {
qml_global_ = qml_global;
}
+ LOperand* context() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(GlobalObject, "global-object")
- LOperand* context() { return InputAt(0); }
bool qml_global() { return qml_global_; }
+
private:
bool qml_global_;
};
@@ -1419,9 +1544,9 @@ class LGlobalReceiver: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = global_object;
}
- DECLARE_CONCRETE_INSTRUCTION(GlobalReceiver, "global-receiver")
+ LOperand* global_object() { return inputs_[0]; }
- LOperand* global() { return InputAt(0); }
+ DECLARE_CONCRETE_INSTRUCTION(GlobalReceiver, "global-receiver")
};
@@ -1443,11 +1568,11 @@ class LInvokeFunction: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = function;
}
+ LOperand* function() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(InvokeFunction, "invoke-function")
DECLARE_HYDROGEN_ACCESSOR(InvokeFunction)
- LOperand* function() { return inputs_[0]; }
-
virtual void PrintDataTo(StringStream* stream);
int arity() const { return hydrogen()->argument_count() - 1; }
@@ -1461,6 +1586,8 @@ class LCallKeyed: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = key;
}
+ LOperand* key() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CallKeyed, "call-keyed")
DECLARE_HYDROGEN_ACCESSOR(CallKeyed)
@@ -1489,10 +1616,11 @@ class LCallFunction: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = function;
}
+ LOperand* function() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CallFunction, "call-function")
DECLARE_HYDROGEN_ACCESSOR(CallFunction)
- LOperand* function() { return inputs_[0]; }
int arity() const { return hydrogen()->argument_count() - 1; }
};
@@ -1533,6 +1661,8 @@ class LCallNew: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = constructor;
}
+ LOperand* constructor() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CallNew, "call-new")
DECLARE_HYDROGEN_ACCESSOR(CallNew)
@@ -1558,28 +1688,60 @@ class LInteger32ToDouble: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Integer32ToDouble, "int32-to-double")
};
+class LUint32ToDouble: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LUint32ToDouble(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(Uint32ToDouble, "uint32-to-double")
+};
+
+
class LNumberTagI: public LTemplateInstruction<1, 1, 0> {
public:
explicit LNumberTagI(LOperand* value) {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(NumberTagI, "number-tag-i")
};
+class LNumberTagU: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LNumberTagU(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(NumberTagU, "number-tag-u")
+};
+
+
class LNumberTagD: public LTemplateInstruction<1, 1, 2> {
public:
- LNumberTagD(LOperand* value, LOperand* temp1, LOperand* temp2) {
+ LNumberTagD(LOperand* value, LOperand* temp, LOperand* temp2) {
inputs_[0] = value;
- temps_[0] = temp1;
+ temps_[0] = temp;
temps_[1] = temp2;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(NumberTagD, "number-tag-d")
};
@@ -1587,12 +1749,16 @@ class LNumberTagD: public LTemplateInstruction<1, 1, 2> {
// Sometimes truncating conversion from a tagged value to an int32.
class LDoubleToI: public LTemplateInstruction<1, 1, 2> {
public:
- LDoubleToI(LOperand* value, LOperand* temp1, LOperand* temp2) {
+ LDoubleToI(LOperand* value, LOperand* temp, LOperand* temp2) {
inputs_[0] = value;
- temps_[0] = temp1;
+ temps_[0] = temp;
temps_[1] = temp2;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(DoubleToI, "double-to-i")
DECLARE_HYDROGEN_ACCESSOR(UnaryOperation)
@@ -1604,15 +1770,20 @@ class LDoubleToI: public LTemplateInstruction<1, 1, 2> {
class LTaggedToI: public LTemplateInstruction<1, 1, 3> {
public:
LTaggedToI(LOperand* value,
- LOperand* temp1,
+ LOperand* temp,
LOperand* temp2,
LOperand* temp3) {
inputs_[0] = value;
- temps_[0] = temp1;
+ temps_[0] = temp;
temps_[1] = temp2;
temps_[2] = temp3;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+ LOperand* temp3() { return temps_[2]; }
+
DECLARE_CONCRETE_INSTRUCTION(TaggedToI, "tagged-to-i")
DECLARE_HYDROGEN_ACCESSOR(UnaryOperation)
@@ -1626,6 +1797,8 @@ class LSmiTag: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(SmiTag, "smi-tag")
};
@@ -1636,6 +1809,8 @@ class LNumberUntagD: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(NumberUntagD, "double-untag")
DECLARE_HYDROGEN_ACCESSOR(Change)
};
@@ -1648,30 +1823,33 @@ class LSmiUntag: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(SmiUntag, "smi-untag")
-
+ LOperand* value() { return inputs_[0]; }
bool needs_check() const { return needs_check_; }
+ DECLARE_CONCRETE_INSTRUCTION(SmiUntag, "smi-untag")
+
private:
bool needs_check_;
};
-class LStoreNamedField: public LTemplateInstruction<0, 2, 0> {
+class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
public:
- LStoreNamedField(LOperand* obj, LOperand* val) {
- inputs_[0] = obj;
- inputs_[1] = val;
+ LStoreNamedField(LOperand* object, LOperand* value, LOperand* temp) {
+ inputs_[0] = object;
+ inputs_[1] = value;
+ temps_[0] = temp;
}
+ LOperand* object() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreNamedField, "store-named-field")
DECLARE_HYDROGEN_ACCESSOR(StoreNamedField)
virtual void PrintDataTo(StringStream* stream);
- LOperand* object() { return inputs_[0]; }
- LOperand* value() { return inputs_[1]; }
-
Handle<Object> name() const { return hydrogen()->name(); }
bool is_in_object() { return hydrogen()->is_in_object(); }
int offset() { return hydrogen()->offset(); }
@@ -1681,106 +1859,67 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 0> {
class LStoreNamedGeneric: public LTemplateInstruction<0, 2, 0> {
public:
- LStoreNamedGeneric(LOperand* obj, LOperand* val) {
- inputs_[0] = obj;
- inputs_[1] = val;
+ LStoreNamedGeneric(LOperand* object, LOperand* value) {
+ inputs_[0] = object;
+ inputs_[1] = value;
}
+ LOperand* object() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store-named-generic")
DECLARE_HYDROGEN_ACCESSOR(StoreNamedGeneric)
virtual void PrintDataTo(StringStream* stream);
- LOperand* object() { return inputs_[0]; }
- LOperand* value() { return inputs_[1]; }
Handle<Object> name() const { return hydrogen()->name(); }
StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
-class LStoreKeyedFastElement: public LTemplateInstruction<0, 3, 0> {
+class LStoreKeyed: public LTemplateInstruction<0, 3, 0> {
public:
- LStoreKeyedFastElement(LOperand* obj, LOperand* key, LOperand* val) {
- inputs_[0] = obj;
+ LStoreKeyed(LOperand* object, LOperand* key, LOperand* value) {
+ inputs_[0] = object;
inputs_[1] = key;
- inputs_[2] = val;
+ inputs_[2] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedFastElement,
- "store-keyed-fast-element")
- DECLARE_HYDROGEN_ACCESSOR(StoreKeyedFastElement)
-
- virtual void PrintDataTo(StringStream* stream);
-
- LOperand* object() { return inputs_[0]; }
+ bool is_external() const { return hydrogen()->is_external(); }
+ LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
-};
-
-
-class LStoreKeyedFastDoubleElement: public LTemplateInstruction<0, 3, 0> {
- public:
- LStoreKeyedFastDoubleElement(LOperand* elements,
- LOperand* key,
- LOperand* val) {
- inputs_[0] = elements;
- inputs_[1] = key;
- inputs_[2] = val;
+ ElementsKind elements_kind() const {
+ return hydrogen()->elements_kind();
}
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedFastDoubleElement,
- "store-keyed-fast-double-element")
- DECLARE_HYDROGEN_ACCESSOR(StoreKeyedFastDoubleElement)
+ DECLARE_CONCRETE_INSTRUCTION(StoreKeyed, "store-keyed")
+ DECLARE_HYDROGEN_ACCESSOR(StoreKeyed)
virtual void PrintDataTo(StringStream* stream);
-
- LOperand* elements() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
- LOperand* value() { return inputs_[2]; }
-
bool NeedsCanonicalization() { return hydrogen()->NeedsCanonicalization(); }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
class LStoreKeyedGeneric: public LTemplateInstruction<0, 3, 0> {
public:
- LStoreKeyedGeneric(LOperand* obj, LOperand* key, LOperand* val) {
+ LStoreKeyedGeneric(LOperand* obj, LOperand* key, LOperand* value) {
inputs_[0] = obj;
inputs_[1] = key;
- inputs_[2] = val;
+ inputs_[2] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
- DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
-
- virtual void PrintDataTo(StringStream* stream);
-
LOperand* object() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
- StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
-};
-class LStoreKeyedSpecializedArrayElement: public LTemplateInstruction<0, 3, 0> {
- public:
- LStoreKeyedSpecializedArrayElement(LOperand* external_pointer,
- LOperand* key,
- LOperand* val) {
- inputs_[0] = external_pointer;
- inputs_[1] = key;
- inputs_[2] = val;
- }
+ DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
+ DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedSpecializedArrayElement,
- "store-keyed-specialized-array-element")
- DECLARE_HYDROGEN_ACCESSOR(StoreKeyedSpecializedArrayElement)
+ virtual void PrintDataTo(StringStream* stream);
- LOperand* external_pointer() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
- LOperand* value() { return inputs_[2]; }
- ElementsKind elements_kind() const {
- return hydrogen()->elements_kind();
- }
+ StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
@@ -1788,21 +1927,22 @@ class LTransitionElementsKind: public LTemplateInstruction<1, 1, 2> {
public:
LTransitionElementsKind(LOperand* object,
LOperand* new_map_temp,
- LOperand* temp_reg) {
+ LOperand* temp) {
inputs_[0] = object;
temps_[0] = new_map_temp;
- temps_[1] = temp_reg;
+ temps_[1] = temp;
}
+ LOperand* object() { return inputs_[0]; }
+ LOperand* new_map_temp() { return temps_[0]; }
+ LOperand* temp() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(TransitionElementsKind,
"transition-elements-kind")
DECLARE_HYDROGEN_ACCESSOR(TransitionElementsKind)
virtual void PrintDataTo(StringStream* stream);
- LOperand* object() { return inputs_[0]; }
- LOperand* new_map_reg() { return temps_[0]; }
- LOperand* temp_reg() { return temps_[1]; }
Handle<Map> original_map() { return hydrogen()->original_map(); }
Handle<Map> transitioned_map() { return hydrogen()->transitioned_map(); }
};
@@ -1815,11 +1955,11 @@ class LStringAdd: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
- DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string-add")
- DECLARE_HYDROGEN_ACCESSOR(StringAdd)
-
LOperand* left() { return inputs_[0]; }
LOperand* right() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string-add")
+ DECLARE_HYDROGEN_ACCESSOR(StringAdd)
};
@@ -1831,11 +1971,11 @@ class LStringCharCodeAt: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = index;
}
- DECLARE_CONCRETE_INSTRUCTION(StringCharCodeAt, "string-char-code-at")
- DECLARE_HYDROGEN_ACCESSOR(StringCharCodeAt)
-
LOperand* string() { return inputs_[0]; }
LOperand* index() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringCharCodeAt, "string-char-code-at")
+ DECLARE_HYDROGEN_ACCESSOR(StringCharCodeAt)
};
@@ -1845,10 +1985,10 @@ class LStringCharFromCode: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = char_code;
}
+ LOperand* char_code() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StringCharFromCode, "string-char-from-code")
DECLARE_HYDROGEN_ACCESSOR(StringCharFromCode)
-
- LOperand* char_code() { return inputs_[0]; }
};
@@ -1858,10 +1998,10 @@ class LStringLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = string;
}
+ LOperand* string() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StringLength, "string-length")
DECLARE_HYDROGEN_ACCESSOR(StringLength)
-
- LOperand* string() { return inputs_[0]; }
};
@@ -1871,7 +2011,7 @@ class LCheckFunction: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
- LOperand* value() { return InputAt(0); }
+ LOperand* value() { return inputs_[0]; }
DECLARE_CONCRETE_INSTRUCTION(CheckFunction, "check-function")
DECLARE_HYDROGEN_ACCESSOR(CheckFunction)
@@ -1884,6 +2024,8 @@ class LCheckInstanceType: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckInstanceType, "check-instance-type")
DECLARE_HYDROGEN_ACCESSOR(CheckInstanceType)
};
@@ -1895,18 +2037,23 @@ class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps")
DECLARE_HYDROGEN_ACCESSOR(CheckMaps)
};
-class LCheckPrototypeMaps: public LTemplateInstruction<0, 0, 2> {
+class LCheckPrototypeMaps: public LTemplateInstruction<1, 0, 2> {
public:
- LCheckPrototypeMaps(LOperand* temp1, LOperand* temp2) {
- temps_[0] = temp1;
+ LCheckPrototypeMaps(LOperand* temp, LOperand* temp2) {
+ temps_[0] = temp;
temps_[1] = temp2;
}
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps, "check-prototype-maps")
DECLARE_HYDROGEN_ACCESSOR(CheckPrototypeMaps)
@@ -1921,6 +2068,8 @@ class LCheckSmi: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckSmi, "check-smi")
};
@@ -1931,18 +2080,21 @@ class LCheckNonSmi: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckNonSmi, "check-non-smi")
};
class LClampDToUint8: public LTemplateInstruction<1, 1, 1> {
public:
- LClampDToUint8(LOperand* value, LOperand* temp) {
- inputs_[0] = value;
+ LClampDToUint8(LOperand* unclamped, LOperand* temp) {
+ inputs_[0] = unclamped;
temps_[0] = temp;
}
LOperand* unclamped() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
DECLARE_CONCRETE_INSTRUCTION(ClampDToUint8, "clamp-d-to-uint8")
};
@@ -1950,8 +2102,8 @@ class LClampDToUint8: public LTemplateInstruction<1, 1, 1> {
class LClampIToUint8: public LTemplateInstruction<1, 1, 0> {
public:
- explicit LClampIToUint8(LOperand* value) {
- inputs_[0] = value;
+ explicit LClampIToUint8(LOperand* unclamped) {
+ inputs_[0] = unclamped;
}
LOperand* unclamped() { return inputs_[0]; }
@@ -1962,12 +2114,13 @@ class LClampIToUint8: public LTemplateInstruction<1, 1, 0> {
class LClampTToUint8: public LTemplateInstruction<1, 1, 1> {
public:
- LClampTToUint8(LOperand* value, LOperand* temp) {
- inputs_[0] = value;
+ LClampTToUint8(LOperand* unclamped, LOperand* temp) {
+ inputs_[0] = unclamped;
temps_[0] = temp;
}
LOperand* unclamped() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
DECLARE_CONCRETE_INSTRUCTION(ClampTToUint8, "clamp-t-to-uint8")
};
@@ -1975,11 +2128,14 @@ class LClampTToUint8: public LTemplateInstruction<1, 1, 1> {
class LAllocateObject: public LTemplateInstruction<1, 0, 2> {
public:
- LAllocateObject(LOperand* temp1, LOperand* temp2) {
- temps_[0] = temp1;
+ LAllocateObject(LOperand* temp, LOperand* temp2) {
+ temps_[0] = temp;
temps_[1] = temp2;
}
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
};
@@ -2028,6 +2184,8 @@ class LToFastProperties: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ToFastProperties, "to-fast-properties")
DECLARE_HYDROGEN_ACCESSOR(ToFastProperties)
};
@@ -2039,6 +2197,8 @@ class LTypeof: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Typeof, "typeof")
};
@@ -2049,6 +2209,8 @@ class LTypeofIsAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(TypeofIsAndBranch, "typeof-is-and-branch")
DECLARE_HYDROGEN_ACCESSOR(TypeofIsAndBranch)
@@ -2064,6 +2226,8 @@ class LIsConstructCallAndBranch: public LControlInstruction<0, 1> {
temps_[0] = temp;
}
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsConstructCallAndBranch,
"is-construct-call-and-branch")
};
@@ -2071,15 +2235,15 @@ class LIsConstructCallAndBranch: public LControlInstruction<0, 1> {
class LDeleteProperty: public LTemplateInstruction<1, 2, 0> {
public:
- LDeleteProperty(LOperand* obj, LOperand* key) {
- inputs_[0] = obj;
+ LDeleteProperty(LOperand* object, LOperand* key) {
+ inputs_[0] = object;
inputs_[1] = key;
}
- DECLARE_CONCRETE_INSTRUCTION(DeleteProperty, "delete-property")
-
LOperand* object() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(DeleteProperty, "delete-property")
};
@@ -2189,63 +2353,13 @@ class LLoadFieldByIndex: public LTemplateInstruction<1, 2, 0> {
class LChunkBuilder;
-class LChunk: public ZoneObject {
+class LPlatformChunk: public LChunk {
public:
- explicit LChunk(CompilationInfo* info, HGraph* graph);
-
- void AddInstruction(LInstruction* instruction, HBasicBlock* block);
- LConstantOperand* DefineConstantOperand(HConstant* constant);
- Handle<Object> LookupLiteral(LConstantOperand* operand) const;
- Representation LookupLiteralRepresentation(LConstantOperand* operand) const;
+ LPlatformChunk(CompilationInfo* info, HGraph* graph)
+ : LChunk(info, graph) { }
int GetNextSpillIndex(bool is_double);
LOperand* GetNextSpillSlot(bool is_double);
-
- int ParameterAt(int index);
- int GetParameterStackSlot(int index) const;
- int spill_slot_count() const { return spill_slot_count_; }
- CompilationInfo* info() const { return info_; }
- HGraph* graph() const { return graph_; }
- const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
- void AddGapMove(int index, LOperand* from, LOperand* to);
- LGap* GetGapAt(int index) const;
- bool IsGapAt(int index) const;
- int NearestGapPos(int index) const;
- void MarkEmptyBlocks();
- const ZoneList<LPointerMap*>* pointer_maps() const { return &pointer_maps_; }
- LLabel* GetLabel(int block_id) const {
- HBasicBlock* block = graph_->blocks()->at(block_id);
- int first_instruction = block->first_instruction_index();
- return LLabel::cast(instructions_[first_instruction]);
- }
- int LookupDestination(int block_id) const {
- LLabel* cur = GetLabel(block_id);
- while (cur->replacement() != NULL) {
- cur = cur->replacement();
- }
- return cur->block_id();
- }
- Label* GetAssemblyLabel(int block_id) const {
- LLabel* label = GetLabel(block_id);
- ASSERT(!label->HasReplacement());
- return label->label();
- }
-
- const ZoneList<Handle<JSFunction> >* inlined_closures() const {
- return &inlined_closures_;
- }
-
- void AddInlinedClosure(Handle<JSFunction> closure) {
- inlined_closures_.Add(closure);
- }
-
- private:
- int spill_slot_count_;
- CompilationInfo* info_;
- HGraph* const graph_;
- ZoneList<LInstruction*> instructions_;
- ZoneList<LPointerMap*> pointer_maps_;
- ZoneList<Handle<JSFunction> > inlined_closures_;
};
@@ -2255,7 +2369,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
- zone_(graph->isolate()->zone()),
+ zone_(graph->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2264,10 +2378,10 @@ class LChunkBuilder BASE_EMBEDDED {
allocator_(allocator),
position_(RelocInfo::kNoPosition),
instruction_pending_deoptimization_environment_(NULL),
- pending_deoptimization_ast_id_(AstNode::kNoNumber) { }
+ pending_deoptimization_ast_id_(BailoutId::None()) { }
// Build the sequence for the graph.
- LChunk* Build();
+ LPlatformChunk* Build();
// Declare methods that deal with the individual node types.
#define DECLARE_DO(type) LInstruction* Do##type(H##type* node);
@@ -2282,7 +2396,7 @@ class LChunkBuilder BASE_EMBEDDED {
ABORTED
};
- LChunk* chunk() const { return chunk_; }
+ LPlatformChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
Zone* zone() const { return zone_; }
@@ -2292,7 +2406,7 @@ class LChunkBuilder BASE_EMBEDDED {
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
- void Abort(const char* format, ...);
+ void Abort(const char* reason);
// Methods for getting operands for Use / Define / Temp.
LUnallocated* ToUnallocated(Register reg);
@@ -2383,7 +2497,7 @@ class LChunkBuilder BASE_EMBEDDED {
LInstruction* DoArithmeticT(Token::Value op,
HArithmeticBinaryOperation* instr);
- LChunk* chunk_;
+ LPlatformChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
Zone* zone_;
@@ -2395,7 +2509,7 @@ class LChunkBuilder BASE_EMBEDDED {
LAllocator* allocator_;
int position_;
LInstruction* instruction_pending_deoptimization_environment_;
- int pending_deoptimization_ast_id_;
+ BailoutId pending_deoptimization_ast_id_;
DISALLOW_COPY_AND_ASSIGN(LChunkBuilder);
};
diff --git a/src/3rdparty/v8/src/mips/macro-assembler-mips.cc b/src/3rdparty/v8/src/mips/macro-assembler-mips.cc
index 2c2445b..aebfe73 100644
--- a/src/3rdparty/v8/src/mips/macro-assembler-mips.cc
+++ b/src/3rdparty/v8/src/mips/macro-assembler-mips.cc
@@ -361,28 +361,29 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
scratch, Operand(zero_reg));
#endif
- // Load the global context of the current context.
- int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ // Load the native context of the current context.
+ int offset =
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
lw(scratch, FieldMemOperand(scratch, offset));
- lw(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
+ lw(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
- // Check the context is a global context.
+ // Check the context is a native context.
if (emit_debug_code()) {
// TODO(119): Avoid push(holder_reg)/pop(holder_reg).
push(holder_reg); // Temporarily save holder on the stack.
- // Read the first word and compare to the global_context_map.
+ // Read the first word and compare to the native_context_map.
lw(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
- LoadRoot(at, Heap::kGlobalContextMapRootIndex);
- Check(eq, "JSGlobalObject::global_context should be a global context.",
+ LoadRoot(at, Heap::kNativeContextMapRootIndex);
+ Check(eq, "JSGlobalObject::native_context should be a native context.",
holder_reg, Operand(at));
pop(holder_reg); // Restore holder.
}
// Check if both contexts are the same.
- lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
+ lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Branch(&same_contexts, eq, scratch, Operand(at));
- // Check the context is a global context.
+ // Check the context is a native context.
if (emit_debug_code()) {
// TODO(119): Avoid push(holder_reg)/pop(holder_reg).
push(holder_reg); // Temporarily save holder on the stack.
@@ -392,13 +393,13 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
holder_reg, Operand(at));
lw(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
- LoadRoot(at, Heap::kGlobalContextMapRootIndex);
- Check(eq, "JSGlobalObject::global_context should be a global context.",
+ LoadRoot(at, Heap::kNativeContextMapRootIndex);
+ Check(eq, "JSGlobalObject::native_context should be a native context.",
holder_reg, Operand(at));
// Restore at is not needed. at is reloaded below.
pop(holder_reg); // Restore holder.
// Restore at to holder's context.
- lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
+ lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
}
// Check that the security token in the calling global object is
@@ -2559,7 +2560,7 @@ void MacroAssembler::Call(Address target,
int MacroAssembler::CallSize(Handle<Code> code,
RelocInfo::Mode rmode,
- unsigned ast_id,
+ TypeFeedbackId ast_id,
Condition cond,
Register rs,
const Operand& rt,
@@ -2571,7 +2572,7 @@ int MacroAssembler::CallSize(Handle<Code> code,
void MacroAssembler::Call(Handle<Code> code,
RelocInfo::Mode rmode,
- unsigned ast_id,
+ TypeFeedbackId ast_id,
Condition cond,
Register rs,
const Operand& rt,
@@ -2580,7 +2581,7 @@ void MacroAssembler::Call(Handle<Code> code,
Label start;
bind(&start);
ASSERT(RelocInfo::IsCodeTarget(rmode));
- if (rmode == RelocInfo::CODE_TARGET && ast_id != kNoASTId) {
+ if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
SetRecordedAstId(ast_id);
rmode = RelocInfo::CODE_TARGET_WITH_ID;
}
@@ -3341,33 +3342,39 @@ void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
void MacroAssembler::CheckFastElements(Register map,
Register scratch,
Label* fail) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
- Branch(fail, hi, scratch, Operand(Map::kMaximumBitField2FastElementValue));
+ Branch(fail, hi, scratch,
+ Operand(Map::kMaximumBitField2FastHoleyElementValue));
}
void MacroAssembler::CheckFastObjectElements(Register map,
Register scratch,
Label* fail) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Branch(fail, ls, scratch,
- Operand(Map::kMaximumBitField2FastSmiOnlyElementValue));
+ Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Branch(fail, hi, scratch,
- Operand(Map::kMaximumBitField2FastElementValue));
+ Operand(Map::kMaximumBitField2FastHoleyElementValue));
}
-void MacroAssembler::CheckFastSmiOnlyElements(Register map,
- Register scratch,
- Label* fail) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
+void MacroAssembler::CheckFastSmiElements(Register map,
+ Register scratch,
+ Label* fail) {
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Branch(fail, hi, scratch,
- Operand(Map::kMaximumBitField2FastSmiOnlyElementValue));
+ Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
}
@@ -3438,7 +3445,7 @@ void MacroAssembler::StoreNumberToDoubleElements(Register value_reg,
destination = FloatingPointHelper::kCoreRegisters;
}
- Register untagged_value = receiver_reg;
+ Register untagged_value = elements_reg;
SmiUntag(untagged_value, value_reg);
FloatingPointHelper::ConvertIntToDouble(this,
untagged_value,
@@ -3467,28 +3474,33 @@ void MacroAssembler::CompareMapAndBranch(Register obj,
Label* branch_to,
CompareMapMode mode) {
lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
+ CompareMapAndBranch(scratch, map, early_success, cond, branch_to, mode);
+}
+
+
+void MacroAssembler::CompareMapAndBranch(Register obj_map,
+ Handle<Map> map,
+ Label* early_success,
+ Condition cond,
+ Label* branch_to,
+ CompareMapMode mode) {
Operand right = Operand(map);
if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
- Map* transitioned_fast_element_map(
- map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
- ASSERT(transitioned_fast_element_map == NULL ||
- map->elements_kind() != FAST_ELEMENTS);
- if (transitioned_fast_element_map != NULL) {
- Branch(early_success, eq, scratch, right);
- right = Operand(Handle<Map>(transitioned_fast_element_map));
- }
-
- Map* transitioned_double_map(
- map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
- ASSERT(transitioned_double_map == NULL ||
- map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
- if (transitioned_double_map != NULL) {
- Branch(early_success, eq, scratch, right);
- right = Operand(Handle<Map>(transitioned_double_map));
+ ElementsKind kind = map->elements_kind();
+ if (IsFastElementsKind(kind)) {
+ bool packed = IsFastPackedElementsKind(kind);
+ Map* current_map = *map;
+ while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
+ kind = GetNextMoreGeneralFastElementsKind(kind, packed);
+ current_map = current_map->LookupElementsTransitionMap(kind);
+ if (!current_map) break;
+ Branch(early_success, eq, obj_map, right);
+ right = Operand(Handle<Map>(current_map));
+ }
}
}
- Branch(branch_to, cond, scratch, right);
+ Branch(branch_to, cond, obj_map, right);
}
@@ -3900,7 +3912,8 @@ void MacroAssembler::CallStub(CodeStub* stub,
const Operand& r2,
BranchDelaySlot bd) {
ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
- Call(stub->GetCode(), RelocInfo::CODE_TARGET, kNoASTId, cond, r1, r2, bd);
+ Call(stub->GetCode(), RelocInfo::CODE_TARGET, TypeFeedbackId::None(),
+ cond, r1, r2, bd);
}
@@ -4270,7 +4283,7 @@ void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
void MacroAssembler::GetBuiltinFunction(Register target,
Builtins::JavaScript id) {
// Load the builtins object into target register.
- lw(target, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ lw(target, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
lw(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset));
// Load the JavaScript builtin function from the builtins object.
lw(target, FieldMemOperand(target,
@@ -4439,31 +4452,43 @@ void MacroAssembler::LoadTransitionedArrayMapConditional(
Register scratch,
Label* no_map_match) {
// Load the global or builtins object from the current context.
- lw(scratch, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- lw(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
+ lw(scratch,
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ lw(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
// Check that the function's map is the same as the expected cached map.
- int expected_index =
- Context::GetContextMapIndexFromElementsKind(expected_kind);
- lw(at, MemOperand(scratch, Context::SlotOffset(expected_index)));
+ lw(scratch,
+ MemOperand(scratch,
+ Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
+ size_t offset = expected_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ lw(at, FieldMemOperand(scratch, offset));
Branch(no_map_match, ne, map_in_out, Operand(at));
// Use the transitioned cached map.
- int trans_index =
- Context::GetContextMapIndexFromElementsKind(transitioned_kind);
- lw(map_in_out, MemOperand(scratch, Context::SlotOffset(trans_index)));
+ offset = transitioned_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ lw(map_in_out, FieldMemOperand(scratch, offset));
}
void MacroAssembler::LoadInitialArrayMap(
- Register function_in, Register scratch, Register map_out) {
+ Register function_in, Register scratch,
+ Register map_out, bool can_have_holes) {
ASSERT(!function_in.is(map_out));
Label done;
lw(map_out, FieldMemOperand(function_in,
JSFunction::kPrototypeOrInitialMapOffset));
if (!FLAG_smi_only_arrays) {
- LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_ELEMENTS,
+ ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ kind,
+ map_out,
+ scratch,
+ &done);
+ } else if (can_have_holes) {
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_HOLEY_SMI_ELEMENTS,
map_out,
scratch,
&done);
@@ -4474,11 +4499,12 @@ void MacroAssembler::LoadInitialArrayMap(
void MacroAssembler::LoadGlobalFunction(int index, Register function) {
// Load the global or builtins object from the current context.
- lw(function, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- // Load the global context from the global or builtins object.
+ lw(function,
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ // Load the native context from the global or builtins object.
lw(function, FieldMemOperand(function,
- GlobalObject::kGlobalContextOffset));
- // Load the function from the global context.
+ GlobalObject::kNativeContextOffset));
+ // Load the function from the native context.
lw(function, MemOperand(function, Context::SlotOffset(index)));
}
@@ -4772,38 +4798,46 @@ void MacroAssembler::JumpIfEitherSmi(Register reg1,
}
-void MacroAssembler::AbortIfSmi(Register object) {
- STATIC_ASSERT(kSmiTag == 0);
- andi(at, object, kSmiTagMask);
- Assert(ne, "Operand is a smi", at, Operand(zero_reg));
+void MacroAssembler::AssertNotSmi(Register object) {
+ if (emit_debug_code()) {
+ STATIC_ASSERT(kSmiTag == 0);
+ andi(at, object, kSmiTagMask);
+ Check(ne, "Operand is a smi", at, Operand(zero_reg));
+ }
}
-void MacroAssembler::AbortIfNotSmi(Register object) {
- STATIC_ASSERT(kSmiTag == 0);
- andi(at, object, kSmiTagMask);
- Assert(eq, "Operand is a smi", at, Operand(zero_reg));
+void MacroAssembler::AssertSmi(Register object) {
+ if (emit_debug_code()) {
+ STATIC_ASSERT(kSmiTag == 0);
+ andi(at, object, kSmiTagMask);
+ Check(eq, "Operand is a smi", at, Operand(zero_reg));
+ }
}
-void MacroAssembler::AbortIfNotString(Register object) {
- STATIC_ASSERT(kSmiTag == 0);
- And(t0, object, Operand(kSmiTagMask));
- Assert(ne, "Operand is not a string", t0, Operand(zero_reg));
- push(object);
- lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
- lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
- Assert(lo, "Operand is not a string", object, Operand(FIRST_NONSTRING_TYPE));
- pop(object);
+void MacroAssembler::AssertString(Register object) {
+ if (emit_debug_code()) {
+ STATIC_ASSERT(kSmiTag == 0);
+ And(t0, object, Operand(kSmiTagMask));
+ Check(ne, "Operand is a smi and not a string", t0, Operand(zero_reg));
+ push(object);
+ lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
+ lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
+ Check(lo, "Operand is not a string", object, Operand(FIRST_NONSTRING_TYPE));
+ pop(object);
+ }
}
-void MacroAssembler::AbortIfNotRootValue(Register src,
- Heap::RootListIndex root_value_index,
- const char* message) {
- ASSERT(!src.is(at));
- LoadRoot(at, root_value_index);
- Assert(eq, message, src, Operand(at));
+void MacroAssembler::AssertRootValue(Register src,
+ Heap::RootListIndex root_value_index,
+ const char* message) {
+ if (emit_debug_code()) {
+ ASSERT(!src.is(at));
+ LoadRoot(at, root_value_index);
+ Check(eq, message, src, Operand(at));
+ }
}
@@ -5238,7 +5272,7 @@ void MacroAssembler::EnsureNotWhite(
// For ASCII (char-size of 1) we shift the smi tag away to get the length.
// For UC16 (char-size of 2) we just leave the smi tag in place, thereby
// getting the length multiplied by 2.
- ASSERT(kAsciiStringTag == 4 && kStringEncodingMask == 4);
+ ASSERT(kOneByteStringTag == 4 && kStringEncodingMask == 4);
ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
lw(t9, FieldMemOperand(value, String::kLengthOffset));
And(t8, instance_type, Operand(kStringEncodingMask));
@@ -5269,54 +5303,54 @@ void MacroAssembler::EnsureNotWhite(
void MacroAssembler::LoadInstanceDescriptors(Register map,
Register descriptors) {
- lw(descriptors,
- FieldMemOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
- Label not_smi;
- JumpIfNotSmi(descriptors, &not_smi);
- LoadRoot(descriptors, Heap::kEmptyDescriptorArrayRootIndex);
- bind(&not_smi);
+ lw(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset));
+}
+
+
+void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
+ lw(dst, FieldMemOperand(map, Map::kBitField3Offset));
+ DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
+}
+
+
+void MacroAssembler::EnumLength(Register dst, Register map) {
+ STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
+ lw(dst, FieldMemOperand(map, Map::kBitField3Offset));
+ And(dst, dst, Operand(Smi::FromInt(Map::EnumLengthBits::kMask)));
}
void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
- Label next;
- // Preload a couple of values used in the loop.
Register empty_fixed_array_value = t2;
LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
- Register empty_descriptor_array_value = t3;
- LoadRoot(empty_descriptor_array_value,
- Heap::kEmptyDescriptorArrayRootIndex);
- mov(a1, a0);
- bind(&next);
+ Label next, start;
+ mov(a2, a0);
- // Check that there are no elements. Register a1 contains the
- // current JS object we've reached through the prototype chain.
- lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
- Branch(call_runtime, ne, a2, Operand(empty_fixed_array_value));
+ // Check if the enum length field is properly initialized, indicating that
+ // there is an enum cache.
+ lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset));
+
+ EnumLength(a3, a1);
+ Branch(call_runtime, eq, a3, Operand(Smi::FromInt(Map::kInvalidEnumCache)));
- // Check that instance descriptors are not empty so that we can
- // check for an enum cache. Leave the map in a2 for the subsequent
- // prototype load.
- lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
- lw(a3, FieldMemOperand(a2, Map::kInstanceDescriptorsOrBitField3Offset));
- JumpIfSmi(a3, call_runtime);
+ jmp(&start);
- // Check that there is an enum cache in the non-empty instance
- // descriptors (a3). This is the case if the next enumeration
- // index field does not contain a smi.
- lw(a3, FieldMemOperand(a3, DescriptorArray::kEnumerationIndexOffset));
- JumpIfSmi(a3, call_runtime);
+ bind(&next);
+ lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset));
// For all objects but the receiver, check that the cache is empty.
- Label check_prototype;
- Branch(&check_prototype, eq, a1, Operand(a0));
- lw(a3, FieldMemOperand(a3, DescriptorArray::kEnumCacheBridgeCacheOffset));
- Branch(call_runtime, ne, a3, Operand(empty_fixed_array_value));
+ EnumLength(a3, a1);
+ Branch(call_runtime, ne, a3, Operand(Smi::FromInt(0)));
+
+ bind(&start);
+
+ // Check that there are no elements. Register r2 contains the current JS
+ // object we've reached through the prototype chain.
+ lw(a2, FieldMemOperand(a2, JSObject::kElementsOffset));
+ Branch(call_runtime, ne, a2, Operand(empty_fixed_array_value));
- // Load the prototype from the map and loop if non-null.
- bind(&check_prototype);
- lw(a1, FieldMemOperand(a2, Map::kPrototypeOffset));
- Branch(&next, ne, a1, Operand(null_value));
+ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
+ Branch(&next, ne, a2, Operand(null_value));
}
@@ -5357,7 +5391,7 @@ void MacroAssembler::ClampDoubleToUint8(Register result_reg,
// In 0-255 range, round and truncate.
bind(&in_bounds);
- round_w_d(temp_double_reg, input_reg);
+ cvt_w_d(temp_double_reg, input_reg);
mfc1(result_reg, temp_double_reg);
bind(&done);
}
diff --git a/src/3rdparty/v8/src/mips/macro-assembler-mips.h b/src/3rdparty/v8/src/mips/macro-assembler-mips.h
index b5897e4..8b7d7c1 100644
--- a/src/3rdparty/v8/src/mips/macro-assembler-mips.h
+++ b/src/3rdparty/v8/src/mips/macro-assembler-mips.h
@@ -108,12 +108,12 @@ inline MemOperand ContextOperand(Register context, int index) {
inline MemOperand GlobalObjectOperand() {
- return ContextOperand(cp, Context::GLOBAL_INDEX);
+ return ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX);
}
static inline MemOperand QmlGlobalObjectOperand() {
- return ContextOperand(cp, Context::QML_GLOBAL_INDEX);
+ return ContextOperand(cp, Context::QML_GLOBAL_OBJECT_INDEX);
}
@@ -187,11 +187,11 @@ class MacroAssembler: public Assembler {
void Call(Address target, RelocInfo::Mode rmode, COND_ARGS);
static int CallSize(Handle<Code> code,
RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
- unsigned ast_id = kNoASTId,
+ TypeFeedbackId ast_id = TypeFeedbackId::None(),
COND_ARGS);
void Call(Handle<Code> code,
RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
- unsigned ast_id = kNoASTId,
+ TypeFeedbackId ast_id = TypeFeedbackId::None(),
COND_ARGS);
void Ret(COND_ARGS);
inline void Ret(BranchDelaySlot bd, Condition cond = al,
@@ -811,8 +811,8 @@ class MacroAssembler: public Assembler {
void LoadContext(Register dst, int context_chain_length);
// Conditionally load the cached Array transitioned map of type
- // transitioned_kind from the global context if the map in register
- // map_in_out is the cached Array map in the global context of
+ // transitioned_kind from the native context if the map in register
+ // map_in_out is the cached Array map in the native context of
// expected_kind.
void LoadTransitionedArrayMapConditional(
ElementsKind expected_kind,
@@ -824,7 +824,8 @@ class MacroAssembler: public Assembler {
// Load the initial map for new Arrays from a JSFunction.
void LoadInitialArrayMap(Register function_in,
Register scratch,
- Register map_out);
+ Register map_out,
+ bool can_have_holes);
void LoadGlobalFunction(int index, Register function);
@@ -966,9 +967,9 @@ class MacroAssembler: public Assembler {
// Check if a map for a JSObject indicates that the object has fast smi only
// elements. Jump to the specified label if it does not.
- void CheckFastSmiOnlyElements(Register map,
- Register scratch,
- Label* fail);
+ void CheckFastSmiElements(Register map,
+ Register scratch,
+ Label* fail);
// Check to see if maybe_number can be stored as a double in
// FastDoubleElements. If it can, store it at the index specified by key in
@@ -977,6 +978,7 @@ class MacroAssembler: public Assembler {
void StoreNumberToDoubleElements(Register value_reg,
Register key_reg,
Register receiver_reg,
+ // All regs below here overwritten.
Register elements_reg,
Register scratch1,
Register scratch2,
@@ -996,6 +998,15 @@ class MacroAssembler: public Assembler {
Label* branch_to,
CompareMapMode mode = REQUIRE_EXACT_MAP);
+ // As above, but the map of the object is already loaded into the register
+ // which is preserved by the code generated.
+ void CompareMapAndBranch(Register obj_map,
+ Handle<Map> map,
+ Label* early_success,
+ Condition cond,
+ Label* branch_to,
+ CompareMapMode mode = REQUIRE_EXACT_MAP);
+
// Check if the map of an object is equal to a specified map and branch to
// label if not. Skip the smi check if not required (object is known to be a
// heap object). If mode is ALLOW_ELEMENT_TRANSITION_MAPS, then also match
@@ -1330,17 +1341,18 @@ class MacroAssembler: public Assembler {
// Jump if either of the registers contain a smi.
void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
- // Abort execution if argument is a smi. Used in debug code.
- void AbortIfSmi(Register object);
- void AbortIfNotSmi(Register object);
+ // Abort execution if argument is a smi, enabled via --debug-code.
+ void AssertNotSmi(Register object);
+ void AssertSmi(Register object);
- // Abort execution if argument is a string. Used in debug code.
- void AbortIfNotString(Register object);
+ // Abort execution if argument is not a string, enabled via --debug-code.
+ void AssertString(Register object);
- // Abort execution if argument is not the root value with the given index.
- void AbortIfNotRootValue(Register src,
- Heap::RootListIndex root_value_index,
- const char* message);
+ // Abort execution if argument is not the root value with the given index,
+ // enabled via --debug-code.
+ void AssertRootValue(Register src,
+ Heap::RootListIndex root_value_index,
+ const char* message);
// ---------------------------------------------------------------------------
// HeapNumber utilities.
@@ -1392,7 +1404,16 @@ class MacroAssembler: public Assembler {
void LoadInstanceDescriptors(Register map, Register descriptors);
-
+ void EnumLength(Register dst, Register map);
+ void NumberOfOwnDescriptors(Register dst, Register map);
+
+ template<typename Field>
+ void DecodeField(Register reg) {
+ static const int shift = Field::kShift;
+ static const int mask = (Field::kMask >> shift) << kSmiTagSize;
+ srl(reg, reg, shift);
+ And(reg, reg, Operand(mask));
+ }
// Activation support.
void EnterFrame(StackFrame::Type type);
diff --git a/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.cc b/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.cc
index c48bcc4..672ba0e 100644
--- a/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.cc
+++ b/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.cc
@@ -43,44 +43,49 @@ namespace internal {
#ifndef V8_INTERPRETED_REGEXP
/*
* This assembler uses the following register assignment convention
+ * - t7 : Temporarily stores the index of capture start after a matching pass
+ * for a global regexp.
* - t1 : Pointer to current code object (Code*) including heap object tag.
* - t2 : Current position in input, as negative offset from end of string.
* Please notice that this is the byte offset, not the character offset!
* - t3 : Currently loaded character. Must be loaded using
* LoadCurrentCharacter before using any of the dispatch methods.
- * - t4 : points to tip of backtrack stack
+ * - t4 : Points to tip of backtrack stack
* - t5 : Unused.
* - t6 : End of input (points to byte after last character in input).
* - fp : Frame pointer. Used to access arguments, local variables and
* RegExp registers.
- * - sp : points to tip of C stack.
+ * - sp : Points to tip of C stack.
*
* The remaining registers are free for computations.
* Each call to a public method should retain this convention.
*
* The stack will have the following structure:
*
- * - fp[56] direct_call (if 1, direct call from JavaScript code,
+ * - fp[64] Isolate* isolate (address of the current isolate)
+ * - fp[60] direct_call (if 1, direct call from JavaScript code,
* if 0, call through the runtime system).
- * - fp[52] stack_area_base (High end of the memory area to use as
+ * - fp[56] stack_area_base (High end of the memory area to use as
* backtracking stack).
+ * - fp[52] capture array size (may fit multiple sets of matches)
* - fp[48] int* capture_array (int[num_saved_registers_], for output).
* - fp[44] secondary link/return address used by native call.
* --- sp when called ---
- * - fp[40] return address (lr).
- * - fp[36] old frame pointer (r11).
+ * - fp[40] return address (lr).
+ * - fp[36] old frame pointer (r11).
* - fp[0..32] backup of registers s0..s7.
* --- frame pointer ----
- * - fp[-4] end of input (Address of end of string).
- * - fp[-8] start of input (Address of first character in string).
+ * - fp[-4] end of input (address of end of string).
+ * - fp[-8] start of input (address of first character in string).
* - fp[-12] start index (character index of start).
* - fp[-16] void* input_string (location of a handle containing the string).
- * - fp[-20] Offset of location before start of input (effectively character
+ * - fp[-20] success counter (only for global regexps to count matches).
+ * - fp[-24] Offset of location before start of input (effectively character
* position -1). Used to initialize capture registers to a
* non-position.
- * - fp[-24] At start (if 1, we are starting at the start of the
+ * - fp[-28] At start (if 1, we are starting at the start of the
* string, otherwise 0)
- * - fp[-28] register 0 (Only positions must be stored in the first
+ * - fp[-32] register 0 (Only positions must be stored in the first
* - register 1 num_saved_registers_ registers)
* - ...
* - register num_registers-1
@@ -114,8 +119,10 @@ namespace internal {
RegExpMacroAssemblerMIPS::RegExpMacroAssemblerMIPS(
Mode mode,
- int registers_to_save)
- : masm_(new MacroAssembler(Isolate::Current(), NULL, kRegExpCodeSize)),
+ int registers_to_save,
+ Zone* zone)
+ : NativeRegExpMacroAssembler(zone),
+ masm_(new MacroAssembler(Isolate::Current(), NULL, kRegExpCodeSize)),
mode_(mode),
num_registers_(registers_to_save),
num_saved_registers_(registers_to_save),
@@ -201,8 +208,8 @@ void RegExpMacroAssemblerMIPS::CheckCharacterGT(uc16 limit, Label* on_greater) {
void RegExpMacroAssemblerMIPS::CheckAtStart(Label* on_at_start) {
Label not_at_start;
// Did we start the match at the start of the string at all?
- __ lw(a0, MemOperand(frame_pointer(), kAtStart));
- BranchOrBacktrack(&not_at_start, eq, a0, Operand(zero_reg));
+ __ lw(a0, MemOperand(frame_pointer(), kStartIndex));
+ BranchOrBacktrack(&not_at_start, ne, a0, Operand(zero_reg));
// If we did, are we still at the start of the input?
__ lw(a1, MemOperand(frame_pointer(), kInputStart));
@@ -214,8 +221,8 @@ void RegExpMacroAssemblerMIPS::CheckAtStart(Label* on_at_start) {
void RegExpMacroAssemblerMIPS::CheckNotAtStart(Label* on_not_at_start) {
// Did we start the match at the start of the string at all?
- __ lw(a0, MemOperand(frame_pointer(), kAtStart));
- BranchOrBacktrack(on_not_at_start, eq, a0, Operand(zero_reg));
+ __ lw(a0, MemOperand(frame_pointer(), kStartIndex));
+ BranchOrBacktrack(on_not_at_start, ne, a0, Operand(zero_reg));
// If we did, are we still at the start of the input?
__ lw(a1, MemOperand(frame_pointer(), kInputStart));
__ Addu(a0, end_of_input_address(), Operand(current_input_offset()));
@@ -444,13 +451,6 @@ void RegExpMacroAssemblerMIPS::CheckNotBackReference(
}
-void RegExpMacroAssemblerMIPS::CheckNotRegistersEqual(int reg1,
- int reg2,
- Label* on_not_equal) {
- UNIMPLEMENTED_MIPS();
-}
-
-
void RegExpMacroAssemblerMIPS::CheckNotCharacter(uint32_t c,
Label* on_not_equal) {
BranchOrBacktrack(on_not_equal, ne, current_character(), Operand(c));
@@ -640,6 +640,7 @@ void RegExpMacroAssemblerMIPS::Fail() {
Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
+ Label return_v0;
if (masm_->has_exception()) {
// If the code gets corrupted due to long regular expressions and lack of
// space on trampolines, an internal exception flag is set. If this case
@@ -669,8 +670,9 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
// Set frame pointer in space for it if this is not a direct call
// from generated code.
__ Addu(frame_pointer(), sp, Operand(4 * kPointerSize));
+ __ mov(a0, zero_reg);
+ __ push(a0); // Make room for success counter and initialize it to 0.
__ push(a0); // Make room for "position - 1" constant (value irrelevant).
- __ push(a0); // Make room for "at start" constant (value irrelevant).
// Check if we have space on the stack for registers.
Label stack_limit_hit;
@@ -689,12 +691,12 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
// Exit with OutOfMemory exception. There is not enough space on the stack
// for our working registers.
__ li(v0, Operand(EXCEPTION));
- __ jmp(&exit_label_);
+ __ jmp(&return_v0);
__ bind(&stack_limit_hit);
CallCheckStackGuardState(a0);
// If returned value is non-zero, we exit with the returned value as result.
- __ Branch(&exit_label_, ne, v0, Operand(zero_reg));
+ __ Branch(&return_v0, ne, v0, Operand(zero_reg));
__ bind(&stack_ok);
// Allocate space on stack for registers.
@@ -715,39 +717,44 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
// position registers.
__ sw(a0, MemOperand(frame_pointer(), kInputStartMinusOne));
- // Determine whether the start index is zero, that is at the start of the
- // string, and store that value in a local variable.
- __ mov(t5, a1);
- __ li(a1, Operand(1));
- __ Movn(a1, zero_reg, t5);
- __ sw(a1, MemOperand(frame_pointer(), kAtStart));
+ // Initialize code pointer register
+ __ li(code_pointer(), Operand(masm_->CodeObject()), CONSTANT_SIZE);
+
+ Label load_char_start_regexp, start_regexp;
+ // Load newline if index is at start, previous character otherwise.
+ __ Branch(&load_char_start_regexp, ne, a1, Operand(zero_reg));
+ __ li(current_character(), Operand('\n'));
+ __ jmp(&start_regexp);
+
+ // Global regexp restarts matching here.
+ __ bind(&load_char_start_regexp);
+ // Load previous char as initial value of current character register.
+ LoadCurrentCharacterUnchecked(-1, 1);
+ __ bind(&start_regexp);
+ // Initialize on-stack registers.
if (num_saved_registers_ > 0) { // Always is, if generated from a regexp.
// Fill saved registers with initial value = start offset - 1.
-
- // Address of register 0.
- __ Addu(a1, frame_pointer(), Operand(kRegisterZero));
- __ li(a2, Operand(num_saved_registers_));
- Label init_loop;
- __ bind(&init_loop);
- __ sw(a0, MemOperand(a1));
- __ Addu(a1, a1, Operand(-kPointerSize));
- __ Subu(a2, a2, Operand(1));
- __ Branch(&init_loop, ne, a2, Operand(zero_reg));
+ if (num_saved_registers_ > 8) {
+ // Address of register 0.
+ __ Addu(a1, frame_pointer(), Operand(kRegisterZero));
+ __ li(a2, Operand(num_saved_registers_));
+ Label init_loop;
+ __ bind(&init_loop);
+ __ sw(a0, MemOperand(a1));
+ __ Addu(a1, a1, Operand(-kPointerSize));
+ __ Subu(a2, a2, Operand(1));
+ __ Branch(&init_loop, ne, a2, Operand(zero_reg));
+ } else {
+ for (int i = 0; i < num_saved_registers_; i++) {
+ __ sw(a0, register_location(i));
+ }
+ }
}
// Initialize backtrack stack pointer.
__ lw(backtrack_stackpointer(), MemOperand(frame_pointer(), kStackHighEnd));
- // Initialize code pointer register
- __ li(code_pointer(), Operand(masm_->CodeObject()), CONSTANT_SIZE);
- // Load previous char as initial value of current character register.
- Label at_start;
- __ lw(a0, MemOperand(frame_pointer(), kAtStart));
- __ Branch(&at_start, ne, a0, Operand(zero_reg));
- LoadCurrentCharacterUnchecked(-1, 1); // Load previous char.
- __ jmp(&start_label_);
- __ bind(&at_start);
- __ li(current_character(), Operand('\n'));
+
__ jmp(&start_label_);
@@ -776,6 +783,10 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
for (int i = 0; i < num_saved_registers_; i += 2) {
__ lw(a2, register_location(i));
__ lw(a3, register_location(i + 1));
+ if (i == 0 && global_with_zero_length_check()) {
+ // Keep capture start in a4 for the zero-length check later.
+ __ mov(t7, a2);
+ }
if (mode_ == UC16) {
__ sra(a2, a2, 1);
__ Addu(a2, a2, a1);
@@ -791,10 +802,57 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
__ Addu(a0, a0, kPointerSize);
}
}
- __ li(v0, Operand(SUCCESS));
+
+ if (global()) {
+ // Restart matching if the regular expression is flagged as global.
+ __ lw(a0, MemOperand(frame_pointer(), kSuccessfulCaptures));
+ __ lw(a1, MemOperand(frame_pointer(), kNumOutputRegisters));
+ __ lw(a2, MemOperand(frame_pointer(), kRegisterOutput));
+ // Increment success counter.
+ __ Addu(a0, a0, 1);
+ __ sw(a0, MemOperand(frame_pointer(), kSuccessfulCaptures));
+ // Capture results have been stored, so the number of remaining global
+ // output registers is reduced by the number of stored captures.
+ __ Subu(a1, a1, num_saved_registers_);
+ // Check whether we have enough room for another set of capture results.
+ __ mov(v0, a0);
+ __ Branch(&return_v0, lt, a1, Operand(num_saved_registers_));
+
+ __ sw(a1, MemOperand(frame_pointer(), kNumOutputRegisters));
+ // Advance the location for output.
+ __ Addu(a2, a2, num_saved_registers_ * kPointerSize);
+ __ sw(a2, MemOperand(frame_pointer(), kRegisterOutput));
+
+ // Prepare a0 to initialize registers with its value in the next run.
+ __ lw(a0, MemOperand(frame_pointer(), kInputStartMinusOne));
+
+ if (global_with_zero_length_check()) {
+ // Special case for zero-length matches.
+ // t7: capture start index
+ // Not a zero-length match, restart.
+ __ Branch(
+ &load_char_start_regexp, ne, current_input_offset(), Operand(t7));
+ // Offset from the end is zero if we already reached the end.
+ __ Branch(&exit_label_, eq, current_input_offset(),
+ Operand(zero_reg));
+ // Advance current position after a zero-length match.
+ __ Addu(current_input_offset(),
+ current_input_offset(),
+ Operand((mode_ == UC16) ? 2 : 1));
+ }
+
+ __ Branch(&load_char_start_regexp);
+ } else {
+ __ li(v0, Operand(SUCCESS));
+ }
}
// Exit and return v0.
__ bind(&exit_label_);
+ if (global()) {
+ __ lw(v0, MemOperand(frame_pointer(), kSuccessfulCaptures));
+ }
+
+ __ bind(&return_v0);
// Skip sp past regexp registers and local variables..
__ mov(sp, frame_pointer());
// Restore registers s0..s7 and return (restoring ra to pc).
@@ -820,7 +878,7 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
__ MultiPop(regexp_registers_to_retain);
// If returning non-zero, we should end execution with the given
// result as return value.
- __ Branch(&exit_label_, ne, v0, Operand(zero_reg));
+ __ Branch(&return_v0, ne, v0, Operand(zero_reg));
// String might have moved: Reload end of string from frame.
__ lw(end_of_input_address(), MemOperand(frame_pointer(), kInputEnd));
@@ -864,7 +922,7 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
__ bind(&exit_with_exception);
// Exit with Result EXCEPTION(-1) to signal thrown exception.
__ li(v0, Operand(EXCEPTION));
- __ jmp(&exit_label_);
+ __ jmp(&return_v0);
}
}
@@ -1012,8 +1070,9 @@ void RegExpMacroAssemblerMIPS::SetRegister(int register_index, int to) {
}
-void RegExpMacroAssemblerMIPS::Succeed() {
+bool RegExpMacroAssemblerMIPS::Succeed() {
__ jmp(&success_label_);
+ return global();
}
@@ -1044,6 +1103,11 @@ void RegExpMacroAssemblerMIPS::WriteStackPointerToRegister(int reg) {
}
+bool RegExpMacroAssemblerMIPS::CanReadUnaligned() {
+ return false;
+}
+
+
// Private methods:
void RegExpMacroAssemblerMIPS::CallCheckStackGuardState(Register scratch) {
@@ -1280,8 +1344,9 @@ void RegExpMacroAssemblerMIPS::LoadCurrentCharacterUnchecked(int cp_offset,
int characters) {
Register offset = current_input_offset();
if (cp_offset != 0) {
- __ Addu(a0, current_input_offset(), Operand(cp_offset * char_size()));
- offset = a0;
+ // t7 is not being used to store the capture start index at this point.
+ __ Addu(t7, current_input_offset(), Operand(cp_offset * char_size()));
+ offset = t7;
}
// We assume that we cannot do unaligned loads on MIPS, so this function
// must only be used to load a single character at a time.
diff --git a/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.h b/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.h
index d167f62..8dd52a4 100644
--- a/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.h
+++ b/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.h
@@ -38,16 +38,10 @@
namespace v8 {
namespace internal {
-#ifdef V8_INTERPRETED_REGEXP
-class RegExpMacroAssemblerMIPS: public RegExpMacroAssembler {
- public:
- RegExpMacroAssemblerMIPS();
- virtual ~RegExpMacroAssemblerMIPS();
-};
-#else // V8_INTERPRETED_REGEXP
+#ifndef V8_INTERPRETED_REGEXP
class RegExpMacroAssemblerMIPS: public NativeRegExpMacroAssembler {
public:
- RegExpMacroAssemblerMIPS(Mode mode, int registers_to_save);
+ RegExpMacroAssemblerMIPS(Mode mode, int registers_to_save, Zone* zone);
virtual ~RegExpMacroAssemblerMIPS();
virtual int stack_limit_slack();
virtual void AdvanceCurrentPosition(int by);
@@ -72,7 +66,6 @@ class RegExpMacroAssemblerMIPS: public NativeRegExpMacroAssembler {
virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
Label* on_no_match);
- virtual void CheckNotRegistersEqual(int reg1, int reg2, Label* on_not_equal);
virtual void CheckNotCharacter(uint32_t c, Label* on_not_equal);
virtual void CheckNotCharacterAfterAnd(uint32_t c,
uint32_t mask,
@@ -115,10 +108,11 @@ class RegExpMacroAssemblerMIPS: public NativeRegExpMacroAssembler {
virtual void ReadStackPointerFromRegister(int reg);
virtual void SetCurrentPositionFromEnd(int by);
virtual void SetRegister(int register_index, int to);
- virtual void Succeed();
+ virtual bool Succeed();
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset);
virtual void ClearRegisters(int reg_from, int reg_to);
virtual void WriteStackPointerToRegister(int reg);
+ virtual bool CanReadUnaligned();
// Called from RegExp if the stack-guard is triggered.
// If the code object is relocated, the return address is fixed before
@@ -141,7 +135,8 @@ class RegExpMacroAssemblerMIPS: public NativeRegExpMacroAssembler {
static const int kStackFrameHeader = kReturnAddress + kPointerSize;
// Stack parameters placed by caller.
static const int kRegisterOutput = kStackFrameHeader + 20;
- static const int kStackHighEnd = kRegisterOutput + kPointerSize;
+ static const int kNumOutputRegisters = kRegisterOutput + kPointerSize;
+ static const int kStackHighEnd = kNumOutputRegisters + kPointerSize;
static const int kDirectCall = kStackHighEnd + kPointerSize;
static const int kIsolate = kDirectCall + kPointerSize;
@@ -153,10 +148,10 @@ class RegExpMacroAssemblerMIPS: public NativeRegExpMacroAssembler {
static const int kInputString = kStartIndex - kPointerSize;
// When adding local variables remember to push space for them in
// the frame in GetCode.
- static const int kInputStartMinusOne = kInputString - kPointerSize;
- static const int kAtStart = kInputStartMinusOne - kPointerSize;
+ static const int kSuccessfulCaptures = kInputString - kPointerSize;
+ static const int kInputStartMinusOne = kSuccessfulCaptures - kPointerSize;
// First register address. Following registers are below it on the stack.
- static const int kRegisterZero = kAtStart - kPointerSize;
+ static const int kRegisterZero = kInputStartMinusOne - kPointerSize;
// Initial size of code buffer.
static const size_t kRegExpCodeSize = 1024;
diff --git a/src/3rdparty/v8/src/mips/simulator-mips.cc b/src/3rdparty/v8/src/mips/simulator-mips.cc
index f31ce7e..cf87f93 100644
--- a/src/3rdparty/v8/src/mips/simulator-mips.cc
+++ b/src/3rdparty/v8/src/mips/simulator-mips.cc
@@ -1502,10 +1502,15 @@ void Simulator::SoftwareInterrupt(Instruction* instr) {
break;
}
}
- double result = target(arg0, arg1, arg2, arg3);
if (redirection->type() != ExternalReference::BUILTIN_COMPARE_CALL) {
- SetFpResult(result);
+ SimulatorRuntimeFPCall target =
+ reinterpret_cast<SimulatorRuntimeFPCall>(external);
+ double result = target(arg0, arg1, arg2, arg3);
+ SetFpResult(result);
} else {
+ SimulatorRuntimeCall target =
+ reinterpret_cast<SimulatorRuntimeCall>(external);
+ uint64_t result = target(arg0, arg1, arg2, arg3, arg4, arg5);
int32_t gpreg_pair[2];
memcpy(&gpreg_pair[0], &result, 2 * sizeof(int32_t));
set_register(v0, gpreg_pair[0]);
@@ -2063,10 +2068,15 @@ void Simulator::DecodeTypeRegister(Instruction* instr) {
// Rounding modes are not yet supported.
ASSERT((FCSR_ & 3) == 0);
// In rounding mode 0 it should behave like ROUND.
- case ROUND_W_D: // Round double to word.
+ case ROUND_W_D: // Round double to word (round half to even).
{
- double rounded = fs > 0 ? floor(fs + 0.5) : ceil(fs - 0.5);
+ double rounded = floor(fs + 0.5);
int32_t result = static_cast<int32_t>(rounded);
+ if ((result & 1) != 0 && result - fs == 0.5) {
+ // If the number is halfway between two integers,
+ // round to the even one.
+ result--;
+ }
set_fpu_register(fd_reg, result);
if (set_fcsr_round_error(fs, rounded)) {
set_fpu_register(fd_reg, kFPUInvalidResult);
diff --git a/src/3rdparty/v8/src/mips/simulator-mips.h b/src/3rdparty/v8/src/mips/simulator-mips.h
index 1e72939..776badc 100644
--- a/src/3rdparty/v8/src/mips/simulator-mips.h
+++ b/src/3rdparty/v8/src/mips/simulator-mips.h
@@ -50,16 +50,16 @@ namespace internal {
entry(p0, p1, p2, p3, p4)
typedef int (*mips_regexp_matcher)(String*, int, const byte*, const byte*,
- void*, int*, Address, int, Isolate*);
+ void*, int*, int, Address, int, Isolate*);
// Call the generated regexp code directly. The code at the entry address
// should act as a function matching the type arm_regexp_matcher.
// The fifth argument is a dummy that reserves the space used for
// the return address added by the ExitFrame in native calls.
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7, p8) \
(FUNCTION_CAST<mips_regexp_matcher>(entry)( \
- p0, p1, p2, p3, NULL, p4, p5, p6, p7))
+ p0, p1, p2, p3, NULL, p4, p5, p6, p7, p8))
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
reinterpret_cast<TryCatch*>(try_catch_address)
@@ -403,9 +403,9 @@ class Simulator {
reinterpret_cast<Object*>(Simulator::current(Isolate::Current())->Call( \
FUNCTION_ADDR(entry), 5, p0, p1, p2, p3, p4))
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7, p8) \
Simulator::current(Isolate::Current())->Call( \
- entry, 9, p0, p1, p2, p3, NULL, p4, p5, p6, p7)
+ entry, 10, p0, p1, p2, p3, NULL, p4, p5, p6, p7, p8)
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
try_catch_address == NULL ? \
diff --git a/src/3rdparty/v8/src/mips/stub-cache-mips.cc b/src/3rdparty/v8/src/mips/stub-cache-mips.cc
index 18a5f5f..bd15775 100644
--- a/src/3rdparty/v8/src/mips/stub-cache-mips.cc
+++ b/src/3rdparty/v8/src/mips/stub-cache-mips.cc
@@ -270,11 +270,12 @@ void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
int index,
Register prototype) {
// Load the global or builtins object from the current context.
- __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- // Load the global context from the global or builtins object.
__ lw(prototype,
- FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
- // Load the function from the global context.
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ // Load the native context from the global or builtins object.
+ __ lw(prototype,
+ FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
+ // Load the function from the native context.
__ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
// Load the initial map. The global functions all have initial maps.
__ lw(prototype,
@@ -291,13 +292,14 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
Label* miss) {
Isolate* isolate = masm->isolate();
// Check we're still in the same context.
- __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ lw(prototype,
+ MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
ASSERT(!prototype.is(at));
- __ li(at, isolate->global());
+ __ li(at, isolate->global_object());
__ Branch(miss, ne, prototype, Operand(at));
// Get the global function with the given index.
Handle<JSFunction> function(
- JSFunction::cast(isolate->global_context()->get(index)));
+ JSFunction::cast(isolate->native_context()->get(index)));
// Load its initial map. The global functions all have initial maps.
__ li(prototype, Handle<Map>(function->initial_map()));
// Load the prototype from the initial map.
@@ -422,21 +424,59 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
Handle<JSObject> object,
int index,
Handle<Map> transition,
+ Handle<String> name,
Register receiver_reg,
Register name_reg,
- Register scratch,
+ Register scratch1,
+ Register scratch2,
Label* miss_label) {
// a0 : value.
Label exit;
+
+ LookupResult lookup(masm->isolate());
+ object->Lookup(*name, &lookup);
+ if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
+ // In sloppy mode, we could just return the value and be done. However, we
+ // might be in strict mode, where we have to throw. Since we cannot tell,
+ // go into slow case unconditionally.
+ __ jmp(miss_label);
+ return;
+ }
+
// Check that the map of the object hasn't changed.
CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
: REQUIRE_EXACT_MAP;
- __ CheckMap(receiver_reg, scratch, Handle<Map>(object->map()), miss_label,
+ __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label,
DO_SMI_CHECK, mode);
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
+ __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
+ }
+
+ // Check that we are allowed to write this.
+ if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
+ JSObject* holder;
+ if (lookup.IsFound()) {
+ holder = lookup.holder();
+ } else {
+ // Find the top object.
+ holder = *object;
+ do {
+ holder = JSObject::cast(holder->GetPrototype());
+ } while (holder->GetPrototype()->IsJSObject());
+ }
+ // We need an extra register, push
+ __ push(name_reg);
+ Label miss_pop, done_check;
+ CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
+ scratch1, scratch2, name, &miss_pop);
+ __ jmp(&done_check);
+ __ bind(&miss_pop);
+ __ pop(name_reg);
+ __ jmp(miss_label);
+ __ bind(&done_check);
+ __ pop(name_reg);
}
// Stub never generated for non-global objects that require access
@@ -458,10 +498,20 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
}
if (!transition.is_null()) {
- // Update the map of the object; no write barrier updating is
- // needed because the map is never in new space.
- __ li(t0, Operand(transition));
- __ sw(t0, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
+ // Update the map of the object.
+ __ li(scratch1, Operand(transition));
+ __ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
+
+ // Update the write barrier for the map field and pass the now unused
+ // name_reg as scratch register.
+ __ RecordWriteField(receiver_reg,
+ HeapObject::kMapOffset,
+ scratch1,
+ name_reg,
+ kRAHasNotBeenSaved,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
}
// Adjust for the number of properties stored in the object. Even in the
@@ -475,7 +525,7 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
__ sw(a0, FieldMemOperand(receiver_reg, offset));
// Skip updating write barrier if storing a smi.
- __ JumpIfSmi(a0, &exit, scratch);
+ __ JumpIfSmi(a0, &exit, scratch1);
// Update the write barrier for the array address.
// Pass the now unused name_reg as a scratch register.
@@ -483,15 +533,16 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
__ RecordWriteField(receiver_reg,
offset,
name_reg,
- scratch,
+ scratch1,
kRAHasNotBeenSaved,
kDontSaveFPRegs);
} else {
// Write to the properties array.
int offset = index * kPointerSize + FixedArray::kHeaderSize;
// Get the properties array.
- __ lw(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
- __ sw(a0, FieldMemOperand(scratch, offset));
+ __ lw(scratch1,
+ FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
+ __ sw(a0, FieldMemOperand(scratch1, offset));
// Skip updating write barrier if storing a smi.
__ JumpIfSmi(a0, &exit);
@@ -499,7 +550,7 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
// Update the write barrier for the array address.
// Ok to clobber receiver_reg and name_reg, since we return.
__ mov(name_reg, a0);
- __ RecordWriteField(scratch,
+ __ RecordWriteField(scratch1,
offset,
name_reg,
receiver_reg,
@@ -1185,6 +1236,44 @@ void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
}
+void StubCompiler::GenerateDictionaryLoadCallback(Register receiver,
+ Register name_reg,
+ Register scratch1,
+ Register scratch2,
+ Register scratch3,
+ Handle<AccessorInfo> callback,
+ Handle<String> name,
+ Label* miss) {
+ ASSERT(!receiver.is(scratch1));
+ ASSERT(!receiver.is(scratch2));
+ ASSERT(!receiver.is(scratch3));
+
+ // Load the properties dictionary.
+ Register dictionary = scratch1;
+ __ lw(dictionary, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
+
+ // Probe the dictionary.
+ Label probe_done;
+ StringDictionaryLookupStub::GeneratePositiveLookup(masm(),
+ miss,
+ &probe_done,
+ dictionary,
+ name_reg,
+ scratch2,
+ scratch3);
+ __ bind(&probe_done);
+
+ // If probing finds an entry in the dictionary, scratch3 contains the
+ // pointer into the dictionary. Check that the value is the callback.
+ Register pointer = scratch3;
+ const int kElementsStartOffset = StringDictionary::kHeaderSize +
+ StringDictionary::kElementsStartIndex * kPointerSize;
+ const int kValueOffset = kElementsStartOffset + kPointerSize;
+ __ lw(scratch2, FieldMemOperand(pointer, kValueOffset));
+ __ Branch(miss, ne, scratch2, Operand(callback));
+}
+
+
void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
Handle<JSObject> holder,
Register receiver,
@@ -1192,6 +1281,7 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
Register scratch1,
Register scratch2,
Register scratch3,
+ Register scratch4,
Handle<AccessorInfo> callback,
Handle<String> name,
Label* miss) {
@@ -1202,6 +1292,11 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
Register reg = CheckPrototypes(object, receiver, holder, scratch1,
scratch2, scratch3, name, miss);
+ if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
+ GenerateDictionaryLoadCallback(
+ reg, name_reg, scratch2, scratch3, scratch4, callback, name, miss);
+ }
+
// Build AccessorInfo::args_ list on the stack and push property name below
// the exit frame to make GC aware of them and store pointers to them.
__ push(receiver);
@@ -1269,12 +1364,13 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
// later.
bool compile_followup_inline = false;
if (lookup->IsFound() && lookup->IsCacheable()) {
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
lookup->GetCallbackObject()->IsAccessorInfo()) {
- compile_followup_inline =
- AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL;
+ AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
+ compile_followup_inline = callback->getter() != NULL &&
+ callback->IsCompatibleReceiver(*object);
}
}
@@ -1341,7 +1437,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
miss);
}
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
// We found FIELD property in prototype chain of interceptor's holder.
// Retrieve a field from field's holder.
GenerateFastPropertyLoad(masm(), v0, holder_reg,
@@ -1481,7 +1577,7 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
GenerateMissBranch();
// Return the generated code.
- return GetCode(FIELD, name);
+ return GetCode(Code::FIELD, name);
}
@@ -1585,16 +1681,29 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
- __ CheckFastSmiOnlyElements(a3, t3, &call_builtin);
+ __ CheckFastSmiElements(a3, t3, &call_builtin);
// edx: receiver
// r3: map
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ Label try_holey_map;
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
a3,
t3,
+ &try_holey_map);
+ __ mov(a2, receiver);
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
+ __ jmp(&fast_object);
+
+ __ bind(&try_holey_map);
+ __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
+ FAST_HOLEY_ELEMENTS,
+ a3,
+ t3,
&call_builtin);
__ mov(a2, receiver);
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(a3, a3, &call_builtin);
@@ -2015,7 +2124,7 @@ Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
GenerateMissBranch();
// Return the generated code.
- return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
+ return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
}
@@ -2149,7 +2258,7 @@ Handle<Code> CallStubCompiler::CompileMathFloorCall(
GenerateMissBranch();
// Return the generated code.
- return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
+ return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
}
@@ -2250,7 +2359,7 @@ Handle<Code> CallStubCompiler::CompileMathAbsCall(
GenerateMissBranch();
// Return the generated code.
- return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
+ return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
}
@@ -2471,7 +2580,7 @@ Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
GenerateMissBranch();
// Return the generated code.
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2530,7 +2639,7 @@ Handle<Code> CallStubCompiler::CompileCallGlobal(
GenerateMissBranch();
// Return the generated code.
- return GetCode(NORMAL, name);
+ return GetCode(Code::NORMAL, name);
}
@@ -2547,21 +2656,30 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
Label miss;
// Name register might be clobbered.
- GenerateStoreField(masm(), object, index, transition, a1, a2, a3, &miss);
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ a1, a2, a3, t0,
+ &miss);
__ bind(&miss);
__ li(a2, Operand(Handle<String>(name))); // Restore name.
Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
+ return GetCode(transition.is_null()
+ ? Code::FIELD
+ : Code::MAP_TRANSITION, name);
}
Handle<Code> StoreStubCompiler::CompileStoreCallback(
- Handle<JSObject> object,
- Handle<AccessorInfo> callback,
- Handle<String> name) {
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<AccessorInfo> callback) {
// ----------- S t a t e -------------
// -- a0 : value
// -- a1 : receiver
@@ -2569,19 +2687,13 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
// -- ra : return address
// -----------------------------------
Label miss;
-
- // Check that the map of the object hasn't changed.
- __ CheckMap(a1, a3, Handle<Map>(object->map()), &miss,
- DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
-
- // Perform global security token check if needed.
- if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(a1, a3, &miss);
- }
+ // Check that the maps haven't changed.
+ __ JumpIfSmi(a1, &miss, a3);
+ CheckPrototypes(receiver, a1, holder, a3, t0, t1, name, &miss);
// Stub never generated for non-global objects that require access
// checks.
- ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
+ ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
__ push(a1); // Receiver.
__ li(a3, Operand(callback)); // Callback info.
@@ -2599,7 +2711,81 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm)
+
+
+void StoreStubCompiler::GenerateStoreViaSetter(
+ MacroAssembler* masm,
+ Handle<JSFunction> setter) {
+ // ----------- S t a t e -------------
+ // -- a0 : value
+ // -- a1 : receiver
+ // -- a2 : name
+ // -- ra : return address
+ // -----------------------------------
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ // Save value register, so we can restore it later.
+ __ push(a0);
+
+ if (!setter.is_null()) {
+ // Call the JavaScript setter with receiver and value on the stack.
+ __ push(a1);
+ __ push(a0);
+ ParameterCount actual(1);
+ __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+ } else {
+ // If we generate a global code snippet for deoptimization only, remember
+ // the place to continue after deoptimization.
+ masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
+ }
+
+ // We have to return the passed value, not the return value of the setter.
+ __ pop(v0);
+
+ // Restore context register.
+ __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ }
+ __ Ret();
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm())
+
+
+Handle<Code> StoreStubCompiler::CompileStoreViaSetter(
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> setter) {
+ // ----------- S t a t e -------------
+ // -- a0 : value
+ // -- a1 : receiver
+ // -- a2 : name
+ // -- ra : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the maps haven't changed.
+ __ JumpIfSmi(a1, &miss);
+ CheckPrototypes(receiver, a1, holder, a3, t0, t1, name, &miss);
+
+ GenerateStoreViaSetter(masm(), setter);
+
+ __ bind(&miss);
+ Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2644,7 +2830,7 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2689,7 +2875,7 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal(
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, name);
+ return GetCode(Code::NORMAL, name);
}
@@ -2723,7 +2909,7 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(NONEXISTENT, factory()->empty_string());
+ return GetCode(Code::NONEXISTENT, factory()->empty_string());
}
@@ -2745,7 +2931,7 @@ Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(FIELD, name);
+ return GetCode(Code::FIELD, name);
}
@@ -2760,13 +2946,76 @@ Handle<Code> LoadStubCompiler::CompileLoadCallback(
// -- ra : return address
// -----------------------------------
Label miss;
- GenerateLoadCallback(object, holder, a0, a2, a3, a1, t0, callback, name,
+ GenerateLoadCallback(object, holder, a0, a2, a3, a1, t0, t1, callback, name,
&miss);
__ bind(&miss);
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm)
+
+
+void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
+ Handle<JSFunction> getter) {
+ // ----------- S t a t e -------------
+ // -- a0 : receiver
+ // -- a2 : name
+ // -- ra : return address
+ // -----------------------------------
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ if (!getter.is_null()) {
+ // Call the JavaScript getter with the receiver on the stack.
+ __ push(a0);
+ ParameterCount actual(0);
+ __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+ } else {
+ // If we generate a global code snippet for deoptimization only, remember
+ // the place to continue after deoptimization.
+ masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
+ }
+
+ // Restore context register.
+ __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ }
+ __ Ret();
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm())
+
+
+Handle<Code> LoadStubCompiler::CompileLoadViaGetter(
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> getter) {
+ // ----------- S t a t e -------------
+ // -- a0 : receiver
+ // -- a2 : name
+ // -- ra : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the maps haven't changed.
+ __ JumpIfSmi(a0, &miss);
+ CheckPrototypes(receiver, a0, holder, a3, t0, a1, name, &miss);
+
+ GenerateLoadViaGetter(masm(), getter);
+
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2786,7 +3035,7 @@ Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(CONSTANT_FUNCTION, name);
+ return GetCode(Code::CONSTANT_FUNCTION, name);
}
@@ -2809,7 +3058,7 @@ Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> object,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2850,7 +3099,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(NORMAL, name);
+ return GetCode(Code::NORMAL, name);
}
@@ -2872,7 +3121,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
__ bind(&miss);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
- return GetCode(FIELD, name);
+ return GetCode(Code::FIELD, name);
}
@@ -2891,12 +3140,12 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback(
// Check the key is the cached one.
__ Branch(&miss, ne, a0, Operand(name));
- GenerateLoadCallback(receiver, holder, a1, a0, a2, a3, t0, callback, name,
- &miss);
+ GenerateLoadCallback(receiver, holder, a1, a0, a2, a3, t0, t1, callback,
+ name, &miss);
__ bind(&miss);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2920,7 +3169,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(CONSTANT_FUNCTION, name);
+ return GetCode(Code::CONSTANT_FUNCTION, name);
}
@@ -2945,7 +3194,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor(
__ bind(&miss);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2965,7 +3214,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
__ bind(&miss);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2990,7 +3239,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -3014,7 +3263,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
__ DecrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -3034,7 +3283,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string());
+ return GetCode(Code::NORMAL, factory()->empty_string());
}
@@ -3061,7 +3310,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
__ Jump(miss_ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
+ return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC);
}
@@ -3086,7 +3335,13 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
// a3 is used as scratch register. a1 and a2 keep their values if a jump to
// the miss label is generated.
- GenerateStoreField(masm(), object, index, transition, a2, a1, a3, &miss);
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ a2, a1, a3, t0,
+ &miss);
__ bind(&miss);
__ DecrementCounter(counters->keyed_store_field(), 1, a3, t0);
@@ -3094,7 +3349,9 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
+ return GetCode(transition.is_null()
+ ? Code::FIELD
+ : Code::MAP_TRANSITION, name);
}
@@ -3118,7 +3375,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string());
+ return GetCode(Code::NORMAL, factory()->empty_string());
}
@@ -3156,7 +3413,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
__ Jump(miss_ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
+ return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC);
}
@@ -3196,7 +3453,7 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
// t7: undefined
__ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
__ Check(ne, "Function constructed by construct stub.",
- a3, Operand(JS_FUNCTION_TYPE));
+ a3, Operand(JS_FUNCTION_TYPE));
#endif
// Now allocate the JSObject in new space.
@@ -3204,7 +3461,13 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
// a1: constructor function
// a2: initial map
// t7: undefined
+ ASSERT(function->has_initial_map());
__ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
+#ifdef DEBUG
+ int instance_size = function->initial_map()->instance_size();
+ __ Check(eq, "Instance size of initial map changed.",
+ a3, Operand(instance_size >> kPointerSizeLog2));
+#endif
__ AllocateInNewSpace(a3, t4, t5, t6, &generic_stub_call, SIZE_IN_WORDS);
// Allocated the JSObject, now initialize the fields. Map is set to initial
@@ -3267,7 +3530,6 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
}
// Fill the unused in-object property fields with undefined.
- ASSERT(function->has_initial_map());
for (int i = shared->this_property_assignments_count();
i < function->initial_map()->inobject_properties();
i++) {
@@ -3372,9 +3634,12 @@ static bool IsElementTypeSigned(ElementsKind elements_kind) {
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3508,8 +3773,11 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
}
break;
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3676,7 +3944,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
__ li(t0, 0x7ff);
__ Xor(t1, t5, Operand(0xFF));
__ Movz(t5, t0, t1); // Set t5 to 0x7ff only if t5 is equal to 0xff.
- __ Branch(&exponent_rebiased, eq, t0, Operand(0xff));
+ __ Branch(&exponent_rebiased, eq, t1, Operand(zero_reg));
// Rebias exponent.
__ Addu(t5,
@@ -3869,8 +4137,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
}
break;
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3934,8 +4205,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3970,7 +4244,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
__ xor_(t1, t6, t5);
__ li(t2, kBinary32ExponentMask);
__ Movz(t6, t2, t1); // Only if t6 is equal to t5.
- __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(t5));
+ __ Branch(&nan_or_infinity_or_zero, eq, t1, Operand(zero_reg));
// Rebias exponent.
__ srl(t6, t6, HeapNumber::kExponentShift);
@@ -4001,7 +4275,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
__ bind(&done);
__ sll(t9, key, 1);
- __ addu(t9, a2, t9);
+ __ addu(t9, a3, t9);
__ sw(t3, MemOperand(t9, 0));
// Entry registers are intact, a0 holds the value which is the return
@@ -4019,7 +4293,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
__ or_(t3, t6, t4);
__ Branch(&done);
} else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
- __ sll(t8, t0, 3);
+ __ sll(t8, key, 2);
__ addu(t8, a3, t8);
// t8: effective address of destination element.
__ sw(t4, MemOperand(t8, 0));
@@ -4106,8 +4380,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -4286,7 +4563,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
__ JumpIfNotSmi(value_reg, &transition_elements_kind);
}
@@ -4314,7 +4591,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ bind(&finish_store);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
__ Addu(scratch,
elements_reg,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -4323,7 +4600,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ Addu(scratch, scratch, scratch2);
__ sw(value_reg, MemOperand(scratch));
} else {
- ASSERT(elements_kind == FAST_ELEMENTS);
+ ASSERT(IsFastObjectElementsKind(elements_kind));
__ Addu(scratch,
elements_reg,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -4332,7 +4609,6 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ Addu(scratch, scratch, scratch2);
__ sw(value_reg, MemOperand(scratch));
__ mov(receiver_reg, value_reg);
- ASSERT(elements_kind == FAST_ELEMENTS);
__ RecordWrite(elements_reg, // Object.
scratch, // Address.
receiver_reg, // Value.
@@ -4477,6 +4753,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
__ StoreNumberToDoubleElements(value_reg,
key_reg,
receiver_reg,
+ // All registers after this are overwritten.
elements_reg,
scratch1,
scratch2,
diff --git a/src/3rdparty/v8/src/mirror-debugger.js b/src/3rdparty/v8/src/mirror-debugger.js
index c7f0dcc..a5331a0 100644
--- a/src/3rdparty/v8/src/mirror-debugger.js
+++ b/src/3rdparty/v8/src/mirror-debugger.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2006-2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -154,6 +154,7 @@ var FUNCTION_TYPE = 'function';
var REGEXP_TYPE = 'regexp';
var ERROR_TYPE = 'error';
var PROPERTY_TYPE = 'property';
+var INTERNAL_PROPERTY_TYPE = 'internalProperty';
var FRAME_TYPE = 'frame';
var SCRIPT_TYPE = 'script';
var CONTEXT_TYPE = 'context';
@@ -176,10 +177,8 @@ PropertyType.ConstantFunction = 2;
PropertyType.Callbacks = 3;
PropertyType.Handler = 4;
PropertyType.Interceptor = 5;
-PropertyType.MapTransition = 6;
-PropertyType.ExternalArrayTransition = 7;
-PropertyType.ConstantTransition = 8;
-PropertyType.NullDescriptor = 9;
+PropertyType.Transition = 6;
+PropertyType.Nonexistent = 7;
// Different attributes for a property.
@@ -214,6 +213,7 @@ var ScopeType = { Global: 0,
// - RegExpMirror
// - ErrorMirror
// - PropertyMirror
+// - InternalPropertyMirror
// - FrameMirror
// - ScriptMirror
@@ -360,6 +360,15 @@ Mirror.prototype.isProperty = function() {
/**
+ * Check whether the mirror reflects an internal property.
+ * @returns {boolean} True if the mirror reflects an internal property
+ */
+Mirror.prototype.isInternalProperty = function() {
+ return this instanceof InternalPropertyMirror;
+};
+
+
+/**
* Check whether the mirror reflects a stack frame.
* @returns {boolean} True if the mirror reflects a stack frame
*/
@@ -596,23 +605,6 @@ ObjectMirror.prototype.protoObject = function() {
};
-/**
- * Return the primitive value if this is object of Boolean, Number or String
- * type (but not Date). Otherwise return undefined.
- */
-ObjectMirror.prototype.primitiveValue = function() {
- if (!IS_STRING_WRAPPER(this.value_) && !IS_NUMBER_WRAPPER(this.value_) &&
- !IS_BOOLEAN_WRAPPER(this.value_)) {
- return void 0;
- }
- var primitiveValue = %_ValueOf(this.value_);
- if (IS_UNDEFINED(primitiveValue)) {
- return void 0;
- }
- return MakeMirror(primitiveValue);
-};
-
-
ObjectMirror.prototype.hasNamedInterceptor = function() {
// Get information on interceptors for this object.
var x = %GetInterceptorInfo(this.value_);
@@ -703,7 +695,7 @@ ObjectMirror.prototype.propertyNames = function(kind, limit) {
* Return the properties for this object as an array of PropertyMirror objects.
* @param {number} kind Indicate whether named, indexed or both kinds of
* properties are requested
- * @param {number} limit Limit the number of properties returend to the
+ * @param {number} limit Limit the number of properties returned to the
specified value
* @return {Array} Property mirrors for this object
*/
@@ -718,6 +710,16 @@ ObjectMirror.prototype.properties = function(kind, limit) {
};
+/**
+ * Return the internal properties for this object as an array of
+ * InternalPropertyMirror objects.
+ * @return {Array} Property mirrors for this object
+ */
+ObjectMirror.prototype.internalProperties = function() {
+ return ObjectMirror.GetInternalProperties(this.value_);
+}
+
+
ObjectMirror.prototype.property = function(name) {
var details = %DebugGetPropertyDetails(this.value_, %ToString(name));
if (details) {
@@ -792,6 +794,37 @@ ObjectMirror.prototype.toText = function() {
/**
+ * Return the internal properties of the value, such as [[PrimitiveValue]] of
+ * scalar wrapper objects and properties of the bound function.
+ * This method is done static to be accessible from Debug API with the bare
+ * values without mirrors.
+ * @return {Array} array (possibly empty) of InternalProperty instances
+ */
+ObjectMirror.GetInternalProperties = function(value) {
+ if (IS_STRING_WRAPPER(value) || IS_NUMBER_WRAPPER(value) ||
+ IS_BOOLEAN_WRAPPER(value)) {
+ var primitiveValue = %_ValueOf(value);
+ return [new InternalPropertyMirror("[[PrimitiveValue]]", primitiveValue)];
+ } else if (IS_FUNCTION(value)) {
+ var bindings = %BoundFunctionGetBindings(value);
+ var result = [];
+ if (bindings && IS_ARRAY(bindings)) {
+ result.push(new InternalPropertyMirror("[[TargetFunction]]",
+ bindings[0]));
+ result.push(new InternalPropertyMirror("[[BoundThis]]", bindings[1]));
+ var boundArgs = [];
+ for (var i = 2; i < bindings.length; i++) {
+ boundArgs.push(bindings[i]);
+ }
+ result.push(new InternalPropertyMirror("[[BoundArgs]]", boundArgs));
+ }
+ return result;
+ }
+ return [];
+}
+
+
+/**
* Mirror object for functions.
* @param {function} value The function object reflected by this mirror.
* @constructor
@@ -1270,6 +1303,33 @@ PropertyMirror.prototype.isNative = function() {
};
+/**
+ * Mirror object for internal properties. Internal property reflects properties
+ * not accessible from user code such as [[BoundThis]] in bound function.
+ * Their names are merely symbolic.
+ * @param {string} name The name of the property
+ * @param {value} property value
+ * @constructor
+ * @extends Mirror
+ */
+function InternalPropertyMirror(name, value) {
+ %_CallFunction(this, INTERNAL_PROPERTY_TYPE, Mirror);
+ this.name_ = name;
+ this.value_ = value;
+}
+inherits(InternalPropertyMirror, Mirror);
+
+
+InternalPropertyMirror.prototype.name = function() {
+ return this.name_;
+};
+
+
+InternalPropertyMirror.prototype.value = function() {
+ return MakeMirror(this.value_, false);
+};
+
+
var kFrameDetailsFrameIdIndex = 0;
var kFrameDetailsReceiverIndex = 1;
var kFrameDetailsFunctionIndex = 2;
@@ -1750,6 +1810,15 @@ FrameMirror.prototype.localsText = function() {
};
+FrameMirror.prototype.restart = function() {
+ var result = %LiveEditRestartFrame(this.break_id_, this.index_);
+ if (IS_UNDEFINED(result)) {
+ result = "Failed to find requested frame";
+ }
+ return result;
+};
+
+
FrameMirror.prototype.toText = function(opt_locals) {
var result = '';
result += '#' + (this.index() <= 9 ? '0' : '') + this.index();
@@ -2195,7 +2264,8 @@ JSONProtocolSerializer.prototype.serialize_ = function(mirror, reference,
break;
case PROPERTY_TYPE:
- throw new Error('PropertyMirror cannot be serialized independeltly');
+ case INTERNAL_PROPERTY_TYPE:
+ throw new Error('PropertyMirror cannot be serialized independently');
break;
case FRAME_TYPE:
@@ -2271,7 +2341,8 @@ JSONProtocolSerializer.prototype.serialize_ = function(mirror, reference,
* "prototypeObject":{"ref":<number>},
* "namedInterceptor":<boolean>,
* "indexedInterceptor":<boolean>,
- * "properties":[<properties>]}
+ * "properties":[<properties>],
+ * "internalProperties":[<internal properties>]}
*/
JSONProtocolSerializer.prototype.serializeObject_ = function(mirror, content,
details) {
@@ -2282,11 +2353,6 @@ JSONProtocolSerializer.prototype.serializeObject_ = function(mirror, content,
content.protoObject = this.serializeReference(mirror.protoObject());
content.prototypeObject = this.serializeReference(mirror.prototypeObject());
- var primitiveValue = mirror.primitiveValue();
- if (!IS_UNDEFINED(primitiveValue)) {
- content.primitiveValue = this.serializeReference(primitiveValue);
- }
-
// Add flags to indicate whether there are interceptors.
if (mirror.hasNamedInterceptor()) {
content.namedInterceptor = true;
@@ -2348,6 +2414,15 @@ JSONProtocolSerializer.prototype.serializeObject_ = function(mirror, content,
}
}
content.properties = p;
+
+ var internalProperties = mirror.internalProperties();
+ if (internalProperties.length > 0) {
+ var ip = [];
+ for (var i = 0; i < internalProperties.length; i++) {
+ ip.push(this.serializeInternalProperty_(internalProperties[i]));
+ }
+ content.internalProperties = ip;
+ }
};
@@ -2415,6 +2490,33 @@ JSONProtocolSerializer.prototype.serializeProperty_ = function(propertyMirror) {
};
+/**
+ * Serialize internal property information to the following JSON format for
+ * building the array of properties.
+ *
+ * {"name":"<property name>",
+ * "ref":<number>}
+ *
+ * {"name":"[[BoundThis]]","ref":117}
+ *
+ * @param {InternalPropertyMirror} propertyMirror The property to serialize.
+ * @returns {Object} Protocol object representing the property.
+ */
+JSONProtocolSerializer.prototype.serializeInternalProperty_ =
+ function(propertyMirror) {
+ var result = {};
+
+ result.name = propertyMirror.name();
+ var propertyValue = propertyMirror.value();
+ if (this.inlineRefs_() && propertyValue.isValue()) {
+ result.value = this.serializeReferenceWithDisplayData_(propertyValue);
+ } else {
+ result.ref = propertyValue.handle();
+ }
+ return result;
+};
+
+
JSONProtocolSerializer.prototype.serializeFrame_ = function(mirror, content) {
content.index = mirror.index();
content.receiver = this.serializeReference(mirror.receiver());
diff --git a/src/3rdparty/v8/src/misc-intrinsics.h b/src/3rdparty/v8/src/misc-intrinsics.h
index 5393de2..c1da8a9 100644
--- a/src/3rdparty/v8/src/misc-intrinsics.h
+++ b/src/3rdparty/v8/src/misc-intrinsics.h
@@ -45,7 +45,7 @@ inline int IntegerLog2(uint32_t value) {
return 31 - __builtin_clz(value);
}
-#elif defined(_MSC_VER)
+#elif defined(_MSC_VER) && !defined(_WIN32_WCE)
#pragma intrinsic(_BitScanReverse)
diff --git a/src/3rdparty/v8/src/mksnapshot.cc b/src/3rdparty/v8/src/mksnapshot.cc
index d1620bf..d777551 100644
--- a/src/3rdparty/v8/src/mksnapshot.cc
+++ b/src/3rdparty/v8/src/mksnapshot.cc
@@ -25,6 +25,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#include <errno.h>
+#include <stdio.h>
#ifdef COMPRESS_STARTUP_DATA_BZ2
#include <bzlib.h>
#endif
@@ -33,6 +35,7 @@
#include "v8.h"
#include "bootstrapper.h"
+#include "flags.h"
#include "natives.h"
#include "platform.h"
#include "serialize.h"
@@ -163,30 +166,37 @@ class CppByteSink : public PartialSnapshotSink {
}
void WriteSpaceUsed(
+ const char* prefix,
int new_space_used,
int pointer_space_used,
int data_space_used,
int code_space_used,
int map_space_used,
- int cell_space_used,
- int large_space_used) {
- fprintf(fp_, "const int Snapshot::new_space_used_ = %d;\n", new_space_used);
+ int cell_space_used) {
fprintf(fp_,
- "const int Snapshot::pointer_space_used_ = %d;\n",
+ "const int Snapshot::%snew_space_used_ = %d;\n",
+ prefix,
+ new_space_used);
+ fprintf(fp_,
+ "const int Snapshot::%spointer_space_used_ = %d;\n",
+ prefix,
pointer_space_used);
fprintf(fp_,
- "const int Snapshot::data_space_used_ = %d;\n",
+ "const int Snapshot::%sdata_space_used_ = %d;\n",
+ prefix,
data_space_used);
fprintf(fp_,
- "const int Snapshot::code_space_used_ = %d;\n",
+ "const int Snapshot::%scode_space_used_ = %d;\n",
+ prefix,
code_space_used);
- fprintf(fp_, "const int Snapshot::map_space_used_ = %d;\n", map_space_used);
fprintf(fp_,
- "const int Snapshot::cell_space_used_ = %d;\n",
- cell_space_used);
+ "const int Snapshot::%smap_space_used_ = %d;\n",
+ prefix,
+ map_space_used);
fprintf(fp_,
- "const int Snapshot::large_space_used_ = %d;\n",
- large_space_used);
+ "const int Snapshot::%scell_space_used_ = %d;\n",
+ prefix,
+ cell_space_used);
}
void WritePartialSnapshot() {
@@ -303,7 +313,67 @@ int main(int argc, char** argv) {
#endif
i::Serializer::Enable();
Persistent<Context> context = v8::Context::New();
- ASSERT(!context.IsEmpty());
+ if (context.IsEmpty()) {
+ fprintf(stderr,
+ "\nException thrown while compiling natives - see above.\n\n");
+ exit(1);
+ }
+ if (i::FLAG_extra_code != NULL) {
+ context->Enter();
+ // Capture 100 frames if anything happens.
+ V8::SetCaptureStackTraceForUncaughtExceptions(true, 100);
+ HandleScope scope;
+ const char* name = i::FLAG_extra_code;
+ FILE* file = i::OS::FOpen(name, "rb");
+ if (file == NULL) {
+ fprintf(stderr, "Failed to open '%s': errno %d\n", name, errno);
+ exit(1);
+ }
+
+ fseek(file, 0, SEEK_END);
+ int size = ftell(file);
+ rewind(file);
+
+ char* chars = new char[size + 1];
+ chars[size] = '\0';
+ for (int i = 0; i < size;) {
+ int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
+ if (read < 0) {
+ fprintf(stderr, "Failed to read '%s': errno %d\n", name, errno);
+ exit(1);
+ }
+ i += read;
+ }
+ fclose(file);
+ Local<String> source = String::New(chars);
+ TryCatch try_catch;
+ Local<Script> script = Script::Compile(source);
+ if (try_catch.HasCaught()) {
+ fprintf(stderr, "Failure compiling '%s' (see above)\n", name);
+ exit(1);
+ }
+ script->Run();
+ if (try_catch.HasCaught()) {
+ fprintf(stderr, "Failure running '%s'\n", name);
+ Local<Message> message = try_catch.Message();
+ Local<String> message_string = message->Get();
+ Local<String> message_line = message->GetSourceLine();
+ int len = 2 + message_string->Utf8Length() + message_line->Utf8Length();
+ char* buf = new char(len);
+ message_string->WriteUtf8(buf);
+ fprintf(stderr, "%s at line %d\n", buf, message->GetLineNumber());
+ message_line->WriteUtf8(buf);
+ fprintf(stderr, "%s\n", buf);
+ int from = message->GetStartColumn();
+ int to = message->GetEndColumn();
+ int i;
+ for (i = 0; i < from; i++) fprintf(stderr, " ");
+ for ( ; i <= to; i++) fprintf(stderr, "^");
+ fprintf(stderr, "\n");
+ exit(1);
+ }
+ context->Exit();
+ }
// Make sure all builtin scripts are cached.
{ HandleScope scope;
for (int i = 0; i < i::Natives::GetBuiltinsCount(); i++) {
@@ -337,12 +407,20 @@ int main(int argc, char** argv) {
sink.WritePartialSnapshot();
sink.WriteSpaceUsed(
+ "context_",
partial_ser.CurrentAllocationAddress(i::NEW_SPACE),
partial_ser.CurrentAllocationAddress(i::OLD_POINTER_SPACE),
partial_ser.CurrentAllocationAddress(i::OLD_DATA_SPACE),
partial_ser.CurrentAllocationAddress(i::CODE_SPACE),
partial_ser.CurrentAllocationAddress(i::MAP_SPACE),
- partial_ser.CurrentAllocationAddress(i::CELL_SPACE),
- partial_ser.CurrentAllocationAddress(i::LO_SPACE));
+ partial_ser.CurrentAllocationAddress(i::CELL_SPACE));
+ sink.WriteSpaceUsed(
+ "",
+ ser.CurrentAllocationAddress(i::NEW_SPACE),
+ ser.CurrentAllocationAddress(i::OLD_POINTER_SPACE),
+ ser.CurrentAllocationAddress(i::OLD_DATA_SPACE),
+ ser.CurrentAllocationAddress(i::CODE_SPACE),
+ ser.CurrentAllocationAddress(i::MAP_SPACE),
+ ser.CurrentAllocationAddress(i::CELL_SPACE));
return 0;
}
diff --git a/src/3rdparty/v8/src/object-observe.js b/src/3rdparty/v8/src/object-observe.js
new file mode 100644
index 0000000..28aa1f4
--- /dev/null
+++ b/src/3rdparty/v8/src/object-observe.js
@@ -0,0 +1,240 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"use strict";
+
+var InternalObjectIsFrozen = $Object.isFrozen;
+var InternalObjectFreeze = $Object.freeze;
+
+var observationState = %GetObservationState();
+if (IS_UNDEFINED(observationState.observerInfoMap)) {
+ observationState.observerInfoMap = %CreateObjectHashTable();
+ observationState.objectInfoMap = %CreateObjectHashTable();
+ observationState.notifierTargetMap = %CreateObjectHashTable();
+ observationState.activeObservers = new InternalArray;
+ observationState.observerPriority = 0;
+}
+
+function InternalObjectHashTable(tableName) {
+ this.tableName = tableName;
+}
+
+InternalObjectHashTable.prototype = {
+ get: function(key) {
+ return %ObjectHashTableGet(observationState[this.tableName], key);
+ },
+ set: function(key, value) {
+ observationState[this.tableName] =
+ %ObjectHashTableSet(observationState[this.tableName], key, value);
+ },
+ has: function(key) {
+ return %ObjectHashTableHas(observationState[this.tableName], key);
+ }
+};
+
+var observerInfoMap = new InternalObjectHashTable('observerInfoMap');
+var objectInfoMap = new InternalObjectHashTable('objectInfoMap');
+var notifierTargetMap = new InternalObjectHashTable('notifierTargetMap');
+
+function CreateObjectInfo(object) {
+ var info = {
+ changeObservers: new InternalArray,
+ notifier: null,
+ };
+ objectInfoMap.set(object, info);
+ return info;
+}
+
+function ObjectObserve(object, callback) {
+ if (!IS_SPEC_OBJECT(object))
+ throw MakeTypeError("observe_non_object", ["observe"]);
+ if (!IS_SPEC_FUNCTION(callback))
+ throw MakeTypeError("observe_non_function", ["observe"]);
+ if (InternalObjectIsFrozen(callback))
+ throw MakeTypeError("observe_callback_frozen");
+
+ if (!observerInfoMap.has(callback)) {
+ observerInfoMap.set(callback, {
+ pendingChangeRecords: null,
+ priority: observationState.observerPriority++,
+ });
+ }
+
+ var objectInfo = objectInfoMap.get(object);
+ if (IS_UNDEFINED(objectInfo)) {
+ objectInfo = CreateObjectInfo(object);
+ %SetIsObserved(object, true);
+ }
+
+ var changeObservers = objectInfo.changeObservers;
+ if (changeObservers.indexOf(callback) >= 0)
+ return;
+
+ changeObservers.push(callback);
+}
+
+function ObjectUnobserve(object, callback) {
+ if (!IS_SPEC_OBJECT(object))
+ throw MakeTypeError("observe_non_object", ["unobserve"]);
+
+ var objectInfo = objectInfoMap.get(object);
+ if (IS_UNDEFINED(objectInfo))
+ return;
+
+ var changeObservers = objectInfo.changeObservers;
+ var index = changeObservers.indexOf(callback);
+ if (index < 0)
+ return;
+
+ changeObservers.splice(index, 1);
+}
+
+function EnqueueChangeRecord(changeRecord, observers) {
+ for (var i = 0; i < observers.length; i++) {
+ var observer = observers[i];
+ var observerInfo = observerInfoMap.get(observer);
+ observationState.activeObservers[observerInfo.priority] = observer;
+ %SetObserverDeliveryPending();
+ if (IS_NULL(observerInfo.pendingChangeRecords)) {
+ observerInfo.pendingChangeRecords = new InternalArray(changeRecord);
+ } else {
+ observerInfo.pendingChangeRecords.push(changeRecord);
+ }
+ }
+}
+
+function NotifyChange(type, object, name, oldValue) {
+ var objectInfo = objectInfoMap.get(object);
+ var changeRecord = (arguments.length < 4) ?
+ { type: type, object: object, name: name } :
+ { type: type, object: object, name: name, oldValue: oldValue };
+ InternalObjectFreeze(changeRecord);
+ EnqueueChangeRecord(changeRecord, objectInfo.changeObservers);
+}
+
+var notifierPrototype = {};
+
+function ObjectNotifierNotify(changeRecord) {
+ if (!IS_SPEC_OBJECT(this))
+ throw MakeTypeError("called_on_non_object", ["notify"]);
+
+ var target = notifierTargetMap.get(this);
+ if (IS_UNDEFINED(target))
+ throw MakeTypeError("observe_notify_non_notifier");
+
+ if (!IS_STRING(changeRecord.type))
+ throw MakeTypeError("observe_type_non_string");
+
+ var objectInfo = objectInfoMap.get(target);
+ if (IS_UNDEFINED(objectInfo))
+ return;
+
+ if (!objectInfo.changeObservers.length)
+ return;
+
+ var newRecord = {
+ object: target
+ };
+ for (var prop in changeRecord) {
+ if (prop === 'object')
+ continue;
+ newRecord[prop] = changeRecord[prop];
+ }
+ InternalObjectFreeze(newRecord);
+
+ EnqueueChangeRecord(newRecord, objectInfo.changeObservers);
+}
+
+function ObjectGetNotifier(object) {
+ if (!IS_SPEC_OBJECT(object))
+ throw MakeTypeError("observe_non_object", ["getNotifier"]);
+
+ if (InternalObjectIsFrozen(object))
+ return null;
+
+ var objectInfo = objectInfoMap.get(object);
+ if (IS_UNDEFINED(objectInfo))
+ objectInfo = CreateObjectInfo(object);
+
+ if (IS_NULL(objectInfo.notifier)) {
+ objectInfo.notifier = {
+ __proto__: notifierPrototype
+ };
+ notifierTargetMap.set(objectInfo.notifier, object);
+ }
+
+ return objectInfo.notifier;
+}
+
+function DeliverChangeRecordsForObserver(observer) {
+ var observerInfo = observerInfoMap.get(observer);
+ if (IS_UNDEFINED(observerInfo))
+ return;
+
+ var pendingChangeRecords = observerInfo.pendingChangeRecords;
+ if (IS_NULL(pendingChangeRecords))
+ return;
+
+ observerInfo.pendingChangeRecords = null;
+ var delivered = [];
+ %MoveArrayContents(pendingChangeRecords, delivered);
+ try {
+ %Call(void 0, delivered, observer);
+ } catch (ex) {}
+}
+
+function ObjectDeliverChangeRecords(callback) {
+ if (!IS_SPEC_FUNCTION(callback))
+ throw MakeTypeError("observe_non_function", ["deliverChangeRecords"]);
+
+ DeliverChangeRecordsForObserver(callback);
+}
+
+function DeliverChangeRecords() {
+ while (observationState.activeObservers.length) {
+ var activeObservers = observationState.activeObservers;
+ observationState.activeObservers = new InternalArray;
+ for (var i in activeObservers) {
+ DeliverChangeRecordsForObserver(activeObservers[i]);
+ }
+ }
+}
+
+function SetupObjectObserve() {
+ %CheckIsBootstrapping();
+ InstallFunctions($Object, DONT_ENUM, $Array(
+ "deliverChangeRecords", ObjectDeliverChangeRecords,
+ "getNotifier", ObjectGetNotifier,
+ "observe", ObjectObserve,
+ "unobserve", ObjectUnobserve
+ ));
+ InstallFunctions(notifierPrototype, DONT_ENUM, $Array(
+ "notify", ObjectNotifierNotify
+ ));
+}
+
+SetupObjectObserve();
diff --git a/src/3rdparty/v8/src/objects-debug.cc b/src/3rdparty/v8/src/objects-debug.cc
index 9006abd..c2f64d4 100644
--- a/src/3rdparty/v8/src/objects-debug.cc
+++ b/src/3rdparty/v8/src/objects-debug.cc
@@ -35,7 +35,7 @@
namespace v8 {
namespace internal {
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void MaybeObject::Verify() {
Object* this_as_object;
@@ -55,18 +55,18 @@ void Object::VerifyPointer(Object* p) {
if (p->IsHeapObject()) {
HeapObject::VerifyHeapPointer(p);
} else {
- ASSERT(p->IsSmi());
+ CHECK(p->IsSmi());
}
}
void Smi::SmiVerify() {
- ASSERT(IsSmi());
+ CHECK(IsSmi());
}
void Failure::FailureVerify() {
- ASSERT(IsFailure());
+ CHECK(IsFailure());
}
@@ -207,68 +207,68 @@ void HeapObject::HeapObjectVerify() {
void HeapObject::VerifyHeapPointer(Object* p) {
- ASSERT(p->IsHeapObject());
- ASSERT(HEAP->Contains(HeapObject::cast(p)));
+ CHECK(p->IsHeapObject());
+ CHECK(HEAP->Contains(HeapObject::cast(p)));
}
void HeapNumber::HeapNumberVerify() {
- ASSERT(IsHeapNumber());
+ CHECK(IsHeapNumber());
}
void ByteArray::ByteArrayVerify() {
- ASSERT(IsByteArray());
+ CHECK(IsByteArray());
}
void FreeSpace::FreeSpaceVerify() {
- ASSERT(IsFreeSpace());
+ CHECK(IsFreeSpace());
}
void ExternalPixelArray::ExternalPixelArrayVerify() {
- ASSERT(IsExternalPixelArray());
+ CHECK(IsExternalPixelArray());
}
void ExternalByteArray::ExternalByteArrayVerify() {
- ASSERT(IsExternalByteArray());
+ CHECK(IsExternalByteArray());
}
void ExternalUnsignedByteArray::ExternalUnsignedByteArrayVerify() {
- ASSERT(IsExternalUnsignedByteArray());
+ CHECK(IsExternalUnsignedByteArray());
}
void ExternalShortArray::ExternalShortArrayVerify() {
- ASSERT(IsExternalShortArray());
+ CHECK(IsExternalShortArray());
}
void ExternalUnsignedShortArray::ExternalUnsignedShortArrayVerify() {
- ASSERT(IsExternalUnsignedShortArray());
+ CHECK(IsExternalUnsignedShortArray());
}
void ExternalIntArray::ExternalIntArrayVerify() {
- ASSERT(IsExternalIntArray());
+ CHECK(IsExternalIntArray());
}
void ExternalUnsignedIntArray::ExternalUnsignedIntArrayVerify() {
- ASSERT(IsExternalUnsignedIntArray());
+ CHECK(IsExternalUnsignedIntArray());
}
void ExternalFloatArray::ExternalFloatArrayVerify() {
- ASSERT(IsExternalFloatArray());
+ CHECK(IsExternalFloatArray());
}
void ExternalDoubleArray::ExternalDoubleArrayVerify() {
- ASSERT(IsExternalDoubleArray());
+ CHECK(IsExternalDoubleArray());
}
@@ -277,8 +277,8 @@ void JSObject::JSObjectVerify() {
VerifyHeapPointer(elements());
if (GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS) {
- ASSERT(this->elements()->IsFixedArray());
- ASSERT(this->elements()->length() >= 2);
+ CHECK(this->elements()->IsFixedArray());
+ CHECK_GE(this->elements()->length(), 2);
}
if (HasFastProperties()) {
@@ -286,35 +286,41 @@ void JSObject::JSObjectVerify() {
(map()->inobject_properties() + properties()->length() -
map()->NextFreePropertyIndex()));
}
- ASSERT_EQ((map()->has_fast_elements() ||
- map()->has_fast_smi_only_elements() ||
+ CHECK_EQ((map()->has_fast_smi_or_object_elements() ||
(elements() == GetHeap()->empty_fixed_array())),
(elements()->map() == GetHeap()->fixed_array_map() ||
elements()->map() == GetHeap()->fixed_cow_array_map()));
- ASSERT(map()->has_fast_elements() == HasFastElements());
+ CHECK(map()->has_fast_object_elements() == HasFastObjectElements());
}
void Map::MapVerify() {
- ASSERT(!HEAP->InNewSpace(this));
- ASSERT(FIRST_TYPE <= instance_type() && instance_type() <= LAST_TYPE);
- ASSERT(instance_size() == kVariableSizeSentinel ||
+ CHECK(!HEAP->InNewSpace(this));
+ CHECK(FIRST_TYPE <= instance_type() && instance_type() <= LAST_TYPE);
+ CHECK(instance_size() == kVariableSizeSentinel ||
(kPointerSize <= instance_size() &&
instance_size() < HEAP->Capacity()));
VerifyHeapPointer(prototype());
VerifyHeapPointer(instance_descriptors());
+ DescriptorArray* descriptors = instance_descriptors();
+ for (int i = 0; i < NumberOfOwnDescriptors(); ++i) {
+ CHECK_EQ(i, descriptors->GetDetails(i).descriptor_index() - 1);
+ }
SLOW_ASSERT(instance_descriptors()->IsSortedNoDuplicates());
- SLOW_ASSERT(instance_descriptors()->IsConsistentWithBackPointers(this));
+ if (HasTransitionArray()) {
+ SLOW_ASSERT(transitions()->IsSortedNoDuplicates());
+ SLOW_ASSERT(transitions()->IsConsistentWithBackPointers(this));
+ }
}
void Map::SharedMapVerify() {
MapVerify();
- ASSERT(is_shared());
- ASSERT(instance_descriptors()->IsEmpty());
- ASSERT_EQ(0, pre_allocated_property_fields());
- ASSERT_EQ(0, unused_property_fields());
- ASSERT_EQ(StaticVisitorBase::GetVisitorId(instance_type(), instance_size()),
+ CHECK(is_shared());
+ CHECK(instance_descriptors()->IsEmpty());
+ CHECK_EQ(0, pre_allocated_property_fields());
+ CHECK_EQ(0, unused_property_fields());
+ CHECK_EQ(StaticVisitorBase::GetVisitorId(instance_type(), instance_size()),
visitor_id());
}
@@ -322,21 +328,21 @@ void Map::SharedMapVerify() {
void CodeCache::CodeCacheVerify() {
VerifyHeapPointer(default_cache());
VerifyHeapPointer(normal_type_cache());
- ASSERT(default_cache()->IsFixedArray());
- ASSERT(normal_type_cache()->IsUndefined()
+ CHECK(default_cache()->IsFixedArray());
+ CHECK(normal_type_cache()->IsUndefined()
|| normal_type_cache()->IsCodeCacheHashTable());
}
void PolymorphicCodeCache::PolymorphicCodeCacheVerify() {
VerifyHeapPointer(cache());
- ASSERT(cache()->IsUndefined() || cache()->IsPolymorphicCodeCacheHashTable());
+ CHECK(cache()->IsUndefined() || cache()->IsPolymorphicCodeCacheHashTable());
}
void TypeFeedbackInfo::TypeFeedbackInfoVerify() {
- VerifyObjectField(kIcTotalCountOffset);
- VerifyObjectField(kIcWithTypeinfoCountOffset);
+ VerifyObjectField(kStorage1Offset);
+ VerifyObjectField(kStorage2Offset);
VerifyHeapPointer(type_feedback_cells());
}
@@ -362,7 +368,7 @@ void FixedDoubleArray::FixedDoubleArrayVerify() {
for (int i = 0; i < length(); i++) {
if (!is_the_hole(i)) {
double value = get_scalar(i);
- ASSERT(!isnan(value) ||
+ CHECK(!isnan(value) ||
(BitCast<uint64_t>(value) ==
BitCast<uint64_t>(canonical_not_the_hole_nan_as_double())) ||
((BitCast<uint64_t>(value) & Double::kSignMask) != 0));
@@ -372,11 +378,10 @@ void FixedDoubleArray::FixedDoubleArrayVerify() {
void JSModule::JSModuleVerify() {
- Object* v = context();
- if (v->IsHeapObject()) {
- VerifyHeapPointer(v);
- }
- CHECK(v->IsUndefined() || v->IsModuleContext());
+ VerifyObjectField(kContextOffset);
+ VerifyObjectField(kScopeInfoOffset);
+ CHECK(context()->IsUndefined() ||
+ Context::cast(context())->IsModuleContext());
}
@@ -458,10 +463,17 @@ void String::StringVerify() {
ConsString::cast(this)->ConsStringVerify();
} else if (IsSlicedString()) {
SlicedString::cast(this)->SlicedStringVerify();
+ } else if (IsSeqAsciiString()) {
+ SeqAsciiString::cast(this)->SeqAsciiStringVerify();
}
}
+void SeqAsciiString::SeqAsciiStringVerify() {
+ CHECK(String::IsAscii(GetChars(), length()));
+}
+
+
void ConsString::ConsStringVerify() {
CHECK(this->first()->IsString());
CHECK(this->second() == GetHeap()->empty_string() ||
@@ -487,7 +499,8 @@ void JSFunction::JSFunctionVerify() {
VerifyObjectField(kPrototypeOrInitialMapOffset);
VerifyObjectField(kNextFunctionLinkOffset);
CHECK(code()->IsCode());
- CHECK(next_function_link()->IsUndefined() ||
+ CHECK(next_function_link() == NULL ||
+ next_function_link()->IsUndefined() ||
next_function_link()->IsJSFunction());
}
@@ -496,6 +509,7 @@ void SharedFunctionInfo::SharedFunctionInfoVerify() {
CHECK(IsSharedFunctionInfo());
VerifyObjectField(kNameOffset);
VerifyObjectField(kCodeOffset);
+ VerifyObjectField(kOptimizedCodeMapOffset);
VerifyObjectField(kScopeInfoOffset);
VerifyObjectField(kInstanceClassNameOffset);
VerifyObjectField(kFunctionDataOffset);
@@ -507,10 +521,10 @@ void SharedFunctionInfo::SharedFunctionInfoVerify() {
void JSGlobalProxy::JSGlobalProxyVerify() {
CHECK(IsJSGlobalProxy());
JSObjectVerify();
- VerifyObjectField(JSGlobalProxy::kContextOffset);
+ VerifyObjectField(JSGlobalProxy::kNativeContextOffset);
// Make sure that this object has no properties, elements.
CHECK_EQ(0, properties()->length());
- CHECK(HasFastElements());
+ CHECK(HasFastObjectElements());
CHECK_EQ(0, FixedArray::cast(elements())->length());
}
@@ -542,14 +556,14 @@ void Oddball::OddballVerify() {
VerifyHeapPointer(to_string());
Object* number = to_number();
if (number->IsHeapObject()) {
- ASSERT(number == HEAP->nan_value());
+ CHECK(number == HEAP->nan_value());
} else {
- ASSERT(number->IsSmi());
+ CHECK(number->IsSmi());
int value = Smi::cast(number)->value();
// Hidden oddballs have negative smis.
const int kLeastHiddenOddballNumber = -4;
- ASSERT(value <= 1);
- ASSERT(value >= kLeastHiddenOddballNumber);
+ CHECK_LE(value, 1);
+ CHECK(value >= kLeastHiddenOddballNumber);
}
}
@@ -578,8 +592,8 @@ void Code::CodeVerify() {
void JSArray::JSArrayVerify() {
JSObjectVerify();
- ASSERT(length()->IsNumber() || length()->IsUndefined());
- ASSERT(elements()->IsUndefined() ||
+ CHECK(length()->IsNumber() || length()->IsUndefined());
+ CHECK(elements()->IsUndefined() ||
elements()->IsFixedArray() ||
elements()->IsFixedDoubleArray());
}
@@ -589,7 +603,7 @@ void JSSet::JSSetVerify() {
CHECK(IsJSSet());
JSObjectVerify();
VerifyHeapPointer(table());
- ASSERT(table()->IsHashTable() || table()->IsUndefined());
+ CHECK(table()->IsHashTable() || table()->IsUndefined());
}
@@ -597,7 +611,7 @@ void JSMap::JSMapVerify() {
CHECK(IsJSMap());
JSObjectVerify();
VerifyHeapPointer(table());
- ASSERT(table()->IsHashTable() || table()->IsUndefined());
+ CHECK(table()->IsHashTable() || table()->IsUndefined());
}
@@ -605,17 +619,17 @@ void JSWeakMap::JSWeakMapVerify() {
CHECK(IsJSWeakMap());
JSObjectVerify();
VerifyHeapPointer(table());
- ASSERT(table()->IsHashTable() || table()->IsUndefined());
+ CHECK(table()->IsHashTable() || table()->IsUndefined());
}
void JSRegExp::JSRegExpVerify() {
JSObjectVerify();
- ASSERT(data()->IsUndefined() || data()->IsFixedArray());
+ CHECK(data()->IsUndefined() || data()->IsFixedArray());
switch (TypeTag()) {
case JSRegExp::ATOM: {
FixedArray* arr = FixedArray::cast(data());
- ASSERT(arr->get(JSRegExp::kAtomPatternIndex)->IsString());
+ CHECK(arr->get(JSRegExp::kAtomPatternIndex)->IsString());
break;
}
case JSRegExp::IRREGEXP: {
@@ -626,26 +640,26 @@ void JSRegExp::JSRegExpVerify() {
// Smi : Not compiled yet (-1) or code prepared for flushing.
// JSObject: Compilation error.
// Code/ByteArray: Compiled code.
- ASSERT(ascii_data->IsSmi() ||
+ CHECK(ascii_data->IsSmi() ||
(is_native ? ascii_data->IsCode() : ascii_data->IsByteArray()));
Object* uc16_data = arr->get(JSRegExp::kIrregexpUC16CodeIndex);
- ASSERT(uc16_data->IsSmi() ||
+ CHECK(uc16_data->IsSmi() ||
(is_native ? uc16_data->IsCode() : uc16_data->IsByteArray()));
Object* ascii_saved = arr->get(JSRegExp::kIrregexpASCIICodeSavedIndex);
- ASSERT(ascii_saved->IsSmi() || ascii_saved->IsString() ||
+ CHECK(ascii_saved->IsSmi() || ascii_saved->IsString() ||
ascii_saved->IsCode());
Object* uc16_saved = arr->get(JSRegExp::kIrregexpUC16CodeSavedIndex);
- ASSERT(uc16_saved->IsSmi() || uc16_saved->IsString() ||
+ CHECK(uc16_saved->IsSmi() || uc16_saved->IsString() ||
uc16_saved->IsCode());
- ASSERT(arr->get(JSRegExp::kIrregexpCaptureCountIndex)->IsSmi());
- ASSERT(arr->get(JSRegExp::kIrregexpMaxRegisterCountIndex)->IsSmi());
+ CHECK(arr->get(JSRegExp::kIrregexpCaptureCountIndex)->IsSmi());
+ CHECK(arr->get(JSRegExp::kIrregexpMaxRegisterCountIndex)->IsSmi());
break;
}
default:
- ASSERT_EQ(JSRegExp::NOT_COMPILED, TypeTag());
- ASSERT(data()->IsUndefined());
+ CHECK_EQ(JSRegExp::NOT_COMPILED, TypeTag());
+ CHECK(data()->IsUndefined());
break;
}
}
@@ -654,7 +668,7 @@ void JSRegExp::JSRegExpVerify() {
void JSProxy::JSProxyVerify() {
CHECK(IsJSProxy());
VerifyPointer(handler());
- ASSERT(hash()->IsSmi() || hash()->IsUndefined());
+ CHECK(hash()->IsSmi() || hash()->IsUndefined());
}
@@ -667,7 +681,7 @@ void JSFunctionProxy::JSFunctionProxyVerify() {
void Foreign::ForeignVerify() {
- ASSERT(IsForeign());
+ CHECK(IsForeign());
}
@@ -678,6 +692,7 @@ void AccessorInfo::AccessorInfoVerify() {
VerifyPointer(name());
VerifyPointer(data());
VerifyPointer(flag());
+ VerifyPointer(expected_receiver_type());
}
@@ -770,6 +785,47 @@ void Script::ScriptVerify() {
}
+void JSFunctionResultCache::JSFunctionResultCacheVerify() {
+ JSFunction::cast(get(kFactoryIndex))->Verify();
+
+ int size = Smi::cast(get(kCacheSizeIndex))->value();
+ CHECK(kEntriesIndex <= size);
+ CHECK(size <= length());
+ CHECK_EQ(0, size % kEntrySize);
+
+ int finger = Smi::cast(get(kFingerIndex))->value();
+ CHECK(kEntriesIndex <= finger);
+ CHECK((finger < size) || (finger == kEntriesIndex && finger == size));
+ CHECK_EQ(0, finger % kEntrySize);
+
+ if (FLAG_enable_slow_asserts) {
+ for (int i = kEntriesIndex; i < size; i++) {
+ CHECK(!get(i)->IsTheHole());
+ get(i)->Verify();
+ }
+ for (int i = size; i < length(); i++) {
+ CHECK(get(i)->IsTheHole());
+ get(i)->Verify();
+ }
+ }
+}
+
+
+void NormalizedMapCache::NormalizedMapCacheVerify() {
+ FixedArray::cast(this)->Verify();
+ if (FLAG_enable_slow_asserts) {
+ for (int i = 0; i < length(); i++) {
+ Object* e = get(i);
+ if (e->IsMap()) {
+ Map::cast(e)->SharedMapVerify();
+ } else {
+ CHECK(e->IsUndefined());
+ }
+ }
+ }
+}
+
+
#ifdef ENABLE_DEBUGGER_SUPPORT
void DebugInfo::DebugInfoVerify() {
CHECK(IsDebugInfo());
@@ -788,7 +844,9 @@ void BreakPointInfo::BreakPointInfoVerify() {
VerifyPointer(break_point_objects());
}
#endif // ENABLE_DEBUGGER_SUPPORT
+#endif // VERIFY_HEAP
+#ifdef DEBUG
void JSObject::IncrementSpillStatistics(SpillInformation* info) {
info->number_of_objects_++;
@@ -805,6 +863,11 @@ void JSObject::IncrementSpillStatistics(SpillInformation* info) {
}
// Indexed properties
switch (GetElementsKind()) {
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
case FAST_ELEMENTS: {
info->number_of_objects_with_fast_elements_++;
int holes = 0;
@@ -818,6 +881,14 @@ void JSObject::IncrementSpillStatistics(SpillInformation* info) {
info->number_of_fast_unused_elements_ += holes;
break;
}
+ case EXTERNAL_BYTE_ELEMENTS:
+ case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
+ case EXTERNAL_SHORT_ELEMENTS:
+ case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
+ case EXTERNAL_INT_ELEMENTS:
+ case EXTERNAL_UNSIGNED_INT_ELEMENTS:
+ case EXTERNAL_FLOAT_ELEMENTS:
+ case EXTERNAL_DOUBLE_ELEMENTS:
case EXTERNAL_PIXEL_ELEMENTS: {
info->number_of_objects_with_fast_elements_++;
ExternalPixelArray* e = ExternalPixelArray::cast(elements());
@@ -831,8 +902,7 @@ void JSObject::IncrementSpillStatistics(SpillInformation* info) {
dict->Capacity() - dict->NumberOfElements();
break;
}
- default:
- UNREACHABLE();
+ case NON_STRICT_ARGUMENTS_ELEMENTS:
break;
}
}
@@ -875,17 +945,18 @@ void JSObject::SpillInformation::Print() {
}
-bool DescriptorArray::IsSortedNoDuplicates() {
+bool DescriptorArray::IsSortedNoDuplicates(int valid_entries) {
+ if (valid_entries == -1) valid_entries = number_of_descriptors();
String* current_key = NULL;
uint32_t current = 0;
for (int i = 0; i < number_of_descriptors(); i++) {
- String* key = GetKey(i);
+ String* key = GetSortedKey(i);
if (key == current_key) {
PrintDescriptors();
return false;
}
current_key = key;
- uint32_t hash = GetKey(i)->Hash();
+ uint32_t hash = GetSortedKey(i)->Hash();
if (hash < current) {
PrintDescriptors();
return false;
@@ -896,121 +967,42 @@ bool DescriptorArray::IsSortedNoDuplicates() {
}
-static bool CheckOneBackPointer(Map* current_map, Object* target) {
- return !target->IsMap() || Map::cast(target)->GetBackPointer() == current_map;
-}
-
-
-bool DescriptorArray::IsConsistentWithBackPointers(Map* current_map) {
- for (int i = 0; i < number_of_descriptors(); ++i) {
- switch (GetType(i)) {
- case MAP_TRANSITION:
- case CONSTANT_TRANSITION:
- if (!CheckOneBackPointer(current_map, GetValue(i))) {
- return false;
- }
- break;
- case ELEMENTS_TRANSITION: {
- Object* object = GetValue(i);
- if (!CheckOneBackPointer(current_map, object)) {
- return false;
- }
- if (object->IsFixedArray()) {
- FixedArray* array = FixedArray::cast(object);
- for (int i = 0; i < array->length(); ++i) {
- if (!CheckOneBackPointer(current_map, array->get(i))) {
- return false;
- }
- }
- }
- break;
- }
- case CALLBACKS: {
- Object* object = GetValue(i);
- if (object->IsAccessorPair()) {
- AccessorPair* accessors = AccessorPair::cast(object);
- if (!CheckOneBackPointer(current_map, accessors->getter())) {
- return false;
- }
- if (!CheckOneBackPointer(current_map, accessors->setter())) {
- return false;
- }
- }
- break;
- }
- case NORMAL:
- case FIELD:
- case CONSTANT_FUNCTION:
- case HANDLER:
- case INTERCEPTOR:
- case NULL_DESCRIPTOR:
- break;
+bool TransitionArray::IsSortedNoDuplicates(int valid_entries) {
+ ASSERT(valid_entries == -1);
+ String* current_key = NULL;
+ uint32_t current = 0;
+ for (int i = 0; i < number_of_transitions(); i++) {
+ String* key = GetSortedKey(i);
+ if (key == current_key) {
+ PrintTransitions();
+ return false;
+ }
+ current_key = key;
+ uint32_t hash = GetSortedKey(i)->Hash();
+ if (hash < current) {
+ PrintTransitions();
+ return false;
}
+ current = hash;
}
return true;
}
-void JSFunctionResultCache::JSFunctionResultCacheVerify() {
- JSFunction::cast(get(kFactoryIndex))->Verify();
-
- int size = Smi::cast(get(kCacheSizeIndex))->value();
- ASSERT(kEntriesIndex <= size);
- ASSERT(size <= length());
- ASSERT_EQ(0, size % kEntrySize);
-
- int finger = Smi::cast(get(kFingerIndex))->value();
- ASSERT(kEntriesIndex <= finger);
- ASSERT((finger < size) || (finger == kEntriesIndex && finger == size));
- ASSERT_EQ(0, finger % kEntrySize);
-
- if (FLAG_enable_slow_asserts) {
- for (int i = kEntriesIndex; i < size; i++) {
- ASSERT(!get(i)->IsTheHole());
- get(i)->Verify();
- }
- for (int i = size; i < length(); i++) {
- ASSERT(get(i)->IsTheHole());
- get(i)->Verify();
- }
- }
+static bool CheckOneBackPointer(Map* current_map, Object* target) {
+ return !target->IsMap() || Map::cast(target)->GetBackPointer() == current_map;
}
-void NormalizedMapCache::NormalizedMapCacheVerify() {
- FixedArray::cast(this)->Verify();
- if (FLAG_enable_slow_asserts) {
- for (int i = 0; i < length(); i++) {
- Object* e = get(i);
- if (e->IsMap()) {
- Map::cast(e)->SharedMapVerify();
- } else {
- ASSERT(e->IsUndefined());
- }
- }
+bool TransitionArray::IsConsistentWithBackPointers(Map* current_map) {
+ if (HasElementsTransition() &&
+ !CheckOneBackPointer(current_map, elements_transition())) {
+ return false;
}
-}
-
-
-void Map::ZapInstanceDescriptors() {
- DescriptorArray* descriptors = instance_descriptors();
- if (descriptors == GetHeap()->empty_descriptor_array()) return;
- FixedArray* contents = FixedArray::cast(
- descriptors->get(DescriptorArray::kContentArrayIndex));
- MemsetPointer(descriptors->data_start(),
- GetHeap()->the_hole_value(),
- descriptors->length());
- MemsetPointer(contents->data_start(),
- GetHeap()->the_hole_value(),
- contents->length());
-}
-
-
-void Map::ZapPrototypeTransitions() {
- FixedArray* proto_transitions = prototype_transitions();
- MemsetPointer(proto_transitions->data_start(),
- GetHeap()->the_hole_value(),
- proto_transitions->length());
+ for (int i = 0; i < number_of_transitions(); ++i) {
+ if (!CheckOneBackPointer(current_map, GetTarget(i))) return false;
+ }
+ return true;
}
diff --git a/src/3rdparty/v8/src/objects-inl.h b/src/3rdparty/v8/src/objects-inl.h
index 74e63f2..b45b4d0 100644
--- a/src/3rdparty/v8/src/objects-inl.h
+++ b/src/3rdparty/v8/src/objects-inl.h
@@ -47,6 +47,7 @@
#include "v8memory.h"
#include "factory.h"
#include "incremental-marking.h"
+#include "transitions-inl.h"
namespace v8 {
namespace internal {
@@ -128,18 +129,6 @@ PropertyDetails PropertyDetails::AsDeleted() {
}
-bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
- ElementsKind to_kind) {
- if (to_kind == FAST_ELEMENTS) {
- return from_kind == FAST_SMI_ONLY_ELEMENTS ||
- from_kind == FAST_DOUBLE_ELEMENTS;
- } else {
- return to_kind == FAST_DOUBLE_ELEMENTS &&
- from_kind == FAST_SMI_ONLY_ELEMENTS;
- }
-}
-
-
bool Object::IsFixedArrayBase() {
return IsFixedArray() || IsFixedDoubleArray();
}
@@ -300,7 +289,7 @@ bool StringShape::IsSymbol() {
bool String::IsAsciiRepresentation() {
uint32_t type = map()->instance_type();
- return (type & kStringEncodingMask) == kAsciiStringTag;
+ return (type & kStringEncodingMask) == kOneByteStringTag;
}
@@ -316,7 +305,7 @@ bool String::IsAsciiRepresentationUnderneath() {
STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
ASSERT(IsFlat());
switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
- case kAsciiStringTag:
+ case kOneByteStringTag:
return true;
case kTwoByteStringTag:
return false;
@@ -332,7 +321,7 @@ bool String::IsTwoByteRepresentationUnderneath() {
STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
ASSERT(IsFlat());
switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
- case kAsciiStringTag:
+ case kOneByteStringTag:
return false;
case kTwoByteStringTag:
return true;
@@ -344,7 +333,7 @@ bool String::IsTwoByteRepresentationUnderneath() {
bool String::HasOnlyAsciiChars() {
uint32_t type = map()->instance_type();
- return (type & kStringEncodingMask) == kAsciiStringTag ||
+ return (type & kStringEncodingMask) == kOneByteStringTag ||
(type & kAsciiDataHintMask) == kAsciiDataHintTag;
}
@@ -393,9 +382,12 @@ uint32_t StringShape::full_representation_tag() {
STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
Internals::kFullStringRepresentationMask);
+STATIC_CHECK(static_cast<uint32_t>(kStringEncodingMask) ==
+ Internals::kStringEncodingMask);
+
bool StringShape::IsSequentialAscii() {
- return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
+ return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
}
@@ -405,10 +397,16 @@ bool StringShape::IsSequentialTwoByte() {
bool StringShape::IsExternalAscii() {
- return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
+ return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
}
+STATIC_CHECK((kExternalStringTag | kOneByteStringTag) ==
+ Internals::kExternalAsciiRepresentationTag);
+
+STATIC_CHECK(v8::String::ASCII_ENCODING == kOneByteStringTag);
+
+
bool StringShape::IsExternalTwoByte() {
return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
}
@@ -417,6 +415,7 @@ bool StringShape::IsExternalTwoByte() {
STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
Internals::kExternalTwoByteRepresentationTag);
+STATIC_CHECK(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
uc32 FlatStringReader::Get(int index) {
ASSERT(0 <= index && index <= length_);
@@ -536,6 +535,11 @@ bool Object::IsDescriptorArray() {
}
+bool Object::IsTransitionArray() {
+ return IsFixedArray();
+}
+
+
bool Object::IsDeoptimizationInputData() {
// Must be a fixed array.
if (!IsFixedArray()) return false;
@@ -574,31 +578,23 @@ bool Object::IsTypeFeedbackCells() {
bool Object::IsContext() {
- if (Object::IsHeapObject()) {
- Map* map = HeapObject::cast(this)->map();
- Heap* heap = map->GetHeap();
- return (map == heap->function_context_map() ||
- map == heap->catch_context_map() ||
- map == heap->with_context_map() ||
- map == heap->global_context_map() ||
- map == heap->block_context_map() ||
- map == heap->module_context_map());
- }
- return false;
-}
-
-
-bool Object::IsGlobalContext() {
- return Object::IsHeapObject() &&
- HeapObject::cast(this)->map() ==
- HeapObject::cast(this)->GetHeap()->global_context_map();
+ if (!Object::IsHeapObject()) return false;
+ Map* map = HeapObject::cast(this)->map();
+ Heap* heap = map->GetHeap();
+ return (map == heap->function_context_map() ||
+ map == heap->catch_context_map() ||
+ map == heap->with_context_map() ||
+ map == heap->native_context_map() ||
+ map == heap->block_context_map() ||
+ map == heap->module_context_map() ||
+ map == heap->global_context_map());
}
-bool Object::IsModuleContext() {
+bool Object::IsNativeContext() {
return Object::IsHeapObject() &&
HeapObject::cast(this)->map() ==
- HeapObject::cast(this)->GetHeap()->module_context_map();
+ HeapObject::cast(this)->GetHeap()->native_context_map();
}
@@ -664,8 +660,8 @@ bool Object::IsDictionary() {
bool Object::IsSymbolTable() {
- return IsHashTable() && this ==
- HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
+ return IsHashTable() &&
+ this == HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
}
@@ -678,7 +674,7 @@ bool Object::IsJSFunctionResultCache() {
% JSFunctionResultCache::kEntrySize != 0) {
return false;
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
reinterpret_cast<JSFunctionResultCache*>(this)->
JSFunctionResultCacheVerify();
@@ -693,7 +689,7 @@ bool Object::IsNormalizedMapCache() {
if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
return false;
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
}
@@ -722,6 +718,11 @@ bool Object::IsMapCache() {
}
+bool Object::IsObjectHashTable() {
+ return IsHashTable();
+}
+
+
bool Object::IsPrimitive() {
return IsOddball() || IsNumber() || IsString();
}
@@ -1114,13 +1115,13 @@ HeapObject* MapWord::ToForwardingAddress() {
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void HeapObject::VerifyObjectField(int offset) {
VerifyPointer(READ_FIELD(this, offset));
}
void HeapObject::VerifySmiField(int offset) {
- ASSERT(READ_FIELD(this, offset)->IsSmi());
+ CHECK(READ_FIELD(this, offset)->IsSmi());
}
#endif
@@ -1244,35 +1245,26 @@ FixedArrayBase* JSObject::elements() {
return static_cast<FixedArrayBase*>(array);
}
-void JSObject::ValidateSmiOnlyElements() {
+
+void JSObject::ValidateElements() {
#if DEBUG
- if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
- Heap* heap = GetHeap();
- // Don't use elements, since integrity checks will fail if there
- // are filler pointers in the array.
- FixedArray* fixed_array =
- reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
- Map* map = fixed_array->map();
- // Arrays that have been shifted in place can't be verified.
- if (map != heap->raw_unchecked_one_pointer_filler_map() &&
- map != heap->raw_unchecked_two_pointer_filler_map() &&
- map != heap->free_space_map()) {
- for (int i = 0; i < fixed_array->length(); i++) {
- Object* current = fixed_array->get(i);
- ASSERT(current->IsSmi() || current->IsTheHole());
- }
- }
+ if (FLAG_enable_slow_asserts) {
+ ElementsAccessor* accessor = GetElementsAccessor();
+ accessor->Validate(this);
}
#endif
}
MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
-#if DEBUG
- ValidateSmiOnlyElements();
-#endif
- if ((map()->elements_kind() != FAST_ELEMENTS)) {
- return TransitionElementsKind(FAST_ELEMENTS);
+ ValidateElements();
+ ElementsKind elements_kind = map()->elements_kind();
+ if (!IsFastObjectElementsKind(elements_kind)) {
+ if (IsFastHoleyElementsKind(elements_kind)) {
+ return TransitionElementsKind(FAST_HOLEY_ELEMENTS);
+ } else {
+ return TransitionElementsKind(FAST_ELEMENTS);
+ }
}
return this;
}
@@ -1284,20 +1276,29 @@ MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
ElementsKind current_kind = map()->elements_kind();
ElementsKind target_kind = current_kind;
ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
- if (current_kind == FAST_ELEMENTS) return this;
-
+ bool is_holey = IsFastHoleyElementsKind(current_kind);
+ if (current_kind == FAST_HOLEY_ELEMENTS) return this;
Heap* heap = GetHeap();
Object* the_hole = heap->the_hole_value();
- Object* heap_number_map = heap->heap_number_map();
for (uint32_t i = 0; i < count; ++i) {
Object* current = *objects++;
- if (!current->IsSmi() && current != the_hole) {
- if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS &&
- HeapObject::cast(current)->map() == heap_number_map) {
- target_kind = FAST_DOUBLE_ELEMENTS;
+ if (current == the_hole) {
+ is_holey = true;
+ target_kind = GetHoleyElementsKind(target_kind);
+ } else if (!current->IsSmi()) {
+ if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
+ if (IsFastSmiElementsKind(target_kind)) {
+ if (is_holey) {
+ target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
+ } else {
+ target_kind = FAST_DOUBLE_ELEMENTS;
+ }
+ }
+ } else if (is_holey) {
+ target_kind = FAST_HOLEY_ELEMENTS;
+ break;
} else {
target_kind = FAST_ELEMENTS;
- break;
}
}
}
@@ -1310,6 +1311,7 @@ MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
+ uint32_t length,
EnsureElementsMode mode) {
if (elements->map() != GetHeap()->fixed_double_array_map()) {
ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
@@ -1318,11 +1320,19 @@ MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
mode = DONT_ALLOW_DOUBLE_ELEMENTS;
}
Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
- return EnsureCanContainElements(objects, elements->length(), mode);
+ return EnsureCanContainElements(objects, length, mode);
}
ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
- if (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) {
+ if (GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
+ return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
+ } else if (GetElementsKind() == FAST_SMI_ELEMENTS) {
+ FixedDoubleArray* double_array = FixedDoubleArray::cast(elements);
+ for (uint32_t i = 0; i < length; ++i) {
+ if (double_array->is_the_hole(i)) {
+ return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
+ }
+ }
return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
}
@@ -1334,21 +1344,20 @@ MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
ElementsKind to_kind) {
Map* current_map = map();
ElementsKind from_kind = current_map->elements_kind();
-
if (from_kind == to_kind) return current_map;
- Context* global_context = isolate->context()->global_context();
- if (current_map == global_context->smi_js_array_map()) {
- if (to_kind == FAST_ELEMENTS) {
- return global_context->object_js_array_map();
- } else {
- if (to_kind == FAST_DOUBLE_ELEMENTS) {
- return global_context->double_js_array_map();
- } else {
- ASSERT(to_kind == DICTIONARY_ELEMENTS);
+ Context* native_context = isolate->context()->native_context();
+ Object* maybe_array_maps = native_context->js_array_maps();
+ if (maybe_array_maps->IsFixedArray()) {
+ FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
+ if (array_maps->get(from_kind) == current_map) {
+ Object* maybe_transitioned_map = array_maps->get(to_kind);
+ if (maybe_transitioned_map->IsMap()) {
+ return Map::cast(maybe_transitioned_map);
}
}
}
+
return GetElementsTransitionMapSlow(to_kind);
}
@@ -1357,9 +1366,6 @@ void JSObject::set_map_and_elements(Map* new_map,
FixedArrayBase* value,
WriteBarrierMode mode) {
ASSERT(value->HasValidElements());
-#ifdef DEBUG
- ValidateSmiOnlyElements();
-#endif
if (new_map != NULL) {
if (mode == UPDATE_WRITE_BARRIER) {
set_map(new_map);
@@ -1368,8 +1374,7 @@ void JSObject::set_map_and_elements(Map* new_map,
set_map_no_write_barrier(new_map);
}
}
- ASSERT((map()->has_fast_elements() ||
- map()->has_fast_smi_only_elements() ||
+ ASSERT((map()->has_fast_smi_or_object_elements() ||
(value == GetHeap()->empty_fixed_array())) ==
(value->map() == GetHeap()->fixed_array_map() ||
value->map() == GetHeap()->fixed_cow_array_map()));
@@ -1392,8 +1397,7 @@ void JSObject::initialize_properties() {
void JSObject::initialize_elements() {
- ASSERT(map()->has_fast_elements() ||
- map()->has_fast_smi_only_elements() ||
+ ASSERT(map()->has_fast_smi_or_object_elements() ||
map()->has_fast_double_elements());
ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
@@ -1402,9 +1406,10 @@ void JSObject::initialize_elements() {
MaybeObject* JSObject::ResetElements() {
Object* obj;
- ElementsKind elements_kind = FLAG_smi_only_arrays
- ? FAST_SMI_ONLY_ELEMENTS
- : FAST_ELEMENTS;
+ ElementsKind elements_kind = GetInitialFastElementsKind();
+ if (!FLAG_smi_only_arrays) {
+ elements_kind = FastSmiToObjectElementsKind(elements_kind);
+ }
MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
elements_kind);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
@@ -1414,6 +1419,43 @@ MaybeObject* JSObject::ResetElements() {
}
+MaybeObject* JSObject::AddFastPropertyUsingMap(Map* map) {
+ ASSERT(this->map()->NumberOfOwnDescriptors() + 1 ==
+ map->NumberOfOwnDescriptors());
+ if (this->map()->unused_property_fields() == 0) {
+ int new_size = properties()->length() + map->unused_property_fields() + 1;
+ FixedArray* new_properties;
+ MaybeObject* maybe_properties = properties()->CopySize(new_size);
+ if (!maybe_properties->To(&new_properties)) return maybe_properties;
+ set_properties(new_properties);
+ }
+ set_map(map);
+ return this;
+}
+
+
+bool JSObject::TryTransitionToField(Handle<JSObject> object,
+ Handle<String> key) {
+ if (!object->map()->HasTransitionArray()) return false;
+ Handle<TransitionArray> transitions(object->map()->transitions());
+ int transition = transitions->Search(*key);
+ if (transition == TransitionArray::kNotFound) return false;
+ PropertyDetails target_details = transitions->GetTargetDetails(transition);
+ if (target_details.type() != FIELD) return false;
+ if (target_details.attributes() != NONE) return false;
+ Handle<Map> target(transitions->GetTarget(transition));
+ JSObject::AddFastPropertyUsingMap(object, target);
+ return true;
+}
+
+
+int JSObject::LastAddedFieldIndex() {
+ Map* map = this->map();
+ int last_added = map->LastAdded();
+ return map->instance_descriptors()->GetFieldIndex(last_added);
+}
+
+
ACCESSORS(Oddball, to_string, String, kToStringOffset)
ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
@@ -1627,17 +1669,27 @@ void JSObject::InitializeBody(Map* map,
bool JSObject::HasFastProperties() {
+ ASSERT(properties()->IsDictionary() == map()->is_dictionary_map());
return !properties()->IsDictionary();
}
-int JSObject::MaxFastProperties() {
+bool JSObject::TooManyFastProperties(int properties,
+ JSObject::StoreFromKeyed store_mode) {
// Allow extra fast properties if the object has more than
- // kMaxFastProperties in-object properties. When this is the case,
+ // kFastPropertiesSoftLimit in-object properties. When this is the case,
// it is very unlikely that the object is being used as a dictionary
// and there is a good chance that allowing more map transitions
// will be worth it.
- return Max(map()->inobject_properties(), kMaxFastProperties);
+ int inobject = map()->inobject_properties();
+
+ int limit;
+ if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED) {
+ limit = Max(inobject, kMaxFastProperties);
+ } else {
+ limit = Max(inobject, kFastPropertiesSoftLimit);
+ }
+ return properties > limit;
}
@@ -1681,6 +1733,23 @@ bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
}
+
+void Object::VerifyApiCallResultType() {
+#if ENABLE_EXTRA_CHECKS
+ if (!(IsSmi() ||
+ IsString() ||
+ IsSpecObject() ||
+ IsHeapNumber() ||
+ IsUndefined() ||
+ IsTrue() ||
+ IsFalse() ||
+ IsNull())) {
+ FATAL("API call returned invalid object");
+ }
+#endif // ENABLE_EXTRA_CHECKS
+}
+
+
FixedArrayBase* FixedArrayBase::cast(Object* object) {
ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
return reinterpret_cast<FixedArrayBase*>(object);
@@ -1693,6 +1762,11 @@ Object* FixedArray::get(int index) {
}
+bool FixedArray::is_the_hole(int index) {
+ return get(index) == GetHeap()->the_hole_value();
+}
+
+
void FixedArray::set(int index, Smi* value) {
ASSERT(map() != HEAP->fixed_cow_array_map());
ASSERT(index >= 0 && index < this->length());
@@ -1798,7 +1872,7 @@ void FixedArray::set(int index,
void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
int index,
Object* value) {
- ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
+ ASSERT(array->map() != HEAP->fixed_cow_array_map());
ASSERT(index >= 0 && index < array->length());
int offset = kHeaderSize + index * kPointerSize;
WRITE_FIELD(array, offset, value);
@@ -1812,7 +1886,7 @@ void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
void FixedArray::NoWriteBarrierSet(FixedArray* array,
int index,
Object* value) {
- ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
+ ASSERT(array->map() != HEAP->fixed_cow_array_map());
ASSERT(index >= 0 && index < array->length());
ASSERT(!HEAP->InNewSpace(value));
WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
@@ -1874,7 +1948,7 @@ void FixedArray::set_unchecked(Heap* heap,
void FixedArray::set_null_unchecked(Heap* heap, int index) {
ASSERT(index >= 0 && index < this->length());
- ASSERT(!HEAP->InNewSpace(heap->null_value()));
+ ASSERT(!heap->InNewSpace(heap->null_value()));
WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
}
@@ -1885,76 +1959,198 @@ Object** FixedArray::data_start() {
bool DescriptorArray::IsEmpty() {
- ASSERT(this->IsSmi() ||
- this->length() > kFirstIndex ||
+ ASSERT(length() >= kFirstIndex ||
this == HEAP->empty_descriptor_array());
- return this->IsSmi() || length() <= kFirstIndex;
+ return length() < kFirstIndex;
}
-int DescriptorArray::bit_field3_storage() {
- Object* storage = READ_FIELD(this, kBitField3StorageOffset);
- return Smi::cast(storage)->value();
+void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
+ WRITE_FIELD(
+ this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
}
-void DescriptorArray::set_bit_field3_storage(int value) {
- ASSERT(!IsEmpty());
- WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
+
+// Perform a binary search in a fixed array. Low and high are entry indices. If
+// there are three entries in this array it should be called with low=0 and
+// high=2.
+template<SearchMode search_mode, typename T>
+int BinarySearch(T* array, String* name, int low, int high, int valid_entries) {
+ uint32_t hash = name->Hash();
+ int limit = high;
+
+ ASSERT(low <= high);
+
+ while (low != high) {
+ int mid = (low + high) / 2;
+ String* mid_name = array->GetSortedKey(mid);
+ uint32_t mid_hash = mid_name->Hash();
+
+ if (mid_hash >= hash) {
+ high = mid;
+ } else {
+ low = mid + 1;
+ }
+ }
+
+ for (; low <= limit; ++low) {
+ int sort_index = array->GetSortedKeyIndex(low);
+ String* entry = array->GetKey(sort_index);
+ if (entry->Hash() != hash) break;
+ if (entry->Equals(name)) {
+ if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
+ return sort_index;
+ }
+ return T::kNotFound;
+ }
+ }
+
+ return T::kNotFound;
}
-void DescriptorArray::NoIncrementalWriteBarrierSwap(FixedArray* array,
- int first,
- int second) {
- Object* tmp = array->get(first);
- NoIncrementalWriteBarrierSet(array, first, array->get(second));
- NoIncrementalWriteBarrierSet(array, second, tmp);
+// Perform a linear search in this fixed array. len is the number of entry
+// indices that are valid.
+template<SearchMode search_mode, typename T>
+int LinearSearch(T* array, String* name, int len, int valid_entries) {
+ uint32_t hash = name->Hash();
+ if (search_mode == ALL_ENTRIES) {
+ for (int number = 0; number < len; number++) {
+ int sorted_index = array->GetSortedKeyIndex(number);
+ String* entry = array->GetKey(sorted_index);
+ uint32_t current_hash = entry->Hash();
+ if (current_hash > hash) break;
+ if (current_hash == hash && entry->Equals(name)) return sorted_index;
+ }
+ } else {
+ ASSERT(len >= valid_entries);
+ for (int number = 0; number < valid_entries; number++) {
+ String* entry = array->GetKey(number);
+ uint32_t current_hash = entry->Hash();
+ if (current_hash == hash && entry->Equals(name)) return number;
+ }
+ }
+ return T::kNotFound;
}
-int DescriptorArray::Search(String* name) {
- SLOW_ASSERT(IsSortedNoDuplicates());
+template<SearchMode search_mode, typename T>
+int Search(T* array, String* name, int valid_entries) {
+ if (search_mode == VALID_ENTRIES) {
+ SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
+ } else {
+ SLOW_ASSERT(array->IsSortedNoDuplicates());
+ }
- // Check for empty descriptor array.
- int nof = number_of_descriptors();
- if (nof == 0) return kNotFound;
+ int nof = array->number_of_entries();
+ if (nof == 0) return T::kNotFound;
// Fast case: do linear search for small arrays.
const int kMaxElementsForLinearSearch = 8;
- if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
- return LinearSearch(name, nof);
+ if ((search_mode == ALL_ENTRIES &&
+ nof <= kMaxElementsForLinearSearch) ||
+ (search_mode == VALID_ENTRIES &&
+ valid_entries <= (kMaxElementsForLinearSearch * 3))) {
+ return LinearSearch<search_mode>(array, name, nof, valid_entries);
}
// Slow case: perform binary search.
- return BinarySearch(name, 0, nof - 1);
+ return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
+}
+
+
+int DescriptorArray::Search(String* name, int valid_descriptors) {
+ return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
}
-int DescriptorArray::SearchWithCache(String* name) {
- int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
+int DescriptorArray::SearchWithCache(String* name, Map* map) {
+ int number_of_own_descriptors = map->NumberOfOwnDescriptors();
+ if (number_of_own_descriptors == 0) return kNotFound;
+
+ DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
+ int number = cache->Lookup(map, name);
+
if (number == DescriptorLookupCache::kAbsent) {
- number = Search(name);
- GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
+ number = Search(name, number_of_own_descriptors);
+ cache->Update(map, name, number);
}
+
return number;
}
+void Map::LookupDescriptor(JSObject* holder,
+ String* name,
+ LookupResult* result) {
+ DescriptorArray* descriptors = this->instance_descriptors();
+ int number = descriptors->SearchWithCache(name, this);
+ if (number == DescriptorArray::kNotFound) return result->NotFound();
+ result->DescriptorResult(holder, descriptors->GetDetails(number), number);
+}
+
+
+void Map::LookupTransition(JSObject* holder,
+ String* name,
+ LookupResult* result) {
+ if (HasTransitionArray()) {
+ TransitionArray* transition_array = transitions();
+ int number = transition_array->Search(name);
+ if (number != TransitionArray::kNotFound) {
+ return result->TransitionResult(holder, number);
+ }
+ }
+ result->NotFound();
+}
+
+
+Object** DescriptorArray::GetKeySlot(int descriptor_number) {
+ ASSERT(descriptor_number < number_of_descriptors());
+ return HeapObject::RawField(
+ reinterpret_cast<HeapObject*>(this),
+ OffsetOfElementAt(ToKeyIndex(descriptor_number)));
+}
+
+
String* DescriptorArray::GetKey(int descriptor_number) {
ASSERT(descriptor_number < number_of_descriptors());
return String::cast(get(ToKeyIndex(descriptor_number)));
}
+int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
+ return GetDetails(descriptor_number).pointer();
+}
+
+
+String* DescriptorArray::GetSortedKey(int descriptor_number) {
+ return GetKey(GetSortedKeyIndex(descriptor_number));
+}
+
+
+void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
+ PropertyDetails details = GetDetails(descriptor_index);
+ set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
+}
+
+
+Object** DescriptorArray::GetValueSlot(int descriptor_number) {
+ ASSERT(descriptor_number < number_of_descriptors());
+ return HeapObject::RawField(
+ reinterpret_cast<HeapObject*>(this),
+ OffsetOfElementAt(ToValueIndex(descriptor_number)));
+}
+
+
Object* DescriptorArray::GetValue(int descriptor_number) {
ASSERT(descriptor_number < number_of_descriptors());
- return GetContentArray()->get(ToValueIndex(descriptor_number));
+ return get(ToValueIndex(descriptor_number));
}
PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
ASSERT(descriptor_number < number_of_descriptors());
- Object* details = GetContentArray()->get(ToDetailsIndex(descriptor_number));
+ Object* details = get(ToDetailsIndex(descriptor_number));
return PropertyDetails(Smi::cast(details));
}
@@ -1987,42 +2183,6 @@ AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
}
-bool DescriptorArray::IsProperty(int descriptor_number) {
- Entry entry(this, descriptor_number);
- return IsPropertyDescriptor(&entry);
-}
-
-
-bool DescriptorArray::IsTransitionOnly(int descriptor_number) {
- switch (GetType(descriptor_number)) {
- case MAP_TRANSITION:
- case CONSTANT_TRANSITION:
- case ELEMENTS_TRANSITION:
- return true;
- case CALLBACKS: {
- Object* value = GetValue(descriptor_number);
- if (!value->IsAccessorPair()) return false;
- AccessorPair* accessors = AccessorPair::cast(value);
- return accessors->getter()->IsMap() && accessors->setter()->IsMap();
- }
- case NORMAL:
- case FIELD:
- case CONSTANT_FUNCTION:
- case HANDLER:
- case INTERCEPTOR:
- case NULL_DESCRIPTOR:
- return false;
- }
- UNREACHABLE(); // Keep the compiler happy.
- return false;
-}
-
-
-bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
- return GetType(descriptor_number) == NULL_DESCRIPTOR;
-}
-
-
void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
desc->Init(GetKey(descriptor_number),
GetValue(descriptor_number),
@@ -2035,40 +2195,89 @@ void DescriptorArray::Set(int descriptor_number,
const WhitenessWitness&) {
// Range check.
ASSERT(descriptor_number < number_of_descriptors());
+ ASSERT(desc->GetDetails().descriptor_index() <=
+ number_of_descriptors());
+ ASSERT(desc->GetDetails().descriptor_index() > 0);
NoIncrementalWriteBarrierSet(this,
ToKeyIndex(descriptor_number),
desc->GetKey());
- FixedArray* content_array = GetContentArray();
- NoIncrementalWriteBarrierSet(content_array,
+ NoIncrementalWriteBarrierSet(this,
ToValueIndex(descriptor_number),
desc->GetValue());
- NoIncrementalWriteBarrierSet(content_array,
+ NoIncrementalWriteBarrierSet(this,
ToDetailsIndex(descriptor_number),
desc->GetDetails().AsSmi());
}
-void DescriptorArray::NoIncrementalWriteBarrierSwapDescriptors(
- int first, int second) {
- NoIncrementalWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
- FixedArray* content_array = GetContentArray();
- NoIncrementalWriteBarrierSwap(content_array,
- ToValueIndex(first),
- ToValueIndex(second));
- NoIncrementalWriteBarrierSwap(content_array,
- ToDetailsIndex(first),
- ToDetailsIndex(second));
+void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
+ // Range check.
+ ASSERT(descriptor_number < number_of_descriptors());
+ ASSERT(desc->GetDetails().descriptor_index() <=
+ number_of_descriptors());
+ ASSERT(desc->GetDetails().descriptor_index() > 0);
+
+ set(ToKeyIndex(descriptor_number), desc->GetKey());
+ set(ToValueIndex(descriptor_number), desc->GetValue());
+ set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
}
-DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
+void DescriptorArray::Append(Descriptor* desc,
+ const WhitenessWitness& witness) {
+ int descriptor_number = number_of_descriptors();
+ int enumeration_index = descriptor_number + 1;
+ SetNumberOfDescriptors(descriptor_number + 1);
+ desc->SetEnumerationIndex(enumeration_index);
+ Set(descriptor_number, desc, witness);
+
+ uint32_t hash = desc->GetKey()->Hash();
+
+ int insertion;
+
+ for (insertion = descriptor_number; insertion > 0; --insertion) {
+ String* key = GetSortedKey(insertion - 1);
+ if (key->Hash() <= hash) break;
+ SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
+ }
+
+ SetSortedKey(insertion, descriptor_number);
+}
+
+
+void DescriptorArray::Append(Descriptor* desc) {
+ int descriptor_number = number_of_descriptors();
+ int enumeration_index = descriptor_number + 1;
+ SetNumberOfDescriptors(descriptor_number + 1);
+ desc->SetEnumerationIndex(enumeration_index);
+ Set(descriptor_number, desc);
+
+ uint32_t hash = desc->GetKey()->Hash();
+
+ int insertion;
+
+ for (insertion = descriptor_number; insertion > 0; --insertion) {
+ String* key = GetSortedKey(insertion - 1);
+ if (key->Hash() <= hash) break;
+ SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
+ }
+
+ SetSortedKey(insertion, descriptor_number);
+}
+
+
+void DescriptorArray::SwapSortedKeys(int first, int second) {
+ int first_key = GetSortedKeyIndex(first);
+ SetSortedKey(first, GetSortedKeyIndex(second));
+ SetSortedKey(second, first_key);
+}
+
+
+DescriptorArray::WhitenessWitness::WhitenessWitness(FixedArray* array)
: marking_(array->GetHeap()->incremental_marking()) {
marking_->EnterNoMarkingScope();
- if (array->number_of_descriptors() > 0) {
- ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
- ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
- }
+ ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
}
@@ -2103,7 +2312,8 @@ int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
// EnsureCapacity will guarantee the hash table is never full.
while (true) {
Object* element = KeyAt(entry);
- // Empty entry.
+ // Empty entry. Uses raw unchecked accessors because it is called by the
+ // symbol table during bootstrapping.
if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
Shape::IsMatch(key, element)) return entry;
@@ -2258,18 +2468,18 @@ String* String::TryFlattenGetString(PretenureFlag pretenure) {
uint16_t String::Get(int index) {
ASSERT(index >= 0 && index < length());
switch (StringShape(this).full_representation_tag()) {
- case kSeqStringTag | kAsciiStringTag:
+ case kSeqStringTag | kOneByteStringTag:
return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
case kSeqStringTag | kTwoByteStringTag:
return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
- case kConsStringTag | kAsciiStringTag:
+ case kConsStringTag | kOneByteStringTag:
case kConsStringTag | kTwoByteStringTag:
return ConsString::cast(this)->ConsStringGet(index);
- case kExternalStringTag | kAsciiStringTag:
+ case kExternalStringTag | kOneByteStringTag:
return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
case kExternalStringTag | kTwoByteStringTag:
return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
- case kSlicedStringTag | kAsciiStringTag:
+ case kSlicedStringTag | kOneByteStringTag:
case kSlicedStringTag | kTwoByteStringTag:
return SlicedString::cast(this)->SlicedStringGet(index);
default:
@@ -2368,9 +2578,10 @@ String* SlicedString::parent() {
}
-void SlicedString::set_parent(String* parent) {
+void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
ASSERT(parent->IsSeqString() || parent->IsExternalString());
WRITE_FIELD(this, kParentOffset, parent);
+ CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
}
@@ -2874,16 +3085,12 @@ bool Map::has_non_instance_prototype() {
void Map::set_function_with_prototype(bool value) {
- if (value) {
- set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
- } else {
- set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
- }
+ set_bit_field3(FunctionWithPrototype::update(bit_field3(), value));
}
bool Map::function_with_prototype() {
- return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
+ return FunctionWithPrototype::decode(bit_field3());
}
@@ -2915,28 +3122,21 @@ bool Map::is_extensible() {
void Map::set_attached_to_shared_function_info(bool value) {
- if (value) {
- set_bit_field3(bit_field3() | (1 << kAttachedToSharedFunctionInfo));
- } else {
- set_bit_field3(bit_field3() & ~(1 << kAttachedToSharedFunctionInfo));
- }
+ set_bit_field3(AttachedToSharedFunctionInfo::update(bit_field3(), value));
}
bool Map::attached_to_shared_function_info() {
- return ((1 << kAttachedToSharedFunctionInfo) & bit_field3()) != 0;
+ return AttachedToSharedFunctionInfo::decode(bit_field3());
}
void Map::set_is_shared(bool value) {
- if (value) {
- set_bit_field3(bit_field3() | (1 << kIsShared));
- } else {
- set_bit_field3(bit_field3() & ~(1 << kIsShared));
- }
+ set_bit_field3(IsShared::update(bit_field3(), value));
}
+
bool Map::is_shared() {
- return ((1 << kIsShared) & bit_field3()) != 0;
+ return IsShared::decode(bit_field3());
}
void Map::set_has_external_resource(bool value) {
@@ -2968,15 +3168,21 @@ bool Map::use_user_object_comparison() {
void Map::set_named_interceptor_is_fallback(bool value) {
- if (value) {
- set_bit_field3(bit_field3() | (1 << kNamedInterceptorIsFallback));
- } else {
- set_bit_field3(bit_field3() & ~(1 << kNamedInterceptorIsFallback));
- }
+ set_bit_field3(NamedInterceptorIsFallback::update(bit_field3(), value));
}
bool Map::named_interceptor_is_fallback() {
- return ((1 << kNamedInterceptorIsFallback) & bit_field3()) != 0;
+ return NamedInterceptorIsFallback::decode(bit_field3());
+}
+
+
+void Map::set_dictionary_map(bool value) {
+ set_bit_field3(DictionaryMap::update(bit_field3(), value));
+}
+
+
+bool Map::is_dictionary_map() {
+ return DictionaryMap::decode(bit_field3());
}
@@ -2990,6 +3196,26 @@ Code::Flags Code::flags() {
}
+void Map::set_owns_descriptors(bool is_shared) {
+ set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
+}
+
+
+bool Map::owns_descriptors() {
+ return OwnsDescriptors::decode(bit_field3());
+}
+
+
+void Map::set_is_observed(bool is_observed) {
+ set_bit_field3(IsObserved::update(bit_field3(), is_observed));
+}
+
+
+bool Map::is_observed() {
+ return IsObserved::decode(bit_field3());
+}
+
+
void Code::set_flags(Code::Flags flags) {
STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
// Make sure that all call stubs have an arguments count.
@@ -3024,7 +3250,7 @@ Code::ExtraICState Code::extra_ic_state() {
}
-PropertyType Code::type() {
+Code::StubType Code::type() {
return ExtractTypeFromFlags(flags());
}
@@ -3041,7 +3267,8 @@ int Code::major_key() {
kind() == BINARY_OP_IC ||
kind() == COMPARE_IC ||
kind() == TO_BOOLEAN_IC);
- return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
+ return StubMajorKeyField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
}
@@ -3052,7 +3279,9 @@ void Code::set_major_key(int major) {
kind() == COMPARE_IC ||
kind() == TO_BOOLEAN_IC);
ASSERT(0 <= major && major < 256);
- WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
+ int updated = StubMajorKeyField::update(previous, major);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
}
@@ -3154,39 +3383,50 @@ void Code::set_profiler_ticks(int ticks) {
unsigned Code::stack_slots() {
ASSERT(kind() == OPTIMIZED_FUNCTION);
- return READ_UINT32_FIELD(this, kStackSlotsOffset);
+ return StackSlotsField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_stack_slots(unsigned slots) {
+ CHECK(slots <= (1 << kStackSlotsBitCount));
ASSERT(kind() == OPTIMIZED_FUNCTION);
- WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = StackSlotsField::update(previous, slots);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
unsigned Code::safepoint_table_offset() {
ASSERT(kind() == OPTIMIZED_FUNCTION);
- return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
+ return SafepointTableOffsetField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
}
void Code::set_safepoint_table_offset(unsigned offset) {
+ CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
ASSERT(kind() == OPTIMIZED_FUNCTION);
ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
- WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
+ int updated = SafepointTableOffsetField::update(previous, offset);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
}
unsigned Code::stack_check_table_offset() {
ASSERT_EQ(FUNCTION, kind());
- return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
+ return StackCheckTableOffsetField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
}
void Code::set_stack_check_table_offset(unsigned offset) {
ASSERT_EQ(FUNCTION, kind());
ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
- WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
+ int updated = StackCheckTableOffsetField::update(previous, offset);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
}
@@ -3205,85 +3445,106 @@ void Code::set_check_type(CheckType value) {
byte Code::unary_op_type() {
ASSERT(is_unary_op_stub());
- return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
+ return UnaryOpTypeField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_unary_op_type(byte value) {
ASSERT(is_unary_op_stub());
- WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = UnaryOpTypeField::update(previous, value);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
byte Code::binary_op_type() {
ASSERT(is_binary_op_stub());
- return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
+ return BinaryOpTypeField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_binary_op_type(byte value) {
ASSERT(is_binary_op_stub());
- WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = BinaryOpTypeField::update(previous, value);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
byte Code::binary_op_result_type() {
ASSERT(is_binary_op_stub());
- return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
+ return BinaryOpResultTypeField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_binary_op_result_type(byte value) {
ASSERT(is_binary_op_stub());
- WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = BinaryOpResultTypeField::update(previous, value);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
byte Code::compare_state() {
ASSERT(is_compare_ic_stub());
- return READ_BYTE_FIELD(this, kCompareStateOffset);
+ return CompareStateField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_compare_state(byte value) {
ASSERT(is_compare_ic_stub());
- WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = CompareStateField::update(previous, value);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
byte Code::compare_operation() {
ASSERT(is_compare_ic_stub());
- return READ_BYTE_FIELD(this, kCompareOperationOffset);
+ return CompareOperationField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_compare_operation(byte value) {
ASSERT(is_compare_ic_stub());
- WRITE_BYTE_FIELD(this, kCompareOperationOffset, value);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = CompareOperationField::update(previous, value);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
byte Code::to_boolean_state() {
ASSERT(is_to_boolean_ic_stub());
- return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
+ return ToBooleanStateField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_to_boolean_state(byte value) {
ASSERT(is_to_boolean_ic_stub());
- WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = ToBooleanStateField::update(previous, value);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
bool Code::has_function_cache() {
ASSERT(kind() == STUB);
- return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
+ return HasFunctionCacheField::decode(
+ READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::set_has_function_cache(bool flag) {
ASSERT(kind() == STUB);
- WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
+ int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
+ int updated = HasFunctionCacheField::update(previous, flag);
+ WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
@@ -3296,7 +3557,7 @@ bool Code::is_inline_cache_stub() {
Code::Flags Code::ComputeFlags(Kind kind,
InlineCacheState ic_state,
ExtraICState extra_ic_state,
- PropertyType type,
+ StubType type,
int argc,
InlineCacheHolderFlag holder) {
// Extra IC state is only allowed for call IC stubs or for store IC
@@ -3317,7 +3578,7 @@ Code::Flags Code::ComputeFlags(Kind kind,
Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
- PropertyType type,
+ StubType type,
ExtraICState extra_ic_state,
InlineCacheHolderFlag holder,
int argc) {
@@ -3340,7 +3601,7 @@ Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
}
-PropertyType Code::ExtractTypeFromFlags(Flags flags) {
+Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
return TypeField::decode(flags);
}
@@ -3390,150 +3651,222 @@ void Map::set_prototype(Object* value, WriteBarrierMode mode) {
}
-DescriptorArray* Map::instance_descriptors() {
- Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
- if (object->IsSmi()) {
- return GetHeap()->empty_descriptor_array();
+// If the descriptor is using the empty transition array, install a new empty
+// transition array that will have place for an element transition.
+static MaybeObject* EnsureHasTransitionArray(Map* map) {
+ TransitionArray* transitions;
+ MaybeObject* maybe_transitions;
+ if (!map->HasTransitionArray()) {
+ maybe_transitions = TransitionArray::Allocate(0);
+ if (!maybe_transitions->To(&transitions)) return maybe_transitions;
+ transitions->set_back_pointer_storage(map->GetBackPointer());
+ } else if (!map->transitions()->IsFullTransitionArray()) {
+ maybe_transitions = map->transitions()->ExtendToFullTransitionArray();
+ if (!maybe_transitions->To(&transitions)) return maybe_transitions;
} else {
- return DescriptorArray::cast(object);
+ return map;
}
+ map->set_transitions(transitions);
+ return transitions;
}
-void Map::init_instance_descriptors() {
- WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
-}
-
-
-void Map::clear_instance_descriptors() {
- Object* object = READ_FIELD(this,
- kInstanceDescriptorsOrBitField3Offset);
- if (!object->IsSmi()) {
+void Map::InitializeDescriptors(DescriptorArray* descriptors) {
+ int len = descriptors->number_of_descriptors();
#ifdef DEBUG
- ZapInstanceDescriptors();
-#endif
- WRITE_FIELD(
- this,
- kInstanceDescriptorsOrBitField3Offset,
- Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
+ ASSERT(len <= DescriptorArray::kMaxNumberOfDescriptors);
+
+ bool used_indices[DescriptorArray::kMaxNumberOfDescriptors];
+ for (int i = 0; i < len; ++i) used_indices[i] = false;
+
+ // Ensure that all enumeration indexes between 1 and length occur uniquely in
+ // the descriptor array.
+ for (int i = 0; i < len; ++i) {
+ int enum_index = descriptors->GetDetails(i).descriptor_index() -
+ PropertyDetails::kInitialIndex;
+ ASSERT(0 <= enum_index && enum_index < len);
+ ASSERT(!used_indices[enum_index]);
+ used_indices[enum_index] = true;
}
+#endif
+
+ set_instance_descriptors(descriptors);
+ SetNumberOfOwnDescriptors(len);
}
-void Map::set_instance_descriptors(DescriptorArray* value,
- WriteBarrierMode mode) {
- Object* object = READ_FIELD(this,
- kInstanceDescriptorsOrBitField3Offset);
- Heap* heap = GetHeap();
- if (value == heap->empty_descriptor_array()) {
- clear_instance_descriptors();
- return;
- } else {
- if (object->IsSmi()) {
- value->set_bit_field3_storage(Smi::cast(object)->value());
- } else {
- value->set_bit_field3_storage(
- DescriptorArray::cast(object)->bit_field3_storage());
- }
- }
- ASSERT(!is_shared());
-#ifdef DEBUG
- if (value != instance_descriptors()) {
- ZapInstanceDescriptors();
- }
-#endif
- WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
- CONDITIONAL_WRITE_BARRIER(
- heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
-}
+ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
+SMI_ACCESSORS(Map, bit_field3, kBitField3Offset)
-int Map::bit_field3() {
- Object* object = READ_FIELD(this,
- kInstanceDescriptorsOrBitField3Offset);
- if (object->IsSmi()) {
- return Smi::cast(object)->value();
- } else {
- return DescriptorArray::cast(object)->bit_field3_storage();
+void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
+ Object* back_pointer = GetBackPointer();
+
+ if (Heap::ShouldZapGarbage() && HasTransitionArray()) {
+ ZapTransitions();
}
+
+ WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer);
+ CONDITIONAL_WRITE_BARRIER(
+ heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode);
}
-void Map::set_bit_field3(int value) {
- ASSERT(Smi::IsValid(value));
- Object* object = READ_FIELD(this,
- kInstanceDescriptorsOrBitField3Offset);
- if (object->IsSmi()) {
- WRITE_FIELD(this,
- kInstanceDescriptorsOrBitField3Offset,
- Smi::FromInt(value));
- } else {
- DescriptorArray::cast(object)->set_bit_field3_storage(value);
- }
+void Map::AppendDescriptor(Descriptor* desc,
+ const DescriptorArray::WhitenessWitness& witness) {
+ DescriptorArray* descriptors = instance_descriptors();
+ int number_of_own_descriptors = NumberOfOwnDescriptors();
+ ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
+ descriptors->Append(desc, witness);
+ SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
}
Object* Map::GetBackPointer() {
- Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
- if (object->IsFixedArray()) {
- return FixedArray::cast(object)->get(kProtoTransitionBackPointerOffset);
+ Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
+ if (object->IsDescriptorArray()) {
+ return TransitionArray::cast(object)->back_pointer_storage();
} else {
+ ASSERT(object->IsMap() || object->IsUndefined());
return object;
}
}
-void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
- Heap* heap = GetHeap();
- ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
- ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
- (value->IsMap() && GetBackPointer()->IsUndefined()));
- Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
- if (object->IsFixedArray()) {
- FixedArray::cast(object)->set(
- kProtoTransitionBackPointerOffset, value, mode);
- } else {
- WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
- CONDITIONAL_WRITE_BARRIER(
- heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
- }
+bool Map::HasElementsTransition() {
+ return HasTransitionArray() && transitions()->HasElementsTransition();
}
-FixedArray* Map::prototype_transitions() {
- Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
- if (object->IsFixedArray()) {
- return FixedArray::cast(object);
- } else {
+bool Map::HasTransitionArray() {
+ Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
+ return object->IsTransitionArray();
+}
+
+
+Map* Map::elements_transition_map() {
+ return transitions()->elements_transition();
+}
+
+
+bool Map::CanHaveMoreTransitions() {
+ if (!HasTransitionArray()) return true;
+ return FixedArray::SizeFor(transitions()->length() +
+ TransitionArray::kTransitionSize)
+ <= Page::kMaxNonCodeHeapObjectSize;
+}
+
+
+MaybeObject* Map::AddTransition(String* key,
+ Map* target,
+ SimpleTransitionFlag flag) {
+ if (HasTransitionArray()) return transitions()->CopyInsert(key, target);
+ return TransitionArray::NewWith(flag, key, target, GetBackPointer());
+}
+
+
+void Map::SetTransition(int transition_index, Map* target) {
+ transitions()->SetTarget(transition_index, target);
+}
+
+
+Map* Map::GetTransition(int transition_index) {
+ return transitions()->GetTarget(transition_index);
+}
+
+
+MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) {
+ MaybeObject* allow_elements = EnsureHasTransitionArray(this);
+ if (allow_elements->IsFailure()) return allow_elements;
+ transitions()->set_elements_transition(transitioned_map);
+ return this;
+}
+
+
+FixedArray* Map::GetPrototypeTransitions() {
+ if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
+ if (!transitions()->HasPrototypeTransitions()) {
return GetHeap()->empty_fixed_array();
}
+ return transitions()->GetPrototypeTransitions();
}
-void Map::set_prototype_transitions(FixedArray* value, WriteBarrierMode mode) {
- Heap* heap = GetHeap();
- ASSERT(value != heap->empty_fixed_array());
- value->set(kProtoTransitionBackPointerOffset, GetBackPointer());
+MaybeObject* Map::SetPrototypeTransitions(FixedArray* proto_transitions) {
+ MaybeObject* allow_prototype = EnsureHasTransitionArray(this);
+ if (allow_prototype->IsFailure()) return allow_prototype;
#ifdef DEBUG
- if (value != prototype_transitions()) {
+ if (HasPrototypeTransitions()) {
+ ASSERT(GetPrototypeTransitions() != proto_transitions);
ZapPrototypeTransitions();
}
#endif
- WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
+ transitions()->SetPrototypeTransitions(proto_transitions);
+ return this;
+}
+
+
+bool Map::HasPrototypeTransitions() {
+ return HasTransitionArray() && transitions()->HasPrototypeTransitions();
+}
+
+
+TransitionArray* Map::transitions() {
+ ASSERT(HasTransitionArray());
+ Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
+ return TransitionArray::cast(object);
+}
+
+
+void Map::set_transitions(TransitionArray* transition_array,
+ WriteBarrierMode mode) {
+ // In release mode, only run this code if verify_heap is on.
+ if (Heap::ShouldZapGarbage() && HasTransitionArray()) {
+ CHECK(transitions() != transition_array);
+ ZapTransitions();
+ }
+
+ WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
CONDITIONAL_WRITE_BARRIER(
- heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
+ GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
}
-void Map::init_prototype_transitions(Object* undefined) {
+void Map::init_back_pointer(Object* undefined) {
ASSERT(undefined->IsUndefined());
- WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, undefined);
+ WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
+}
+
+
+void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
+ ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
+ ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
+ (value->IsMap() && GetBackPointer()->IsUndefined()));
+ Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
+ if (object->IsTransitionArray()) {
+ TransitionArray::cast(object)->set_back_pointer_storage(value);
+ } else {
+ WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
+ CONDITIONAL_WRITE_BARRIER(
+ GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
+ }
+}
+
+
+// Can either be Smi (no transitions), normal transition array, or a transition
+// array with the header overwritten as a Smi (thus iterating).
+TransitionArray* Map::unchecked_transition_array() {
+ Object* object = *HeapObject::RawField(this,
+ Map::kTransitionsOrBackPointerOffset);
+ TransitionArray* transition_array = static_cast<TransitionArray*>(object);
+ return transition_array;
}
-HeapObject* Map::unchecked_prototype_transitions() {
- Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
- return reinterpret_cast<HeapObject*>(object);
+HeapObject* Map::UncheckedPrototypeTransitions() {
+ ASSERT(HasTransitionArray());
+ ASSERT(unchecked_transition_array()->HasPrototypeTransitions());
+ return unchecked_transition_array()->UncheckedPrototypeTransitions();
}
@@ -3542,22 +3875,22 @@ ACCESSORS(Map, constructor, Object, kConstructorOffset)
ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
-ACCESSORS(JSFunction,
- next_function_link,
- Object,
- kNextFunctionLinkOffset)
+ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
+ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
-ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
+ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
ACCESSORS(AccessorInfo, data, Object, kDataOffset)
ACCESSORS(AccessorInfo, name, Object, kNameOffset)
ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
+ACCESSORS(AccessorInfo, expected_receiver_type, Object,
+ kExpectedReceiverTypeOffset)
ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
@@ -3606,7 +3939,7 @@ ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
kInternalFieldCountOffset)
ACCESSORS(ObjectTemplateInfo, has_external_resource, Object,
kHasExternalResourceOffset)
-ACCESSORS(ObjectTemplateInfo, use_user_object_comparison, Object,
+ACCESSORS(ObjectTemplateInfo, use_user_object_comparison, Object,
kUseUserObjectComparisonOffset)
ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
@@ -3643,6 +3976,8 @@ ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
#endif
ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
+ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
+ kOptimizedCodeMapOffset)
ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
@@ -3653,7 +3988,7 @@ ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
kThisPropertyAssignmentsOffset)
-SMI_ACCESSORS(SharedFunctionInfo, ic_age, kICAgeOffset)
+SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
@@ -3677,6 +4012,10 @@ BOOL_ACCESSORS(SharedFunctionInfo,
kAllowLazyCompilation)
BOOL_ACCESSORS(SharedFunctionInfo,
compiler_hints,
+ allows_lazy_compilation_without_context,
+ kAllowLazyCompilationWithoutContext)
+BOOL_ACCESSORS(SharedFunctionInfo,
+ compiler_hints,
uses_arguments,
kUsesArguments)
BOOL_ACCESSORS(SharedFunctionInfo,
@@ -3702,8 +4041,10 @@ SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
kThisPropertyAssignmentsCountOffset)
SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
-SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
-SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
+SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
+SMI_ACCESSORS(SharedFunctionInfo,
+ stress_deopt_counter,
+ kStressDeoptCounterOffset)
#else
#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
@@ -3755,8 +4096,10 @@ PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
kThisPropertyAssignmentsCountOffset)
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
-PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
-PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
+PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, counters, kCountersOffset)
+PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
+ stress_deopt_counter,
+ kStressDeoptCounterOffset)
#endif
@@ -3851,6 +4194,18 @@ BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
kDontOptimize)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
+
+void SharedFunctionInfo::BeforeVisitingPointers() {
+ if (IsInobjectSlackTrackingInProgress()) DetachInitialMap();
+
+ // Flush optimized code map on major GC.
+ // Note: we may experiment with rebuilding it or retaining entries
+ // which should survive as we iterate through optimized functions
+ // anyway.
+ set_optimized_code_map(Smi::FromInt(0));
+}
+
ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
@@ -3959,14 +4314,66 @@ void SharedFunctionInfo::set_code_age(int code_age) {
}
+int SharedFunctionInfo::ic_age() {
+ return ICAgeBits::decode(counters());
+}
+
+
+void SharedFunctionInfo::set_ic_age(int ic_age) {
+ set_counters(ICAgeBits::update(counters(), ic_age));
+}
+
+
+int SharedFunctionInfo::deopt_count() {
+ return DeoptCountBits::decode(counters());
+}
+
+
+void SharedFunctionInfo::set_deopt_count(int deopt_count) {
+ set_counters(DeoptCountBits::update(counters(), deopt_count));
+}
+
+
+void SharedFunctionInfo::increment_deopt_count() {
+ int value = counters();
+ int deopt_count = DeoptCountBits::decode(value);
+ deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
+ set_counters(DeoptCountBits::update(value, deopt_count));
+}
+
+
+int SharedFunctionInfo::opt_reenable_tries() {
+ return OptReenableTriesBits::decode(counters());
+}
+
+
+void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
+ set_counters(OptReenableTriesBits::update(counters(), tries));
+}
+
+
bool SharedFunctionInfo::has_deoptimization_support() {
Code* code = this->code();
return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
}
+void SharedFunctionInfo::TryReenableOptimization() {
+ int tries = opt_reenable_tries();
+ set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
+ // We reenable optimization whenever the number of tries is a large
+ // enough power of 2.
+ if (tries >= 16 && (((tries - 1) & tries) == 0)) {
+ set_optimization_disabled(false);
+ set_opt_count(0);
+ set_deopt_count(0);
+ code()->set_optimizable(true);
+ }
+}
+
+
bool JSFunction::IsBuiltin() {
- return context()->global()->IsJSBuiltinsObject();
+ return context()->global_object()->IsJSBuiltinsObject();
}
@@ -3991,6 +4398,18 @@ bool JSFunction::IsMarkedForLazyRecompilation() {
}
+bool JSFunction::IsMarkedForParallelRecompilation() {
+ return code() ==
+ GetIsolate()->builtins()->builtin(Builtins::kParallelRecompile);
+}
+
+
+bool JSFunction::IsInRecompileQueue() {
+ return code() == GetIsolate()->builtins()->builtin(
+ Builtins::kInRecompileQueue);
+}
+
+
Code* JSFunction::code() {
return Code::cast(unchecked_code());
}
@@ -4022,10 +4441,10 @@ void JSFunction::ReplaceCode(Code* code) {
// Add/remove the function from the list of optimized functions for this
// context based on the state change.
if (!was_optimized && is_optimized) {
- context()->global_context()->AddOptimizedFunction(this);
+ context()->native_context()->AddOptimizedFunction(this);
}
if (was_optimized && !is_optimized) {
- context()->global_context()->RemoveOptimizedFunction(this);
+ context()->native_context()->RemoveOptimizedFunction(this);
}
}
@@ -4066,40 +4485,6 @@ void JSFunction::set_initial_map(Map* value) {
}
-MaybeObject* JSFunction::set_initial_map_and_cache_transitions(
- Map* initial_map) {
- Context* global_context = context()->global_context();
- Object* array_function =
- global_context->get(Context::ARRAY_FUNCTION_INDEX);
- if (array_function->IsJSFunction() &&
- this == JSFunction::cast(array_function)) {
- ASSERT(initial_map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
-
- MaybeObject* maybe_map = initial_map->CopyDropTransitions();
- Map* new_double_map = NULL;
- if (!maybe_map->To<Map>(&new_double_map)) return maybe_map;
- new_double_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
- maybe_map = initial_map->AddElementsTransition(FAST_DOUBLE_ELEMENTS,
- new_double_map);
- if (maybe_map->IsFailure()) return maybe_map;
-
- maybe_map = new_double_map->CopyDropTransitions();
- Map* new_object_map = NULL;
- if (!maybe_map->To<Map>(&new_object_map)) return maybe_map;
- new_object_map->set_elements_kind(FAST_ELEMENTS);
- maybe_map = new_double_map->AddElementsTransition(FAST_ELEMENTS,
- new_object_map);
- if (maybe_map->IsFailure()) return maybe_map;
-
- global_context->set_smi_js_array_map(initial_map);
- global_context->set_double_js_array_map(new_double_map);
- global_context->set_object_js_array_map(new_object_map);
- }
- set_initial_map(initial_map);
- return this;
-}
-
-
bool JSFunction::has_initial_map() {
return prototype_or_initial_map()->IsMap();
}
@@ -4132,6 +4517,7 @@ Object* JSFunction::prototype() {
return instance_prototype();
}
+
bool JSFunction::should_have_prototype() {
return map()->function_with_prototype();
}
@@ -4235,6 +4621,7 @@ void Foreign::set_foreign_address(Address value) {
ACCESSORS(JSModule, context, Object, kContextOffset)
+ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
JSModule* JSModule::cast(Object* obj) {
@@ -4429,18 +4816,18 @@ ElementsKind JSObject::GetElementsKind() {
FixedArrayBase* fixed_array =
reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
Map* map = fixed_array->map();
- ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
- (map == GetHeap()->fixed_array_map() ||
- map == GetHeap()->fixed_cow_array_map())) ||
- (kind == FAST_DOUBLE_ELEMENTS &&
- (fixed_array->IsFixedDoubleArray() ||
- fixed_array == GetHeap()->empty_fixed_array())) ||
- (kind == DICTIONARY_ELEMENTS &&
+ ASSERT((IsFastSmiOrObjectElementsKind(kind) &&
+ (map == GetHeap()->fixed_array_map() ||
+ map == GetHeap()->fixed_cow_array_map())) ||
+ (IsFastDoubleElementsKind(kind) &&
+ (fixed_array->IsFixedDoubleArray() ||
+ fixed_array == GetHeap()->empty_fixed_array())) ||
+ (kind == DICTIONARY_ELEMENTS &&
fixed_array->IsFixedArray() &&
- fixed_array->IsDictionary()) ||
- (kind > DICTIONARY_ELEMENTS));
- ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
- (elements()->IsFixedArray() && elements()->length() >= 2));
+ fixed_array->IsDictionary()) ||
+ (kind > DICTIONARY_ELEMENTS));
+ ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
+ (elements()->IsFixedArray() && elements()->length() >= 2));
#endif
return kind;
}
@@ -4451,25 +4838,28 @@ ElementsAccessor* JSObject::GetElementsAccessor() {
}
-bool JSObject::HasFastElements() {
- return GetElementsKind() == FAST_ELEMENTS;
+bool JSObject::HasFastObjectElements() {
+ return IsFastObjectElementsKind(GetElementsKind());
}
-bool JSObject::HasFastSmiOnlyElements() {
- return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
+bool JSObject::HasFastSmiElements() {
+ return IsFastSmiElementsKind(GetElementsKind());
}
-bool JSObject::HasFastTypeElements() {
- ElementsKind elements_kind = GetElementsKind();
- return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
- elements_kind == FAST_ELEMENTS;
+bool JSObject::HasFastSmiOrObjectElements() {
+ return IsFastSmiOrObjectElementsKind(GetElementsKind());
}
bool JSObject::HasFastDoubleElements() {
- return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
+ return IsFastDoubleElementsKind(GetElementsKind());
+}
+
+
+bool JSObject::HasFastHoleyElements() {
+ return IsFastHoleyElementsKind(GetElementsKind());
}
@@ -4526,7 +4916,7 @@ bool JSObject::HasIndexedInterceptor() {
MaybeObject* JSObject::EnsureWritableFastElements() {
- ASSERT(HasFastTypeElements());
+ ASSERT(HasFastSmiOrObjectElements());
FixedArray* elems = FixedArray::cast(elements());
Isolate* isolate = GetIsolate();
if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
@@ -4579,8 +4969,7 @@ StringHasher::StringHasher(int length, uint32_t seed)
raw_running_hash_(seed),
array_index_(0),
is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
- is_first_char_(true),
- is_valid_(true) {
+ is_first_char_(true) {
ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
}
@@ -4590,6 +4979,25 @@ bool StringHasher::has_trivial_hash() {
}
+uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint32_t c) {
+ running_hash += c;
+ running_hash += (running_hash << 10);
+ running_hash ^= (running_hash >> 6);
+ return running_hash;
+}
+
+
+uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
+ running_hash += (running_hash << 3);
+ running_hash ^= (running_hash >> 11);
+ running_hash += (running_hash << 15);
+ if ((running_hash & String::kHashBitMask) == 0) {
+ return kZeroHash;
+ }
+ return running_hash;
+}
+
+
void StringHasher::AddCharacter(uint32_t c) {
if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
AddSurrogatePair(c); // Not inlined.
@@ -4597,9 +5005,7 @@ void StringHasher::AddCharacter(uint32_t c) {
}
// Use the Jenkins one-at-a-time hash function to update the hash
// for the given character.
- raw_running_hash_ += c;
- raw_running_hash_ += (raw_running_hash_ << 10);
- raw_running_hash_ ^= (raw_running_hash_ >> 6);
+ raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
// Incremental array index computation.
if (is_array_index_) {
if (c < '0' || c > '9') {
@@ -4629,23 +5035,14 @@ void StringHasher::AddCharacterNoIndex(uint32_t c) {
AddSurrogatePairNoIndex(c); // Not inlined.
return;
}
- raw_running_hash_ += c;
- raw_running_hash_ += (raw_running_hash_ << 10);
- raw_running_hash_ ^= (raw_running_hash_ >> 6);
+ raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
}
uint32_t StringHasher::GetHash() {
// Get the calculated raw hash value and do some more bit ops to distribute
// the hash further. Ensure that we never return zero as the hash value.
- uint32_t result = raw_running_hash_;
- result += (result << 3);
- result ^= (result >> 11);
- result += (result << 15);
- if ((result & String::kHashBitMask) == 0) {
- result = 27;
- }
- return result;
+ return GetHashCore(raw_running_hash_);
}
@@ -4675,7 +5072,12 @@ bool String::AsArrayIndex(uint32_t* index) {
Object* JSReceiver::GetPrototype() {
- return HeapObject::cast(this)->map()->prototype();
+ return map()->prototype();
+}
+
+
+Object* JSReceiver::GetConstructor() {
+ return map()->constructor();
}
@@ -4699,6 +5101,16 @@ PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
return GetPropertyAttributeWithReceiver(this, key);
}
+
+PropertyAttributes JSReceiver::GetElementAttribute(uint32_t index) {
+ if (IsJSProxy()) {
+ return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
+ }
+ return JSObject::cast(this)->GetElementAttributeWithReceiver(
+ this, index, true);
+}
+
+
// TODO(504): this may be useful in other places too where JSGlobalProxy
// is used.
Object* JSObject::BypassGlobalProxy() {
@@ -4723,7 +5135,26 @@ bool JSReceiver::HasElement(uint32_t index) {
if (IsJSProxy()) {
return JSProxy::cast(this)->HasElementWithHandler(index);
}
- return JSObject::cast(this)->HasElementWithReceiver(this, index);
+ return JSObject::cast(this)->GetElementAttributeWithReceiver(
+ this, index, true) != ABSENT;
+}
+
+
+bool JSReceiver::HasLocalElement(uint32_t index) {
+ if (IsJSProxy()) {
+ return JSProxy::cast(this)->HasElementWithHandler(index);
+ }
+ return JSObject::cast(this)->GetElementAttributeWithReceiver(
+ this, index, false) != ABSENT;
+}
+
+
+PropertyAttributes JSReceiver::GetLocalElementAttribute(uint32_t index) {
+ if (IsJSProxy()) {
+ return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
+ }
+ return JSObject::cast(this)->GetElementAttributeWithReceiver(
+ this, index, false);
}
@@ -4767,6 +5198,13 @@ void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
}
+bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
+ Object* function_template = expected_receiver_type();
+ if (!function_template->IsFunctionTemplateInfo()) return true;
+ return receiver->IsInstanceOf(FunctionTemplateInfo::cast(function_template));
+}
+
+
template<typename Shape, typename Key>
void Dictionary<Shape, Key>::SetEntry(int entry,
Object* key,
@@ -4780,7 +5218,9 @@ void Dictionary<Shape, Key>::SetEntry(int entry,
Object* key,
Object* value,
PropertyDetails details) {
- ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
+ ASSERT(!key->IsString() ||
+ details.IsDeleted() ||
+ details.dictionary_index() > 0);
int index = HashTable<Shape, Key>::EntryToIndex(entry);
AssertNoAllocation no_gc;
WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
@@ -4878,13 +5318,13 @@ void Map::ClearCodeCache(Heap* heap) {
// Please note this function is used during marking:
// - MarkCompactCollector::MarkUnmarkedObject
// - IncrementalMarking::Step
- ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
- WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
+ ASSERT(!heap->InNewSpace(heap->empty_fixed_array()));
+ WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
}
void JSArray::EnsureSize(int required_size) {
- ASSERT(HasFastTypeElements());
+ ASSERT(HasFastSmiOrObjectElements());
FixedArray* elts = FixedArray::cast(elements());
const int kArraySizeThatFitsComfortablyInNewSpace = 128;
if (elts->length() < required_size) {
@@ -4916,13 +5356,13 @@ bool JSArray::AllowsSetElementsLength() {
MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
MaybeObject* maybe_result = EnsureCanContainElements(
- storage, ALLOW_COPIED_DOUBLE_ELEMENTS);
+ storage, storage->length(), ALLOW_COPIED_DOUBLE_ELEMENTS);
if (maybe_result->IsFailure()) return maybe_result;
ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
- GetElementsKind() == FAST_DOUBLE_ELEMENTS) ||
+ IsFastDoubleElementsKind(GetElementsKind())) ||
((storage->map() != GetHeap()->fixed_double_array_map()) &&
- ((GetElementsKind() == FAST_ELEMENTS) ||
- (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
+ (IsFastObjectElementsKind(GetElementsKind()) ||
+ (IsFastSmiElementsKind(GetElementsKind()) &&
FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
set_elements(storage);
set_length(Smi::FromInt(storage->length()));
@@ -4942,13 +5382,13 @@ MaybeObject* FixedDoubleArray::Copy() {
}
-void TypeFeedbackCells::SetAstId(int index, Smi* id) {
- set(1 + index * 2, id);
+void TypeFeedbackCells::SetAstId(int index, TypeFeedbackId id) {
+ set(1 + index * 2, Smi::FromInt(id.ToInt()));
}
-Smi* TypeFeedbackCells::AstId(int index) {
- return Smi::cast(get(1 + index * 2));
+TypeFeedbackId TypeFeedbackCells::AstId(int index) {
+ return TypeFeedbackId(Smi::cast(get(1 + index * 2))->value());
}
@@ -4973,13 +5413,88 @@ Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
- return heap->raw_unchecked_the_hole_value();
+ return heap->the_hole_value();
+}
+
+
+int TypeFeedbackInfo::ic_total_count() {
+ int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
+ return ICTotalCountField::decode(current);
+}
+
+
+void TypeFeedbackInfo::set_ic_total_count(int count) {
+ int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
+ value = ICTotalCountField::update(value,
+ ICTotalCountField::decode(count));
+ WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
+}
+
+
+int TypeFeedbackInfo::ic_with_type_info_count() {
+ int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
+ return ICsWithTypeInfoCountField::decode(current);
+}
+
+
+void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
+ int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
+ int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
+ // We can get negative count here when the type-feedback info is
+ // shared between two code objects. The can only happen when
+ // the debugger made a shallow copy of code object (see Heap::CopyCode).
+ // Since we do not optimize when the debugger is active, we can skip
+ // this counter update.
+ if (new_count >= 0) {
+ new_count &= ICsWithTypeInfoCountField::kMask;
+ value = ICsWithTypeInfoCountField::update(value, new_count);
+ WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
+ }
+}
+
+
+void TypeFeedbackInfo::initialize_storage() {
+ WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
+ WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
+}
+
+
+void TypeFeedbackInfo::change_own_type_change_checksum() {
+ int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
+ int checksum = OwnTypeChangeChecksum::decode(value);
+ checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
+ value = OwnTypeChangeChecksum::update(value, checksum);
+ // Ensure packed bit field is in Smi range.
+ if (value > Smi::kMaxValue) value |= Smi::kMinValue;
+ if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
+ WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
+}
+
+
+void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
+ int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
+ int mask = (1 << kTypeChangeChecksumBits) - 1;
+ value = InlinedTypeChangeChecksum::update(value, checksum & mask);
+ // Ensure packed bit field is in Smi range.
+ if (value > Smi::kMaxValue) value |= Smi::kMinValue;
+ if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
+ WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
+}
+
+
+int TypeFeedbackInfo::own_type_change_checksum() {
+ int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
+ return OwnTypeChangeChecksum::decode(value);
+}
+
+
+bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
+ int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
+ int mask = (1 << kTypeChangeChecksumBits) - 1;
+ return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
}
-SMI_ACCESSORS(TypeFeedbackInfo, ic_total_count, kIcTotalCountOffset)
-SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count,
- kIcWithTypeinfoCountOffset)
ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
kTypeFeedbackCellsOffset)
@@ -5049,14 +5564,13 @@ void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
}
-#define SLOT_ADDR(obj, offset) \
- reinterpret_cast<Object**>((obj)->address() + offset)
template<int start_offset, int end_offset, int size>
void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
HeapObject* obj,
ObjectVisitor* v) {
- v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
+ v->VisitPointers(HeapObject::RawField(obj, start_offset),
+ HeapObject::RawField(obj, end_offset));
}
@@ -5064,10 +5578,10 @@ template<int start_offset>
void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
int object_size,
ObjectVisitor* v) {
- v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
+ v->VisitPointers(HeapObject::RawField(obj, start_offset),
+ HeapObject::RawField(obj, object_size));
}
-#undef SLOT_ADDR
#undef TYPE_CHECKER
#undef CAST_ACCESSOR
diff --git a/src/3rdparty/v8/src/objects-printer.cc b/src/3rdparty/v8/src/objects-printer.cc
index febdaab..6e87c7a 100644
--- a/src/3rdparty/v8/src/objects-printer.cc
+++ b/src/3rdparty/v8/src/objects-printer.cc
@@ -57,12 +57,12 @@ void MaybeObject::Print(FILE* out) {
void MaybeObject::PrintLn(FILE* out) {
Print(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
void HeapObject::PrintHeader(FILE* out, const char* id) {
- PrintF(out, "%p: [%s]\n", reinterpret_cast<void*>(this), id);
+ FPrintF(out, "%p: [%s]\n", reinterpret_cast<void*>(this), id);
}
@@ -124,7 +124,7 @@ void HeapObject::HeapObjectPrint(FILE* out) {
ExternalDoubleArray::cast(this)->ExternalDoubleArrayPrint(out);
break;
case FILLER_TYPE:
- PrintF(out, "filler");
+ FPrintF(out, "filler");
break;
case JS_OBJECT_TYPE: // fall through
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
@@ -151,7 +151,7 @@ void HeapObject::HeapObjectPrint(FILE* out) {
JSBuiltinsObject::cast(this)->JSBuiltinsObjectPrint(out);
break;
case JS_VALUE_TYPE:
- PrintF(out, "Value wrapper around:");
+ FPrintF(out, "Value wrapper around:");
JSValue::cast(this)->value()->Print(out);
break;
case JS_DATE_TYPE:
@@ -189,7 +189,7 @@ void HeapObject::HeapObjectPrint(FILE* out) {
#undef MAKE_STRUCT_CASE
default:
- PrintF(out, "UNKNOWN TYPE %d", map()->instance_type());
+ FPrintF(out, "UNKNOWN TYPE %d", map()->instance_type());
UNREACHABLE();
break;
}
@@ -197,113 +197,88 @@ void HeapObject::HeapObjectPrint(FILE* out) {
void ByteArray::ByteArrayPrint(FILE* out) {
- PrintF(out, "byte array, data starts at %p", GetDataStartAddress());
+ FPrintF(out, "byte array, data starts at %p", GetDataStartAddress());
}
void FreeSpace::FreeSpacePrint(FILE* out) {
- PrintF(out, "free space, size %d", Size());
+ FPrintF(out, "free space, size %d", Size());
}
void ExternalPixelArray::ExternalPixelArrayPrint(FILE* out) {
- PrintF(out, "external pixel array");
+ FPrintF(out, "external pixel array");
}
void ExternalByteArray::ExternalByteArrayPrint(FILE* out) {
- PrintF(out, "external byte array");
+ FPrintF(out, "external byte array");
}
void ExternalUnsignedByteArray::ExternalUnsignedByteArrayPrint(FILE* out) {
- PrintF(out, "external unsigned byte array");
+ FPrintF(out, "external unsigned byte array");
}
void ExternalShortArray::ExternalShortArrayPrint(FILE* out) {
- PrintF(out, "external short array");
+ FPrintF(out, "external short array");
}
void ExternalUnsignedShortArray::ExternalUnsignedShortArrayPrint(FILE* out) {
- PrintF(out, "external unsigned short array");
+ FPrintF(out, "external unsigned short array");
}
void ExternalIntArray::ExternalIntArrayPrint(FILE* out) {
- PrintF(out, "external int array");
+ FPrintF(out, "external int array");
}
void ExternalUnsignedIntArray::ExternalUnsignedIntArrayPrint(FILE* out) {
- PrintF(out, "external unsigned int array");
+ FPrintF(out, "external unsigned int array");
}
void ExternalFloatArray::ExternalFloatArrayPrint(FILE* out) {
- PrintF(out, "external float array");
+ FPrintF(out, "external float array");
}
void ExternalDoubleArray::ExternalDoubleArrayPrint(FILE* out) {
- PrintF(out, "external double array");
+ FPrintF(out, "external double array");
}
void JSObject::PrintProperties(FILE* out) {
if (HasFastProperties()) {
DescriptorArray* descs = map()->instance_descriptors();
- for (int i = 0; i < descs->number_of_descriptors(); i++) {
- PrintF(out, " ");
+ for (int i = 0; i < map()->NumberOfOwnDescriptors(); i++) {
+ FPrintF(out, " ");
descs->GetKey(i)->StringPrint(out);
- PrintF(out, ": ");
+ FPrintF(out, ": ");
switch (descs->GetType(i)) {
case FIELD: {
int index = descs->GetFieldIndex(i);
FastPropertyAt(index)->ShortPrint(out);
- PrintF(out, " (field at offset %d)\n", index);
+ FPrintF(out, " (field at offset %d)\n", index);
break;
}
case CONSTANT_FUNCTION:
descs->GetConstantFunction(i)->ShortPrint(out);
- PrintF(out, " (constant function)\n");
+ FPrintF(out, " (constant function)\n");
break;
case CALLBACKS:
descs->GetCallbacksObject(i)->ShortPrint(out);
- PrintF(out, " (callback)\n");
- break;
- case ELEMENTS_TRANSITION: {
- PrintF(out, "(elements transition to ");
- Object* descriptor_contents = descs->GetValue(i);
- if (descriptor_contents->IsMap()) {
- Map* map = Map::cast(descriptor_contents);
- PrintElementsKind(out, map->elements_kind());
- } else {
- FixedArray* map_array = FixedArray::cast(descriptor_contents);
- for (int i = 0; i < map_array->length(); ++i) {
- Map* map = Map::cast(map_array->get(i));
- if (i != 0) {
- PrintF(out, ", ");
- }
- PrintElementsKind(out, map->elements_kind());
- }
- }
- PrintF(out, ")\n");
- break;
- }
- case MAP_TRANSITION:
- PrintF(out, "(map transition)\n");
- break;
- case CONSTANT_TRANSITION:
- PrintF(out, "(constant transition)\n");
- break;
- case NULL_DESCRIPTOR:
- PrintF(out, "(null descriptor)\n");
+ FPrintF(out, " (callback)\n");
break;
case NORMAL: // only in slow mode
case HANDLER: // only in lookup results, not in descriptors
case INTERCEPTOR: // only in lookup results, not in descriptors
+ // There are no transitions in the descriptor array.
+ case TRANSITION:
+ case NONEXISTENT:
UNREACHABLE();
break;
}
@@ -318,28 +293,31 @@ void JSObject::PrintElements(FILE* out) {
// Don't call GetElementsKind, its validation code can cause the printer to
// fail when debugging.
switch (map()->elements_kind()) {
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
case FAST_ELEMENTS: {
// Print in array notation for non-sparse arrays.
FixedArray* p = FixedArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: ", i);
+ FPrintF(out, " %d: ", i);
p->get(i)->ShortPrint(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
break;
}
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS: {
// Print in array notation for non-sparse arrays.
if (elements()->length() > 0) {
FixedDoubleArray* p = FixedDoubleArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
if (p->is_the_hole(i)) {
- PrintF(out, " %d: <the hole>", i);
+ FPrintF(out, " %d: <the hole>", i);
} else {
- PrintF(out, " %d: %g", i, p->get_scalar(i));
+ FPrintF(out, " %d: %g", i, p->get_scalar(i));
}
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
}
break;
@@ -347,14 +325,14 @@ void JSObject::PrintElements(FILE* out) {
case EXTERNAL_PIXEL_ELEMENTS: {
ExternalPixelArray* p = ExternalPixelArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %d\n", i, p->get_scalar(i));
+ FPrintF(out, " %d: %d\n", i, p->get_scalar(i));
}
break;
}
case EXTERNAL_BYTE_ELEMENTS: {
ExternalByteArray* p = ExternalByteArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
+ FPrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
@@ -362,14 +340,14 @@ void JSObject::PrintElements(FILE* out) {
ExternalUnsignedByteArray* p =
ExternalUnsignedByteArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
+ FPrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
case EXTERNAL_SHORT_ELEMENTS: {
ExternalShortArray* p = ExternalShortArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
+ FPrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
@@ -377,14 +355,14 @@ void JSObject::PrintElements(FILE* out) {
ExternalUnsignedShortArray* p =
ExternalUnsignedShortArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
+ FPrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
case EXTERNAL_INT_ELEMENTS: {
ExternalIntArray* p = ExternalIntArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
+ FPrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
@@ -392,21 +370,21 @@ void JSObject::PrintElements(FILE* out) {
ExternalUnsignedIntArray* p =
ExternalUnsignedIntArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
+ FPrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
case EXTERNAL_FLOAT_ELEMENTS: {
ExternalFloatArray* p = ExternalFloatArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %f\n", i, p->get_scalar(i));
+ FPrintF(out, " %d: %f\n", i, p->get_scalar(i));
}
break;
}
case EXTERNAL_DOUBLE_ELEMENTS: {
ExternalDoubleArray* p = ExternalDoubleArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %f\n", i, p->get_scalar(i));
+ FPrintF(out, " %d: %f\n", i, p->get_scalar(i));
}
break;
}
@@ -416,9 +394,9 @@ void JSObject::PrintElements(FILE* out) {
case NON_STRICT_ARGUMENTS_ELEMENTS: {
FixedArray* p = FixedArray::cast(elements());
for (int i = 2; i < p->length(); i++) {
- PrintF(out, " %d: ", i);
+ FPrintF(out, " %d: ", i);
p->get(i)->ShortPrint(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
break;
}
@@ -426,32 +404,66 @@ void JSObject::PrintElements(FILE* out) {
}
+void JSObject::PrintTransitions(FILE* out) {
+ if (!map()->HasTransitionArray()) return;
+ TransitionArray* transitions = map()->transitions();
+ for (int i = 0; i < transitions->number_of_transitions(); i++) {
+ FPrintF(out, " ");
+ transitions->GetKey(i)->StringPrint(out);
+ FPrintF(out, ": ");
+ switch (transitions->GetTargetDetails(i).type()) {
+ case FIELD: {
+ FPrintF(out, " (transition to field)\n");
+ break;
+ }
+ case CONSTANT_FUNCTION:
+ FPrintF(out, " (transition to constant function)\n");
+ break;
+ case CALLBACKS:
+ FPrintF(out, " (transition to callback)\n");
+ break;
+ // Values below are never in the target descriptor array.
+ case NORMAL:
+ case HANDLER:
+ case INTERCEPTOR:
+ case TRANSITION:
+ case NONEXISTENT:
+ UNREACHABLE();
+ break;
+ }
+ }
+}
+
+
void JSObject::JSObjectPrint(FILE* out) {
- PrintF(out, "%p: [JSObject]\n", reinterpret_cast<void*>(this));
- PrintF(out, " - map = %p [", reinterpret_cast<void*>(map()));
+ FPrintF(out, "%p: [JSObject]\n", reinterpret_cast<void*>(this));
+ FPrintF(out, " - map = %p [", reinterpret_cast<void*>(map()));
// Don't call GetElementsKind, its validation code can cause the printer to
// fail when debugging.
PrintElementsKind(out, this->map()->elements_kind());
- PrintF(out,
+ FPrintF(out,
"]\n - prototype = %p\n",
reinterpret_cast<void*>(GetPrototype()));
- PrintF(out, " {\n");
+ FPrintF(out, " {\n");
PrintProperties(out);
+ PrintTransitions(out);
PrintElements(out);
- PrintF(out, " }\n");
+ FPrintF(out, " }\n");
}
void JSModule::JSModulePrint(FILE* out) {
HeapObject::PrintHeader(out, "JSModule");
- PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
- PrintF(out, " - context = ");
+ FPrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ FPrintF(out, " - context = ");
context()->Print(out);
+ FPrintF(out, " - scope_info = ");
+ scope_info()->ShortPrint(out);
PrintElementsKind(out, this->map()->elements_kind());
- PrintF(out, " {\n");
+ FPrintF(out, " {\n");
PrintProperties(out);
PrintElements(out);
- PrintF(out, " }\n");
+ FPrintF(out, " }\n");
}
@@ -524,95 +536,105 @@ static const char* TypeToString(InstanceType type) {
void Map::MapPrint(FILE* out) {
HeapObject::PrintHeader(out, "Map");
- PrintF(out, " - type: %s\n", TypeToString(instance_type()));
- PrintF(out, " - instance size: %d\n", instance_size());
- PrintF(out, " - inobject properties: %d\n", inobject_properties());
- PrintF(out, " - elements kind: ");
+ FPrintF(out, " - type: %s\n", TypeToString(instance_type()));
+ FPrintF(out, " - instance size: %d\n", instance_size());
+ FPrintF(out, " - inobject properties: %d\n", inobject_properties());
+ FPrintF(out, " - elements kind: ");
PrintElementsKind(out, elements_kind());
- PrintF(out, "\n - pre-allocated property fields: %d\n",
+ FPrintF(out, "\n - pre-allocated property fields: %d\n",
pre_allocated_property_fields());
- PrintF(out, " - unused property fields: %d\n", unused_property_fields());
+ FPrintF(out, " - unused property fields: %d\n", unused_property_fields());
if (is_hidden_prototype()) {
- PrintF(out, " - hidden_prototype\n");
+ FPrintF(out, " - hidden_prototype\n");
}
if (has_named_interceptor()) {
- PrintF(out, " - named_interceptor\n");
+ FPrintF(out, " - named_interceptor\n");
}
if (has_indexed_interceptor()) {
- PrintF(out, " - indexed_interceptor\n");
+ FPrintF(out, " - indexed_interceptor\n");
}
if (is_undetectable()) {
- PrintF(out, " - undetectable\n");
+ FPrintF(out, " - undetectable\n");
}
if (has_instance_call_handler()) {
- PrintF(out, " - instance_call_handler\n");
+ FPrintF(out, " - instance_call_handler\n");
}
if (is_access_check_needed()) {
- PrintF(out, " - access_check_needed\n");
+ FPrintF(out, " - access_check_needed\n");
}
- PrintF(out, " - instance descriptors: ");
+ FPrintF(out, " - back pointer: ");
+ GetBackPointer()->ShortPrint(out);
+ FPrintF(out, "\n - instance descriptors %i #%i: ",
+ owns_descriptors(),
+ NumberOfOwnDescriptors());
instance_descriptors()->ShortPrint(out);
- PrintF(out, "\n - prototype: ");
+ if (HasTransitionArray()) {
+ FPrintF(out, "\n - transitions: ");
+ transitions()->ShortPrint(out);
+ }
+ FPrintF(out, "\n - prototype: ");
prototype()->ShortPrint(out);
- PrintF(out, "\n - constructor: ");
+ FPrintF(out, "\n - constructor: ");
constructor()->ShortPrint(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n - code cache: ");
+ code_cache()->ShortPrint(out);
+ FPrintF(out, "\n");
}
void CodeCache::CodeCachePrint(FILE* out) {
HeapObject::PrintHeader(out, "CodeCache");
- PrintF(out, "\n - default_cache: ");
+ FPrintF(out, "\n - default_cache: ");
default_cache()->ShortPrint(out);
- PrintF(out, "\n - normal_type_cache: ");
+ FPrintF(out, "\n - normal_type_cache: ");
normal_type_cache()->ShortPrint(out);
}
void PolymorphicCodeCache::PolymorphicCodeCachePrint(FILE* out) {
HeapObject::PrintHeader(out, "PolymorphicCodeCache");
- PrintF(out, "\n - cache: ");
+ FPrintF(out, "\n - cache: ");
cache()->ShortPrint(out);
}
void TypeFeedbackInfo::TypeFeedbackInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "TypeFeedbackInfo");
- PrintF(out, "\n - ic_total_count: %d, ic_with_type_info_count: %d",
+ FPrintF(out, " - ic_total_count: %d, ic_with_type_info_count: %d\n",
ic_total_count(), ic_with_type_info_count());
- PrintF(out, "\n - type_feedback_cells: ");
+ FPrintF(out, " - type_feedback_cells: ");
type_feedback_cells()->FixedArrayPrint(out);
}
void AliasedArgumentsEntry::AliasedArgumentsEntryPrint(FILE* out) {
HeapObject::PrintHeader(out, "AliasedArgumentsEntry");
- PrintF(out, "\n - aliased_context_slot: %d", aliased_context_slot());
+ FPrintF(out, "\n - aliased_context_slot: %d", aliased_context_slot());
}
void FixedArray::FixedArrayPrint(FILE* out) {
HeapObject::PrintHeader(out, "FixedArray");
- PrintF(out, " - length: %d", length());
+ FPrintF(out, " - length: %d", length());
for (int i = 0; i < length(); i++) {
- PrintF(out, "\n [%d]: ", i);
+ FPrintF(out, "\n [%d]: ", i);
get(i)->ShortPrint(out);
}
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
void FixedDoubleArray::FixedDoubleArrayPrint(FILE* out) {
HeapObject::PrintHeader(out, "FixedDoubleArray");
- PrintF(out, " - length: %d", length());
+ FPrintF(out, " - length: %d", length());
for (int i = 0; i < length(); i++) {
if (is_the_hole(i)) {
- PrintF(out, "\n [%d]: <the hole>", i);
+ FPrintF(out, "\n [%d]: <the hole>", i);
} else {
- PrintF(out, "\n [%d]: %g", i, get_scalar(i));
+ FPrintF(out, "\n [%d]: %g", i, get_scalar(i));
}
}
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
@@ -624,29 +646,29 @@ void JSValue::JSValuePrint(FILE* out) {
void JSMessageObject::JSMessageObjectPrint(FILE* out) {
HeapObject::PrintHeader(out, "JSMessageObject");
- PrintF(out, " - type: ");
+ FPrintF(out, " - type: ");
type()->ShortPrint(out);
- PrintF(out, "\n - arguments: ");
+ FPrintF(out, "\n - arguments: ");
arguments()->ShortPrint(out);
- PrintF(out, "\n - start_position: %d", start_position());
- PrintF(out, "\n - end_position: %d", end_position());
- PrintF(out, "\n - script: ");
+ FPrintF(out, "\n - start_position: %d", start_position());
+ FPrintF(out, "\n - end_position: %d", end_position());
+ FPrintF(out, "\n - script: ");
script()->ShortPrint(out);
- PrintF(out, "\n - stack_trace: ");
+ FPrintF(out, "\n - stack_trace: ");
stack_trace()->ShortPrint(out);
- PrintF(out, "\n - stack_frames: ");
+ FPrintF(out, "\n - stack_frames: ");
stack_frames()->ShortPrint(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
void String::StringPrint(FILE* out) {
if (StringShape(this).IsSymbol()) {
- PrintF(out, "#");
+ FPrintF(out, "#");
} else if (StringShape(this).IsCons()) {
- PrintF(out, "c\"");
+ FPrintF(out, "c\"");
} else {
- PrintF(out, "\"");
+ FPrintF(out, "\"");
}
const char truncated_epilogue[] = "...<truncated>";
@@ -657,13 +679,13 @@ void String::StringPrint(FILE* out) {
}
}
for (int i = 0; i < len; i++) {
- PrintF(out, "%c", Get(i));
+ FPrintF(out, "%c", Get(i));
}
if (len != length()) {
- PrintF(out, "%s", truncated_epilogue);
+ FPrintF(out, "%s", truncated_epilogue);
}
- if (!StringShape(this).IsSymbol()) PrintF(out, "\"");
+ if (!StringShape(this).IsSymbol()) FPrintF(out, "\"");
}
@@ -688,13 +710,13 @@ static const char* const weekdays[] = {
void JSDate::JSDatePrint(FILE* out) {
HeapObject::PrintHeader(out, "JSDate");
- PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
- PrintF(out, " - value = ");
+ FPrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ FPrintF(out, " - value = ");
value()->Print(out);
if (!year()->IsSmi()) {
- PrintF(out, " - time = NaN\n");
+ FPrintF(out, " - time = NaN\n");
} else {
- PrintF(out, " - time = %s %04d/%02d/%02d %02d:%02d:%02d\n",
+ FPrintF(out, " - time = %s %04d/%02d/%02d %02d:%02d:%02d\n",
weekdays[weekday()->IsSmi() ? Smi::cast(weekday())->value() + 1 : 0],
year()->IsSmi() ? Smi::cast(year())->value() : -1,
month()->IsSmi() ? Smi::cast(month())->value() : -1,
@@ -708,110 +730,121 @@ void JSDate::JSDatePrint(FILE* out) {
void JSProxy::JSProxyPrint(FILE* out) {
HeapObject::PrintHeader(out, "JSProxy");
- PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
- PrintF(out, " - handler = ");
+ FPrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ FPrintF(out, " - handler = ");
handler()->Print(out);
- PrintF(out, " - hash = ");
+ FPrintF(out, " - hash = ");
hash()->Print(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
void JSFunctionProxy::JSFunctionProxyPrint(FILE* out) {
HeapObject::PrintHeader(out, "JSFunctionProxy");
- PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
- PrintF(out, " - handler = ");
+ FPrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ FPrintF(out, " - handler = ");
handler()->Print(out);
- PrintF(out, " - call_trap = ");
+ FPrintF(out, " - call_trap = ");
call_trap()->Print(out);
- PrintF(out, " - construct_trap = ");
+ FPrintF(out, " - construct_trap = ");
construct_trap()->Print(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
void JSWeakMap::JSWeakMapPrint(FILE* out) {
HeapObject::PrintHeader(out, "JSWeakMap");
- PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
- PrintF(out, " - table = ");
+ FPrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ FPrintF(out, " - table = ");
table()->ShortPrint(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
void JSFunction::JSFunctionPrint(FILE* out) {
HeapObject::PrintHeader(out, "Function");
- PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
- PrintF(out, " - initial_map = ");
+ FPrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ FPrintF(out, " - initial_map = ");
if (has_initial_map()) {
initial_map()->ShortPrint(out);
}
- PrintF(out, "\n - shared_info = ");
+ FPrintF(out, "\n - shared_info = ");
shared()->ShortPrint(out);
- PrintF(out, "\n - name = ");
+ FPrintF(out, "\n - name = ");
shared()->name()->Print(out);
- PrintF(out, "\n - context = ");
+ FPrintF(out, "\n - context = ");
unchecked_context()->ShortPrint(out);
- PrintF(out, "\n - code = ");
+ FPrintF(out, "\n - literals = ");
+ literals()->ShortPrint(out);
+ FPrintF(out, "\n - code = ");
code()->ShortPrint(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
PrintProperties(out);
PrintElements(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
void SharedFunctionInfo::SharedFunctionInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "SharedFunctionInfo");
- PrintF(out, " - name: ");
+ FPrintF(out, " - name: ");
name()->ShortPrint(out);
- PrintF(out, "\n - expected_nof_properties: %d", expected_nof_properties());
- PrintF(out, "\n - instance class name = ");
+ FPrintF(out, "\n - expected_nof_properties: %d", expected_nof_properties());
+ FPrintF(out, "\n - instance class name = ");
instance_class_name()->Print(out);
- PrintF(out, "\n - code = ");
+ FPrintF(out, "\n - code = ");
code()->ShortPrint(out);
- PrintF(out, "\n - source code = ");
- GetSourceCode()->ShortPrint(out);
+ if (HasSourceCode()) {
+ FPrintF(out, "\n - source code = ");
+ String* source = String::cast(Script::cast(script())->source());
+ int start = start_position();
+ int length = end_position() - start;
+ SmartArrayPointer<char> source_string =
+ source->ToCString(DISALLOW_NULLS,
+ FAST_STRING_TRAVERSAL,
+ start, length, NULL);
+ FPrintF(out, "%s", *source_string);
+ }
// Script files are often large, hard to read.
- // PrintF(out, "\n - script =");
+ // FPrintF(out, "\n - script =");
// script()->Print(out);
- PrintF(out, "\n - function token position = %d", function_token_position());
- PrintF(out, "\n - start position = %d", start_position());
- PrintF(out, "\n - end position = %d", end_position());
- PrintF(out, "\n - is expression = %d", is_expression());
- PrintF(out, "\n - debug info = ");
+ FPrintF(out, "\n - function token position = %d", function_token_position());
+ FPrintF(out, "\n - start position = %d", start_position());
+ FPrintF(out, "\n - end position = %d", end_position());
+ FPrintF(out, "\n - is expression = %d", is_expression());
+ FPrintF(out, "\n - debug info = ");
debug_info()->ShortPrint(out);
- PrintF(out, "\n - length = %d", length());
- PrintF(out, "\n - has_only_simple_this_property_assignments = %d",
+ FPrintF(out, "\n - length = %d", length());
+ FPrintF(out, "\n - has_only_simple_this_property_assignments = %d",
has_only_simple_this_property_assignments());
- PrintF(out, "\n - this_property_assignments = ");
+ FPrintF(out, "\n - this_property_assignments = ");
this_property_assignments()->ShortPrint(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
void JSGlobalProxy::JSGlobalProxyPrint(FILE* out) {
- PrintF(out, "global_proxy");
+ FPrintF(out, "global_proxy ");
JSObjectPrint(out);
- PrintF(out, "context : ");
- context()->ShortPrint(out);
- PrintF(out, "\n");
+ FPrintF(out, "native context : ");
+ native_context()->ShortPrint(out);
+ FPrintF(out, "\n");
}
void JSGlobalObject::JSGlobalObjectPrint(FILE* out) {
- PrintF(out, "global ");
+ FPrintF(out, "global ");
JSObjectPrint(out);
- PrintF(out, "global context : ");
- global_context()->ShortPrint(out);
- PrintF(out, "\n");
+ FPrintF(out, "native context : ");
+ native_context()->ShortPrint(out);
+ FPrintF(out, "\n");
}
void JSBuiltinsObject::JSBuiltinsObjectPrint(FILE* out) {
- PrintF(out, "builtins ");
+ FPrintF(out, "builtins ");
JSObjectPrint(out);
}
@@ -832,204 +865,235 @@ void Code::CodePrint(FILE* out) {
void Foreign::ForeignPrint(FILE* out) {
- PrintF(out, "foreign address : %p", foreign_address());
+ FPrintF(out, "foreign address : %p", foreign_address());
}
void AccessorInfo::AccessorInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "AccessorInfo");
- PrintF(out, "\n - getter: ");
+ FPrintF(out, "\n - getter: ");
getter()->ShortPrint(out);
- PrintF(out, "\n - setter: ");
+ FPrintF(out, "\n - setter: ");
setter()->ShortPrint(out);
- PrintF(out, "\n - name: ");
+ FPrintF(out, "\n - name: ");
name()->ShortPrint(out);
- PrintF(out, "\n - data: ");
+ FPrintF(out, "\n - data: ");
data()->ShortPrint(out);
- PrintF(out, "\n - flag: ");
+ FPrintF(out, "\n - flag: ");
flag()->ShortPrint(out);
}
void AccessorPair::AccessorPairPrint(FILE* out) {
HeapObject::PrintHeader(out, "AccessorPair");
- PrintF(out, "\n - getter: ");
+ FPrintF(out, "\n - getter: ");
getter()->ShortPrint(out);
- PrintF(out, "\n - setter: ");
+ FPrintF(out, "\n - setter: ");
setter()->ShortPrint(out);
}
void AccessCheckInfo::AccessCheckInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "AccessCheckInfo");
- PrintF(out, "\n - named_callback: ");
+ FPrintF(out, "\n - named_callback: ");
named_callback()->ShortPrint(out);
- PrintF(out, "\n - indexed_callback: ");
+ FPrintF(out, "\n - indexed_callback: ");
indexed_callback()->ShortPrint(out);
- PrintF(out, "\n - data: ");
+ FPrintF(out, "\n - data: ");
data()->ShortPrint(out);
}
void InterceptorInfo::InterceptorInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "InterceptorInfo");
- PrintF(out, "\n - getter: ");
+ FPrintF(out, "\n - getter: ");
getter()->ShortPrint(out);
- PrintF(out, "\n - setter: ");
+ FPrintF(out, "\n - setter: ");
setter()->ShortPrint(out);
- PrintF(out, "\n - query: ");
+ FPrintF(out, "\n - query: ");
query()->ShortPrint(out);
- PrintF(out, "\n - deleter: ");
+ FPrintF(out, "\n - deleter: ");
deleter()->ShortPrint(out);
- PrintF(out, "\n - enumerator: ");
+ FPrintF(out, "\n - enumerator: ");
enumerator()->ShortPrint(out);
- PrintF(out, "\n - data: ");
+ FPrintF(out, "\n - data: ");
data()->ShortPrint(out);
}
void CallHandlerInfo::CallHandlerInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "CallHandlerInfo");
- PrintF(out, "\n - callback: ");
+ FPrintF(out, "\n - callback: ");
callback()->ShortPrint(out);
- PrintF(out, "\n - data: ");
+ FPrintF(out, "\n - data: ");
data()->ShortPrint(out);
- PrintF(out, "\n - call_stub_cache: ");
+ FPrintF(out, "\n - call_stub_cache: ");
}
void FunctionTemplateInfo::FunctionTemplateInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "FunctionTemplateInfo");
- PrintF(out, "\n - class name: ");
+ FPrintF(out, "\n - class name: ");
class_name()->ShortPrint(out);
- PrintF(out, "\n - tag: ");
+ FPrintF(out, "\n - tag: ");
tag()->ShortPrint(out);
- PrintF(out, "\n - property_list: ");
+ FPrintF(out, "\n - property_list: ");
property_list()->ShortPrint(out);
- PrintF(out, "\n - serial_number: ");
+ FPrintF(out, "\n - serial_number: ");
serial_number()->ShortPrint(out);
- PrintF(out, "\n - call_code: ");
+ FPrintF(out, "\n - call_code: ");
call_code()->ShortPrint(out);
- PrintF(out, "\n - property_accessors: ");
+ FPrintF(out, "\n - property_accessors: ");
property_accessors()->ShortPrint(out);
- PrintF(out, "\n - prototype_template: ");
+ FPrintF(out, "\n - prototype_template: ");
prototype_template()->ShortPrint(out);
- PrintF(out, "\n - parent_template: ");
+ FPrintF(out, "\n - parent_template: ");
parent_template()->ShortPrint(out);
- PrintF(out, "\n - named_property_handler: ");
+ FPrintF(out, "\n - named_property_handler: ");
named_property_handler()->ShortPrint(out);
- PrintF(out, "\n - indexed_property_handler: ");
+ FPrintF(out, "\n - indexed_property_handler: ");
indexed_property_handler()->ShortPrint(out);
- PrintF(out, "\n - instance_template: ");
+ FPrintF(out, "\n - instance_template: ");
instance_template()->ShortPrint(out);
- PrintF(out, "\n - signature: ");
+ FPrintF(out, "\n - signature: ");
signature()->ShortPrint(out);
- PrintF(out, "\n - access_check_info: ");
+ FPrintF(out, "\n - access_check_info: ");
access_check_info()->ShortPrint(out);
- PrintF(out, "\n - hidden_prototype: %s",
+ FPrintF(out, "\n - hidden_prototype: %s",
hidden_prototype() ? "true" : "false");
- PrintF(out, "\n - undetectable: %s", undetectable() ? "true" : "false");
- PrintF(out, "\n - need_access_check: %s",
+ FPrintF(out, "\n - undetectable: %s", undetectable() ? "true" : "false");
+ FPrintF(out, "\n - need_access_check: %s",
needs_access_check() ? "true" : "false");
}
void ObjectTemplateInfo::ObjectTemplateInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "ObjectTemplateInfo");
- PrintF(out, " - tag: ");
+ FPrintF(out, " - tag: ");
tag()->ShortPrint(out);
- PrintF(out, "\n - property_list: ");
+ FPrintF(out, "\n - property_list: ");
property_list()->ShortPrint(out);
- PrintF(out, "\n - constructor: ");
+ FPrintF(out, "\n - constructor: ");
constructor()->ShortPrint(out);
- PrintF(out, "\n - internal_field_count: ");
+ FPrintF(out, "\n - internal_field_count: ");
internal_field_count()->ShortPrint(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
void SignatureInfo::SignatureInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "SignatureInfo");
- PrintF(out, "\n - receiver: ");
+ FPrintF(out, "\n - receiver: ");
receiver()->ShortPrint(out);
- PrintF(out, "\n - args: ");
+ FPrintF(out, "\n - args: ");
args()->ShortPrint(out);
}
void TypeSwitchInfo::TypeSwitchInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "TypeSwitchInfo");
- PrintF(out, "\n - types: ");
+ FPrintF(out, "\n - types: ");
types()->ShortPrint(out);
}
void Script::ScriptPrint(FILE* out) {
HeapObject::PrintHeader(out, "Script");
- PrintF(out, "\n - source: ");
+ FPrintF(out, "\n - source: ");
source()->ShortPrint(out);
- PrintF(out, "\n - name: ");
+ FPrintF(out, "\n - name: ");
name()->ShortPrint(out);
- PrintF(out, "\n - line_offset: ");
+ FPrintF(out, "\n - line_offset: ");
line_offset()->ShortPrint(out);
- PrintF(out, "\n - column_offset: ");
+ FPrintF(out, "\n - column_offset: ");
column_offset()->ShortPrint(out);
- PrintF(out, "\n - type: ");
+ FPrintF(out, "\n - type: ");
type()->ShortPrint(out);
- PrintF(out, "\n - id: ");
+ FPrintF(out, "\n - id: ");
id()->ShortPrint(out);
- PrintF(out, "\n - data: ");
+ FPrintF(out, "\n - data: ");
data()->ShortPrint(out);
- PrintF(out, "\n - context data: ");
+ FPrintF(out, "\n - context data: ");
context_data()->ShortPrint(out);
- PrintF(out, "\n - wrapper: ");
+ FPrintF(out, "\n - wrapper: ");
wrapper()->ShortPrint(out);
- PrintF(out, "\n - compilation type: ");
+ FPrintF(out, "\n - compilation type: ");
compilation_type()->ShortPrint(out);
- PrintF(out, "\n - line ends: ");
+ FPrintF(out, "\n - line ends: ");
line_ends()->ShortPrint(out);
- PrintF(out, "\n - eval from shared: ");
+ FPrintF(out, "\n - eval from shared: ");
eval_from_shared()->ShortPrint(out);
- PrintF(out, "\n - eval from instructions offset: ");
+ FPrintF(out, "\n - eval from instructions offset: ");
eval_from_instructions_offset()->ShortPrint(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
#ifdef ENABLE_DEBUGGER_SUPPORT
void DebugInfo::DebugInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "DebugInfo");
- PrintF(out, "\n - shared: ");
+ FPrintF(out, "\n - shared: ");
shared()->ShortPrint(out);
- PrintF(out, "\n - original_code: ");
+ FPrintF(out, "\n - original_code: ");
original_code()->ShortPrint(out);
- PrintF(out, "\n - code: ");
+ FPrintF(out, "\n - code: ");
code()->ShortPrint(out);
- PrintF(out, "\n - break_points: ");
+ FPrintF(out, "\n - break_points: ");
break_points()->Print(out);
}
void BreakPointInfo::BreakPointInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "BreakPointInfo");
- PrintF(out, "\n - code_position: %d", code_position()->value());
- PrintF(out, "\n - source_position: %d", source_position()->value());
- PrintF(out, "\n - statement_position: %d", statement_position()->value());
- PrintF(out, "\n - break_point_objects: ");
+ FPrintF(out, "\n - code_position: %d", code_position()->value());
+ FPrintF(out, "\n - source_position: %d", source_position()->value());
+ FPrintF(out, "\n - statement_position: %d", statement_position()->value());
+ FPrintF(out, "\n - break_point_objects: ");
break_point_objects()->ShortPrint(out);
}
#endif // ENABLE_DEBUGGER_SUPPORT
void DescriptorArray::PrintDescriptors(FILE* out) {
- PrintF(out, "Descriptor array %d\n", number_of_descriptors());
+ FPrintF(out, "Descriptor array %d\n", number_of_descriptors());
for (int i = 0; i < number_of_descriptors(); i++) {
- PrintF(out, " %d: ", i);
+ FPrintF(out, " %d: ", i);
Descriptor desc;
Get(i, &desc);
desc.Print(out);
}
- PrintF(out, "\n");
+ FPrintF(out, "\n");
+}
+
+
+void TransitionArray::PrintTransitions(FILE* out) {
+ FPrintF(out, "Transition array %d\n", number_of_transitions());
+ for (int i = 0; i < number_of_transitions(); i++) {
+ FPrintF(out, " %d: ", i);
+ GetKey(i)->StringPrint(out);
+ FPrintF(out, ": ");
+ switch (GetTargetDetails(i).type()) {
+ case FIELD: {
+ FPrintF(out, " (transition to field)\n");
+ break;
+ }
+ case CONSTANT_FUNCTION:
+ FPrintF(out, " (transition to constant function)\n");
+ break;
+ case CALLBACKS:
+ FPrintF(out, " (transition to callback)\n");
+ break;
+ // Values below are never in the target descriptor array.
+ case NORMAL:
+ case HANDLER:
+ case INTERCEPTOR:
+ case TRANSITION:
+ case NONEXISTENT:
+ UNREACHABLE();
+ break;
+ }
+ }
+ FPrintF(out, "\n");
}
diff --git a/src/3rdparty/v8/src/objects-visiting-inl.h b/src/3rdparty/v8/src/objects-visiting-inl.h
index 8ba92f7..71635ca 100644
--- a/src/3rdparty/v8/src/objects-visiting-inl.h
+++ b/src/3rdparty/v8/src/objects-visiting-inl.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -56,7 +56,7 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
- table_.Register(kVisitGlobalContext,
+ table_.Register(kVisitNativeContext,
&FixedBodyVisitor<StaticVisitor,
Context::ScavengeBodyDescriptor,
int>::Visit);
@@ -93,6 +93,537 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
}
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::Initialize() {
+ table_.Register(kVisitShortcutCandidate,
+ &FixedBodyVisitor<StaticVisitor,
+ ConsString::BodyDescriptor,
+ void>::Visit);
+
+ table_.Register(kVisitConsString,
+ &FixedBodyVisitor<StaticVisitor,
+ ConsString::BodyDescriptor,
+ void>::Visit);
+
+ table_.Register(kVisitSlicedString,
+ &FixedBodyVisitor<StaticVisitor,
+ SlicedString::BodyDescriptor,
+ void>::Visit);
+
+ table_.Register(kVisitFixedArray,
+ &FlexibleBodyVisitor<StaticVisitor,
+ FixedArray::BodyDescriptor,
+ void>::Visit);
+
+ table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
+
+ table_.Register(kVisitNativeContext, &VisitNativeContext);
+
+ table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
+
+ table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
+
+ table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
+
+ table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
+
+ table_.Register(kVisitJSWeakMap, &StaticVisitor::VisitJSWeakMap);
+
+ table_.Register(kVisitOddball,
+ &FixedBodyVisitor<StaticVisitor,
+ Oddball::BodyDescriptor,
+ void>::Visit);
+
+ table_.Register(kVisitMap, &VisitMap);
+
+ table_.Register(kVisitCode, &VisitCode);
+
+ table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
+
+ table_.Register(kVisitJSFunction, &VisitJSFunction);
+
+ // Registration for kVisitJSRegExp is done by StaticVisitor.
+
+ table_.Register(kVisitPropertyCell,
+ &FixedBodyVisitor<StaticVisitor,
+ JSGlobalPropertyCell::BodyDescriptor,
+ void>::Visit);
+
+ table_.template RegisterSpecializations<DataObjectVisitor,
+ kVisitDataObject,
+ kVisitDataObjectGeneric>();
+
+ table_.template RegisterSpecializations<JSObjectVisitor,
+ kVisitJSObject,
+ kVisitJSObjectGeneric>();
+
+ table_.template RegisterSpecializations<StructObjectVisitor,
+ kVisitStruct,
+ kVisitStructGeneric>();
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
+ Heap* heap, Address entry_address) {
+ Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
+ heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
+ StaticVisitor::MarkObject(heap, code);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
+ Heap* heap, RelocInfo* rinfo) {
+ ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
+ ASSERT(!rinfo->target_object()->IsConsString());
+ HeapObject* object = HeapObject::cast(rinfo->target_object());
+ heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
+ StaticVisitor::MarkObject(heap, object);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitGlobalPropertyCell(
+ Heap* heap, RelocInfo* rinfo) {
+ ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
+ JSGlobalPropertyCell* cell = rinfo->target_cell();
+ StaticVisitor::MarkObject(heap, cell);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(
+ Heap* heap, RelocInfo* rinfo) {
+ ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
+ rinfo->IsPatchedReturnSequence()) ||
+ (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
+ rinfo->IsPatchedDebugBreakSlotSequence()));
+ Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
+ heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
+ StaticVisitor::MarkObject(heap, target);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
+ Heap* heap, RelocInfo* rinfo) {
+ ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
+ Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
+ // Monomorphic ICs are preserved when possible, but need to be flushed
+ // when they might be keeping a Context alive, or when the heap is about
+ // to be serialized.
+ if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
+ && (target->ic_state() == MEGAMORPHIC || heap->flush_monomorphic_ics() ||
+ Serializer::enabled() || target->ic_age() != heap->global_ic_age())) {
+ IC::Clear(rinfo->pc());
+ target = Code::GetCodeFromTargetAddress(rinfo->target_address());
+ }
+ heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
+ StaticVisitor::MarkObject(heap, target);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
+ Heap* heap, RelocInfo* rinfo) {
+ ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
+ Code* target = rinfo->code_age_stub();
+ ASSERT(target != NULL);
+ heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
+ StaticVisitor::MarkObject(heap, target);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
+ Map* map, HeapObject* object) {
+ FixedBodyVisitor<StaticVisitor,
+ Context::MarkCompactBodyDescriptor,
+ void>::Visit(map, object);
+
+ MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
+ for (int idx = Context::FIRST_WEAK_SLOT;
+ idx < Context::NATIVE_CONTEXT_SLOTS;
+ ++idx) {
+ Object** slot =
+ HeapObject::RawField(object, FixedArray::OffsetOfElementAt(idx));
+ collector->RecordSlot(slot, slot, *slot);
+ }
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitMap(
+ Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ Map* map_object = Map::cast(object);
+
+ // Clears the cache of ICs related to this map.
+ if (FLAG_cleanup_code_caches_at_gc) {
+ map_object->ClearCodeCache(heap);
+ }
+
+ // When map collection is enabled we have to mark through map's
+ // transitions and back pointers in a special way to make these links
+ // weak. Only maps for subclasses of JSReceiver can have transitions.
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
+ if (FLAG_collect_maps &&
+ map_object->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
+ MarkMapContents(heap, map_object);
+ } else {
+ StaticVisitor::VisitPointers(heap,
+ HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
+ HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
+ }
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitCode(
+ Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ Code* code = Code::cast(object);
+ if (FLAG_cleanup_code_caches_at_gc) {
+ code->ClearTypeFeedbackCells(heap);
+ }
+ if (FLAG_age_code && !Serializer::enabled()) {
+ code->MakeOlder(heap->mark_compact_collector()->marking_parity());
+ }
+ code->CodeIterateBody<StaticVisitor>(heap);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
+ Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
+ if (shared->ic_age() != heap->global_ic_age()) {
+ shared->ResetForNewContext(heap->global_ic_age());
+ }
+ MarkCompactCollector* collector = heap->mark_compact_collector();
+ if (collector->is_code_flushing_enabled()) {
+ if (IsFlushable(heap, shared)) {
+ // This function's code looks flushable. But we have to postpone
+ // the decision until we see all functions that point to the same
+ // SharedFunctionInfo because some of them might be optimized.
+ // That would also make the non-optimized version of the code
+ // non-flushable, because it is required for bailing out from
+ // optimized code.
+ collector->code_flusher()->AddCandidate(shared);
+ // Treat the reference to the code object weakly.
+ VisitSharedFunctionInfoWeakCode(heap, object);
+ return;
+ }
+ }
+ VisitSharedFunctionInfoStrongCode(heap, object);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
+ Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ JSFunction* function = JSFunction::cast(object);
+ MarkCompactCollector* collector = heap->mark_compact_collector();
+ if (collector->is_code_flushing_enabled()) {
+ if (IsFlushable(heap, function)) {
+ // This function's code looks flushable. But we have to postpone
+ // the decision until we see all functions that point to the same
+ // SharedFunctionInfo because some of them might be optimized.
+ // That would also make the non-optimized version of the code
+ // non-flushable, because it is required for bailing out from
+ // optimized code.
+ collector->code_flusher()->AddCandidate(function);
+ // Visit shared function info immediately to avoid double checking
+ // of its flushability later. This is just an optimization because
+ // the shared function info would eventually be visited.
+ SharedFunctionInfo* shared = function->unchecked_shared();
+ if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
+ StaticVisitor::MarkObject(heap, shared->map());
+ VisitSharedFunctionInfoWeakCode(heap, shared);
+ }
+ // Treat the reference to the code object weakly.
+ VisitJSFunctionWeakCode(heap, object);
+ return;
+ } else {
+ // Visit all unoptimized code objects to prevent flushing them.
+ StaticVisitor::MarkObject(heap, function->shared()->code());
+ if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
+ MarkInlinedFunctionsCode(heap, function->code());
+ }
+ }
+ }
+ VisitJSFunctionStrongCode(heap, object);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
+ Map* map, HeapObject* object) {
+ int last_property_offset =
+ JSRegExp::kSize + kPointerSize * map->inobject_properties();
+ StaticVisitor::VisitPointers(map->GetHeap(),
+ HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
+ HeapObject::RawField(object, last_property_offset));
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
+ Heap* heap, Map* map) {
+ // Make sure that the back pointer stored either in the map itself or
+ // inside its transitions array is marked. Skip recording the back
+ // pointer slot since map space is not compacted.
+ StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
+
+ // Treat pointers in the transitions array as weak and also mark that
+ // array to prevent visiting it later. Skip recording the transition
+ // array slot, since it will be implicitly recorded when the pointer
+ // fields of this map are visited.
+ TransitionArray* transitions = map->unchecked_transition_array();
+ if (transitions->IsTransitionArray()) {
+ MarkTransitionArray(heap, transitions);
+ } else {
+ // Already marked by marking map->GetBackPointer() above.
+ ASSERT(transitions->IsMap() || transitions->IsUndefined());
+ }
+
+ // Mark the pointer fields of the Map. Since the transitions array has
+ // been marked already, it is fine that one of these fields contains a
+ // pointer to it.
+ StaticVisitor::VisitPointers(heap,
+ HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
+ HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
+ Heap* heap, TransitionArray* transitions) {
+ if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
+
+ // Simple transitions do not have keys nor prototype transitions.
+ if (transitions->IsSimpleTransition()) return;
+
+ if (transitions->HasPrototypeTransitions()) {
+ // Mark prototype transitions array but do not push it onto marking
+ // stack, this will make references from it weak. We will clean dead
+ // prototype transitions in ClearNonLiveTransitions.
+ Object** slot = transitions->GetPrototypeTransitionsSlot();
+ HeapObject* obj = HeapObject::cast(*slot);
+ heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
+ StaticVisitor::MarkObjectWithoutPush(heap, obj);
+ }
+
+ for (int i = 0; i < transitions->number_of_transitions(); ++i) {
+ StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
+ }
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(
+ Heap* heap, Code* code) {
+ // For optimized functions we should retain both non-optimized version
+ // of its code and non-optimized version of all inlined functions.
+ // This is required to support bailing out from inlined code.
+ DeoptimizationInputData* data =
+ DeoptimizationInputData::cast(code->deoptimization_data());
+ FixedArray* literals = data->LiteralArray();
+ for (int i = 0, count = data->InlinedFunctionCount()->value();
+ i < count;
+ i++) {
+ JSFunction* inlined = JSFunction::cast(literals->get(i));
+ StaticVisitor::MarkObject(heap, inlined->shared()->code());
+ }
+}
+
+
+inline static bool IsValidNonBuiltinContext(Object* context) {
+ return context->IsContext() &&
+ !Context::cast(context)->global_object()->IsJSBuiltinsObject();
+}
+
+
+inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
+ Object* undefined = heap->undefined_value();
+ return (info->script() != undefined) &&
+ (reinterpret_cast<Script*>(info->script())->source() != undefined);
+}
+
+
+template<typename StaticVisitor>
+bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
+ Heap* heap, JSFunction* function) {
+ SharedFunctionInfo* shared_info = function->unchecked_shared();
+
+ // Code is either on stack, in compilation cache or referenced
+ // by optimized version of function.
+ MarkBit code_mark = Marking::MarkBitFrom(function->code());
+ if (code_mark.Get()) {
+ if (!FLAG_age_code) {
+ if (!Marking::MarkBitFrom(shared_info).Get()) {
+ shared_info->set_code_age(0);
+ }
+ }
+ return false;
+ }
+
+ // The function must have a valid context and not be a builtin.
+ if (!IsValidNonBuiltinContext(function->unchecked_context())) {
+ return false;
+ }
+
+ // We do not (yet) flush code for optimized functions.
+ if (function->code() != shared_info->code()) {
+ return false;
+ }
+
+ // Check age of optimized code.
+ if (FLAG_age_code && !function->code()->IsOld()) {
+ return false;
+ }
+
+ return IsFlushable(heap, shared_info);
+}
+
+
+template<typename StaticVisitor>
+bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
+ Heap* heap, SharedFunctionInfo* shared_info) {
+ // Code is either on stack, in compilation cache or referenced
+ // by optimized version of function.
+ MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
+ if (code_mark.Get()) {
+ return false;
+ }
+
+ // The function must be compiled and have the source code available,
+ // to be able to recompile it in case we need the function again.
+ if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
+ return false;
+ }
+
+ // We never flush code for API functions.
+ Object* function_data = shared_info->function_data();
+ if (function_data->IsFunctionTemplateInfo()) {
+ return false;
+ }
+
+ // Only flush code for functions.
+ if (shared_info->code()->kind() != Code::FUNCTION) {
+ return false;
+ }
+
+ // Function must be lazy compilable.
+ if (!shared_info->allows_lazy_compilation()) {
+ return false;
+ }
+
+ // If this is a full script wrapped in a function we do no flush the code.
+ if (shared_info->is_toplevel()) {
+ return false;
+ }
+
+ if (FLAG_age_code) {
+ return shared_info->code()->IsOld();
+ } else {
+ // How many collections newly compiled code object will survive before being
+ // flushed.
+ static const int kCodeAgeThreshold = 5;
+
+ // Age this shared function info.
+ if (shared_info->code_age() < kCodeAgeThreshold) {
+ shared_info->set_code_age(shared_info->code_age() + 1);
+ return false;
+ }
+ return true;
+ }
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
+ Heap* heap, HeapObject* object) {
+ StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
+ Object** start_slot =
+ HeapObject::RawField(object,
+ SharedFunctionInfo::BodyDescriptor::kStartOffset);
+ Object** end_slot =
+ HeapObject::RawField(object,
+ SharedFunctionInfo::BodyDescriptor::kEndOffset);
+ StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
+ Heap* heap, HeapObject* object) {
+ StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
+ Object** name_slot =
+ HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
+ StaticVisitor::VisitPointer(heap, name_slot);
+
+ // Skip visiting kCodeOffset as it is treated weakly here.
+ STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
+ SharedFunctionInfo::kCodeOffset);
+ STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
+ SharedFunctionInfo::kOptimizedCodeMapOffset);
+
+ Object** start_slot =
+ HeapObject::RawField(object,
+ SharedFunctionInfo::kOptimizedCodeMapOffset);
+ Object** end_slot =
+ HeapObject::RawField(object,
+ SharedFunctionInfo::BodyDescriptor::kEndOffset);
+ StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
+ Heap* heap, HeapObject* object) {
+ Object** start_slot =
+ HeapObject::RawField(object, JSFunction::kPropertiesOffset);
+ Object** end_slot =
+ HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
+ StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+
+ VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
+ STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
+ JSFunction::kPrototypeOrInitialMapOffset);
+
+ start_slot =
+ HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
+ end_slot =
+ HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
+ StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
+ Heap* heap, HeapObject* object) {
+ Object** start_slot =
+ HeapObject::RawField(object, JSFunction::kPropertiesOffset);
+ Object** end_slot =
+ HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
+ StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+
+ // Skip visiting kCodeEntryOffset as it is treated weakly here.
+ STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
+ JSFunction::kPrototypeOrInitialMapOffset);
+
+ start_slot =
+ HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
+ end_slot =
+ HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
+ StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+}
+
+
void Code::CodeIterateBody(ObjectVisitor* v) {
int mode_mask = RelocInfo::kCodeTargetMask |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
diff --git a/src/3rdparty/v8/src/objects-visiting.cc b/src/3rdparty/v8/src/objects-visiting.cc
index a2dc43e..6ae4d7c 100644
--- a/src/3rdparty/v8/src/objects-visiting.cc
+++ b/src/3rdparty/v8/src/objects-visiting.cc
@@ -45,7 +45,7 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
if (instance_type < FIRST_NONSTRING_TYPE) {
switch (instance_type & kStringRepresentationMask) {
case kSeqStringTag:
- if ((instance_type & kStringEncodingMask) == kAsciiStringTag) {
+ if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
return kVisitSeqAsciiString;
} else {
return kVisitSeqTwoByteString;
diff --git a/src/3rdparty/v8/src/objects-visiting.h b/src/3rdparty/v8/src/objects-visiting.h
index b476dfe..3937e25 100644
--- a/src/3rdparty/v8/src/objects-visiting.h
+++ b/src/3rdparty/v8/src/objects-visiting.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -46,71 +46,70 @@ namespace internal {
// Base class for all static visitors.
class StaticVisitorBase : public AllStatic {
public:
+#define VISITOR_ID_LIST(V) \
+ V(SeqAsciiString) \
+ V(SeqTwoByteString) \
+ V(ShortcutCandidate) \
+ V(ByteArray) \
+ V(FreeSpace) \
+ V(FixedArray) \
+ V(FixedDoubleArray) \
+ V(NativeContext) \
+ V(DataObject2) \
+ V(DataObject3) \
+ V(DataObject4) \
+ V(DataObject5) \
+ V(DataObject6) \
+ V(DataObject7) \
+ V(DataObject8) \
+ V(DataObject9) \
+ V(DataObjectGeneric) \
+ V(JSObject2) \
+ V(JSObject3) \
+ V(JSObject4) \
+ V(JSObject5) \
+ V(JSObject6) \
+ V(JSObject7) \
+ V(JSObject8) \
+ V(JSObject9) \
+ V(JSObjectGeneric) \
+ V(Struct2) \
+ V(Struct3) \
+ V(Struct4) \
+ V(Struct5) \
+ V(Struct6) \
+ V(Struct7) \
+ V(Struct8) \
+ V(Struct9) \
+ V(StructGeneric) \
+ V(ConsString) \
+ V(SlicedString) \
+ V(Oddball) \
+ V(Code) \
+ V(Map) \
+ V(PropertyCell) \
+ V(SharedFunctionInfo) \
+ V(JSFunction) \
+ V(JSWeakMap) \
+ V(JSRegExp)
+
+ // For data objects, JS objects and structs along with generic visitor which
+ // can visit object of any size we provide visitors specialized by
+ // object size in words.
+ // Ids of specialized visitors are declared in a linear order (without
+ // holes) starting from the id of visitor specialized for 2 words objects
+ // (base visitor id) and ending with the id of generic visitor.
+ // Method GetVisitorIdForSize depends on this ordering to calculate visitor
+ // id of specialized visitor from given instance size, base visitor id and
+ // generic visitor's id.
enum VisitorId {
- kVisitSeqAsciiString = 0,
- kVisitSeqTwoByteString,
- kVisitShortcutCandidate,
- kVisitByteArray,
- kVisitFreeSpace,
- kVisitFixedArray,
- kVisitFixedDoubleArray,
- kVisitGlobalContext,
-
- // For data objects, JS objects and structs along with generic visitor which
- // can visit object of any size we provide visitors specialized by
- // object size in words.
- // Ids of specialized visitors are declared in a linear order (without
- // holes) starting from the id of visitor specialized for 2 words objects
- // (base visitor id) and ending with the id of generic visitor.
- // Method GetVisitorIdForSize depends on this ordering to calculate visitor
- // id of specialized visitor from given instance size, base visitor id and
- // generic visitor's id.
-
- kVisitDataObject,
- kVisitDataObject2 = kVisitDataObject,
- kVisitDataObject3,
- kVisitDataObject4,
- kVisitDataObject5,
- kVisitDataObject6,
- kVisitDataObject7,
- kVisitDataObject8,
- kVisitDataObject9,
- kVisitDataObjectGeneric,
-
- kVisitJSObject,
- kVisitJSObject2 = kVisitJSObject,
- kVisitJSObject3,
- kVisitJSObject4,
- kVisitJSObject5,
- kVisitJSObject6,
- kVisitJSObject7,
- kVisitJSObject8,
- kVisitJSObject9,
- kVisitJSObjectGeneric,
-
- kVisitStruct,
- kVisitStruct2 = kVisitStruct,
- kVisitStruct3,
- kVisitStruct4,
- kVisitStruct5,
- kVisitStruct6,
- kVisitStruct7,
- kVisitStruct8,
- kVisitStruct9,
- kVisitStructGeneric,
-
- kVisitConsString,
- kVisitSlicedString,
- kVisitOddball,
- kVisitCode,
- kVisitMap,
- kVisitPropertyCell,
- kVisitSharedFunctionInfo,
- kVisitJSFunction,
- kVisitJSWeakMap,
- kVisitJSRegExp,
-
+#define VISITOR_ID_ENUM_DECL(id) kVisit##id,
+ VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
+#undef VISITOR_ID_ENUM_DECL
kVisitorIdCount,
+ kVisitDataObject = kVisitDataObject2,
+ kVisitJSObject = kVisitJSObject2,
+ kVisitStruct = kVisitStruct2,
kMinObjectSizeInWords = 2
};
@@ -361,7 +360,97 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
template<typename StaticVisitor>
VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
- StaticNewSpaceVisitor<StaticVisitor>::table_;
+ StaticNewSpaceVisitor<StaticVisitor>::table_;
+
+
+// Base class for visitors used to transitively mark the entire heap.
+// IterateBody returns nothing.
+// Certain types of objects might not be handled by this base class and
+// no visitor function is registered by the generic initialization. A
+// specialized visitor function needs to be provided by the inheriting
+// class itself for those cases.
+//
+// This class is intended to be used in the following way:
+//
+// class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
+// ...
+// }
+//
+// This is an example of Curiously recurring template pattern.
+template<typename StaticVisitor>
+class StaticMarkingVisitor : public StaticVisitorBase {
+ public:
+ static void Initialize();
+
+ static inline void IterateBody(Map* map, HeapObject* obj) {
+ table_.GetVisitor(map)(map, obj);
+ }
+
+ static inline void VisitCodeEntry(Heap* heap, Address entry_address);
+ static inline void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo);
+ static inline void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo);
+ static inline void VisitDebugTarget(Heap* heap, RelocInfo* rinfo);
+ static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo);
+ static inline void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo);
+ static inline void VisitExternalReference(RelocInfo* rinfo) { }
+ static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }
+
+ // TODO(mstarzinger): This should be made protected once refactoring is done.
+ // Mark non-optimize code for functions inlined into the given optimized
+ // code. This will prevent it from being flushed.
+ static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
+
+ protected:
+ static inline void VisitMap(Map* map, HeapObject* object);
+ static inline void VisitCode(Map* map, HeapObject* object);
+ static inline void VisitSharedFunctionInfo(Map* map, HeapObject* object);
+ static inline void VisitJSFunction(Map* map, HeapObject* object);
+ static inline void VisitJSRegExp(Map* map, HeapObject* object);
+ static inline void VisitNativeContext(Map* map, HeapObject* object);
+
+ // Mark pointers in a Map and its TransitionArray together, possibly
+ // treating transitions or back pointers weak.
+ static void MarkMapContents(Heap* heap, Map* map);
+ static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
+
+ // Code flushing support.
+ static inline bool IsFlushable(Heap* heap, JSFunction* function);
+ static inline bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info);
+
+ // Helpers used by code flushing support that visit pointer fields and treat
+ // references to code objects either strongly or weakly.
+ static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
+ static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
+ static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
+ static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
+
+ class DataObjectVisitor {
+ public:
+ template<int size>
+ static inline void VisitSpecialized(Map* map, HeapObject* object) {
+ }
+
+ static inline void Visit(Map* map, HeapObject* object) {
+ }
+ };
+
+ typedef FlexibleBodyVisitor<StaticVisitor,
+ JSObject::BodyDescriptor,
+ void> JSObjectVisitor;
+
+ typedef FlexibleBodyVisitor<StaticVisitor,
+ StructBodyDescriptor,
+ void> StructObjectVisitor;
+
+ typedef void (*Callback)(Map* map, HeapObject* object);
+
+ static VisitorDispatchTable<Callback> table_;
+};
+
+
+template<typename StaticVisitor>
+VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
+ StaticMarkingVisitor<StaticVisitor>::table_;
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/objects.cc b/src/3rdparty/v8/src/objects.cc
index 7373384..0d3836b 100644
--- a/src/3rdparty/v8/src/objects.cc
+++ b/src/3rdparty/v8/src/objects.cc
@@ -56,11 +56,6 @@
namespace v8 {
namespace internal {
-void PrintElementsKind(FILE* out, ElementsKind kind) {
- ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
- PrintF(out, "%s", accessor->name());
-}
-
MUST_USE_RESULT static MaybeObject* CreateJSValue(JSFunction* constructor,
Object* value) {
@@ -74,13 +69,13 @@ MUST_USE_RESULT static MaybeObject* CreateJSValue(JSFunction* constructor,
}
-MaybeObject* Object::ToObject(Context* global_context) {
+MaybeObject* Object::ToObject(Context* native_context) {
if (IsNumber()) {
- return CreateJSValue(global_context->number_function(), this);
+ return CreateJSValue(native_context->number_function(), this);
} else if (IsBoolean()) {
- return CreateJSValue(global_context->boolean_function(), this);
+ return CreateJSValue(native_context->boolean_function(), this);
} else if (IsString()) {
- return CreateJSValue(global_context->string_function(), this);
+ return CreateJSValue(native_context->string_function(), this);
}
ASSERT(IsJSObject());
return this;
@@ -92,16 +87,16 @@ MaybeObject* Object::ToObject() {
return this;
} else if (IsNumber()) {
Isolate* isolate = Isolate::Current();
- Context* global_context = isolate->context()->global_context();
- return CreateJSValue(global_context->number_function(), this);
+ Context* native_context = isolate->context()->native_context();
+ return CreateJSValue(native_context->number_function(), this);
} else if (IsBoolean()) {
Isolate* isolate = HeapObject::cast(this)->GetIsolate();
- Context* global_context = isolate->context()->global_context();
- return CreateJSValue(global_context->boolean_function(), this);
+ Context* native_context = isolate->context()->native_context();
+ return CreateJSValue(native_context->boolean_function(), this);
} else if (IsString()) {
Isolate* isolate = HeapObject::cast(this)->GetIsolate();
- Context* global_context = isolate->context()->global_context();
- return CreateJSValue(global_context->string_function(), this);
+ Context* native_context = isolate->context()->native_context();
+ return CreateJSValue(native_context->string_function(), this);
}
// Throw a type error.
@@ -139,13 +134,16 @@ void Object::Lookup(String* name, LookupResult* result) {
if (IsJSReceiver()) {
holder = this;
} else {
- Context* global_context = Isolate::Current()->context()->global_context();
+ Context* native_context = Isolate::Current()->context()->native_context();
if (IsNumber()) {
- holder = global_context->number_function()->instance_prototype();
+ holder = native_context->number_function()->instance_prototype();
} else if (IsString()) {
- holder = global_context->string_function()->instance_prototype();
+ holder = native_context->string_function()->instance_prototype();
} else if (IsBoolean()) {
- holder = global_context->boolean_function()->instance_prototype();
+ holder = native_context->boolean_function()->instance_prototype();
+ } else {
+ Isolate::Current()->PushStackTraceAndDie(
+ 0xDEAD0000, this, JSReceiver::cast(this)->map(), 0xDEAD0001);
}
}
ASSERT(holder != NULL); // Cannot handle null or undefined.
@@ -183,8 +181,19 @@ MaybeObject* JSObject::GetPropertyWithCallback(Object* receiver,
// api style callbacks.
if (structure->IsAccessorInfo()) {
AccessorInfo* data = AccessorInfo::cast(structure);
+ if (!data->IsCompatibleReceiver(receiver)) {
+ Handle<Object> name_handle(name);
+ Handle<Object> receiver_handle(receiver);
+ Handle<Object> args[2] = { name_handle, receiver_handle };
+ Handle<Object> error =
+ isolate->factory()->NewTypeError("incompatible_method_receiver",
+ HandleVector(args,
+ ARRAY_SIZE(args)));
+ return isolate->Throw(*error);
+ }
Object* fun_obj = data->getter();
v8::AccessorGetter call_fun = v8::ToCData<v8::AccessorGetter>(fun_obj);
+ if (call_fun == NULL) return isolate->heap()->undefined_value();
HandleScope scope(isolate);
JSObject* self = JSObject::cast(receiver);
Handle<String> key(name);
@@ -201,7 +210,9 @@ MaybeObject* JSObject::GetPropertyWithCallback(Object* receiver,
if (result.IsEmpty()) {
return isolate->heap()->undefined_value();
}
- return *v8::Utils::OpenHandle(*result);
+ Object* return_value = *v8::Utils::OpenHandle(*result);
+ return_value->VerifyApiCallResultType();
+ return return_value;
}
// __defineGetter__ callback
@@ -253,13 +264,14 @@ MaybeObject* JSProxy::GetElementWithHandler(Object* receiver,
}
-MaybeObject* JSProxy::SetElementWithHandler(uint32_t index,
+MaybeObject* JSProxy::SetElementWithHandler(JSReceiver* receiver,
+ uint32_t index,
Object* value,
StrictModeFlag strict_mode) {
String* name;
MaybeObject* maybe = GetHeap()->Uint32ToString(index);
if (!maybe->To<String>(&name)) return maybe;
- return SetPropertyWithHandler(name, value, NONE, strict_mode);
+ return SetPropertyWithHandler(receiver, name, value, NONE, strict_mode);
}
@@ -400,16 +412,16 @@ PropertyAttributes JSObject::GetPropertyAttributeWithFailedAccessCheck(
} else {
result->holder()->LocalLookupRealNamedProperty(name, &r);
}
- if (r.IsProperty()) {
- return GetPropertyAttributeWithFailedAccessCheck(receiver,
- &r,
- name,
- continue_search);
- }
- break;
+ if (!r.IsFound()) break;
+ return GetPropertyAttributeWithFailedAccessCheck(receiver,
+ &r,
+ name,
+ continue_search);
}
- default:
+ case HANDLER:
+ case TRANSITION:
+ case NONEXISTENT:
UNREACHABLE();
}
}
@@ -475,10 +487,21 @@ MaybeObject* JSObject::SetNormalizedProperty(String* name,
set_properties(StringDictionary::cast(dict));
return value;
}
- // Preserve enumeration index.
- details = PropertyDetails(details.attributes(),
- details.type(),
- property_dictionary()->DetailsAt(entry).index());
+
+ PropertyDetails original_details = property_dictionary()->DetailsAt(entry);
+ int enumeration_index;
+ // Preserve the enumeration index unless the property was deleted.
+ if (original_details.IsDeleted()) {
+ enumeration_index = property_dictionary()->NextEnumerationIndex();
+ property_dictionary()->SetNextEnumerationIndex(enumeration_index + 1);
+ } else {
+ enumeration_index = original_details.dictionary_index();
+ ASSERT(enumeration_index > 0);
+ }
+
+ details = PropertyDetails(
+ details.attributes(), details.type(), enumeration_index);
+
if (IsGlobalObject()) {
JSGlobalPropertyCell* cell =
JSGlobalPropertyCell::cast(property_dictionary()->ValueAt(entry));
@@ -506,11 +529,12 @@ MaybeObject* JSObject::DeleteNormalizedProperty(String* name, DeleteMode mode) {
// map change to invalidate any ICs that think they can load
// from the DontDelete cell without checking if it contains
// the hole value.
- Object* new_map;
- { MaybeObject* maybe_new_map = map()->CopyDropDescriptors();
- if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
- }
- set_map(Map::cast(new_map));
+ Map* new_map;
+ MaybeObject* maybe_new_map = map()->CopyDropDescriptors();
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+
+ ASSERT(new_map->is_dictionary_map());
+ set_map(new_map);
}
JSGlobalPropertyCell* cell =
JSGlobalPropertyCell::cast(dictionary->ValueAt(entry));
@@ -543,7 +567,7 @@ bool JSObject::IsDirty() {
// If the object is fully fast case and has the same map it was
// created with then no changes can have been made to it.
return map() != fun->initial_map()
- || !HasFastElements()
+ || !HasFastObjectElements()
|| !HasFastProperties();
}
@@ -627,15 +651,12 @@ MaybeObject* Object::GetProperty(Object* receiver,
receiver, result->GetCallbackObject(), name);
case HANDLER:
return result->proxy()->GetPropertyWithHandler(receiver, name);
- case INTERCEPTOR: {
- JSObject* recvr = JSObject::cast(receiver);
+ case INTERCEPTOR:
return result->holder()->GetPropertyWithInterceptor(
- recvr, name, attributes);
- }
- case MAP_TRANSITION:
- case ELEMENTS_TRANSITION:
- case CONSTANT_TRANSITION:
- case NULL_DESCRIPTOR:
+ receiver, name, attributes);
+ case TRANSITION:
+ case NONEXISTENT:
+ UNREACHABLE();
break;
}
UNREACHABLE();
@@ -656,13 +677,13 @@ MaybeObject* Object::GetElementWithReceiver(Object* receiver, uint32_t index) {
holder = holder->GetPrototype()) {
if (!holder->IsJSObject()) {
Isolate* isolate = heap->isolate();
- Context* global_context = isolate->context()->global_context();
+ Context* native_context = isolate->context()->native_context();
if (holder->IsNumber()) {
- holder = global_context->number_function()->instance_prototype();
+ holder = native_context->number_function()->instance_prototype();
} else if (holder->IsString()) {
- holder = global_context->string_function()->instance_prototype();
+ holder = native_context->string_function()->instance_prototype();
} else if (holder->IsBoolean()) {
- holder = global_context->boolean_function()->instance_prototype();
+ holder = native_context->boolean_function()->instance_prototype();
} else if (holder->IsJSProxy()) {
return JSProxy::cast(holder)->GetElementWithHandler(receiver, index);
} else {
@@ -704,7 +725,7 @@ MaybeObject* Object::GetElementWithReceiver(Object* receiver, uint32_t index) {
Object* Object::GetPrototype() {
if (IsSmi()) {
Heap* heap = Isolate::Current()->heap();
- Context* context = heap->isolate()->context()->global_context();
+ Context* context = heap->isolate()->context()->native_context();
return context->number_function()->instance_prototype();
}
@@ -716,7 +737,7 @@ Object* Object::GetPrototype() {
return heap_object->map()->prototype();
}
Heap* heap = heap_object->GetHeap();
- Context* context = heap->isolate()->context()->global_context();
+ Context* context = heap->isolate()->context()->native_context();
if (heap_object->IsHeapNumber()) {
return context->number_function()->instance_prototype();
@@ -758,7 +779,6 @@ MaybeObject* Object::GetHash(CreationFlag flag) {
bool Object::SameValue(Object* other) {
if (other == this) return true;
- if (!IsHeapObject() || !other->IsHeapObject()) return false;
// The object is either a number, a string, an odd-ball,
// a real JS object, or a Harmony proxy.
@@ -795,7 +815,7 @@ void Object::ShortPrint(StringStream* accumulator) {
void Smi::SmiPrint(FILE* out) {
- PrintF(out, "%d", value());
+ FPrintF(out, "%d", value());
}
@@ -810,7 +830,7 @@ void Failure::FailurePrint(StringStream* accumulator) {
void Failure::FailurePrint(FILE* out) {
- PrintF(out, "Failure(%p)", reinterpret_cast<void*>(value()));
+ FPrintF(out, "Failure(%p)", reinterpret_cast<void*>(value()));
}
@@ -1067,7 +1087,9 @@ void String::StringShortPrint(StringStream* accumulator) {
void JSObject::JSObjectShortPrint(StringStream* accumulator) {
switch (map()->instance_type()) {
case JS_ARRAY_TYPE: {
- double length = JSArray::cast(this)->length()->Number();
+ double length = JSArray::cast(this)->length()->IsUndefined()
+ ? 0
+ : JSArray::cast(this)->length()->Number();
accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length));
break;
}
@@ -1146,19 +1168,19 @@ void JSObject::PrintElementsTransition(
FILE* file, ElementsKind from_kind, FixedArrayBase* from_elements,
ElementsKind to_kind, FixedArrayBase* to_elements) {
if (from_kind != to_kind) {
- PrintF(file, "elements transition [");
+ FPrintF(file, "elements transition [");
PrintElementsKind(file, from_kind);
- PrintF(file, " -> ");
+ FPrintF(file, " -> ");
PrintElementsKind(file, to_kind);
- PrintF(file, "] in ");
+ FPrintF(file, "] in ");
JavaScriptFrame::PrintTop(file, false, true);
- PrintF(file, " for ");
+ FPrintF(file, " for ");
ShortPrint(file);
- PrintF(file, " from ");
+ FPrintF(file, " from ");
from_elements->ShortPrint(file);
- PrintF(file, " to ");
+ FPrintF(file, " to ");
to_elements->ShortPrint(file);
- PrintF(file, "\n");
+ FPrintF(file, "\n");
}
}
@@ -1318,7 +1340,7 @@ void HeapObject::IterateBody(InstanceType type, int object_size,
SlicedString::BodyDescriptor::IterateBody(this, v);
break;
case kExternalStringTag:
- if ((type & kStringEncodingMask) == kAsciiStringTag) {
+ if ((type & kStringEncodingMask) == kOneByteStringTag) {
reinterpret_cast<ExternalAsciiString*>(this)->
ExternalAsciiStringIterateBody(v);
} else {
@@ -1392,8 +1414,7 @@ void HeapObject::IterateBody(InstanceType type, int object_size,
case EXTERNAL_DOUBLE_ARRAY_TYPE:
break;
case SHARED_FUNCTION_INFO_TYPE: {
- SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(this);
- shared->SharedFunctionInfoIterateBody(v);
+ SharedFunctionInfo::BodyDescriptor::IterateBody(this, v);
break;
}
@@ -1433,7 +1454,7 @@ Object* HeapNumber::HeapNumberToBoolean() {
void HeapNumber::HeapNumberPrint(FILE* out) {
- PrintF(out, "%.16g", Number());
+ FPrintF(out, "%.16g", Number());
}
@@ -1481,20 +1502,19 @@ String* JSReceiver::constructor_name() {
MaybeObject* JSObject::AddFastPropertyUsingMap(Map* new_map,
String* name,
- Object* value) {
- int index = new_map->PropertyIndexFor(name);
+ Object* value,
+ int field_index) {
if (map()->unused_property_fields() == 0) {
- ASSERT(map()->unused_property_fields() == 0);
int new_unused = new_map->unused_property_fields();
- Object* values;
+ FixedArray* values;
{ MaybeObject* maybe_values =
properties()->CopySize(properties()->length() + new_unused + 1);
- if (!maybe_values->ToObject(&values)) return maybe_values;
+ if (!maybe_values->To(&values)) return maybe_values;
}
- set_properties(FixedArray::cast(values));
+ set_properties(values);
}
set_map(new_map);
- return FastPropertyAtPut(index, value);
+ return FastPropertyAtPut(field_index, value);
}
@@ -1516,96 +1536,66 @@ static bool IsIdentifier(UnicodeCache* cache,
MaybeObject* JSObject::AddFastProperty(String* name,
Object* value,
- PropertyAttributes attributes) {
+ PropertyAttributes attributes,
+ StoreFromKeyed store_mode) {
ASSERT(!IsJSGlobalProxy());
+ ASSERT(DescriptorArray::kNotFound ==
+ map()->instance_descriptors()->Search(
+ name, map()->NumberOfOwnDescriptors()));
// Normalize the object if the name is an actual string (not the
// hidden symbols) and is not a real identifier.
+ // Normalize the object if it will have too many fast properties.
Isolate* isolate = GetHeap()->isolate();
StringInputBuffer buffer(name);
- if (!IsIdentifier(isolate->unicode_cache(), &buffer)
- && name != isolate->heap()->hidden_symbol()) {
+ if ((!IsIdentifier(isolate->unicode_cache(), &buffer)
+ && name != isolate->heap()->hidden_symbol()) ||
+ (map()->unused_property_fields() == 0 &&
+ TooManyFastProperties(properties()->length(), store_mode))) {
Object* obj;
- { MaybeObject* maybe_obj =
- NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
+ MaybeObject* maybe_obj =
+ NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+
return AddSlowProperty(name, value, attributes);
}
- DescriptorArray* old_descriptors = map()->instance_descriptors();
// Compute the new index for new field.
int index = map()->NextFreePropertyIndex();
// Allocate new instance descriptors with (name, index) added
- FieldDescriptor new_field(name, index, attributes);
- Object* new_descriptors;
- { MaybeObject* maybe_new_descriptors =
- old_descriptors->CopyInsert(&new_field, REMOVE_TRANSITIONS);
- if (!maybe_new_descriptors->ToObject(&new_descriptors)) {
- return maybe_new_descriptors;
- }
- }
-
- // Only allow map transition if the object isn't the global object and there
- // is not a transition for the name, or there's a transition for the name but
- // it's unrelated to properties.
- int descriptor_index = old_descriptors->Search(name);
-
- // Element transitions are stored in the descriptor for property "", which is
- // not a identifier and should have forced a switch to slow properties above.
- ASSERT(descriptor_index == DescriptorArray::kNotFound ||
- old_descriptors->GetType(descriptor_index) != ELEMENTS_TRANSITION);
- bool can_insert_transition = descriptor_index == DescriptorArray::kNotFound ||
- old_descriptors->GetType(descriptor_index) == ELEMENTS_TRANSITION;
- bool allow_map_transition =
- can_insert_transition &&
- (isolate->context()->global_context()->object_function()->map() != map());
+ FieldDescriptor new_field(name, index, attributes, 0);
ASSERT(index < map()->inobject_properties() ||
(index - map()->inobject_properties()) < properties()->length() ||
map()->unused_property_fields() == 0);
- // Allocate a new map for the object.
- Object* r;
- { MaybeObject* maybe_r = map()->CopyDropDescriptors();
- if (!maybe_r->ToObject(&r)) return maybe_r;
- }
- Map* new_map = Map::cast(r);
- if (allow_map_transition) {
- // Allocate new instance descriptors for the old map with map transition.
- MapTransitionDescriptor d(name, Map::cast(new_map), attributes);
- Object* r;
- { MaybeObject* maybe_r = old_descriptors->CopyInsert(&d, KEEP_TRANSITIONS);
- if (!maybe_r->ToObject(&r)) return maybe_r;
- }
- old_descriptors = DescriptorArray::cast(r);
- }
+
+ FixedArray* values = NULL;
if (map()->unused_property_fields() == 0) {
- if (properties()->length() > MaxFastProperties()) {
- Object* obj;
- { MaybeObject* maybe_obj =
- NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
- return AddSlowProperty(name, value, attributes);
- }
// Make room for the new value
- Object* values;
- { MaybeObject* maybe_values =
- properties()->CopySize(properties()->length() + kFieldsAdded);
- if (!maybe_values->ToObject(&values)) return maybe_values;
- }
- set_properties(FixedArray::cast(values));
+ MaybeObject* maybe_values =
+ properties()->CopySize(properties()->length() + kFieldsAdded);
+ if (!maybe_values->To(&values)) return maybe_values;
+ }
+
+ // Only allow map transition if the object isn't the global object.
+ TransitionFlag flag = isolate->empty_object_map() != map()
+ ? INSERT_TRANSITION
+ : OMIT_TRANSITION;
+
+ Map* new_map;
+ MaybeObject* maybe_new_map = map()->CopyAddDescriptor(&new_field, flag);
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+
+ if (map()->unused_property_fields() == 0) {
+ ASSERT(values != NULL);
+ set_properties(values);
new_map->set_unused_property_fields(kFieldsAdded - 1);
} else {
new_map->set_unused_property_fields(map()->unused_property_fields() - 1);
}
- // We have now allocated all the necessary objects.
- // All the changes can be applied at once, so they are atomic.
- map()->set_instance_descriptors(old_descriptors);
- new_map->SetBackPointer(map());
- new_map->set_instance_descriptors(DescriptorArray::cast(new_descriptors));
+
set_map(new_map);
return FastPropertyAtPut(index, value);
}
@@ -1616,57 +1606,26 @@ MaybeObject* JSObject::AddConstantFunctionProperty(
JSFunction* function,
PropertyAttributes attributes) {
// Allocate new instance descriptors with (name, function) added
- ConstantFunctionDescriptor d(name, function, attributes);
- Object* new_descriptors;
- { MaybeObject* maybe_new_descriptors =
- map()->instance_descriptors()->CopyInsert(&d, REMOVE_TRANSITIONS);
- if (!maybe_new_descriptors->ToObject(&new_descriptors)) {
- return maybe_new_descriptors;
- }
- }
-
- // Allocate a new map for the object.
- Object* new_map;
- { MaybeObject* maybe_new_map = map()->CopyDropDescriptors();
- if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
- }
-
- DescriptorArray* descriptors = DescriptorArray::cast(new_descriptors);
- Map::cast(new_map)->set_instance_descriptors(descriptors);
- Map* old_map = map();
- set_map(Map::cast(new_map));
+ ConstantFunctionDescriptor d(name, function, attributes, 0);
- // If the old map is the global object map (from new Object()),
- // then transitions are not added to it, so we are done.
Heap* heap = GetHeap();
- if (old_map == heap->isolate()->context()->global_context()->
- object_function()->map()) {
- return function;
- }
-
- // Do not add CONSTANT_TRANSITIONS to global objects
- if (IsGlobalObject()) {
- return function;
- }
+ TransitionFlag flag =
+ // Do not add transitions to the empty object map (map of "new Object()"),
+ // nor to global objects.
+ (map() == heap->isolate()->empty_object_map() || IsGlobalObject() ||
+ // Don't add transitions to special properties with non-trivial
+ // attributes.
+ // TODO(verwaest): Once we support attribute changes, these transitions
+ // should be kept as well.
+ attributes != NONE)
+ ? OMIT_TRANSITION
+ : INSERT_TRANSITION;
- // Add a CONSTANT_TRANSITION descriptor to the old map,
- // so future assignments to this property on other objects
- // of the same type will create a normal field, not a constant function.
- // Don't do this for special properties, with non-trival attributes.
- if (attributes != NONE) {
- return function;
- }
- ConstTransitionDescriptor mark(name, Map::cast(new_map));
- { MaybeObject* maybe_new_descriptors =
- old_map->instance_descriptors()->CopyInsert(&mark, KEEP_TRANSITIONS);
- if (!maybe_new_descriptors->ToObject(&new_descriptors)) {
- // We have accomplished the main goal, so return success.
- return function;
- }
- }
- old_map->set_instance_descriptors(DescriptorArray::cast(new_descriptors));
- Map::cast(new_map)->SetBackPointer(old_map);
+ Map* new_map;
+ MaybeObject* maybe_new_map = map()->CopyAddDescriptor(&d, flag);
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+ set_map(new_map);
return function;
}
@@ -1712,11 +1671,15 @@ MaybeObject* JSObject::AddSlowProperty(String* name,
MaybeObject* JSObject::AddProperty(String* name,
Object* value,
PropertyAttributes attributes,
- StrictModeFlag strict_mode) {
+ StrictModeFlag strict_mode,
+ JSReceiver::StoreFromKeyed store_mode,
+ ExtensibilityCheck extensibility_check) {
ASSERT(!IsJSGlobalProxy());
Map* map_of_this = map();
Heap* heap = GetHeap();
- if (!map_of_this->is_extensible()) {
+ MaybeObject* result;
+ if (extensibility_check == PERFORM_EXTENSIBILITY_CHECK &&
+ !map_of_this->is_extensible()) {
if (strict_mode == kNonStrictMode) {
return value;
} else {
@@ -1726,28 +1689,70 @@ MaybeObject* JSObject::AddProperty(String* name,
HandleVector(args, 1)));
}
}
+
if (HasFastProperties()) {
// Ensure the descriptor array does not get too big.
- if (map_of_this->instance_descriptors()->number_of_descriptors() <
+ if (map_of_this->NumberOfOwnDescriptors() <
DescriptorArray::kMaxNumberOfDescriptors) {
if (value->IsJSFunction()) {
- return AddConstantFunctionProperty(name,
- JSFunction::cast(value),
- attributes);
+ result = AddConstantFunctionProperty(name,
+ JSFunction::cast(value),
+ attributes);
} else {
- return AddFastProperty(name, value, attributes);
+ result = AddFastProperty(name, value, attributes, store_mode);
}
} else {
// Normalize the object to prevent very large instance descriptors.
// This eliminates unwanted N^2 allocation and lookup behavior.
Object* obj;
- { MaybeObject* maybe_obj =
- NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
+ MaybeObject* maybe = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
+ if (!maybe->To(&obj)) return maybe;
+ result = AddSlowProperty(name, value, attributes);
}
+ } else {
+ result = AddSlowProperty(name, value, attributes);
+ }
+
+ Handle<Object> hresult;
+ if (!result->ToHandle(&hresult)) return result;
+
+ if (FLAG_harmony_observation && map()->is_observed()) {
+ EnqueueChangeRecord(handle(this), "new", handle(name),
+ handle(heap->the_hole_value()));
}
- return AddSlowProperty(name, value, attributes);
+
+ return *hresult;
+}
+
+
+void JSObject::EnqueueChangeRecord(Handle<JSObject> object,
+ const char* type_str,
+ Handle<String> name,
+ Handle<Object> old_value) {
+ Isolate* isolate = object->GetIsolate();
+ HandleScope scope;
+ Handle<String> type = isolate->factory()->LookupAsciiSymbol(type_str);
+ Handle<Object> args[] = { type, object, name, old_value };
+ bool threw;
+ Execution::Call(Handle<JSFunction>(isolate->observers_notify_change()),
+ Handle<Object>(isolate->heap()->undefined_value()),
+ old_value->IsTheHole() ? 3 : 4, args,
+ &threw);
+ ASSERT(!threw);
+}
+
+
+void JSObject::DeliverChangeRecords(Isolate* isolate) {
+ ASSERT(isolate->observer_delivery_pending());
+ bool threw = false;
+ Execution::Call(
+ isolate->observers_deliver_changes(),
+ isolate->factory()->undefined_value(),
+ 0,
+ NULL,
+ &threw);
+ ASSERT(!threw);
+ isolate->set_observer_delivery_pending(false);
}
@@ -1755,25 +1760,25 @@ MaybeObject* JSObject::SetPropertyPostInterceptor(
String* name,
Object* value,
PropertyAttributes attributes,
- StrictModeFlag strict_mode) {
+ StrictModeFlag strict_mode,
+ ExtensibilityCheck extensibility_check) {
// Check local property, ignore interceptor.
LookupResult result(GetIsolate());
LocalLookupRealNamedProperty(name, &result);
+ if (!result.IsFound()) map()->LookupTransition(this, name, &result);
if (result.IsFound()) {
- // An existing property, a map transition or a null descriptor was
- // found. Use set property to handle all these cases.
+ // An existing property or a map transition was found. Use set property to
+ // handle all these cases.
return SetProperty(&result, name, value, attributes, strict_mode);
}
- bool found = false;
+ bool done = false;
MaybeObject* result_object;
- result_object = SetPropertyWithCallbackSetterInPrototypes(name,
- value,
- attributes,
- &found,
- strict_mode);
- if (found) return result_object;
+ result_object =
+ SetPropertyViaPrototypes(name, value, attributes, strict_mode, &done);
+ if (done) return result_object;
// Add a new real property.
- return AddProperty(name, value, attributes, strict_mode);
+ return AddProperty(name, value, attributes, strict_mode,
+ MAY_BE_STORE_FROM_KEYED, extensibility_check);
}
@@ -1785,8 +1790,7 @@ MaybeObject* JSObject::ReplaceSlowProperty(String* name,
int new_enumeration_index = 0; // 0 means "Use the next available index."
if (old_index != -1) {
// All calls to ReplaceSlowProperty have had all transitions removed.
- ASSERT(!dictionary->ContainsTransition(old_index));
- new_enumeration_index = dictionary->DetailsAt(old_index).index();
+ new_enumeration_index = dictionary->DetailsAt(old_index).dictionary_index();
}
PropertyDetails new_details(attributes, NORMAL, new_enumeration_index);
@@ -1794,39 +1798,52 @@ MaybeObject* JSObject::ReplaceSlowProperty(String* name,
}
-MaybeObject* JSObject::ConvertDescriptorToFieldAndMapTransition(
+MaybeObject* JSObject::ConvertTransitionToMapTransition(
+ int transition_index,
String* name,
Object* new_value,
PropertyAttributes attributes) {
Map* old_map = map();
+ Map* old_target = old_map->GetTransition(transition_index);
Object* result;
- { MaybeObject* maybe_result =
- ConvertDescriptorToField(name, new_value, attributes);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- // If we get to this point we have succeeded - do not return failure
- // after this point. Later stuff is optional.
- if (!HasFastProperties()) {
- return result;
- }
- // Do not add transitions to the map of "new Object()".
- if (map() == GetIsolate()->context()->global_context()->
- object_function()->map()) {
- return result;
- }
- MapTransitionDescriptor transition(name,
- map(),
- attributes);
- Object* new_descriptors;
- { MaybeObject* maybe_new_descriptors = old_map->instance_descriptors()->
- CopyInsert(&transition, KEEP_TRANSITIONS);
- if (!maybe_new_descriptors->ToObject(&new_descriptors)) {
- return result; // Yes, return _result_.
- }
- }
- old_map->set_instance_descriptors(DescriptorArray::cast(new_descriptors));
- map()->SetBackPointer(old_map);
+ MaybeObject* maybe_result =
+ ConvertDescriptorToField(name, new_value, attributes);
+ if (!maybe_result->To(&result)) return maybe_result;
+
+ if (!HasFastProperties()) return result;
+
+ // This method should only be used to convert existing transitions. Objects
+ // with the map of "new Object()" cannot have transitions in the first place.
+ Map* new_map = map();
+ ASSERT(new_map != GetIsolate()->empty_object_map());
+
+ // TODO(verwaest): From here on we lose existing map transitions, causing
+ // invalid back pointers. This will change once we can store multiple
+ // transitions with the same key.
+
+ bool owned_descriptors = old_map->owns_descriptors();
+ if (owned_descriptors ||
+ old_target->instance_descriptors() == old_map->instance_descriptors()) {
+ // Since the conversion above generated a new fast map with an additional
+ // property which can be shared as well, install this descriptor pointer
+ // along the entire chain of smaller maps.
+ Map* map;
+ DescriptorArray* new_descriptors = new_map->instance_descriptors();
+ DescriptorArray* old_descriptors = old_map->instance_descriptors();
+ for (Object* current = old_map;
+ !current->IsUndefined();
+ current = map->GetBackPointer()) {
+ map = Map::cast(current);
+ if (map->instance_descriptors() != old_descriptors) break;
+ map->SetEnumLength(Map::kInvalidEnumCache);
+ map->set_instance_descriptors(new_descriptors);
+ }
+ old_map->set_owns_descriptors(false);
+ }
+
+ old_map->SetTransition(transition_index, new_map);
+ new_map->SetBackPointer(old_map);
return result;
}
@@ -1835,59 +1852,38 @@ MaybeObject* JSObject::ConvertDescriptorToField(String* name,
Object* new_value,
PropertyAttributes attributes) {
if (map()->unused_property_fields() == 0 &&
- properties()->length() > MaxFastProperties()) {
+ TooManyFastProperties(properties()->length(), MAY_BE_STORE_FROM_KEYED)) {
Object* obj;
- { MaybeObject* maybe_obj =
- NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
+ MaybeObject* maybe_obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
return ReplaceSlowProperty(name, new_value, attributes);
}
int index = map()->NextFreePropertyIndex();
- FieldDescriptor new_field(name, index, attributes);
- // Make a new DescriptorArray replacing an entry with FieldDescriptor.
- Object* descriptors_unchecked;
- { MaybeObject* maybe_descriptors_unchecked = map()->instance_descriptors()->
- CopyInsert(&new_field, REMOVE_TRANSITIONS);
- if (!maybe_descriptors_unchecked->ToObject(&descriptors_unchecked)) {
- return maybe_descriptors_unchecked;
- }
- }
- DescriptorArray* new_descriptors =
- DescriptorArray::cast(descriptors_unchecked);
+ FieldDescriptor new_field(name, index, attributes, 0);
// Make a new map for the object.
- Object* new_map_unchecked;
- { MaybeObject* maybe_new_map_unchecked = map()->CopyDropDescriptors();
- if (!maybe_new_map_unchecked->ToObject(&new_map_unchecked)) {
- return maybe_new_map_unchecked;
- }
- }
- Map* new_map = Map::cast(new_map_unchecked);
- new_map->set_instance_descriptors(new_descriptors);
+ Map* new_map;
+ MaybeObject* maybe_new_map = map()->CopyInsertDescriptor(&new_field,
+ OMIT_TRANSITION);
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
// Make new properties array if necessary.
- FixedArray* new_properties = 0; // Will always be NULL or a valid pointer.
+ FixedArray* new_properties = NULL;
int new_unused_property_fields = map()->unused_property_fields() - 1;
if (map()->unused_property_fields() == 0) {
new_unused_property_fields = kFieldsAdded - 1;
- Object* new_properties_object;
- { MaybeObject* maybe_new_properties_object =
- properties()->CopySize(properties()->length() + kFieldsAdded);
- if (!maybe_new_properties_object->ToObject(&new_properties_object)) {
- return maybe_new_properties_object;
- }
- }
- new_properties = FixedArray::cast(new_properties_object);
+ MaybeObject* maybe_new_properties =
+ properties()->CopySize(properties()->length() + kFieldsAdded);
+ if (!maybe_new_properties->To(&new_properties)) return maybe_new_properties;
}
// Update pointers to commit changes.
// Object points to the new map.
new_map->set_unused_property_fields(new_unused_property_fields);
set_map(new_map);
- if (new_properties) {
- set_properties(FixedArray::cast(new_properties));
+ if (new_properties != NULL) {
+ set_properties(new_properties);
}
return FastPropertyAtPut(index, new_value);
}
@@ -1930,7 +1926,8 @@ MaybeObject* JSObject::SetPropertyWithInterceptor(
this_handle->SetPropertyPostInterceptor(*name_handle,
*value_handle,
attributes,
- strict_mode);
+ strict_mode,
+ PERFORM_EXTENSIBILITY_CHECK);
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
return raw_result;
}
@@ -1944,7 +1941,7 @@ Handle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object,
bool skip_fallback_interceptor) {
CALL_HEAP_FUNCTION(object->GetIsolate(),
object->SetProperty(*key, *value, attributes, strict_mode,
- skip_fallback_interceptor),
+ MAY_BE_STORE_FROM_KEYED, skip_fallback_interceptor),
Object);
}
@@ -1953,10 +1950,14 @@ MaybeObject* JSReceiver::SetProperty(String* name,
Object* value,
PropertyAttributes attributes,
StrictModeFlag strict_mode,
+ JSReceiver::StoreFromKeyed store_mode,
bool skip_fallback_interceptor) {
LookupResult result(GetIsolate());
LocalLookup(name, &result, skip_fallback_interceptor);
- return SetProperty(&result, name, value, attributes, strict_mode);
+ if (!result.IsFound()) {
+ map()->LookupTransition(JSObject::cast(this), name, &result);
+ }
+ return SetProperty(&result, name, value, attributes, strict_mode, store_mode);
}
@@ -1989,6 +1990,16 @@ MaybeObject* JSObject::SetPropertyWithCallback(Object* structure,
if (structure->IsAccessorInfo()) {
// api style callbacks
AccessorInfo* data = AccessorInfo::cast(structure);
+ if (!data->IsCompatibleReceiver(this)) {
+ Handle<Object> name_handle(name);
+ Handle<Object> receiver_handle(this);
+ Handle<Object> args[2] = { name_handle, receiver_handle };
+ Handle<Object> error =
+ isolate->factory()->NewTypeError("incompatible_method_receiver",
+ HandleVector(args,
+ ARRAY_SIZE(args)));
+ return isolate->Throw(*error);
+ }
Object* call_obj = data->setter();
v8::AccessorSetter call_fun = v8::ToCData<v8::AccessorSetter>(call_obj);
if (call_fun == NULL) return value;
@@ -2054,26 +2065,6 @@ MaybeObject* JSReceiver::SetPropertyWithDefinedSetter(JSReceiver* setter,
}
-void JSObject::LookupCallbackSetterInPrototypes(String* name,
- LookupResult* result) {
- Heap* heap = GetHeap();
- for (Object* pt = GetPrototype();
- pt != heap->null_value();
- pt = pt->GetPrototype()) {
- if (pt->IsJSProxy()) {
- return result->HandlerResult(JSProxy::cast(pt));
- }
- JSObject::cast(pt)->LocalLookupRealNamedProperty(name, result);
- if (result->IsProperty()) {
- if (result->type() == CALLBACKS && !result->IsReadOnly()) return;
- // Found non-callback or read-only callback, stop looking.
- break;
- }
- }
- result->NotFound();
-}
-
-
MaybeObject* JSObject::SetElementWithCallbackSetterInPrototypes(
uint32_t index,
Object* value,
@@ -2090,8 +2081,8 @@ MaybeObject* JSObject::SetElementWithCallbackSetterInPrototypes(
*found = true; // Force abort
return maybe;
}
- return JSProxy::cast(pt)->SetPropertyWithHandlerIfDefiningSetter(
- name, value, NONE, strict_mode, found);
+ return JSProxy::cast(pt)->SetPropertyViaPrototypesWithHandler(
+ this, name, value, NONE, strict_mode, found);
}
if (!JSObject::cast(pt)->HasDictionaryElements()) {
continue;
@@ -2115,76 +2106,167 @@ MaybeObject* JSObject::SetElementWithCallbackSetterInPrototypes(
return heap->the_hole_value();
}
-MaybeObject* JSObject::SetPropertyWithCallbackSetterInPrototypes(
+MaybeObject* JSObject::SetPropertyViaPrototypes(
String* name,
Object* value,
PropertyAttributes attributes,
- bool* found,
- StrictModeFlag strict_mode) {
+ StrictModeFlag strict_mode,
+ bool* done) {
Heap* heap = GetHeap();
+ Isolate* isolate = heap->isolate();
+
+ *done = false;
// We could not find a local property so let's check whether there is an
- // accessor that wants to handle the property.
- LookupResult accessor_result(heap->isolate());
- LookupCallbackSetterInPrototypes(name, &accessor_result);
- if (accessor_result.IsFound()) {
- *found = true;
- if (accessor_result.type() == CALLBACKS) {
- return SetPropertyWithCallback(accessor_result.GetCallbackObject(),
- name,
- value,
- accessor_result.holder(),
- strict_mode);
- } else if (accessor_result.type() == HANDLER) {
- // There is a proxy in the prototype chain. Invoke its
- // getPropertyDescriptor trap.
- bool found = false;
- // SetPropertyWithHandlerIfDefiningSetter can cause GC,
- // make sure to use the handlified references after calling
- // the function.
- Handle<JSObject> self(this);
- Handle<String> hname(name);
- Handle<Object> hvalue(value);
- MaybeObject* result =
- accessor_result.proxy()->SetPropertyWithHandlerIfDefiningSetter(
- name, value, attributes, strict_mode, &found);
- if (found) return result;
- // The proxy does not define the property as an accessor.
- // Consequently, it has no effect on setting the receiver.
- return self->AddProperty(*hname, *hvalue, attributes, strict_mode);
+ // accessor that wants to handle the property, or whether the property is
+ // read-only on the prototype chain.
+ LookupResult result(isolate);
+ LookupRealNamedPropertyInPrototypes(name, &result);
+ if (result.IsFound()) {
+ switch (result.type()) {
+ case NORMAL:
+ case FIELD:
+ case CONSTANT_FUNCTION:
+ *done = result.IsReadOnly();
+ break;
+ case INTERCEPTOR: {
+ PropertyAttributes attr =
+ result.holder()->GetPropertyAttributeWithInterceptor(
+ this, name, true);
+ *done = !!(attr & READ_ONLY);
+ break;
+ }
+ case CALLBACKS: {
+ if (!FLAG_es5_readonly && result.IsReadOnly()) break;
+ *done = true;
+ return SetPropertyWithCallback(result.GetCallbackObject(),
+ name, value, result.holder(), strict_mode);
+ }
+ case HANDLER: {
+ return result.proxy()->SetPropertyViaPrototypesWithHandler(
+ this, name, value, attributes, strict_mode, done);
+ }
+ case TRANSITION:
+ case NONEXISTENT:
+ UNREACHABLE();
+ break;
}
}
- *found = false;
+
+ // If we get here with *done true, we have encountered a read-only property.
+ if (!FLAG_es5_readonly) *done = false;
+ if (*done) {
+ if (strict_mode == kNonStrictMode) return value;
+ Handle<Object> args[] = { Handle<Object>(name), Handle<Object>(this)};
+ return isolate->Throw(*isolate->factory()->NewTypeError(
+ "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))));
+ }
return heap->the_hole_value();
}
-void JSObject::LookupInDescriptor(String* name, LookupResult* result) {
- DescriptorArray* descriptors = map()->instance_descriptors();
- int number = descriptors->SearchWithCache(name);
- if (number != DescriptorArray::kNotFound) {
- result->DescriptorResult(this, descriptors->GetDetails(number), number);
- } else {
- result->NotFound();
+enum RightTrimMode { FROM_GC, FROM_MUTATOR };
+
+
+static void ZapEndOfFixedArray(Address new_end, int to_trim) {
+ // If we are doing a big trim in old space then we zap the space.
+ Object** zap = reinterpret_cast<Object**>(new_end);
+ zap++; // Header of filler must be at least one word so skip that.
+ for (int i = 1; i < to_trim; i++) {
+ *zap++ = Smi::FromInt(0);
}
}
-void Map::LookupInDescriptors(JSObject* holder,
- String* name,
- LookupResult* result) {
- DescriptorArray* descriptors = instance_descriptors();
- DescriptorLookupCache* cache =
- GetHeap()->isolate()->descriptor_lookup_cache();
- int number = cache->Lookup(descriptors, name);
- if (number == DescriptorLookupCache::kAbsent) {
- number = descriptors->Search(name);
- cache->Update(descriptors, name, number);
- }
- if (number != DescriptorArray::kNotFound) {
- result->DescriptorResult(holder, descriptors->GetDetails(number), number);
- } else {
- result->NotFound();
+template<RightTrimMode trim_mode>
+static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) {
+ ASSERT(elms->map() != HEAP->fixed_cow_array_map());
+ // For now this trick is only applied to fixed arrays in new and paged space.
+ ASSERT(!HEAP->lo_space()->Contains(elms));
+
+ const int len = elms->length();
+
+ ASSERT(to_trim < len);
+
+ Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim);
+
+ if (trim_mode != FROM_GC || Heap::ShouldZapGarbage()) {
+ ZapEndOfFixedArray(new_end, to_trim);
}
+
+ int size_delta = to_trim * kPointerSize;
+
+ // Technically in new space this write might be omitted (except for
+ // debug mode which iterates through the heap), but to play safer
+ // we still do it.
+ heap->CreateFillerObjectAt(new_end, size_delta);
+
+ elms->set_length(len - to_trim);
+
+ // Maintain marking consistency for IncrementalMarking.
+ if (Marking::IsBlack(Marking::MarkBitFrom(elms))) {
+ if (trim_mode == FROM_GC) {
+ MemoryChunk::IncrementLiveBytesFromGC(elms->address(), -size_delta);
+ } else {
+ MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta);
+ }
+ }
+}
+
+
+void Map::EnsureDescriptorSlack(Handle<Map> map, int slack) {
+ Handle<DescriptorArray> descriptors(map->instance_descriptors());
+ if (slack <= descriptors->NumberOfSlackDescriptors()) return;
+ int number_of_descriptors = descriptors->number_of_descriptors();
+ Isolate* isolate = map->GetIsolate();
+ Handle<DescriptorArray> new_descriptors =
+ isolate->factory()->NewDescriptorArray(number_of_descriptors, slack);
+ DescriptorArray::WhitenessWitness witness(*new_descriptors);
+
+ for (int i = 0; i < number_of_descriptors; ++i) {
+ new_descriptors->CopyFrom(i, *descriptors, i, witness);
+ }
+
+ map->set_instance_descriptors(*new_descriptors);
+}
+
+
+void Map::AppendCallbackDescriptors(Handle<Map> map,
+ Handle<Object> descriptors) {
+ Isolate* isolate = map->GetIsolate();
+ Handle<DescriptorArray> array(map->instance_descriptors());
+ NeanderArray callbacks(descriptors);
+ int nof_callbacks = callbacks.length();
+
+ ASSERT(array->NumberOfSlackDescriptors() >= nof_callbacks);
+
+ // Ensure the keys are symbols before writing them into the instance
+ // descriptor. Since it may cause a GC, it has to be done before we
+ // temporarily put the heap in an invalid state while appending descriptors.
+ for (int i = 0; i < nof_callbacks; ++i) {
+ Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks.get(i)));
+ Handle<String> key =
+ isolate->factory()->SymbolFromString(
+ Handle<String>(String::cast(entry->name())));
+ entry->set_name(*key);
+ }
+
+ int nof = map->NumberOfOwnDescriptors();
+
+ // Fill in new callback descriptors. Process the callbacks from
+ // back to front so that the last callback with a given name takes
+ // precedence over previously added callbacks with that name.
+ for (int i = nof_callbacks - 1; i >= 0; i--) {
+ AccessorInfo* entry = AccessorInfo::cast(callbacks.get(i));
+ String* key = String::cast(entry->name());
+ // Check if a descriptor with this name already exists before writing.
+ if (array->Search(key, nof) == DescriptorArray::kNotFound) {
+ CallbacksDescriptor desc(key, entry, entry->property_attributes());
+ array->Append(&desc);
+ nof += 1;
+ }
+ }
+
+ map->SetNumberOfOwnDescriptors(nof);
}
@@ -2205,217 +2287,88 @@ static Handle<T> MaybeNull(T* p) {
Handle<Map> Map::FindTransitionedMap(MapHandleList* candidates) {
- ElementsKind elms_kind = elements_kind();
- if (elms_kind == FAST_DOUBLE_ELEMENTS) {
- bool dummy = true;
- Handle<Map> fast_map =
- MaybeNull(LookupElementsTransitionMap(FAST_ELEMENTS, &dummy));
- if (!fast_map.is_null() && ContainsMap(candidates, fast_map)) {
- return fast_map;
- }
- return Handle<Map>::null();
- }
- if (elms_kind == FAST_SMI_ONLY_ELEMENTS) {
- bool dummy = true;
- Handle<Map> double_map =
- MaybeNull(LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, &dummy));
- // In the current implementation, if the DOUBLE map doesn't exist, the
- // FAST map can't exist either.
- if (double_map.is_null()) return Handle<Map>::null();
- Handle<Map> fast_map =
- MaybeNull(double_map->LookupElementsTransitionMap(FAST_ELEMENTS,
- &dummy));
- if (!fast_map.is_null() && ContainsMap(candidates, fast_map)) {
- return fast_map;
- }
- if (ContainsMap(candidates, double_map)) return double_map;
- }
- return Handle<Map>::null();
-}
-
-static Map* GetElementsTransitionMapFromDescriptor(Object* descriptor_contents,
- ElementsKind elements_kind) {
- if (descriptor_contents->IsMap()) {
- Map* map = Map::cast(descriptor_contents);
- if (map->elements_kind() == elements_kind) {
- return map;
- }
- return NULL;
- }
-
- FixedArray* map_array = FixedArray::cast(descriptor_contents);
- for (int i = 0; i < map_array->length(); ++i) {
- Object* current = map_array->get(i);
- // Skip undefined slots, they are sentinels for reclaimed maps.
- if (!current->IsUndefined()) {
- Map* current_map = Map::cast(map_array->get(i));
- if (current_map->elements_kind() == elements_kind) {
- return current_map;
+ ElementsKind kind = elements_kind();
+ Handle<Map> transitioned_map = Handle<Map>::null();
+ Handle<Map> current_map(this);
+ bool packed = IsFastPackedElementsKind(kind);
+ if (IsTransitionableFastElementsKind(kind)) {
+ while (CanTransitionToMoreGeneralFastElementsKind(kind, false)) {
+ kind = GetNextMoreGeneralFastElementsKind(kind, false);
+ Handle<Map> maybe_transitioned_map =
+ MaybeNull(current_map->LookupElementsTransitionMap(kind));
+ if (maybe_transitioned_map.is_null()) break;
+ if (ContainsMap(candidates, maybe_transitioned_map) &&
+ (packed || !IsFastPackedElementsKind(kind))) {
+ transitioned_map = maybe_transitioned_map;
+ if (!IsFastPackedElementsKind(kind)) packed = false;
}
+ current_map = maybe_transitioned_map;
}
}
-
- return NULL;
+ return transitioned_map;
}
-static MaybeObject* AddElementsTransitionMapToDescriptor(
- Object* descriptor_contents,
- Map* new_map) {
- // Nothing was in the descriptor for an ELEMENTS_TRANSITION,
- // simply add the map.
- if (descriptor_contents == NULL) {
- return new_map;
- }
-
- // There was already a map in the descriptor, create a 2-element FixedArray
- // to contain the existing map plus the new one.
- FixedArray* new_array;
- Heap* heap = new_map->GetHeap();
- if (descriptor_contents->IsMap()) {
- // Must tenure, DescriptorArray expects no new-space objects.
- MaybeObject* maybe_new_array = heap->AllocateFixedArray(2, TENURED);
- if (!maybe_new_array->To<FixedArray>(&new_array)) {
- return maybe_new_array;
- }
- new_array->set(0, descriptor_contents);
- new_array->set(1, new_map);
- return new_array;
- }
+static Map* FindClosestElementsTransition(Map* map, ElementsKind to_kind) {
+ Map* current_map = map;
+ int index = GetSequenceIndexFromFastElementsKind(map->elements_kind());
+ int to_index = IsFastElementsKind(to_kind)
+ ? GetSequenceIndexFromFastElementsKind(to_kind)
+ : GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
- // The descriptor already contained a list of maps for different ElementKinds
- // of ELEMENTS_TRANSITION, first check the existing array for an undefined
- // slot, and if that's not available, create a FixedArray to hold the existing
- // maps plus the new one and fill it in.
- FixedArray* array = FixedArray::cast(descriptor_contents);
- for (int i = 0; i < array->length(); ++i) {
- if (array->get(i)->IsUndefined()) {
- array->set(i, new_map);
- return array;
- }
- }
+ ASSERT(index <= to_index);
- // Must tenure, DescriptorArray expects no new-space objects.
- MaybeObject* maybe_new_array =
- heap->AllocateFixedArray(array->length() + 1, TENURED);
- if (!maybe_new_array->To<FixedArray>(&new_array)) {
- return maybe_new_array;
+ for (; index < to_index; ++index) {
+ if (!current_map->HasElementsTransition()) return current_map;
+ current_map = current_map->elements_transition_map();
}
- int i = 0;
- while (i < array->length()) {
- new_array->set(i, array->get(i));
- ++i;
+ if (!IsFastElementsKind(to_kind) && current_map->HasElementsTransition()) {
+ Map* next_map = current_map->elements_transition_map();
+ if (next_map->elements_kind() == to_kind) return next_map;
}
- new_array->set(i, new_map);
- return new_array;
+ ASSERT(IsFastElementsKind(to_kind)
+ ? current_map->elements_kind() == to_kind
+ : current_map->elements_kind() == TERMINAL_FAST_ELEMENTS_KIND);
+ return current_map;
}
-String* Map::elements_transition_sentinel_name() {
- return GetHeap()->empty_symbol();
+Map* Map::LookupElementsTransitionMap(ElementsKind to_kind) {
+ Map* to_map = FindClosestElementsTransition(this, to_kind);
+ if (to_map->elements_kind() == to_kind) return to_map;
+ return NULL;
}
-Object* Map::GetDescriptorContents(String* sentinel_name,
- bool* safe_to_add_transition) {
- // Get the cached index for the descriptors lookup, or find and cache it.
- DescriptorArray* descriptors = instance_descriptors();
- DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
- int index = cache->Lookup(descriptors, sentinel_name);
- if (index == DescriptorLookupCache::kAbsent) {
- index = descriptors->Search(sentinel_name);
- cache->Update(descriptors, sentinel_name, index);
- }
- // If the transition already exists, return its descriptor.
- if (index != DescriptorArray::kNotFound) {
- PropertyDetails details = descriptors->GetDetails(index);
- if (details.type() == ELEMENTS_TRANSITION) {
- return descriptors->GetValue(index);
- } else {
- if (safe_to_add_transition != NULL) {
- *safe_to_add_transition = false;
- }
- }
- }
- return NULL;
-}
+static MaybeObject* AddMissingElementsTransitions(Map* map,
+ ElementsKind to_kind) {
+ ASSERT(IsFastElementsKind(map->elements_kind()));
+ int index = GetSequenceIndexFromFastElementsKind(map->elements_kind());
+ int to_index = IsFastElementsKind(to_kind)
+ ? GetSequenceIndexFromFastElementsKind(to_kind)
+ : GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
+ ASSERT(index <= to_index);
-Map* Map::LookupElementsTransitionMap(ElementsKind elements_kind,
- bool* safe_to_add_transition) {
- // Special case: indirect SMI->FAST transition (cf. comment in
- // AddElementsTransition()).
- if (this->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
- elements_kind == FAST_ELEMENTS) {
- Map* double_map = this->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS,
- safe_to_add_transition);
- if (double_map == NULL) return double_map;
- return double_map->LookupElementsTransitionMap(FAST_ELEMENTS,
- safe_to_add_transition);
- }
- Object* descriptor_contents = GetDescriptorContents(
- elements_transition_sentinel_name(), safe_to_add_transition);
- if (descriptor_contents != NULL) {
- Map* maybe_transition_map =
- GetElementsTransitionMapFromDescriptor(descriptor_contents,
- elements_kind);
- ASSERT(maybe_transition_map == NULL || maybe_transition_map->IsMap());
- return maybe_transition_map;
+ Map* current_map = map;
+
+ for (; index < to_index; ++index) {
+ ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(index + 1);
+ MaybeObject* maybe_next_map =
+ current_map->CopyAsElementsKind(next_kind, INSERT_TRANSITION);
+ if (!maybe_next_map->To(&current_map)) return maybe_next_map;
}
- return NULL;
-}
+ // In case we are exiting the fast elements kind system, just add the map in
+ // the end.
+ if (!IsFastElementsKind(to_kind)) {
+ MaybeObject* maybe_next_map =
+ current_map->CopyAsElementsKind(to_kind, INSERT_TRANSITION);
+ if (!maybe_next_map->To(&current_map)) return maybe_next_map;
+ }
-MaybeObject* Map::AddElementsTransition(ElementsKind elements_kind,
- Map* transitioned_map) {
- // The map transition graph should be a tree, therefore the transition
- // from SMI to FAST elements is not done directly, but by going through
- // DOUBLE elements first.
- if (this->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
- elements_kind == FAST_ELEMENTS) {
- bool safe_to_add = true;
- Map* double_map = this->LookupElementsTransitionMap(
- FAST_DOUBLE_ELEMENTS, &safe_to_add);
- // This method is only called when safe_to_add_transition has been found
- // to be true earlier.
- ASSERT(safe_to_add);
-
- if (double_map == NULL) {
- MaybeObject* maybe_map = this->CopyDropTransitions();
- if (!maybe_map->To(&double_map)) return maybe_map;
- double_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
- MaybeObject* maybe_double_transition = this->AddElementsTransition(
- FAST_DOUBLE_ELEMENTS, double_map);
- if (maybe_double_transition->IsFailure()) return maybe_double_transition;
- }
- return double_map->AddElementsTransition(FAST_ELEMENTS, transitioned_map);
- }
-
- bool safe_to_add_transition = true;
- Object* descriptor_contents = GetDescriptorContents(
- elements_transition_sentinel_name(), &safe_to_add_transition);
- // This method is only called when safe_to_add_transition has been found
- // to be true earlier.
- ASSERT(safe_to_add_transition);
- MaybeObject* maybe_new_contents =
- AddElementsTransitionMapToDescriptor(descriptor_contents,
- transitioned_map);
- Object* new_contents;
- if (!maybe_new_contents->ToObject(&new_contents)) {
- return maybe_new_contents;
- }
-
- ElementsTransitionDescriptor desc(elements_transition_sentinel_name(),
- new_contents);
- Object* new_descriptors;
- MaybeObject* maybe_new_descriptors =
- instance_descriptors()->CopyInsert(&desc, KEEP_TRANSITIONS);
- if (!maybe_new_descriptors->ToObject(&new_descriptors)) {
- return maybe_new_descriptors;
- }
- set_instance_descriptors(DescriptorArray::cast(new_descriptors));
- transitioned_map->SetBackPointer(this);
- return this;
+ ASSERT(current_map->elements_kind() == to_kind);
+ return current_map;
}
@@ -2429,56 +2382,39 @@ Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object,
MaybeObject* JSObject::GetElementsTransitionMapSlow(ElementsKind to_kind) {
- Map* current_map = map();
- ElementsKind from_kind = current_map->elements_kind();
+ Map* start_map = map();
+ ElementsKind from_kind = start_map->elements_kind();
- if (from_kind == to_kind) return current_map;
+ if (from_kind == to_kind) {
+ return start_map;
+ }
- // Only objects with FastProperties can have DescriptorArrays and can track
- // element-related maps. Also don't add descriptors to maps that are shared.
- bool safe_to_add_transition = HasFastProperties() &&
- !current_map->IsUndefined() &&
- !current_map->is_shared();
+ bool allow_store_transition =
+ // Only remember the map transition if the object's map is NOT equal to
+ // the global object_function's map and there is not an already existing
+ // non-matching element transition.
+ (GetIsolate()->empty_object_map() != map()) &&
+ !start_map->IsUndefined() && !start_map->is_shared() &&
+ IsFastElementsKind(from_kind);
- // Prevent long chains of DICTIONARY -> FAST_ELEMENTS maps caused by objects
- // with elements that switch back and forth between dictionary and fast
- // element mode.
- if (from_kind == DICTIONARY_ELEMENTS && to_kind == FAST_ELEMENTS) {
- safe_to_add_transition = false;
+ // Only store fast element maps in ascending generality.
+ if (IsFastElementsKind(to_kind)) {
+ allow_store_transition &=
+ IsTransitionableFastElementsKind(from_kind) &&
+ IsMoreGeneralElementsKindTransition(from_kind, to_kind);
}
- if (safe_to_add_transition) {
- // It's only safe to manipulate the descriptor array if it would be
- // safe to add a transition.
- Map* maybe_transition_map = current_map->LookupElementsTransitionMap(
- to_kind, &safe_to_add_transition);
- if (maybe_transition_map != NULL) {
- return maybe_transition_map;
- }
+ if (!allow_store_transition) {
+ return start_map->CopyAsElementsKind(to_kind, OMIT_TRANSITION);
}
- Map* new_map = NULL;
+ Map* closest_map = FindClosestElementsTransition(start_map, to_kind);
- // No transition to an existing map for the given ElementsKind. Make a new
- // one.
- { MaybeObject* maybe_map = current_map->CopyDropTransitions();
- if (!maybe_map->To(&new_map)) return maybe_map;
+ if (closest_map->elements_kind() == to_kind) {
+ return closest_map;
}
- new_map->set_elements_kind(to_kind);
-
- // Only remember the map transition if the object's map is NOT equal to the
- // global object_function's map and there is not an already existing
- // non-matching element transition.
- Context* global_context = GetIsolate()->context()->global_context();
- bool allow_map_transition = safe_to_add_transition &&
- (global_context->object_function()->map() != map());
- if (allow_map_transition) {
- MaybeObject* maybe_transition =
- current_map->AddElementsTransition(to_kind, new_map);
- if (maybe_transition->IsFailure()) return maybe_transition;
- }
- return new_map;
+ return AddMissingElementsTransitions(closest_map, to_kind);
}
@@ -2493,47 +2429,47 @@ void JSObject::LocalLookupRealNamedProperty(String* name,
}
if (HasFastProperties()) {
- LookupInDescriptor(name, result);
- if (result->IsFound()) {
- // A property, a map transition or a null descriptor was found.
- // We return all of these result types because
- // LocalLookupRealNamedProperty is used when setting properties
- // where map transitions and null descriptors are handled.
- ASSERT(result->holder() == this && result->type() != NORMAL);
- // Disallow caching for uninitialized constants. These can only
- // occur as fields.
- if (result->IsReadOnly() && result->type() == FIELD &&
- FastPropertyAt(result->GetFieldIndex())->IsTheHole()) {
- result->DisallowCaching();
- }
- return;
+ map()->LookupDescriptor(this, name, result);
+ // A property or a map transition was found. We return all of these result
+ // types because LocalLookupRealNamedProperty is used when setting
+ // properties where map transitions are handled.
+ ASSERT(!result->IsFound() ||
+ (result->holder() == this && result->IsFastPropertyType()));
+ // Disallow caching for uninitialized constants. These can only
+ // occur as fields.
+ if (result->IsField() &&
+ result->IsReadOnly() &&
+ FastPropertyAt(result->GetFieldIndex())->IsTheHole()) {
+ result->DisallowCaching();
}
- } else {
- int entry = property_dictionary()->FindEntry(name);
- if (entry != StringDictionary::kNotFound) {
- Object* value = property_dictionary()->ValueAt(entry);
- if (IsGlobalObject()) {
- PropertyDetails d = property_dictionary()->DetailsAt(entry);
- if (d.IsDeleted()) {
- result->NotFound();
- return;
- }
- value = JSGlobalPropertyCell::cast(value)->value();
+ return;
+ }
+
+ int entry = property_dictionary()->FindEntry(name);
+ if (entry != StringDictionary::kNotFound) {
+ Object* value = property_dictionary()->ValueAt(entry);
+ if (IsGlobalObject()) {
+ PropertyDetails d = property_dictionary()->DetailsAt(entry);
+ if (d.IsDeleted()) {
+ result->NotFound();
+ return;
}
- // Make sure to disallow caching for uninitialized constants
- // found in the dictionary-mode objects.
- if (value->IsTheHole()) result->DisallowCaching();
- result->DictionaryResult(this, entry);
- return;
+ value = JSGlobalPropertyCell::cast(value)->value();
}
+ // Make sure to disallow caching for uninitialized constants
+ // found in the dictionary-mode objects.
+ if (value->IsTheHole()) result->DisallowCaching();
+ result->DictionaryResult(this, entry);
+ return;
}
+
result->NotFound();
}
void JSObject::LookupRealNamedProperty(String* name, LookupResult* result) {
LocalLookupRealNamedProperty(name, result);
- if (result->IsProperty()) return;
+ if (result->IsFound()) return;
LookupRealNamedPropertyInPrototypes(name, result);
}
@@ -2544,9 +2480,13 @@ void JSObject::LookupRealNamedPropertyInPrototypes(String* name,
Heap* heap = GetHeap();
for (Object* pt = GetPrototype();
pt != heap->null_value();
- pt = JSObject::cast(pt)->GetPrototype()) {
+ pt = pt->GetPrototype()) {
+ if (pt->IsJSProxy()) {
+ return result->HandlerResult(JSProxy::cast(pt));
+ }
JSObject::cast(pt)->LocalLookupRealNamedProperty(name, result);
- if (result->IsProperty() && (result->type() != INTERCEPTOR)) return;
+ ASSERT(!(result->IsFound() && result->type() == INTERCEPTOR));
+ if (result->IsFound()) return;
}
result->NotFound();
}
@@ -2560,7 +2500,7 @@ MaybeObject* JSObject::SetPropertyWithFailedAccessCheck(
bool check_prototype,
StrictModeFlag strict_mode) {
if (check_prototype && !result->IsProperty()) {
- LookupCallbackSetterInPrototypes(name, result);
+ LookupRealNamedPropertyInPrototypes(name, result);
}
if (result->IsProperty()) {
@@ -2613,13 +2553,14 @@ MaybeObject* JSReceiver::SetProperty(LookupResult* result,
String* key,
Object* value,
PropertyAttributes attributes,
- StrictModeFlag strict_mode) {
- if (result->IsFound() && result->type() == HANDLER) {
+ StrictModeFlag strict_mode,
+ JSReceiver::StoreFromKeyed store_mode) {
+ if (result->IsHandler()) {
return result->proxy()->SetPropertyWithHandler(
- key, value, attributes, strict_mode);
+ this, key, value, attributes, strict_mode);
} else {
return JSObject::cast(this)->SetPropertyForResult(
- result, key, value, attributes, strict_mode);
+ result, key, value, attributes, strict_mode, store_mode);
}
}
@@ -2633,20 +2574,21 @@ bool JSProxy::HasPropertyWithHandler(String* name_raw) {
Handle<Object> args[] = { name };
Handle<Object> result = CallTrap(
"has", isolate->derived_has_trap(), ARRAY_SIZE(args), args);
- if (isolate->has_pending_exception()) return Failure::Exception();
+ if (isolate->has_pending_exception()) return false;
return result->ToBoolean()->IsTrue();
}
MUST_USE_RESULT MaybeObject* JSProxy::SetPropertyWithHandler(
+ JSReceiver* receiver_raw,
String* name_raw,
Object* value_raw,
PropertyAttributes attributes,
StrictModeFlag strict_mode) {
Isolate* isolate = GetIsolate();
HandleScope scope(isolate);
- Handle<Object> receiver(this);
+ Handle<JSReceiver> receiver(receiver_raw);
Handle<Object> name(name_raw);
Handle<Object> value(value_raw);
@@ -2658,77 +2600,92 @@ MUST_USE_RESULT MaybeObject* JSProxy::SetPropertyWithHandler(
}
-MUST_USE_RESULT MaybeObject* JSProxy::SetPropertyWithHandlerIfDefiningSetter(
+MUST_USE_RESULT MaybeObject* JSProxy::SetPropertyViaPrototypesWithHandler(
+ JSReceiver* receiver_raw,
String* name_raw,
Object* value_raw,
PropertyAttributes attributes,
StrictModeFlag strict_mode,
- bool* found) {
- *found = true; // except where defined otherwise...
- Isolate* isolate = GetHeap()->isolate();
+ bool* done) {
+ Isolate* isolate = GetIsolate();
Handle<JSProxy> proxy(this);
- Handle<Object> handler(this->handler()); // Trap might morph proxy.
+ Handle<JSReceiver> receiver(receiver_raw);
Handle<String> name(name_raw);
Handle<Object> value(value_raw);
+ Handle<Object> handler(this->handler()); // Trap might morph proxy.
+
+ *done = true; // except where redefined...
Handle<Object> args[] = { name };
Handle<Object> result = proxy->CallTrap(
"getPropertyDescriptor", Handle<Object>(), ARRAY_SIZE(args), args);
if (isolate->has_pending_exception()) return Failure::Exception();
- if (!result->IsUndefined()) {
- // The proxy handler cares about this property.
- // Check whether it is virtualized as an accessor.
- // Emulate [[GetProperty]] semantics for proxies.
- bool has_pending_exception;
- Handle<Object> argv[] = { result };
- Handle<Object> desc =
- Execution::Call(isolate->to_complete_property_descriptor(), result,
- ARRAY_SIZE(argv), argv, &has_pending_exception);
- if (has_pending_exception) return Failure::Exception();
-
- Handle<String> conf_name =
- isolate->factory()->LookupAsciiSymbol("configurable_");
- Handle<Object> configurable(v8::internal::GetProperty(desc, conf_name));
- ASSERT(!isolate->has_pending_exception());
- if (configurable->IsFalse()) {
- Handle<String> trap =
- isolate->factory()->LookupAsciiSymbol("getPropertyDescriptor");
- Handle<Object> args[] = { handler, trap, name };
- Handle<Object> error = isolate->factory()->NewTypeError(
- "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
- return isolate->Throw(*error);
- }
- ASSERT(configurable->IsTrue());
+ if (result->IsUndefined()) {
+ *done = false;
+ return GetHeap()->the_hole_value();
+ }
+
+ // Emulate [[GetProperty]] semantics for proxies.
+ bool has_pending_exception;
+ Handle<Object> argv[] = { result };
+ Handle<Object> desc =
+ Execution::Call(isolate->to_complete_property_descriptor(), result,
+ ARRAY_SIZE(argv), argv, &has_pending_exception);
+ if (has_pending_exception) return Failure::Exception();
- // Check for AccessorDescriptor.
- Handle<String> set_name = isolate->factory()->LookupAsciiSymbol("set_");
- Handle<Object> setter(v8::internal::GetProperty(desc, set_name));
+ // [[GetProperty]] requires to check that all properties are configurable.
+ Handle<String> configurable_name =
+ isolate->factory()->LookupAsciiSymbol("configurable_");
+ Handle<Object> configurable(
+ v8::internal::GetProperty(desc, configurable_name));
+ ASSERT(!isolate->has_pending_exception());
+ ASSERT(configurable->IsTrue() || configurable->IsFalse());
+ if (configurable->IsFalse()) {
+ Handle<String> trap =
+ isolate->factory()->LookupAsciiSymbol("getPropertyDescriptor");
+ Handle<Object> args[] = { handler, trap, name };
+ Handle<Object> error = isolate->factory()->NewTypeError(
+ "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
+ return isolate->Throw(*error);
+ }
+ ASSERT(configurable->IsTrue());
+
+ // Check for DataDescriptor.
+ Handle<String> hasWritable_name =
+ isolate->factory()->LookupAsciiSymbol("hasWritable_");
+ Handle<Object> hasWritable(v8::internal::GetProperty(desc, hasWritable_name));
+ ASSERT(!isolate->has_pending_exception());
+ ASSERT(hasWritable->IsTrue() || hasWritable->IsFalse());
+ if (hasWritable->IsTrue()) {
+ Handle<String> writable_name =
+ isolate->factory()->LookupAsciiSymbol("writable_");
+ Handle<Object> writable(v8::internal::GetProperty(desc, writable_name));
ASSERT(!isolate->has_pending_exception());
- if (!setter->IsUndefined()) {
- // We have a setter -- invoke it.
- // TODO(rossberg): nicer would be to cast to some JSCallable here...
- return proxy->SetPropertyWithDefinedSetter(
- JSReceiver::cast(*setter), *value);
- } else {
- Handle<String> get_name = isolate->factory()->LookupAsciiSymbol("get_");
- Handle<Object> getter(v8::internal::GetProperty(desc, get_name));
- ASSERT(!isolate->has_pending_exception());
- if (!getter->IsUndefined()) {
- // We have a getter but no setter -- the property may not be
- // written. In strict mode, throw an error.
- if (strict_mode == kNonStrictMode) return *value;
- Handle<Object> args[] = { name, proxy };
- Handle<Object> error = isolate->factory()->NewTypeError(
- "no_setter_in_callback", HandleVector(args, ARRAY_SIZE(args)));
- return isolate->Throw(*error);
- }
- }
- // Fall-through.
+ ASSERT(writable->IsTrue() || writable->IsFalse());
+ *done = writable->IsFalse();
+ if (!*done) return GetHeap()->the_hole_value();
+ if (strict_mode == kNonStrictMode) return *value;
+ Handle<Object> args[] = { name, receiver };
+ Handle<Object> error = isolate->factory()->NewTypeError(
+ "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
+ return isolate->Throw(*error);
}
- // The proxy does not define the property as an accessor.
- *found = false;
- return *value;
+ // We have an AccessorDescriptor.
+ Handle<String> set_name = isolate->factory()->LookupAsciiSymbol("set_");
+ Handle<Object> setter(v8::internal::GetProperty(desc, set_name));
+ ASSERT(!isolate->has_pending_exception());
+ if (!setter->IsUndefined()) {
+ // TODO(rossberg): nicer would be to cast to some JSCallable here...
+ return receiver->SetPropertyWithDefinedSetter(
+ JSReceiver::cast(*setter), *value);
+ }
+
+ if (strict_mode == kNonStrictMode) return *value;
+ Handle<Object> args2[] = { name, proxy };
+ Handle<Object> error = isolate->factory()->NewTypeError(
+ "no_setter_in_callback", HandleVector(args2, ARRAY_SIZE(args2)));
+ return isolate->Throw(*error);
}
@@ -2736,7 +2693,7 @@ MUST_USE_RESULT MaybeObject* JSProxy::DeletePropertyWithHandler(
String* name_raw, DeleteMode mode) {
Isolate* isolate = GetIsolate();
HandleScope scope(isolate);
- Handle<Object> receiver(this);
+ Handle<JSProxy> receiver(this);
Handle<Object> name(name_raw);
Handle<Object> args[] = { name };
@@ -2746,8 +2703,9 @@ MUST_USE_RESULT MaybeObject* JSProxy::DeletePropertyWithHandler(
Object* bool_result = result->ToBoolean();
if (mode == STRICT_DELETION && bool_result == GetHeap()->false_value()) {
+ Handle<Object> handler(receiver->handler());
Handle<String> trap_name = isolate->factory()->LookupAsciiSymbol("delete");
- Handle<Object> args[] = { Handle<Object>(handler()), trap_name };
+ Handle<Object> args[] = { handler, trap_name };
Handle<Object> error = isolate->factory()->NewTypeError(
"handler_failed", HandleVector(args, ARRAY_SIZE(args)));
isolate->Throw(*error);
@@ -2821,12 +2779,14 @@ MUST_USE_RESULT PropertyAttributes JSProxy::GetPropertyAttributeWithHandler(
MUST_USE_RESULT PropertyAttributes JSProxy::GetElementAttributeWithHandler(
- JSReceiver* receiver,
+ JSReceiver* receiver_raw,
uint32_t index) {
Isolate* isolate = GetIsolate();
HandleScope scope(isolate);
+ Handle<JSProxy> proxy(this);
+ Handle<JSReceiver> receiver(receiver_raw);
Handle<String> name = isolate->factory()->Uint32ToString(index);
- return GetPropertyAttributeWithHandler(receiver, *name);
+ return proxy->GetPropertyAttributeWithHandler(*receiver, *name);
}
@@ -2850,7 +2810,7 @@ void JSProxy::Fix() {
Object* hash;
if (maybe_hash->To<Object>(&hash) && hash->IsSmi()) {
Handle<JSObject> new_self(JSObject::cast(*self));
- isolate->factory()->SetIdentityHash(new_self, hash);
+ isolate->factory()->SetIdentityHash(new_self, Smi::cast(hash));
}
}
@@ -2882,11 +2842,20 @@ MUST_USE_RESULT Handle<Object> JSProxy::CallTrap(const char* name,
}
-MaybeObject* JSObject::SetPropertyForResult(LookupResult* result,
- String* name,
- Object* value,
+void JSObject::AddFastPropertyUsingMap(Handle<JSObject> object,
+ Handle<Map> map) {
+ CALL_HEAP_FUNCTION_VOID(
+ object->GetIsolate(),
+ object->AddFastPropertyUsingMap(*map));
+}
+
+
+MaybeObject* JSObject::SetPropertyForResult(LookupResult* lookup,
+ String* name_raw,
+ Object* value_raw,
PropertyAttributes attributes,
- StrictModeFlag strict_mode) {
+ StrictModeFlag strict_mode,
+ StoreFromKeyed store_mode) {
Heap* heap = GetHeap();
// Make sure that the top context does not change when doing callbacks or
// interceptor calls.
@@ -2895,116 +2864,156 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* result,
// Optimization for 2-byte strings often used as keys in a decompression
// dictionary. We make these short keys into symbols to avoid constantly
// reallocating them.
- if (!name->IsSymbol() && name->length() <= 2) {
+ if (!name_raw->IsSymbol() && name_raw->length() <= 2) {
Object* symbol_version;
- { MaybeObject* maybe_symbol_version = heap->LookupSymbol(name);
+ { MaybeObject* maybe_symbol_version = heap->LookupSymbol(name_raw);
if (maybe_symbol_version->ToObject(&symbol_version)) {
- name = String::cast(symbol_version);
+ name_raw = String::cast(symbol_version);
}
}
}
// Check access rights if needed.
if (IsAccessCheckNeeded()) {
- if (!heap->isolate()->MayNamedAccess(this, name, v8::ACCESS_SET)) {
+ if (!heap->isolate()->MayNamedAccess(this, name_raw, v8::ACCESS_SET)) {
return SetPropertyWithFailedAccessCheck(
- result, name, value, true, strict_mode);
+ lookup, name_raw, value_raw, true, strict_mode);
}
}
if (IsJSGlobalProxy()) {
Object* proto = GetPrototype();
- if (proto->IsNull()) return value;
+ if (proto->IsNull()) return value_raw;
ASSERT(proto->IsJSGlobalObject());
return JSObject::cast(proto)->SetPropertyForResult(
- result, name, value, attributes, strict_mode);
+ lookup, name_raw, value_raw, attributes, strict_mode, store_mode);
}
- if (!result->IsProperty() && !IsJSContextExtensionObject()) {
- bool found = false;
- MaybeObject* result_object;
- result_object = SetPropertyWithCallbackSetterInPrototypes(name,
- value,
- attributes,
- &found,
- strict_mode);
- if (found) return result_object;
- }
+ // From this point on everything needs to be handlified, because
+ // SetPropertyViaPrototypes might call back into JavaScript.
+ HandleScope scope(GetIsolate());
+ Handle<JSObject> self(this);
+ Handle<String> name(name_raw);
+ Handle<Object> value(value_raw);
- // At this point, no GC should have happened, as this would invalidate
- // 'result', which we cannot handlify!
+ if (!lookup->IsProperty() && !self->IsJSContextExtensionObject()) {
+ bool done = false;
+ MaybeObject* result_object = self->SetPropertyViaPrototypes(
+ *name, *value, attributes, strict_mode, &done);
+ if (done) return result_object;
+ }
- if (!result->IsFound()) {
+ if (!lookup->IsFound()) {
// Neither properties nor transitions found.
- return AddProperty(name, value, attributes, strict_mode);
+ return self->AddProperty(
+ *name, *value, attributes, strict_mode, store_mode);
}
- if (result->IsReadOnly() && result->IsProperty()) {
+
+ if (lookup->IsProperty() && lookup->IsReadOnly()) {
if (strict_mode == kStrictMode) {
- Handle<JSObject> self(this);
- Handle<String> hname(name);
- Handle<Object> args[] = { hname, self };
+ Handle<Object> args[] = { name, self };
return heap->isolate()->Throw(*heap->isolate()->factory()->NewTypeError(
"strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))));
} else {
- return value;
+ return *value;
}
}
+
+ Handle<Object> old_value(heap->the_hole_value());
+ if (FLAG_harmony_observation && map()->is_observed()) {
+ old_value = handle(lookup->GetLazyValue());
+ }
+
// This is a real property that is not read-only, or it is a
// transition or null descriptor and there are no setters in the prototypes.
- switch (result->type()) {
+ MaybeObject* result = *value;
+ switch (lookup->type()) {
case NORMAL:
- return SetNormalizedProperty(result, value);
+ result = self->SetNormalizedProperty(lookup, *value);
+ break;
case FIELD:
- return FastPropertyAtPut(result->GetFieldIndex(), value);
- case MAP_TRANSITION:
- if (attributes == result->GetAttributes()) {
- // Only use map transition if the attributes match.
- return AddFastPropertyUsingMap(result->GetTransitionMap(),
- name,
- value);
- }
- return ConvertDescriptorToField(name, value, attributes);
+ result = self->FastPropertyAtPut(lookup->GetFieldIndex(), *value);
+ break;
case CONSTANT_FUNCTION:
// Only replace the function if necessary.
- if (value == result->GetConstantFunction()) return value;
+ if (*value == lookup->GetConstantFunction()) return *value;
// Preserve the attributes of this existing property.
- attributes = result->GetAttributes();
- return ConvertDescriptorToField(name, value, attributes);
- case CALLBACKS:
- return SetPropertyWithCallback(result->GetCallbackObject(),
- name,
- value,
- result->holder(),
- strict_mode);
+ attributes = lookup->GetAttributes();
+ result = self->ConvertDescriptorToField(*name, *value, attributes);
+ break;
+ case CALLBACKS: {
+ Object* callback_object = lookup->GetCallbackObject();
+ return self->SetPropertyWithCallback(callback_object,
+ *name,
+ *value,
+ lookup->holder(),
+ strict_mode);
+ }
case INTERCEPTOR:
- return SetPropertyWithInterceptor(name, value, attributes, strict_mode);
- case CONSTANT_TRANSITION: {
- // If the same constant function is being added we can simply
- // transition to the target map.
- Map* target_map = result->GetTransitionMap();
- DescriptorArray* target_descriptors = target_map->instance_descriptors();
- int number = target_descriptors->SearchWithCache(name);
- ASSERT(number != DescriptorArray::kNotFound);
- ASSERT(target_descriptors->GetType(number) == CONSTANT_FUNCTION);
- JSFunction* function =
- JSFunction::cast(target_descriptors->GetValue(number));
- if (value == function) {
- set_map(target_map);
- return value;
+ result = self->SetPropertyWithInterceptor(*name,
+ *value,
+ attributes,
+ strict_mode);
+ break;
+ case TRANSITION: {
+ Map* transition_map = lookup->GetTransitionTarget();
+ int descriptor = transition_map->LastAdded();
+
+ DescriptorArray* descriptors = transition_map->instance_descriptors();
+ PropertyDetails details = descriptors->GetDetails(descriptor);
+
+ if (details.type() == FIELD) {
+ if (attributes == details.attributes()) {
+ int field_index = descriptors->GetFieldIndex(descriptor);
+ result = self->AddFastPropertyUsingMap(transition_map,
+ *name,
+ *value,
+ field_index);
+ } else {
+ result = self->ConvertDescriptorToField(*name, *value, attributes);
+ }
+ } else if (details.type() == CALLBACKS) {
+ result = self->ConvertDescriptorToField(*name, *value, attributes);
+ } else {
+ ASSERT(details.type() == CONSTANT_FUNCTION);
+
+ Object* constant_function = descriptors->GetValue(descriptor);
+ if (constant_function == *value) {
+ // If the same constant function is being added we can simply
+ // transition to the target map.
+ self->set_map(transition_map);
+ result = constant_function;
+ } else {
+ // Otherwise, replace with a map transition to a new map with a FIELD,
+ // even if the value is a constant function.
+ result = self->ConvertTransitionToMapTransition(
+ lookup->GetTransitionIndex(), *name, *value, attributes);
+ }
}
- // Otherwise, replace with a MAP_TRANSITION to a new map with a
- // FIELD, even if the value is a constant function.
- return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
+ break;
}
- case NULL_DESCRIPTOR:
- case ELEMENTS_TRANSITION:
- return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
case HANDLER:
+ case NONEXISTENT:
UNREACHABLE();
- return value;
}
- UNREACHABLE(); // keep the compiler happy
- return value;
+
+ Handle<Object> hresult;
+ if (!result->ToHandle(&hresult)) return result;
+
+ if (FLAG_harmony_observation && map()->is_observed()) {
+ if (lookup->IsTransition()) {
+ EnqueueChangeRecord(self, "new", name, old_value);
+ } else {
+ LookupResult new_lookup(self->GetIsolate());
+ self->LocalLookup(*name, &new_lookup);
+ ASSERT(!new_lookup.GetLazyValue()->IsTheHole());
+ if (!new_lookup.GetLazyValue()->SameValue(*old_value)) {
+ EnqueueChangeRecord(self, "updated", name, old_value);
+ }
+ }
+ }
+
+ return *hresult;
}
@@ -3030,21 +3039,22 @@ Handle<Object> JSObject::SetLocalPropertyIgnoreAttributes(
MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
- String* name,
- Object* value,
+ String* name_raw,
+ Object* value_raw,
PropertyAttributes attributes) {
// Make sure that the top context does not change when doing callbacks or
// interceptor calls.
AssertNoContextChange ncc;
Isolate* isolate = GetIsolate();
- LookupResult result(isolate);
- LocalLookup(name, &result);
+ LookupResult lookup(isolate);
+ LocalLookup(name_raw, &lookup);
+ if (!lookup.IsFound()) map()->LookupTransition(this, name_raw, &lookup);
// Check access rights if needed.
if (IsAccessCheckNeeded()) {
- if (!isolate->MayNamedAccess(this, name, v8::ACCESS_SET)) {
- return SetPropertyWithFailedAccessCheck(&result,
- name,
- value,
+ if (!isolate->MayNamedAccess(this, name_raw, v8::ACCESS_SET)) {
+ return SetPropertyWithFailedAccessCheck(&lookup,
+ name_raw,
+ value_raw,
false,
kNonStrictMode);
}
@@ -3052,58 +3062,109 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
if (IsJSGlobalProxy()) {
Object* proto = GetPrototype();
- if (proto->IsNull()) return value;
+ if (proto->IsNull()) return value_raw;
ASSERT(proto->IsJSGlobalObject());
return JSObject::cast(proto)->SetLocalPropertyIgnoreAttributes(
- name,
- value,
+ name_raw,
+ value_raw,
attributes);
}
// Check for accessor in prototype chain removed here in clone.
- if (!result.IsFound()) {
+ if (!lookup.IsFound()) {
// Neither properties nor transitions found.
- return AddProperty(name, value, attributes, kNonStrictMode);
+ return AddProperty(name_raw, value_raw, attributes, kNonStrictMode);
}
- PropertyDetails details = PropertyDetails(attributes, NORMAL);
+ // From this point on everything needs to be handlified.
+ HandleScope scope(GetIsolate());
+ Handle<JSObject> self(this);
+ Handle<String> name(name_raw);
+ Handle<Object> value(value_raw);
+
+ Handle<Object> old_value(isolate->heap()->the_hole_value());
+ PropertyAttributes old_attributes = ABSENT;
+ if (FLAG_harmony_observation && map()->is_observed()) {
+ old_value = handle(lookup.GetLazyValue());
+ old_attributes = lookup.GetAttributes();
+ }
// Check of IsReadOnly removed from here in clone.
- switch (result.type()) {
- case NORMAL:
- return SetNormalizedProperty(name, value, details);
+ MaybeObject* result = *value;
+ switch (lookup.type()) {
+ case NORMAL: {
+ PropertyDetails details = PropertyDetails(attributes, NORMAL);
+ result = self->SetNormalizedProperty(*name, *value, details);
+ break;
+ }
case FIELD:
- return FastPropertyAtPut(result.GetFieldIndex(), value);
- case MAP_TRANSITION:
- if (attributes == result.GetAttributes()) {
- // Only use map transition if the attributes match.
- return AddFastPropertyUsingMap(result.GetTransitionMap(),
- name,
- value);
- }
- return ConvertDescriptorToField(name, value, attributes);
+ result = self->FastPropertyAtPut(lookup.GetFieldIndex(), *value);
+ break;
case CONSTANT_FUNCTION:
// Only replace the function if necessary.
- if (value == result.GetConstantFunction()) return value;
- // Preserve the attributes of this existing property.
- attributes = result.GetAttributes();
- return ConvertDescriptorToField(name, value, attributes);
+ if (*value != lookup.GetConstantFunction()) {
+ // Preserve the attributes of this existing property.
+ attributes = lookup.GetAttributes();
+ result = self->ConvertDescriptorToField(*name, *value, attributes);
+ }
+ break;
case CALLBACKS:
case INTERCEPTOR:
// Override callback in clone
- return ConvertDescriptorToField(name, value, attributes);
- case CONSTANT_TRANSITION:
- // Replace with a MAP_TRANSITION to a new map with a FIELD, even
- // if the value is a function.
- return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
- case NULL_DESCRIPTOR:
- case ELEMENTS_TRANSITION:
- return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
+ result = self->ConvertDescriptorToField(*name, *value, attributes);
+ break;
+ case TRANSITION: {
+ Map* transition_map = lookup.GetTransitionTarget();
+ int descriptor = transition_map->LastAdded();
+
+ DescriptorArray* descriptors = transition_map->instance_descriptors();
+ PropertyDetails details = descriptors->GetDetails(descriptor);
+
+ if (details.type() == FIELD) {
+ if (attributes == details.attributes()) {
+ int field_index = descriptors->GetFieldIndex(descriptor);
+ result = self->AddFastPropertyUsingMap(
+ transition_map, *name, *value, field_index);
+ } else {
+ result = self->ConvertDescriptorToField(*name, *value, attributes);
+ }
+ } else if (details.type() == CALLBACKS) {
+ result = self->ConvertDescriptorToField(*name, *value, attributes);
+ } else {
+ ASSERT(details.type() == CONSTANT_FUNCTION);
+
+ // Replace transition to CONSTANT FUNCTION with a map transition to a
+ // new map with a FIELD, even if the value is a function.
+ result = self->ConvertTransitionToMapTransition(
+ lookup.GetTransitionIndex(), *name, *value, attributes);
+ }
+ break;
+ }
case HANDLER:
+ case NONEXISTENT:
UNREACHABLE();
}
- UNREACHABLE(); // keep the compiler happy
- return value;
+
+ Handle<Object> hresult;
+ if (!result->ToHandle(&hresult)) return result;
+
+ if (FLAG_harmony_observation && map()->is_observed()) {
+ if (lookup.IsTransition()) {
+ EnqueueChangeRecord(self, "new", name, old_value);
+ } else {
+ LookupResult new_lookup(isolate);
+ self->LocalLookup(*name, &new_lookup);
+ ASSERT(!new_lookup.GetLazyValue()->IsTheHole());
+ if (old_value->IsTheHole() ||
+ new_lookup.GetAttributes() != old_attributes) {
+ EnqueueChangeRecord(self, "reconfigured", name, old_value);
+ } else if (!new_lookup.GetLazyValue()->SameValue(*old_value)) {
+ EnqueueChangeRecord(self, "updated", name, old_value);
+ }
+ }
+ }
+
+ return *hresult;
}
@@ -3114,7 +3175,7 @@ PropertyAttributes JSObject::GetPropertyAttributePostInterceptor(
// Check local property, ignore interceptor.
LookupResult result(GetIsolate());
LocalLookupRealNamedProperty(name, &result);
- if (result.IsProperty()) return result.GetAttributes();
+ if (result.IsFound()) return result.GetAttributes();
if (continue_search) {
// Continue searching via the prototype chain.
@@ -3184,44 +3245,46 @@ PropertyAttributes JSReceiver::GetPropertyAttributeWithReceiver(
String* key) {
uint32_t index = 0;
if (IsJSObject() && key->AsArrayIndex(&index)) {
- return JSObject::cast(this)->HasElementWithReceiver(receiver, index)
- ? NONE : ABSENT;
+ return JSObject::cast(this)->GetElementAttributeWithReceiver(
+ receiver, index, true);
}
// Named property.
- LookupResult result(GetIsolate());
- Lookup(key, &result);
- return GetPropertyAttribute(receiver, &result, key, true);
+ LookupResult lookup(GetIsolate());
+ Lookup(key, &lookup);
+ return GetPropertyAttributeForResult(receiver, &lookup, key, true);
}
-PropertyAttributes JSReceiver::GetPropertyAttribute(JSReceiver* receiver,
- LookupResult* result,
- String* name,
- bool continue_search) {
+PropertyAttributes JSReceiver::GetPropertyAttributeForResult(
+ JSReceiver* receiver,
+ LookupResult* lookup,
+ String* name,
+ bool continue_search) {
// Check access rights if needed.
if (IsAccessCheckNeeded()) {
JSObject* this_obj = JSObject::cast(this);
Heap* heap = GetHeap();
if (!heap->isolate()->MayNamedAccess(this_obj, name, v8::ACCESS_HAS)) {
return this_obj->GetPropertyAttributeWithFailedAccessCheck(
- receiver, result, name, continue_search);
+ receiver, lookup, name, continue_search);
}
}
- if (result->IsProperty()) {
- switch (result->type()) {
+ if (lookup->IsFound()) {
+ switch (lookup->type()) {
case NORMAL: // fall through
case FIELD:
case CONSTANT_FUNCTION:
case CALLBACKS:
- return result->GetAttributes();
+ return lookup->GetAttributes();
case HANDLER: {
- return JSProxy::cast(result->proxy())->GetPropertyAttributeWithHandler(
+ return JSProxy::cast(lookup->proxy())->GetPropertyAttributeWithHandler(
receiver, name);
}
case INTERCEPTOR:
- return result->holder()->GetPropertyAttributeWithInterceptor(
+ return lookup->holder()->GetPropertyAttributeWithInterceptor(
JSObject::cast(receiver), name, continue_search);
- default:
+ case TRANSITION:
+ case NONEXISTENT:
UNREACHABLE();
}
}
@@ -3233,13 +3296,113 @@ PropertyAttributes JSReceiver::GetLocalPropertyAttribute(String* name) {
// Check whether the name is an array index.
uint32_t index = 0;
if (IsJSObject() && name->AsArrayIndex(&index)) {
- if (JSObject::cast(this)->HasLocalElement(index)) return NONE;
- return ABSENT;
+ return GetLocalElementAttribute(index);
}
// Named property.
- LookupResult result(GetIsolate());
- LocalLookup(name, &result);
- return GetPropertyAttribute(this, &result, name, false);
+ LookupResult lookup(GetIsolate());
+ LocalLookup(name, &lookup);
+ return GetPropertyAttributeForResult(this, &lookup, name, false);
+}
+
+
+PropertyAttributes JSObject::GetElementAttributeWithReceiver(
+ JSReceiver* receiver, uint32_t index, bool continue_search) {
+ Isolate* isolate = GetIsolate();
+
+ // Check access rights if needed.
+ if (IsAccessCheckNeeded()) {
+ if (!isolate->MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
+ isolate->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
+ return ABSENT;
+ }
+ }
+
+ if (IsJSGlobalProxy()) {
+ Object* proto = GetPrototype();
+ if (proto->IsNull()) return ABSENT;
+ ASSERT(proto->IsJSGlobalObject());
+ return JSObject::cast(proto)->GetElementAttributeWithReceiver(
+ receiver, index, continue_search);
+ }
+
+ // Check for lookup interceptor except when bootstrapping.
+ if (HasIndexedInterceptor() && !isolate->bootstrapper()->IsActive()) {
+ return GetElementAttributeWithInterceptor(receiver, index, continue_search);
+ }
+
+ return GetElementAttributeWithoutInterceptor(
+ receiver, index, continue_search);
+}
+
+
+PropertyAttributes JSObject::GetElementAttributeWithInterceptor(
+ JSReceiver* receiver, uint32_t index, bool continue_search) {
+ Isolate* isolate = GetIsolate();
+ // Make sure that the top context does not change when doing
+ // callbacks or interceptor calls.
+ AssertNoContextChange ncc;
+ HandleScope scope(isolate);
+ Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
+ Handle<JSReceiver> hreceiver(receiver);
+ Handle<JSObject> holder(this);
+ CustomArguments args(isolate, interceptor->data(), receiver, this);
+ v8::AccessorInfo info(args.end());
+ if (!interceptor->query()->IsUndefined()) {
+ v8::IndexedPropertyQuery query =
+ v8::ToCData<v8::IndexedPropertyQuery>(interceptor->query());
+ LOG(isolate,
+ ApiIndexedPropertyAccess("interceptor-indexed-has", this, index));
+ v8::Handle<v8::Integer> result;
+ {
+ // Leaving JavaScript.
+ VMState state(isolate, EXTERNAL);
+ result = query(index, info);
+ }
+ if (!result.IsEmpty())
+ return static_cast<PropertyAttributes>(result->Int32Value());
+ } else if (!interceptor->getter()->IsUndefined()) {
+ v8::IndexedPropertyGetter getter =
+ v8::ToCData<v8::IndexedPropertyGetter>(interceptor->getter());
+ LOG(isolate,
+ ApiIndexedPropertyAccess("interceptor-indexed-get-has", this, index));
+ v8::Handle<v8::Value> result;
+ {
+ // Leaving JavaScript.
+ VMState state(isolate, EXTERNAL);
+ result = getter(index, info);
+ }
+ if (!result.IsEmpty()) return DONT_ENUM;
+ }
+
+ return holder->GetElementAttributeWithoutInterceptor(
+ *hreceiver, index, continue_search);
+}
+
+
+PropertyAttributes JSObject::GetElementAttributeWithoutInterceptor(
+ JSReceiver* receiver, uint32_t index, bool continue_search) {
+ Isolate* isolate = GetIsolate();
+ HandleScope scope(isolate);
+ Handle<JSReceiver> hreceiver(receiver);
+ Handle<JSObject> holder(this);
+ PropertyAttributes attr = holder->GetElementsAccessor()->GetAttributes(
+ *hreceiver, *holder, index);
+ if (attr != ABSENT) return attr;
+
+ if (holder->IsStringObjectWithCharacterAt(index)) {
+ return static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
+ }
+
+ if (!continue_search) return ABSENT;
+
+ Object* pt = holder->GetPrototype();
+ if (pt->IsJSProxy()) {
+ // We need to follow the spec and simulate a call to [[GetOwnProperty]].
+ return JSProxy::cast(pt)->GetElementAttributeWithHandler(*hreceiver, index);
+ }
+ if (pt->IsNull()) return ABSENT;
+ return JSObject::cast(pt)->GetElementAttributeWithReceiver(
+ *hreceiver, index, true);
}
@@ -3251,20 +3414,27 @@ MaybeObject* NormalizedMapCache::Get(JSObject* obj,
Object* result = get(index);
if (result->IsMap() &&
Map::cast(result)->EquivalentToForNormalization(fast, mode)) {
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Map::cast(result)->SharedMapVerify();
}
+#endif
+#ifdef DEBUG
if (FLAG_enable_slow_asserts) {
- // The cached map should match newly created normalized map bit-by-bit.
+ // The cached map should match newly created normalized map bit-by-bit,
+ // except for the code cache, which can contain some ics which can be
+ // applied to the shared map.
Object* fresh;
- { MaybeObject* maybe_fresh =
- fast->CopyNormalized(mode, SHARED_NORMALIZED_MAP);
- if (maybe_fresh->ToObject(&fresh)) {
- ASSERT(memcmp(Map::cast(fresh)->address(),
- Map::cast(result)->address(),
- Map::kSize) == 0);
- }
+ MaybeObject* maybe_fresh =
+ fast->CopyNormalized(mode, SHARED_NORMALIZED_MAP);
+ if (maybe_fresh->ToObject(&fresh)) {
+ ASSERT(memcmp(Map::cast(fresh)->address(),
+ Map::cast(result)->address(),
+ Map::kCodeCacheOffset) == 0);
+ int offset = Map::kCodeCacheOffset + kPointerSize;
+ ASSERT(memcmp(Map::cast(fresh)->address() + offset,
+ Map::cast(result)->address() + offset,
+ Map::kSize - offset) == 0);
}
}
#endif
@@ -3275,6 +3445,7 @@ MaybeObject* NormalizedMapCache::Get(JSObject* obj,
fast->CopyNormalized(mode, SHARED_NORMALIZED_MAP);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
+ ASSERT(Map::cast(result)->is_dictionary_map());
set(index, result);
isolate->counters()->normalized_maps()->Increment();
@@ -3338,24 +3509,25 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
Map* map_of_this = map();
// Allocate new content.
- int property_count = map_of_this->NumberOfDescribedProperties();
+ int real_size = map_of_this->NumberOfOwnDescriptors();
+ int property_count = real_size;
if (expected_additional_properties > 0) {
property_count += expected_additional_properties;
} else {
property_count += 2; // Make space for two more properties.
}
StringDictionary* dictionary;
- { MaybeObject* maybe_dictionary = StringDictionary::Allocate(property_count);
- if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
- }
+ MaybeObject* maybe_dictionary = StringDictionary::Allocate(property_count);
+ if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
DescriptorArray* descs = map_of_this->instance_descriptors();
- for (int i = 0; i < descs->number_of_descriptors(); i++) {
+ for (int i = 0; i < real_size; i++) {
PropertyDetails details = descs->GetDetails(i);
switch (details.type()) {
case CONSTANT_FUNCTION: {
- PropertyDetails d =
- PropertyDetails(details.attributes(), NORMAL, details.index());
+ PropertyDetails d = PropertyDetails(details.attributes(),
+ NORMAL,
+ details.descriptor_index());
Object* value = descs->GetConstantFunction(i);
MaybeObject* maybe_dictionary =
dictionary->Add(descs->GetKey(i), value, d);
@@ -3363,8 +3535,9 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
break;
}
case FIELD: {
- PropertyDetails d =
- PropertyDetails(details.attributes(), NORMAL, details.index());
+ PropertyDetails d = PropertyDetails(details.attributes(),
+ NORMAL,
+ details.descriptor_index());
Object* value = FastPropertyAt(descs->GetFieldIndex(i));
MaybeObject* maybe_dictionary =
dictionary->Add(descs->GetKey(i), value, d);
@@ -3372,26 +3545,19 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
break;
}
case CALLBACKS: {
- if (!descs->IsProperty(i)) break;
Object* value = descs->GetCallbacksObject(i);
- if (value->IsAccessorPair()) {
- MaybeObject* maybe_copy =
- AccessorPair::cast(value)->CopyWithoutTransitions();
- if (!maybe_copy->To(&value)) return maybe_copy;
- }
+ details = details.set_pointer(0);
MaybeObject* maybe_dictionary =
dictionary->Add(descs->GetKey(i), value, details);
if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
break;
}
- case MAP_TRANSITION:
- case CONSTANT_TRANSITION:
- case NULL_DESCRIPTOR:
case INTERCEPTOR:
- case ELEMENTS_TRANSITION:
break;
case HANDLER:
case NORMAL:
+ case TRANSITION:
+ case NONEXISTENT:
UNREACHABLE();
break;
}
@@ -3400,15 +3566,14 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
Heap* current_heap = GetHeap();
// Copy the next enumeration index from instance descriptor.
- int index = map_of_this->instance_descriptors()->NextEnumerationIndex();
- dictionary->SetNextEnumerationIndex(index);
+ dictionary->SetNextEnumerationIndex(real_size + 1);
Map* new_map;
- { MaybeObject* maybe_map =
- current_heap->isolate()->context()->global_context()->
- normalized_map_cache()->Get(this, mode);
- if (!maybe_map->To(&new_map)) return maybe_map;
- }
+ MaybeObject* maybe_map =
+ current_heap->isolate()->context()->native_context()->
+ normalized_map_cache()->Get(this, mode);
+ if (!maybe_map->To(&new_map)) return maybe_map;
+ ASSERT(new_map->is_dictionary_map());
// We have now successfully allocated all the necessary objects.
// Changes can now be made with the guarantee that all of them take effect.
@@ -3424,9 +3589,7 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
-instance_size_delta);
}
-
set_map(new_map);
- new_map->clear_instance_descriptors();
set_properties(dictionary);
@@ -3479,8 +3642,7 @@ MaybeObject* JSObject::NormalizeElements() {
}
if (array->IsDictionary()) return array;
- ASSERT(HasFastElements() ||
- HasFastSmiOnlyElements() ||
+ ASSERT(HasFastSmiOrObjectElements() ||
HasFastDoubleElements() ||
HasFastArgumentsElements());
// Compute the effective length and allocate a new backing store.
@@ -3515,8 +3677,7 @@ MaybeObject* JSObject::NormalizeElements() {
if (!maybe_value_object->ToObject(&value)) return maybe_value_object;
}
} else {
- ASSERT(old_map->has_fast_elements() ||
- old_map->has_fast_smi_only_elements());
+ ASSERT(old_map->has_fast_smi_or_object_elements());
value = FixedArray::cast(array)->get(i);
}
PropertyDetails details = PropertyDetails(NONE, NORMAL);
@@ -3575,7 +3736,7 @@ Smi* JSReceiver::GenerateIdentityHash() {
}
-MaybeObject* JSObject::SetIdentityHash(Object* hash, CreationFlag flag) {
+MaybeObject* JSObject::SetIdentityHash(Smi* hash, CreationFlag flag) {
MaybeObject* maybe = SetHiddenProperty(GetHeap()->identity_hash_symbol(),
hash);
if (maybe->IsFailure()) return maybe;
@@ -3621,6 +3782,7 @@ MaybeObject* JSProxy::GetIdentityHash(CreationFlag flag) {
Object* JSObject::GetHiddenProperty(String* key) {
+ ASSERT(key->IsSymbol());
if (IsJSGlobalProxy()) {
// For a proxy, use the prototype as target object.
Object* proxy_parent = GetPrototype();
@@ -3630,22 +3792,31 @@ Object* JSObject::GetHiddenProperty(String* key) {
return JSObject::cast(proxy_parent)->GetHiddenProperty(key);
}
ASSERT(!IsJSGlobalProxy());
- MaybeObject* hidden_lookup = GetHiddenPropertiesDictionary(false);
- ASSERT(!hidden_lookup->IsFailure()); // No failure when passing false as arg.
- if (hidden_lookup->ToObjectUnchecked()->IsUndefined()) {
- return GetHeap()->undefined_value();
+ MaybeObject* hidden_lookup =
+ GetHiddenPropertiesHashTable(ONLY_RETURN_INLINE_VALUE);
+ Object* inline_value = hidden_lookup->ToObjectUnchecked();
+
+ if (inline_value->IsSmi()) {
+ // Handle inline-stored identity hash.
+ if (key == GetHeap()->identity_hash_symbol()) {
+ return inline_value;
+ } else {
+ return GetHeap()->undefined_value();
+ }
}
- StringDictionary* dictionary =
- StringDictionary::cast(hidden_lookup->ToObjectUnchecked());
- int entry = dictionary->FindEntry(key);
- if (entry == StringDictionary::kNotFound) return GetHeap()->undefined_value();
- return dictionary->ValueAt(entry);
+
+ if (inline_value->IsUndefined()) return GetHeap()->undefined_value();
+
+ ObjectHashTable* hashtable = ObjectHashTable::cast(inline_value);
+ Object* entry = hashtable->Lookup(key);
+ if (entry->IsTheHole()) return GetHeap()->undefined_value();
+ return entry;
}
Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> obj,
- Handle<String> key,
- Handle<Object> value) {
+ Handle<String> key,
+ Handle<Object> value) {
CALL_HEAP_FUNCTION(obj->GetIsolate(),
obj->SetHiddenProperty(*key, *value),
Object);
@@ -3653,6 +3824,7 @@ Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> obj,
MaybeObject* JSObject::SetHiddenProperty(String* key, Object* value) {
+ ASSERT(key->IsSymbol());
if (IsJSGlobalProxy()) {
// For a proxy, use the prototype as target object.
Object* proxy_parent = GetPrototype();
@@ -3662,27 +3834,29 @@ MaybeObject* JSObject::SetHiddenProperty(String* key, Object* value) {
return JSObject::cast(proxy_parent)->SetHiddenProperty(key, value);
}
ASSERT(!IsJSGlobalProxy());
- MaybeObject* hidden_lookup = GetHiddenPropertiesDictionary(true);
- StringDictionary* dictionary;
- if (!hidden_lookup->To<StringDictionary>(&dictionary)) return hidden_lookup;
+ MaybeObject* hidden_lookup =
+ GetHiddenPropertiesHashTable(ONLY_RETURN_INLINE_VALUE);
+ Object* inline_value = hidden_lookup->ToObjectUnchecked();
- // If it was found, check if the key is already in the dictionary.
- int entry = dictionary->FindEntry(key);
- if (entry != StringDictionary::kNotFound) {
- // If key was found, just update the value.
- dictionary->ValueAtPut(entry, value);
- return this;
+ // If there is no backing store yet, store the identity hash inline.
+ if (value->IsSmi() &&
+ key == GetHeap()->identity_hash_symbol() &&
+ (inline_value->IsUndefined() || inline_value->IsSmi())) {
+ return SetHiddenPropertiesHashTable(value);
}
- // Key was not already in the dictionary, so add the entry.
- MaybeObject* insert_result = dictionary->Add(key,
- value,
- PropertyDetails(NONE, NORMAL));
- StringDictionary* new_dict;
- if (!insert_result->To<StringDictionary>(&new_dict)) return insert_result;
- if (new_dict != dictionary) {
+
+ hidden_lookup = GetHiddenPropertiesHashTable(CREATE_NEW_IF_ABSENT);
+ ObjectHashTable* hashtable;
+ if (!hidden_lookup->To(&hashtable)) return hidden_lookup;
+
+ // If it was found, check if the key is already in the dictionary.
+ MaybeObject* insert_result = hashtable->Put(key, value);
+ ObjectHashTable* new_table;
+ if (!insert_result->To(&new_table)) return insert_result;
+ if (new_table != hashtable) {
// If adding the key expanded the dictionary (i.e., Add returned a new
// dictionary), store it back to the object.
- MaybeObject* store_result = SetHiddenPropertiesDictionary(new_dict);
+ MaybeObject* store_result = SetHiddenPropertiesHashTable(new_table);
if (store_result->IsFailure()) return store_result;
}
// Return this to mark success.
@@ -3691,6 +3865,7 @@ MaybeObject* JSObject::SetHiddenProperty(String* key, Object* value) {
void JSObject::DeleteHiddenProperty(String* key) {
+ ASSERT(key->IsSymbol());
if (IsJSGlobalProxy()) {
// For a proxy, use the prototype as target object.
Object* proxy_parent = GetPrototype();
@@ -3700,18 +3875,19 @@ void JSObject::DeleteHiddenProperty(String* key) {
JSObject::cast(proxy_parent)->DeleteHiddenProperty(key);
return;
}
- MaybeObject* hidden_lookup = GetHiddenPropertiesDictionary(false);
- ASSERT(!hidden_lookup->IsFailure()); // No failure when passing false as arg.
- if (hidden_lookup->ToObjectUnchecked()->IsUndefined()) return;
- StringDictionary* dictionary =
- StringDictionary::cast(hidden_lookup->ToObjectUnchecked());
- int entry = dictionary->FindEntry(key);
- if (entry == StringDictionary::kNotFound) {
- // Key wasn't in dictionary. Deletion is a success.
- return;
- }
- // Key was in the dictionary. Remove it.
- dictionary->DeleteProperty(entry, JSReceiver::FORCE_DELETION);
+ ASSERT(!IsJSGlobalProxy());
+ MaybeObject* hidden_lookup =
+ GetHiddenPropertiesHashTable(ONLY_RETURN_INLINE_VALUE);
+ Object* inline_value = hidden_lookup->ToObjectUnchecked();
+
+ // We never delete (inline-stored) identity hashes.
+ ASSERT(key != GetHeap()->identity_hash_symbol());
+ if (inline_value->IsUndefined() || inline_value->IsSmi()) return;
+
+ ObjectHashTable* hashtable = ObjectHashTable::cast(inline_value);
+ MaybeObject* delete_result = hashtable->Put(key, GetHeap()->the_hole_value());
+ USE(delete_result);
+ ASSERT(!delete_result->IsFailure()); // Delete does not cause GC.
}
@@ -3722,77 +3898,102 @@ bool JSObject::HasHiddenProperties() {
}
-MaybeObject* JSObject::GetHiddenPropertiesDictionary(bool create_if_absent) {
+MaybeObject* JSObject::GetHiddenPropertiesHashTable(
+ InitializeHiddenProperties init_option) {
ASSERT(!IsJSGlobalProxy());
+ Object* inline_value;
if (HasFastProperties()) {
// If the object has fast properties, check whether the first slot
// in the descriptor array matches the hidden symbol. Since the
// hidden symbols hash code is zero (and no other string has hash
// code zero) it will always occupy the first entry if present.
DescriptorArray* descriptors = this->map()->instance_descriptors();
- if ((descriptors->number_of_descriptors() > 0) &&
- (descriptors->GetKey(0) == GetHeap()->hidden_symbol())) {
- if (descriptors->GetType(0) == FIELD) {
- Object* hidden_store =
- this->FastPropertyAt(descriptors->GetFieldIndex(0));
- return StringDictionary::cast(hidden_store);
+ if (descriptors->number_of_descriptors() > 0) {
+ int sorted_index = descriptors->GetSortedKeyIndex(0);
+ if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_symbol() &&
+ sorted_index < map()->NumberOfOwnDescriptors()) {
+ ASSERT(descriptors->GetType(sorted_index) == FIELD);
+ inline_value =
+ this->FastPropertyAt(descriptors->GetFieldIndex(sorted_index));
} else {
- ASSERT(descriptors->GetType(0) == NULL_DESCRIPTOR ||
- descriptors->GetType(0) == MAP_TRANSITION);
+ inline_value = GetHeap()->undefined_value();
}
+ } else {
+ inline_value = GetHeap()->undefined_value();
}
} else {
PropertyAttributes attributes;
// You can't install a getter on a property indexed by the hidden symbol,
// so we can be sure that GetLocalPropertyPostInterceptor returns a real
// object.
- Object* lookup =
+ inline_value =
GetLocalPropertyPostInterceptor(this,
GetHeap()->hidden_symbol(),
&attributes)->ToObjectUnchecked();
- if (!lookup->IsUndefined()) {
- return StringDictionary::cast(lookup);
- }
}
- if (!create_if_absent) return GetHeap()->undefined_value();
- const int kInitialSize = 5;
- MaybeObject* dict_alloc = StringDictionary::Allocate(kInitialSize);
- StringDictionary* dictionary;
- if (!dict_alloc->To<StringDictionary>(&dictionary)) return dict_alloc;
- // Using AddProperty or SetPropertyPostInterceptor here could fail, because
- // object might be non-extensible.
- return HasFastProperties()
- ? AddFastProperty(GetHeap()->hidden_symbol(), dictionary, DONT_ENUM)
- : AddSlowProperty(GetHeap()->hidden_symbol(), dictionary, DONT_ENUM);
+
+ if (init_option == ONLY_RETURN_INLINE_VALUE ||
+ inline_value->IsHashTable()) {
+ return inline_value;
+ }
+
+ ObjectHashTable* hashtable;
+ static const int kInitialCapacity = 4;
+ MaybeObject* maybe_obj =
+ ObjectHashTable::Allocate(kInitialCapacity,
+ ObjectHashTable::USE_CUSTOM_MINIMUM_CAPACITY);
+ if (!maybe_obj->To<ObjectHashTable>(&hashtable)) return maybe_obj;
+
+ if (inline_value->IsSmi()) {
+ // We were storing the identity hash inline and now allocated an actual
+ // dictionary. Put the identity hash into the new dictionary.
+ MaybeObject* insert_result =
+ hashtable->Put(GetHeap()->identity_hash_symbol(), inline_value);
+ ObjectHashTable* new_table;
+ if (!insert_result->To(&new_table)) return insert_result;
+ // We expect no resizing for the first insert.
+ ASSERT_EQ(hashtable, new_table);
+ }
+
+ MaybeObject* store_result =
+ SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
+ hashtable,
+ DONT_ENUM,
+ kNonStrictMode,
+ OMIT_EXTENSIBILITY_CHECK);
+ if (store_result->IsFailure()) return store_result;
+ return hashtable;
}
-MaybeObject* JSObject::SetHiddenPropertiesDictionary(
- StringDictionary* dictionary) {
+MaybeObject* JSObject::SetHiddenPropertiesHashTable(Object* value) {
ASSERT(!IsJSGlobalProxy());
- ASSERT(HasHiddenProperties());
+ // We can store the identity hash inline iff there is no backing store
+ // for hidden properties yet.
+ ASSERT(HasHiddenProperties() != value->IsSmi());
if (HasFastProperties()) {
// If the object has fast properties, check whether the first slot
// in the descriptor array matches the hidden symbol. Since the
// hidden symbols hash code is zero (and no other string has hash
// code zero) it will always occupy the first entry if present.
DescriptorArray* descriptors = this->map()->instance_descriptors();
- if ((descriptors->number_of_descriptors() > 0) &&
- (descriptors->GetKey(0) == GetHeap()->hidden_symbol())) {
- if (descriptors->GetType(0) == FIELD) {
- this->FastPropertyAtPut(descriptors->GetFieldIndex(0), dictionary);
+ if (descriptors->number_of_descriptors() > 0) {
+ int sorted_index = descriptors->GetSortedKeyIndex(0);
+ if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_symbol() &&
+ sorted_index < map()->NumberOfOwnDescriptors()) {
+ ASSERT(descriptors->GetType(sorted_index) == FIELD);
+ this->FastPropertyAtPut(descriptors->GetFieldIndex(sorted_index),
+ value);
return this;
- } else {
- ASSERT(descriptors->GetType(0) == NULL_DESCRIPTOR ||
- descriptors->GetType(0) == MAP_TRANSITION);
}
}
}
MaybeObject* store_result =
SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
- dictionary,
+ value,
DONT_ENUM,
- kNonStrictMode);
+ kNonStrictMode,
+ OMIT_EXTENSIBILITY_CHECK);
if (store_result->IsFailure()) return store_result;
return this;
}
@@ -3803,7 +4004,7 @@ MaybeObject* JSObject::DeletePropertyPostInterceptor(String* name,
// Check local property, ignore interceptor.
LookupResult result(GetIsolate());
LocalLookupRealNamedProperty(name, &result);
- if (!result.IsProperty()) return GetHeap()->true_value();
+ if (!result.IsFound()) return GetHeap()->true_value();
// Normalize object if needed.
Object* obj;
@@ -3837,7 +4038,9 @@ MaybeObject* JSObject::DeletePropertyWithInterceptor(String* name) {
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (!result.IsEmpty()) {
ASSERT(result->IsBoolean());
- return *v8::Utils::OpenHandle(*result);
+ Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
+ result_internal->VerifyApiCallResultType();
+ return *result_internal;
}
}
MaybeObject* raw_result =
@@ -3872,7 +4075,9 @@ MaybeObject* JSObject::DeleteElementWithInterceptor(uint32_t index) {
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (!result.IsEmpty()) {
ASSERT(result->IsBoolean());
- return *v8::Utils::OpenHandle(*result);
+ Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
+ result_internal->VerifyApiCallResultType();
+ return *result_internal;
}
MaybeObject* raw_result = this_handle->GetElementsAccessor()->Delete(
*this_handle,
@@ -3922,15 +4127,39 @@ MaybeObject* JSObject::DeleteElement(uint32_t index, DeleteMode mode) {
return JSGlobalObject::cast(proto)->DeleteElement(index, mode);
}
- if (HasIndexedInterceptor()) {
- // Skip interceptor if forcing deletion.
- if (mode != FORCE_DELETION) {
- return DeleteElementWithInterceptor(index);
+ // From this point on everything needs to be handlified.
+ HandleScope scope(isolate);
+ Handle<JSObject> self(this);
+
+ Handle<String> name;
+ Handle<Object> old_value(isolate->heap()->the_hole_value());
+ bool preexists = false;
+ if (FLAG_harmony_observation && map()->is_observed()) {
+ name = isolate->factory()->Uint32ToString(index);
+ preexists = self->HasLocalElement(index);
+ if (preexists) {
+ // TODO(observe): only read & set old_value if it's not an accessor
+ old_value = Object::GetElement(self, index);
}
- mode = JSReceiver::FORCE_DELETION;
}
- return GetElementsAccessor()->Delete(this, index, mode);
+ MaybeObject* result;
+ // Skip interceptor if forcing deletion.
+ if (self->HasIndexedInterceptor() && mode != FORCE_DELETION) {
+ result = self->DeleteElementWithInterceptor(index);
+ } else {
+ result = self->GetElementsAccessor()->Delete(*self, index, mode);
+ }
+
+ Handle<Object> hresult;
+ if (!result->ToHandle(&hresult)) return result;
+
+ if (FLAG_harmony_observation && map()->is_observed()) {
+ if (preexists && !self->HasLocalElement(index))
+ EnqueueChangeRecord(self, "deleted", name, old_value);
+ }
+
+ return *hresult;
}
@@ -3964,38 +4193,60 @@ MaybeObject* JSObject::DeleteProperty(String* name, DeleteMode mode) {
uint32_t index = 0;
if (name->AsArrayIndex(&index)) {
return DeleteElement(index, mode);
- } else {
- LookupResult result(isolate);
- LocalLookup(name, &result);
- if (!result.IsProperty()) return isolate->heap()->true_value();
- // Ignore attributes if forcing a deletion.
- if (result.IsDontDelete() && mode != FORCE_DELETION) {
- if (mode == STRICT_DELETION) {
- // Deleting a non-configurable property in strict mode.
- HandleScope scope(isolate);
- Handle<Object> args[2] = { Handle<Object>(name), Handle<Object>(this) };
- return isolate->Throw(*isolate->factory()->NewTypeError(
- "strict_delete_property", HandleVector(args, 2)));
- }
- return isolate->heap()->false_value();
- }
- // Check for interceptor.
- if (result.type() == INTERCEPTOR) {
- // Skip interceptor if forcing a deletion.
- if (mode == FORCE_DELETION) {
- return DeletePropertyPostInterceptor(name, mode);
- }
- return DeletePropertyWithInterceptor(name);
+ }
+
+ LookupResult lookup(isolate);
+ LocalLookup(name, &lookup);
+ if (!lookup.IsFound()) return isolate->heap()->true_value();
+ // Ignore attributes if forcing a deletion.
+ if (lookup.IsDontDelete() && mode != FORCE_DELETION) {
+ if (mode == STRICT_DELETION) {
+ // Deleting a non-configurable property in strict mode.
+ HandleScope scope(isolate);
+ Handle<Object> args[2] = { Handle<Object>(name), Handle<Object>(this) };
+ return isolate->Throw(*isolate->factory()->NewTypeError(
+ "strict_delete_property", HandleVector(args, 2)));
}
+ return isolate->heap()->false_value();
+ }
+
+ // From this point on everything needs to be handlified.
+ HandleScope scope(isolate);
+ Handle<JSObject> self(this);
+ Handle<String> hname(name);
+
+ Handle<Object> old_value(isolate->heap()->the_hole_value());
+ if (FLAG_harmony_observation && map()->is_observed()) {
+ old_value = handle(lookup.GetLazyValue());
+ }
+ MaybeObject* result;
+
+ // Check for interceptor.
+ if (lookup.IsInterceptor()) {
+ // Skip interceptor if forcing a deletion.
+ if (mode == FORCE_DELETION) {
+ result = self->DeletePropertyPostInterceptor(*hname, mode);
+ } else {
+ result = self->DeletePropertyWithInterceptor(*hname);
+ }
+ } else {
// Normalize object if needed.
Object* obj;
- { MaybeObject* maybe_obj =
- NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
+ result = self->NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
+ if (!result->To(&obj)) return result;
// Make sure the properties are normalized before removing the entry.
- return DeleteNormalizedProperty(name, mode);
+ result = self->DeleteNormalizedProperty(*hname, mode);
}
+
+ Handle<Object> hresult;
+ if (!result->ToHandle(&hresult)) return result;
+
+ if (FLAG_harmony_observation && map()->is_observed()) {
+ if (!self->HasLocalProperty(*hname))
+ EnqueueChangeRecord(self, "deleted", hname, old_value);
+ }
+
+ return *hresult;
}
@@ -4018,9 +4269,9 @@ MaybeObject* JSReceiver::DeleteProperty(String* name, DeleteMode mode) {
bool JSObject::ReferencesObjectFromElements(FixedArray* elements,
ElementsKind kind,
Object* object) {
- ASSERT(kind == FAST_ELEMENTS ||
+ ASSERT(IsFastObjectElementsKind(kind) ||
kind == DICTIONARY_ELEMENTS);
- if (kind == FAST_ELEMENTS) {
+ if (IsFastObjectElementsKind(kind)) {
int length = IsJSArray()
? Smi::cast(JSArray::cast(this)->length())->value()
: elements->length();
@@ -4072,12 +4323,15 @@ bool JSObject::ReferencesObject(Object* obj) {
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
// Raw pixels and external arrays do not reference other
// objects.
break;
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
break;
case FAST_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
case DICTIONARY_ELEMENTS: {
FixedArray* elements = FixedArray::cast(this->elements());
if (ReferencesObjectFromElements(elements, kind, obj)) return true;
@@ -4093,7 +4347,8 @@ bool JSObject::ReferencesObject(Object* obj) {
}
// Check the arguments.
FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
- kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS : FAST_ELEMENTS;
+ kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS :
+ FAST_HOLEY_ELEMENTS;
if (ReferencesObjectFromElements(arguments, kind, obj)) return true;
break;
}
@@ -4103,15 +4358,15 @@ bool JSObject::ReferencesObject(Object* obj) {
if (IsJSFunction()) {
// Get the constructor function for arguments array.
JSObject* arguments_boilerplate =
- heap->isolate()->context()->global_context()->
+ heap->isolate()->context()->native_context()->
arguments_boilerplate();
JSFunction* arguments_function =
JSFunction::cast(arguments_boilerplate->map()->constructor());
- // Get the context and don't check if it is the global context.
+ // Get the context and don't check if it is the native context.
JSFunction* f = JSFunction::cast(this);
Context* context = f->context();
- if (context->IsGlobalContext()) {
+ if (context->IsNativeContext()) {
return false;
}
@@ -4187,9 +4442,9 @@ MaybeObject* JSObject::PreventExtensions() {
// Do a map transition, other objects with this map may still
// be extensible.
Map* new_map;
- { MaybeObject* maybe = map()->CopyDropTransitions();
- if (!maybe->To<Map>(&new_map)) return maybe;
- }
+ MaybeObject* maybe = map()->Copy();
+ if (!maybe->To(&new_map)) return maybe;
+
new_map->set_is_extensible(false);
set_map(new_map);
ASSERT(!map()->is_extensible());
@@ -4209,29 +4464,27 @@ bool JSReceiver::IsSimpleEnum() {
o = JSObject::cast(o)->GetPrototype()) {
if (!o->IsJSObject()) return false;
JSObject* curr = JSObject::cast(o);
- if (!curr->map()->instance_descriptors()->HasEnumCache()) return false;
+ int enum_length = curr->map()->EnumLength();
+ if (enum_length == Map::kInvalidEnumCache) return false;
ASSERT(!curr->HasNamedInterceptor());
ASSERT(!curr->HasIndexedInterceptor());
ASSERT(!curr->IsAccessCheckNeeded());
if (curr->NumberOfEnumElements() > 0) return false;
- if (curr != this) {
- FixedArray* curr_fixed_array =
- FixedArray::cast(curr->map()->instance_descriptors()->GetEnumCache());
- if (curr_fixed_array->length() > 0) return false;
- }
+ if (curr != this && enum_length != 0) return false;
}
return true;
}
-int Map::NumberOfDescribedProperties(PropertyAttributes filter) {
+int Map::NumberOfDescribedProperties(DescriptorFlag which,
+ PropertyAttributes filter) {
int result = 0;
DescriptorArray* descs = instance_descriptors();
- for (int i = 0; i < descs->number_of_descriptors(); i++) {
- PropertyDetails details = descs->GetDetails(i);
- if (descs->IsProperty(i) && (details.attributes() & filter) == 0) {
- result++;
- }
+ int limit = which == ALL_DESCRIPTORS
+ ? descs->number_of_descriptors()
+ : NumberOfOwnDescriptors();
+ for (int i = 0; i < limit; i++) {
+ if ((descs->GetDetails(i).attributes() & filter) == 0) result++;
}
return result;
}
@@ -4239,10 +4492,9 @@ int Map::NumberOfDescribedProperties(PropertyAttributes filter) {
int Map::PropertyIndexFor(String* name) {
DescriptorArray* descs = instance_descriptors();
- for (int i = 0; i < descs->number_of_descriptors(); i++) {
- if (name->Equals(descs->GetKey(i)) && !descs->IsNullDescriptor(i)) {
- return descs->GetFieldIndex(i);
- }
+ int limit = NumberOfOwnDescriptors();
+ for (int i = 0; i < limit; i++) {
+ if (name->Equals(descs->GetKey(i))) return descs->GetFieldIndex(i);
}
return -1;
}
@@ -4250,8 +4502,9 @@ int Map::PropertyIndexFor(String* name) {
int Map::NextFreePropertyIndex() {
int max_index = -1;
+ int number_of_own_descriptors = NumberOfOwnDescriptors();
DescriptorArray* descs = instance_descriptors();
- for (int i = 0; i < descs->number_of_descriptors(); i++) {
+ for (int i = 0; i < number_of_own_descriptors; i++) {
if (descs->GetType(i) == FIELD) {
int current_index = descs->GetFieldIndex(i);
if (current_index > max_index) max_index = current_index;
@@ -4263,8 +4516,9 @@ int Map::NextFreePropertyIndex() {
AccessorDescriptor* Map::FindAccessor(String* name) {
DescriptorArray* descs = instance_descriptors();
- for (int i = 0; i < descs->number_of_descriptors(); i++) {
- if (name->Equals(descs->GetKey(i)) && descs->GetType(i) == CALLBACKS) {
+ int number_of_own_descriptors = NumberOfOwnDescriptors();
+ for (int i = 0; i < number_of_own_descriptors; i++) {
+ if (descs->GetType(i) == CALLBACKS && name->Equals(descs->GetKey(i))) {
return descs->GetCallbacks(i);
}
}
@@ -4322,7 +4576,8 @@ void JSReceiver::LocalLookup(String* name, LookupResult* result,
}
-void JSReceiver::Lookup(String* name, LookupResult* result,
+void JSReceiver::Lookup(String* name,
+ LookupResult* result,
bool skip_fallback_interceptor) {
// Ecma-262 3rd 8.6.2.4
Heap* heap = GetHeap();
@@ -4332,20 +4587,20 @@ void JSReceiver::Lookup(String* name, LookupResult* result,
JSReceiver::cast(current)->LocalLookup(name,
result,
skip_fallback_interceptor);
- if (result->IsProperty()) return;
+ if (result->IsFound()) return;
}
result->NotFound();
}
-// Search object and it's prototype chain for callback properties.
-void JSObject::LookupCallback(String* name, LookupResult* result) {
+// Search object and its prototype chain for callback properties.
+void JSObject::LookupCallbackProperty(String* name, LookupResult* result) {
Heap* heap = GetHeap();
for (Object* current = this;
current != heap->null_value() && current->IsJSObject();
current = JSObject::cast(current)->GetPrototype()) {
JSObject::cast(current)->LocalLookupRealNamedProperty(name, result);
- if (result->IsFound() && result->type() == CALLBACKS) return;
+ if (result->IsPropertyCallbacks()) return;
}
result->NotFound();
}
@@ -4382,9 +4637,12 @@ MaybeObject* JSObject::DefineElementAccessor(uint32_t index,
Object* setter,
PropertyAttributes attributes) {
switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
break;
case EXTERNAL_PIXEL_ELEMENTS:
case EXTERNAL_BYTE_ELEMENTS:
@@ -4445,11 +4703,16 @@ MaybeObject* JSObject::DefineElementAccessor(uint32_t index,
MaybeObject* JSObject::CreateAccessorPairFor(String* name) {
LookupResult result(GetHeap()->isolate());
LocalLookupRealNamedProperty(name, &result);
- if (result.IsProperty() && result.type() == CALLBACKS) {
- ASSERT(!result.IsDontDelete());
+ if (result.IsPropertyCallbacks()) {
+ // Note that the result can actually have IsDontDelete() == true when we
+ // e.g. have to fall back to the slow case while adding a setter after
+ // successfully reusing a map transition for a getter. Nevertheless, this is
+ // OK, because the assertion only holds for the whole addition of both
+ // accessors, not for the addition of each part. See first comment in
+ // DefinePropertyAccessor below.
Object* obj = result.GetCallbackObject();
if (obj->IsAccessorPair()) {
- return AccessorPair::cast(obj)->CopyWithoutTransitions();
+ return AccessorPair::cast(obj)->Copy();
}
}
return GetHeap()->AllocateAccessorPair();
@@ -4460,10 +4723,34 @@ MaybeObject* JSObject::DefinePropertyAccessor(String* name,
Object* getter,
Object* setter,
PropertyAttributes attributes) {
- AccessorPair* accessors;
- { MaybeObject* maybe_accessors = CreateAccessorPairFor(name);
- if (!maybe_accessors->To(&accessors)) return maybe_accessors;
+ // We could assert that the property is configurable here, but we would need
+ // to do a lookup, which seems to be a bit of overkill.
+ Heap* heap = GetHeap();
+ bool only_attribute_changes = getter->IsNull() && setter->IsNull();
+ if (HasFastProperties() && !only_attribute_changes &&
+ (map()->NumberOfOwnDescriptors() <
+ DescriptorArray::kMaxNumberOfDescriptors)) {
+ MaybeObject* getterOk = heap->undefined_value();
+ if (!getter->IsNull()) {
+ getterOk = DefineFastAccessor(name, ACCESSOR_GETTER, getter, attributes);
+ if (getterOk->IsFailure()) return getterOk;
+ }
+
+ MaybeObject* setterOk = heap->undefined_value();
+ if (getterOk != heap->null_value() && !setter->IsNull()) {
+ setterOk = DefineFastAccessor(name, ACCESSOR_SETTER, setter, attributes);
+ if (setterOk->IsFailure()) return setterOk;
+ }
+
+ if (getterOk != heap->null_value() && setterOk != heap->null_value()) {
+ return heap->undefined_value();
+ }
}
+
+ AccessorPair* accessors;
+ MaybeObject* maybe_accessors = CreateAccessorPairFor(name);
+ if (!maybe_accessors->To(&accessors)) return maybe_accessors;
+
accessors->SetComponents(getter, setter);
return SetPropertyCallback(name, accessors, attributes);
}
@@ -4474,14 +4761,14 @@ bool JSObject::CanSetCallback(String* name) {
GetIsolate()->MayNamedAccess(this, name, v8::ACCESS_SET));
// Check if there is an API defined callback object which prohibits
- // callback overwriting in this object or it's prototype chain.
+ // callback overwriting in this object or its prototype chain.
// This mechanism is needed for instance in a browser setting, where
// certain accessors such as window.location should not be allowed
// to be overwritten because allowing overwriting could potentially
// cause security problems.
LookupResult callback_result(GetIsolate());
- LookupCallback(name, &callback_result);
- if (callback_result.IsProperty()) {
+ LookupCallbackProperty(name, &callback_result);
+ if (callback_result.IsFound()) {
Object* obj = callback_result.GetCallbackObject();
if (obj->IsAccessorInfo() &&
AccessorInfo::cast(obj)->prohibits_overwriting()) {
@@ -4535,17 +4822,17 @@ MaybeObject* JSObject::SetPropertyCallback(String* name,
Object* structure,
PropertyAttributes attributes) {
// Normalize object to make this operation simple.
- { MaybeObject* maybe_ok = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
- if (maybe_ok->IsFailure()) return maybe_ok;
- }
+ MaybeObject* maybe_ok = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
+ if (maybe_ok->IsFailure()) return maybe_ok;
// For the global object allocate a new map to invalidate the global inline
// caches which have a global property cell reference directly in the code.
if (IsGlobalObject()) {
Map* new_map;
- { MaybeObject* maybe_new_map = map()->CopyDropDescriptors();
- if (!maybe_new_map->To(&new_map)) return maybe_new_map;
- }
+ MaybeObject* maybe_new_map = map()->CopyDropDescriptors();
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+ ASSERT(new_map->is_dictionary_map());
+
set_map(new_map);
// When running crankshaft, changing the map is not enough. We
// need to deoptimize all functions that rely on this global
@@ -4555,9 +4842,8 @@ MaybeObject* JSObject::SetPropertyCallback(String* name,
// Update the dictionary with the new CALLBACKS property.
PropertyDetails details = PropertyDetails(attributes, CALLBACKS);
- { MaybeObject* maybe_ok = SetNormalizedProperty(name, structure, details);
- if (maybe_ok->IsFailure()) return maybe_ok;
- }
+ maybe_ok = SetNormalizedProperty(name, structure, details);
+ if (maybe_ok->IsFailure()) return maybe_ok;
return GetHeap()->undefined_value();
}
@@ -4573,14 +4859,14 @@ void JSObject::DefineAccessor(Handle<JSObject> object,
object->DefineAccessor(*name, *getter, *setter, attributes));
}
-MaybeObject* JSObject::DefineAccessor(String* name,
- Object* getter,
- Object* setter,
+MaybeObject* JSObject::DefineAccessor(String* name_raw,
+ Object* getter_raw,
+ Object* setter_raw,
PropertyAttributes attributes) {
Isolate* isolate = GetIsolate();
// Check access rights if needed.
if (IsAccessCheckNeeded() &&
- !isolate->MayNamedAccess(this, name, v8::ACCESS_SET)) {
+ !isolate->MayNamedAccess(this, name_raw, v8::ACCESS_SET)) {
isolate->ReportFailedAccessCheck(this, v8::ACCESS_SET);
return isolate->heap()->undefined_value();
}
@@ -4590,7 +4876,7 @@ MaybeObject* JSObject::DefineAccessor(String* name,
if (proto->IsNull()) return this;
ASSERT(proto->IsJSGlobalObject());
return JSObject::cast(proto)->DefineAccessor(
- name, getter, setter, attributes);
+ name_raw, getter_raw, setter_raw, attributes);
}
// Make sure that the top context does not change when doing callbacks or
@@ -4598,14 +4884,160 @@ MaybeObject* JSObject::DefineAccessor(String* name,
AssertNoContextChange ncc;
// Try to flatten before operating on the string.
- name->TryFlatten();
+ name_raw->TryFlatten();
- if (!CanSetCallback(name)) return isolate->heap()->undefined_value();
+ if (!CanSetCallback(name_raw)) return isolate->heap()->undefined_value();
+
+ // From this point on everything needs to be handlified.
+ HandleScope scope(GetIsolate());
+ Handle<JSObject> self(this);
+ Handle<String> name(name_raw);
+ Handle<Object> getter(getter_raw);
+ Handle<Object> setter(setter_raw);
uint32_t index = 0;
- return name->AsArrayIndex(&index) ?
- DefineElementAccessor(index, getter, setter, attributes) :
- DefinePropertyAccessor(name, getter, setter, attributes);
+ bool is_element = name->AsArrayIndex(&index);
+
+ Handle<Object> old_value(isolate->heap()->the_hole_value());
+ bool preexists = false;
+ if (FLAG_harmony_observation && map()->is_observed()) {
+ if (is_element) {
+ preexists = HasLocalElement(index);
+ if (preexists) {
+ // TODO(observe): distinguish the case where it's an accessor
+ old_value = Object::GetElement(self, index);
+ }
+ } else {
+ LookupResult lookup(isolate);
+ LocalLookup(*name, &lookup);
+ preexists = lookup.IsProperty();
+ if (preexists) old_value = handle(lookup.GetLazyValue());
+ }
+ }
+
+ MaybeObject* result = is_element ?
+ self->DefineElementAccessor(index, *getter, *setter, attributes) :
+ self->DefinePropertyAccessor(*name, *getter, *setter, attributes);
+
+ Handle<Object> hresult;
+ if (!result->ToHandle(&hresult)) return result;
+
+ if (FLAG_harmony_observation && map()->is_observed()) {
+ const char* type = preexists ? "reconfigured" : "new";
+ EnqueueChangeRecord(self, type, name, old_value);
+ }
+
+ return *hresult;
+}
+
+
+static MaybeObject* TryAccessorTransition(JSObject* self,
+ Map* transitioned_map,
+ int target_descriptor,
+ AccessorComponent component,
+ Object* accessor,
+ PropertyAttributes attributes) {
+ DescriptorArray* descs = transitioned_map->instance_descriptors();
+ PropertyDetails details = descs->GetDetails(target_descriptor);
+
+ // If the transition target was not callbacks, fall back to the slow case.
+ if (details.type() != CALLBACKS) return self->GetHeap()->null_value();
+ Object* descriptor = descs->GetCallbacksObject(target_descriptor);
+ if (!descriptor->IsAccessorPair()) return self->GetHeap()->null_value();
+
+ Object* target_accessor = AccessorPair::cast(descriptor)->get(component);
+ PropertyAttributes target_attributes = details.attributes();
+
+ // Reuse transition if adding same accessor with same attributes.
+ if (target_accessor == accessor && target_attributes == attributes) {
+ self->set_map(transitioned_map);
+ return self;
+ }
+
+ // If either not the same accessor, or not the same attributes, fall back to
+ // the slow case.
+ return self->GetHeap()->null_value();
+}
+
+
+MaybeObject* JSObject::DefineFastAccessor(String* name,
+ AccessorComponent component,
+ Object* accessor,
+ PropertyAttributes attributes) {
+ ASSERT(accessor->IsSpecFunction() || accessor->IsUndefined());
+ LookupResult result(GetIsolate());
+ LocalLookup(name, &result);
+
+ if (result.IsFound()
+ && !result.IsPropertyCallbacks()
+ && !result.IsTransition()) return GetHeap()->null_value();
+
+ // Return success if the same accessor with the same attributes already exist.
+ AccessorPair* source_accessors = NULL;
+ if (result.IsPropertyCallbacks()) {
+ Object* callback_value = result.GetCallbackObject();
+ if (callback_value->IsAccessorPair()) {
+ source_accessors = AccessorPair::cast(callback_value);
+ Object* entry = source_accessors->get(component);
+ if (entry == accessor && result.GetAttributes() == attributes) {
+ return this;
+ }
+ } else {
+ return GetHeap()->null_value();
+ }
+
+ int descriptor_number = result.GetDescriptorIndex();
+
+ map()->LookupTransition(this, name, &result);
+
+ if (result.IsFound()) {
+ Map* target = result.GetTransitionTarget();
+ ASSERT(target->NumberOfOwnDescriptors() ==
+ map()->NumberOfOwnDescriptors());
+ // This works since descriptors are sorted in order of addition.
+ ASSERT(map()->instance_descriptors()->GetKey(descriptor_number) == name);
+ return TryAccessorTransition(
+ this, target, descriptor_number, component, accessor, attributes);
+ }
+ } else {
+ // If not, lookup a transition.
+ map()->LookupTransition(this, name, &result);
+
+ // If there is a transition, try to follow it.
+ if (result.IsFound()) {
+ Map* target = result.GetTransitionTarget();
+ int descriptor_number = target->LastAdded();
+ ASSERT(target->instance_descriptors()->GetKey(descriptor_number)
+ ->Equals(name));
+ return TryAccessorTransition(
+ this, target, descriptor_number, component, accessor, attributes);
+ }
+ }
+
+ // If there is no transition yet, add a transition to the a new accessor pair
+ // containing the accessor.
+ AccessorPair* accessors;
+ MaybeObject* maybe_accessors;
+
+ // Allocate a new pair if there were no source accessors. Otherwise, copy the
+ // pair and modify the accessor.
+ if (source_accessors != NULL) {
+ maybe_accessors = source_accessors->Copy();
+ } else {
+ maybe_accessors = GetHeap()->AllocateAccessorPair();
+ }
+ if (!maybe_accessors->To(&accessors)) return maybe_accessors;
+ accessors->set(component, accessor);
+
+ CallbacksDescriptor new_accessors_desc(name, accessors, attributes);
+
+ Map* new_map;
+ MaybeObject* maybe_new_map =
+ map()->CopyInsertDescriptor(&new_accessors_desc, INSERT_TRANSITION);
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+
+ set_map(new_map);
+ return this;
}
@@ -4633,9 +5065,7 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
// Try to flatten before operating on the string.
name->TryFlatten();
- if (!CanSetCallback(name)) {
- return isolate->heap()->undefined_value();
- }
+ if (!CanSetCallback(name)) return isolate->heap()->undefined_value();
uint32_t index = 0;
bool is_element = name->AsArrayIndex(&index);
@@ -4645,9 +5075,12 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
// Accessors overwrite previous callbacks (cf. with getters/setters).
switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
break;
case EXTERNAL_PIXEL_ELEMENTS:
case EXTERNAL_BYTE_ELEMENTS:
@@ -4668,23 +5101,22 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
break;
}
- { MaybeObject* maybe_ok =
- SetElementCallback(index, info, info->property_attributes());
- if (maybe_ok->IsFailure()) return maybe_ok;
- }
+ MaybeObject* maybe_ok =
+ SetElementCallback(index, info, info->property_attributes());
+ if (maybe_ok->IsFailure()) return maybe_ok;
} else {
// Lookup the name.
LookupResult result(isolate);
LocalLookup(name, &result);
// ES5 forbids turning a property into an accessor if it's not
// configurable (that is IsDontDelete in ES3 and v8), see 8.6.1 (Table 5).
- if (result.IsProperty() && (result.IsReadOnly() || result.IsDontDelete())) {
+ if (result.IsFound() && (result.IsReadOnly() || result.IsDontDelete())) {
return isolate->heap()->undefined_value();
}
- { MaybeObject* maybe_ok =
- SetPropertyCallback(name, info, info->property_attributes());
- if (maybe_ok->IsFailure()) return maybe_ok;
- }
+
+ MaybeObject* maybe_ok =
+ SetPropertyCallback(name, info, info->property_attributes());
+ if (maybe_ok->IsFailure()) return maybe_ok;
}
return this;
@@ -4710,9 +5142,9 @@ Object* JSObject::LookupAccessor(String* name, AccessorComponent component) {
if (name->AsArrayIndex(&index)) {
for (Object* obj = this;
obj != heap->null_value();
- obj = JSObject::cast(obj)->GetPrototype()) {
- JSObject* js_object = JSObject::cast(obj);
- if (js_object->HasDictionaryElements()) {
+ obj = JSReceiver::cast(obj)->GetPrototype()) {
+ if (obj->IsJSObject() && JSObject::cast(obj)->HasDictionaryElements()) {
+ JSObject* js_object = JSObject::cast(obj);
SeededNumberDictionary* dictionary = js_object->element_dictionary();
int entry = dictionary->FindEntry(index);
if (entry != SeededNumberDictionary::kNotFound) {
@@ -4727,12 +5159,12 @@ Object* JSObject::LookupAccessor(String* name, AccessorComponent component) {
} else {
for (Object* obj = this;
obj != heap->null_value();
- obj = JSObject::cast(obj)->GetPrototype()) {
+ obj = JSReceiver::cast(obj)->GetPrototype()) {
LookupResult result(heap->isolate());
- JSObject::cast(obj)->LocalLookup(name, &result);
- if (result.IsProperty()) {
+ JSReceiver::cast(obj)->LocalLookup(name, &result);
+ if (result.IsFound()) {
if (result.IsReadOnly()) return heap->undefined_value();
- if (result.type() == CALLBACKS) {
+ if (result.IsPropertyCallbacks()) {
Object* obj = result.GetCallbackObject();
if (obj->IsAccessorPair()) {
return AccessorPair::cast(obj)->GetComponent(component);
@@ -4747,8 +5179,9 @@ Object* JSObject::LookupAccessor(String* name, AccessorComponent component) {
Object* JSObject::SlowReverseLookup(Object* value) {
if (HasFastProperties()) {
+ int number_of_own_descriptors = map()->NumberOfOwnDescriptors();
DescriptorArray* descs = map()->instance_descriptors();
- for (int i = 0; i < descs->number_of_descriptors(); i++) {
+ for (int i = 0; i < number_of_own_descriptors; i++) {
if (descs->GetType(i) == FIELD) {
if (FastPropertyAt(descs->GetFieldIndex(i)) == value) {
return descs->GetKey(i);
@@ -4766,45 +5199,21 @@ Object* JSObject::SlowReverseLookup(Object* value) {
}
-MaybeObject* Map::CopyDropDescriptors() {
- Heap* heap = GetHeap();
- Object* result;
- { MaybeObject* maybe_result =
- heap->AllocateMap(instance_type(), instance_size());
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- Map::cast(result)->set_prototype(prototype());
- Map::cast(result)->set_constructor(constructor());
- // Don't copy descriptors, so map transitions always remain a forest.
- // If we retained the same descriptors we would have two maps
- // pointing to the same transition which is bad because the garbage
- // collector relies on being able to reverse pointers from transitions
- // to maps. If properties need to be retained use CopyDropTransitions.
- Map::cast(result)->clear_instance_descriptors();
- // Please note instance_type and instance_size are set when allocated.
- Map::cast(result)->set_inobject_properties(inobject_properties());
- Map::cast(result)->set_unused_property_fields(unused_property_fields());
-
- // If the map has pre-allocated properties always start out with a descriptor
- // array describing these properties.
- if (pre_allocated_property_fields() > 0) {
- ASSERT(constructor()->IsJSFunction());
- JSFunction* ctor = JSFunction::cast(constructor());
- Object* descriptors;
- { MaybeObject* maybe_descriptors =
- ctor->initial_map()->instance_descriptors()->RemoveTransitions();
- if (!maybe_descriptors->ToObject(&descriptors)) return maybe_descriptors;
- }
- Map::cast(result)->set_instance_descriptors(
- DescriptorArray::cast(descriptors));
- Map::cast(result)->set_pre_allocated_property_fields(
- pre_allocated_property_fields());
- }
- Map::cast(result)->set_bit_field(bit_field());
- Map::cast(result)->set_bit_field2(bit_field2());
- Map::cast(result)->set_bit_field3(bit_field3());
- Map::cast(result)->set_is_shared(false);
- Map::cast(result)->ClearCodeCache(heap);
+MaybeObject* Map::RawCopy(int instance_size) {
+ Map* result;
+ MaybeObject* maybe_result =
+ GetHeap()->AllocateMap(instance_type(), instance_size);
+ if (!maybe_result->To(&result)) return maybe_result;
+
+ result->set_prototype(prototype());
+ result->set_constructor(constructor());
+ result->set_bit_field(bit_field());
+ result->set_bit_field2(bit_field2());
+ int new_bit_field3 = bit_field3();
+ new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true);
+ new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
+ new_bit_field3 = EnumLengthBits::update(new_bit_field3, kInvalidEnumCache);
+ result->set_bit_field3(new_bit_field3);
return result;
}
@@ -4816,49 +5225,351 @@ MaybeObject* Map::CopyNormalized(PropertyNormalizationMode mode,
new_instance_size -= inobject_properties() * kPointerSize;
}
- Object* result;
- { MaybeObject* maybe_result =
- GetHeap()->AllocateMap(instance_type(), new_instance_size);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
+ Map* result;
+ MaybeObject* maybe_result = RawCopy(new_instance_size);
+ if (!maybe_result->To(&result)) return maybe_result;
if (mode != CLEAR_INOBJECT_PROPERTIES) {
- Map::cast(result)->set_inobject_properties(inobject_properties());
+ result->set_inobject_properties(inobject_properties());
}
- Map::cast(result)->set_prototype(prototype());
- Map::cast(result)->set_constructor(constructor());
+ result->set_code_cache(code_cache());
+ result->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
+ result->set_dictionary_map(true);
- Map::cast(result)->set_bit_field(bit_field());
- Map::cast(result)->set_bit_field2(bit_field2());
- Map::cast(result)->set_bit_field3(bit_field3());
+#ifdef VERIFY_HEAP
+ if (FLAG_verify_heap && result->is_shared()) {
+ result->SharedMapVerify();
+ }
+#endif
- Map::cast(result)->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
+ return result;
+}
-#ifdef DEBUG
- if (FLAG_verify_heap && Map::cast(result)->is_shared()) {
- Map::cast(result)->SharedMapVerify();
+
+MaybeObject* Map::CopyDropDescriptors() {
+ Map* result;
+ MaybeObject* maybe_result = RawCopy(instance_size());
+ if (!maybe_result->To(&result)) return maybe_result;
+
+ // Please note instance_type and instance_size are set when allocated.
+ result->set_inobject_properties(inobject_properties());
+ result->set_unused_property_fields(unused_property_fields());
+
+ result->set_pre_allocated_property_fields(pre_allocated_property_fields());
+ result->set_is_shared(false);
+ result->ClearCodeCache(GetHeap());
+ return result;
+}
+
+
+MaybeObject* Map::ShareDescriptor(DescriptorArray* descriptors,
+ Descriptor* descriptor) {
+ // Sanity check. This path is only to be taken if the map owns its descriptor
+ // array, implying that its NumberOfOwnDescriptors equals the number of
+ // descriptors in the descriptor array.
+ ASSERT(NumberOfOwnDescriptors() ==
+ instance_descriptors()->number_of_descriptors());
+ Map* result;
+ MaybeObject* maybe_result = CopyDropDescriptors();
+ if (!maybe_result->To(&result)) return maybe_result;
+
+ String* name = descriptor->GetKey();
+
+ TransitionArray* transitions;
+ MaybeObject* maybe_transitions =
+ AddTransition(name, result, SIMPLE_TRANSITION);
+ if (!maybe_transitions->To(&transitions)) return maybe_transitions;
+
+ int old_size = descriptors->number_of_descriptors();
+
+ DescriptorArray* new_descriptors;
+
+ if (descriptors->NumberOfSlackDescriptors() > 0) {
+ new_descriptors = descriptors;
+ new_descriptors->Append(descriptor);
+ } else {
+ // Descriptor arrays grow by 50%.
+ MaybeObject* maybe_descriptors = DescriptorArray::Allocate(
+ old_size, old_size < 4 ? 1 : old_size / 2);
+ if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
+
+ DescriptorArray::WhitenessWitness witness(new_descriptors);
+
+ // Copy the descriptors, inserting a descriptor.
+ for (int i = 0; i < old_size; ++i) {
+ new_descriptors->CopyFrom(i, descriptors, i, witness);
+ }
+
+ new_descriptors->Append(descriptor, witness);
+
+ if (old_size > 0) {
+ // If the source descriptors had an enum cache we copy it. This ensures
+ // that the maps to which we push the new descriptor array back can rely
+ // on a cache always being available once it is set. If the map has more
+ // enumerated descriptors than available in the original cache, the cache
+ // will be lazily replaced by the extended cache when needed.
+ if (descriptors->HasEnumCache()) {
+ new_descriptors->CopyEnumCacheFrom(descriptors);
+ }
+
+ Map* map;
+ // Replace descriptors by new_descriptors in all maps that share it.
+ for (Object* current = GetBackPointer();
+ !current->IsUndefined();
+ current = map->GetBackPointer()) {
+ map = Map::cast(current);
+ if (map->instance_descriptors() != descriptors) break;
+ map->set_instance_descriptors(new_descriptors);
+ }
+
+ set_instance_descriptors(new_descriptors);
+ }
}
-#endif
+
+ result->SetBackPointer(this);
+ result->InitializeDescriptors(new_descriptors);
+ ASSERT(result->NumberOfOwnDescriptors() == NumberOfOwnDescriptors() + 1);
+
+ set_transitions(transitions);
+ set_owns_descriptors(false);
return result;
}
-MaybeObject* Map::CopyDropTransitions() {
- Object* new_map;
- { MaybeObject* maybe_new_map = CopyDropDescriptors();
- if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
+MaybeObject* Map::CopyReplaceDescriptors(DescriptorArray* descriptors,
+ String* name,
+ TransitionFlag flag,
+ int descriptor_index) {
+ ASSERT(descriptors->IsSortedNoDuplicates());
+
+ Map* result;
+ MaybeObject* maybe_result = CopyDropDescriptors();
+ if (!maybe_result->To(&result)) return maybe_result;
+
+ result->InitializeDescriptors(descriptors);
+
+ if (flag == INSERT_TRANSITION && CanHaveMoreTransitions()) {
+ TransitionArray* transitions;
+ SimpleTransitionFlag simple_flag =
+ (descriptor_index == descriptors->number_of_descriptors() - 1)
+ ? SIMPLE_TRANSITION
+ : FULL_TRANSITION;
+ MaybeObject* maybe_transitions = AddTransition(name, result, simple_flag);
+ if (!maybe_transitions->To(&transitions)) return maybe_transitions;
+
+ set_transitions(transitions);
+ result->SetBackPointer(this);
}
- Object* descriptors;
- { MaybeObject* maybe_descriptors =
- instance_descriptors()->RemoveTransitions();
- if (!maybe_descriptors->ToObject(&descriptors)) return maybe_descriptors;
+
+ return result;
+}
+
+
+MaybeObject* Map::CopyAsElementsKind(ElementsKind kind, TransitionFlag flag) {
+ if (flag == INSERT_TRANSITION) {
+ ASSERT(!HasElementsTransition() ||
+ ((elements_transition_map()->elements_kind() == DICTIONARY_ELEMENTS ||
+ IsExternalArrayElementsKind(
+ elements_transition_map()->elements_kind())) &&
+ (kind == DICTIONARY_ELEMENTS ||
+ IsExternalArrayElementsKind(kind))));
+ ASSERT(!IsFastElementsKind(kind) ||
+ IsMoreGeneralElementsKindTransition(elements_kind(), kind));
+ ASSERT(kind != elements_kind());
+ }
+
+ bool insert_transition =
+ flag == INSERT_TRANSITION && !HasElementsTransition();
+
+ if (insert_transition && owns_descriptors()) {
+ // In case the map owned its own descriptors, share the descriptors and
+ // transfer ownership to the new map.
+ Map* new_map;
+ MaybeObject* maybe_new_map = CopyDropDescriptors();
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+
+ MaybeObject* added_elements = set_elements_transition_map(new_map);
+ if (added_elements->IsFailure()) return added_elements;
+
+ new_map->set_elements_kind(kind);
+ new_map->InitializeDescriptors(instance_descriptors());
+ new_map->SetBackPointer(this);
+ set_owns_descriptors(false);
+ return new_map;
}
- cast(new_map)->set_instance_descriptors(DescriptorArray::cast(descriptors));
+
+ // In case the map did not own its own descriptors, a split is forced by
+ // copying the map; creating a new descriptor array cell.
+ // Create a new free-floating map only if we are not allowed to store it.
+ Map* new_map;
+ MaybeObject* maybe_new_map = Copy();
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+
+ new_map->set_elements_kind(kind);
+
+ if (insert_transition) {
+ MaybeObject* added_elements = set_elements_transition_map(new_map);
+ if (added_elements->IsFailure()) return added_elements;
+ new_map->SetBackPointer(this);
+ }
+
return new_map;
}
+
+MaybeObject* Map::CopyWithPreallocatedFieldDescriptors() {
+ if (pre_allocated_property_fields() == 0) return CopyDropDescriptors();
+
+ // If the map has pre-allocated properties always start out with a descriptor
+ // array describing these properties.
+ ASSERT(constructor()->IsJSFunction());
+ JSFunction* ctor = JSFunction::cast(constructor());
+ Map* map = ctor->initial_map();
+ DescriptorArray* descriptors = map->instance_descriptors();
+
+ int number_of_own_descriptors = map->NumberOfOwnDescriptors();
+ DescriptorArray* new_descriptors;
+ MaybeObject* maybe_descriptors =
+ descriptors->CopyUpTo(number_of_own_descriptors);
+ if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
+
+ return CopyReplaceDescriptors(new_descriptors, NULL, OMIT_TRANSITION, 0);
+}
+
+
+MaybeObject* Map::Copy() {
+ DescriptorArray* descriptors = instance_descriptors();
+ DescriptorArray* new_descriptors;
+ int number_of_own_descriptors = NumberOfOwnDescriptors();
+ MaybeObject* maybe_descriptors =
+ descriptors->CopyUpTo(number_of_own_descriptors);
+ if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
+
+ return CopyReplaceDescriptors(new_descriptors, NULL, OMIT_TRANSITION, 0);
+}
+
+
+MaybeObject* Map::CopyAddDescriptor(Descriptor* descriptor,
+ TransitionFlag flag) {
+ DescriptorArray* descriptors = instance_descriptors();
+
+ // Ensure the key is a symbol.
+ MaybeObject* maybe_failure = descriptor->KeyToSymbol();
+ if (maybe_failure->IsFailure()) return maybe_failure;
+
+ int old_size = NumberOfOwnDescriptors();
+ int new_size = old_size + 1;
+ descriptor->SetEnumerationIndex(new_size);
+
+ if (flag == INSERT_TRANSITION &&
+ owns_descriptors() &&
+ CanHaveMoreTransitions()) {
+ return ShareDescriptor(descriptors, descriptor);
+ }
+
+ DescriptorArray* new_descriptors;
+ MaybeObject* maybe_descriptors = DescriptorArray::Allocate(old_size, 1);
+ if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
+
+ DescriptorArray::WhitenessWitness witness(new_descriptors);
+
+ // Copy the descriptors, inserting a descriptor.
+ for (int i = 0; i < old_size; ++i) {
+ new_descriptors->CopyFrom(i, descriptors, i, witness);
+ }
+
+ if (old_size != descriptors->number_of_descriptors()) {
+ new_descriptors->SetNumberOfDescriptors(new_size);
+ new_descriptors->Set(old_size, descriptor, witness);
+ new_descriptors->Sort();
+ } else {
+ new_descriptors->Append(descriptor, witness);
+ }
+
+ String* key = descriptor->GetKey();
+ int insertion_index = new_descriptors->number_of_descriptors() - 1;
+
+ return CopyReplaceDescriptors(new_descriptors, key, flag, insertion_index);
+}
+
+
+MaybeObject* Map::CopyInsertDescriptor(Descriptor* descriptor,
+ TransitionFlag flag) {
+ DescriptorArray* old_descriptors = instance_descriptors();
+
+ // Ensure the key is a symbol.
+ MaybeObject* maybe_result = descriptor->KeyToSymbol();
+ if (maybe_result->IsFailure()) return maybe_result;
+
+ // We replace the key if it is already present.
+ int index = old_descriptors->SearchWithCache(descriptor->GetKey(), this);
+ if (index != DescriptorArray::kNotFound) {
+ return CopyReplaceDescriptor(old_descriptors, descriptor, index, flag);
+ }
+ return CopyAddDescriptor(descriptor, flag);
+}
+
+
+MaybeObject* DescriptorArray::CopyUpTo(int enumeration_index) {
+ if (enumeration_index == 0) return GetHeap()->empty_descriptor_array();
+
+ int size = enumeration_index;
+
+ DescriptorArray* descriptors;
+ MaybeObject* maybe_descriptors = Allocate(size);
+ if (!maybe_descriptors->To(&descriptors)) return maybe_descriptors;
+ DescriptorArray::WhitenessWitness witness(descriptors);
+
+ for (int i = 0; i < size; ++i) {
+ descriptors->CopyFrom(i, this, i, witness);
+ }
+
+ if (number_of_descriptors() != enumeration_index) descriptors->Sort();
+
+ return descriptors;
+}
+
+
+MaybeObject* Map::CopyReplaceDescriptor(DescriptorArray* descriptors,
+ Descriptor* descriptor,
+ int insertion_index,
+ TransitionFlag flag) {
+ // Ensure the key is a symbol.
+ MaybeObject* maybe_failure = descriptor->KeyToSymbol();
+ if (maybe_failure->IsFailure()) return maybe_failure;
+
+ String* key = descriptor->GetKey();
+ ASSERT(key == descriptors->GetKey(insertion_index));
+
+ int new_size = NumberOfOwnDescriptors();
+ ASSERT(0 <= insertion_index && insertion_index < new_size);
+
+ PropertyDetails details = descriptors->GetDetails(insertion_index);
+ ASSERT_LE(details.descriptor_index(), new_size);
+ descriptor->SetEnumerationIndex(details.descriptor_index());
+
+ DescriptorArray* new_descriptors;
+ MaybeObject* maybe_descriptors = DescriptorArray::Allocate(new_size);
+ if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors;
+ DescriptorArray::WhitenessWitness witness(new_descriptors);
+
+ for (int i = 0; i < new_size; ++i) {
+ if (i == insertion_index) {
+ new_descriptors->Set(i, descriptor, witness);
+ } else {
+ new_descriptors->CopyFrom(i, descriptors, i, witness);
+ }
+ }
+
+ // Re-sort if descriptors were removed.
+ if (new_size != descriptors->length()) new_descriptors->Sort();
+
+ return CopyReplaceDescriptors(new_descriptors, key, flag, insertion_index);
+}
+
+
void Map::UpdateCodeCache(Handle<Map> map,
Handle<String> name,
Handle<Code> code) {
@@ -4867,7 +5578,10 @@ void Map::UpdateCodeCache(Handle<Map> map,
map->UpdateCodeCache(*name, *code));
}
+
MaybeObject* Map::UpdateCodeCache(String* name, Code* code) {
+ ASSERT(!is_shared() || code->allowed_in_shared_map_code_cache());
+
// Allocate the code cache if not present.
if (code_cache()->IsFixedArray()) {
Object* result;
@@ -4913,78 +5627,43 @@ void Map::RemoveFromCodeCache(String* name, Code* code, int index) {
// field of the contens array while it is running.
class IntrusiveMapTransitionIterator {
public:
- explicit IntrusiveMapTransitionIterator(DescriptorArray* descriptor_array)
- : descriptor_array_(descriptor_array) { }
+ explicit IntrusiveMapTransitionIterator(TransitionArray* transition_array)
+ : transition_array_(transition_array) { }
void Start() {
ASSERT(!IsIterating());
- if (HasContentArray()) *ContentHeader() = Smi::FromInt(0);
+ *TransitionArrayHeader() = Smi::FromInt(0);
}
bool IsIterating() {
- return HasContentArray() && (*ContentHeader())->IsSmi();
+ return (*TransitionArrayHeader())->IsSmi();
}
Map* Next() {
ASSERT(IsIterating());
- FixedArray* contents = ContentArray();
- // Attention, tricky index manipulation ahead: Every entry in the contents
- // array consists of a value/details pair, so the index is typically even.
- // An exception is made for CALLBACKS entries: An even index means we look
- // at its getter, and an odd index means we look at its setter.
- int index = Smi::cast(*ContentHeader())->value();
- while (index < contents->length()) {
- PropertyDetails details(Smi::cast(contents->get(index | 1)));
- switch (details.type()) {
- case MAP_TRANSITION:
- case CONSTANT_TRANSITION:
- case ELEMENTS_TRANSITION:
- // We definitely have a map transition.
- *ContentHeader() = Smi::FromInt(index + 2);
- return static_cast<Map*>(contents->get(index));
- case CALLBACKS: {
- // We might have a map transition in a getter or in a setter.
- AccessorPair* accessors =
- static_cast<AccessorPair*>(contents->get(index & ~1));
- Object* accessor =
- ((index & 1) == 0) ? accessors->getter() : accessors->setter();
- index++;
- if (accessor->IsMap()) {
- *ContentHeader() = Smi::FromInt(index);
- return static_cast<Map*>(accessor);
- }
- break;
- }
- case NORMAL:
- case FIELD:
- case CONSTANT_FUNCTION:
- case HANDLER:
- case INTERCEPTOR:
- case NULL_DESCRIPTOR:
- // We definitely have no map transition.
- index += 2;
- break;
- }
+ int index = Smi::cast(*TransitionArrayHeader())->value();
+ int number_of_transitions = transition_array_->number_of_transitions();
+ while (index < number_of_transitions) {
+ *TransitionArrayHeader() = Smi::FromInt(index + 1);
+ return transition_array_->GetTarget(index);
}
- *ContentHeader() = descriptor_array_->GetHeap()->fixed_array_map();
+
+ if (index == number_of_transitions &&
+ transition_array_->HasElementsTransition()) {
+ Map* elements_transition = transition_array_->elements_transition();
+ *TransitionArrayHeader() = Smi::FromInt(index + 1);
+ return elements_transition;
+ }
+ *TransitionArrayHeader() = transition_array_->GetHeap()->fixed_array_map();
return NULL;
}
private:
- bool HasContentArray() {
- return descriptor_array_-> length() > DescriptorArray::kContentArrayIndex;
- }
-
- FixedArray* ContentArray() {
- Object* array = descriptor_array_->get(DescriptorArray::kContentArrayIndex);
- return static_cast<FixedArray*>(array);
- }
-
- Object** ContentHeader() {
- return HeapObject::RawField(ContentArray(), DescriptorArray::kMapOffset);
+ Object** TransitionArrayHeader() {
+ return HeapObject::RawField(transition_array_, TransitionArray::kMapOffset);
}
- DescriptorArray* descriptor_array_;
+ TransitionArray* transition_array_;
};
@@ -4997,11 +5676,11 @@ class IntrusivePrototypeTransitionIterator {
void Start() {
ASSERT(!IsIterating());
- if (HasTransitions()) *Header() = Smi::FromInt(0);
+ *Header() = Smi::FromInt(0);
}
bool IsIterating() {
- return HasTransitions() && (*Header())->IsSmi();
+ return (*Header())->IsSmi();
}
Map* Next() {
@@ -5016,23 +5695,17 @@ class IntrusivePrototypeTransitionIterator {
}
private:
- bool HasTransitions() {
- return proto_trans_->map()->IsSmi() || proto_trans_->IsFixedArray();
- }
-
Object** Header() {
return HeapObject::RawField(proto_trans_, FixedArray::kMapOffset);
}
int NumberOfTransitions() {
- ASSERT(HasTransitions());
FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
Object* num = proto_trans->get(Map::kProtoTransitionNumberOfEntriesOffset);
return Smi::cast(num)->value();
}
Map* GetTransition(int transitionNumber) {
- ASSERT(HasTransitions());
FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
return Map::cast(proto_trans->get(IndexFor(transitionNumber)));
}
@@ -5085,25 +5758,40 @@ class TraversableMap : public Map {
// Start iterating over this map's children, possibly destroying a FixedArray
// map (see explanation above).
void ChildIteratorStart() {
- IntrusiveMapTransitionIterator(instance_descriptors()).Start();
- IntrusivePrototypeTransitionIterator(
- unchecked_prototype_transitions()).Start();
+ if (HasTransitionArray()) {
+ if (HasPrototypeTransitions()) {
+ IntrusivePrototypeTransitionIterator(GetPrototypeTransitions()).Start();
+ }
+
+ IntrusiveMapTransitionIterator(transitions()).Start();
+ }
}
// If we have an unvisited child map, return that one and advance. If we have
// none, return NULL and reset any destroyed FixedArray maps.
TraversableMap* ChildIteratorNext() {
- IntrusiveMapTransitionIterator descriptor_iterator(instance_descriptors());
- if (descriptor_iterator.IsIterating()) {
- Map* next = descriptor_iterator.Next();
- if (next != NULL) return static_cast<TraversableMap*>(next);
+ TransitionArray* transition_array = unchecked_transition_array();
+ if (!transition_array->map()->IsSmi() &&
+ !transition_array->IsTransitionArray()) {
+ return NULL;
}
- IntrusivePrototypeTransitionIterator
- proto_iterator(unchecked_prototype_transitions());
- if (proto_iterator.IsIterating()) {
- Map* next = proto_iterator.Next();
+
+ if (transition_array->HasPrototypeTransitions()) {
+ HeapObject* proto_transitions =
+ transition_array->UncheckedPrototypeTransitions();
+ IntrusivePrototypeTransitionIterator proto_iterator(proto_transitions);
+ if (proto_iterator.IsIterating()) {
+ Map* next = proto_iterator.Next();
+ if (next != NULL) return static_cast<TraversableMap*>(next);
+ }
+ }
+
+ IntrusiveMapTransitionIterator transition_iterator(transition_array);
+ if (transition_iterator.IsIterating()) {
+ Map* next = transition_iterator.Next();
if (next != NULL) return static_cast<TraversableMap*>(next);
}
+
return NULL;
}
};
@@ -5134,7 +5822,7 @@ MaybeObject* CodeCache::Update(String* name, Code* code) {
// The number of monomorphic stubs for normal load/store/call IC's can grow to
// a large number and therefore they need to go into a hash table. They are
// used to load global properties from cells.
- if (code->type() == NORMAL) {
+ if (code->type() == Code::NORMAL) {
// Make sure that a hash table is allocated for the normal load code cache.
if (normal_type_cache()->IsUndefined()) {
Object* result;
@@ -5225,7 +5913,7 @@ MaybeObject* CodeCache::UpdateNormalTypeCache(String* name, Code* code) {
Object* CodeCache::Lookup(String* name, Code::Flags flags) {
- if (Code::ExtractTypeFromFlags(flags) == NORMAL) {
+ if (Code::ExtractTypeFromFlags(flags) == Code::NORMAL) {
return LookupNormalTypeCache(name, flags);
} else {
return LookupDefaultCache(name, flags);
@@ -5263,7 +5951,7 @@ Object* CodeCache::LookupNormalTypeCache(String* name, Code::Flags flags) {
int CodeCache::GetIndex(Object* name, Code* code) {
- if (code->type() == NORMAL) {
+ if (code->type() == Code::NORMAL) {
if (normal_type_cache()->IsUndefined()) return -1;
CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
return cache->GetIndex(String::cast(name), code->flags());
@@ -5279,7 +5967,7 @@ int CodeCache::GetIndex(Object* name, Code* code) {
void CodeCache::RemoveByIndex(Object* name, Code* code, int index) {
- if (code->type() == NORMAL) {
+ if (code->type() == Code::NORMAL) {
ASSERT(!normal_type_cache()->IsUndefined());
CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache());
ASSERT(cache->GetIndex(String::cast(name), code->flags()) == index);
@@ -5597,7 +6285,7 @@ MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) {
MaybeObject* maybe_result =
accessor->AddElementsToFixedArray(NULL, NULL, this, other);
FixedArray* result;
- if (!maybe_result->To<FixedArray>(&result)) return maybe_result;
+ if (!maybe_result->To(&result)) return maybe_result;
#ifdef DEBUG
if (FLAG_enable_slow_asserts) {
for (int i = 0; i < result->length(); i++) {
@@ -5653,234 +6341,80 @@ bool FixedArray::IsEqualTo(FixedArray* other) {
#endif
-MaybeObject* DescriptorArray::Allocate(int number_of_descriptors) {
+MaybeObject* DescriptorArray::Allocate(int number_of_descriptors, int slack) {
Heap* heap = Isolate::Current()->heap();
- if (number_of_descriptors == 0) {
- return heap->empty_descriptor_array();
- }
- // Allocate the array of keys.
- Object* array;
- { MaybeObject* maybe_array =
- heap->AllocateFixedArray(ToKeyIndex(number_of_descriptors));
- if (!maybe_array->ToObject(&array)) return maybe_array;
- }
// Do not use DescriptorArray::cast on incomplete object.
- FixedArray* result = FixedArray::cast(array);
+ int size = number_of_descriptors + slack;
+ if (size == 0) return heap->empty_descriptor_array();
+ FixedArray* result;
+ // Allocate the array of keys.
+ MaybeObject* maybe_array = heap->AllocateFixedArray(LengthFor(size));
+ if (!maybe_array->To(&result)) return maybe_array;
- // Allocate the content array and set it in the descriptor array.
- { MaybeObject* maybe_array =
- heap->AllocateFixedArray(number_of_descriptors << 1);
- if (!maybe_array->ToObject(&array)) return maybe_array;
- }
- result->set(kBitField3StorageIndex, Smi::FromInt(0));
- result->set(kContentArrayIndex, array);
- result->set(kEnumerationIndexIndex,
- Smi::FromInt(PropertyDetails::kInitialIndex));
+ result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors));
+ result->set(kEnumCacheIndex, Smi::FromInt(0));
return result;
}
+void DescriptorArray::ClearEnumCache() {
+ set(kEnumCacheIndex, Smi::FromInt(0));
+}
+
+
void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
FixedArray* new_cache,
Object* new_index_cache) {
ASSERT(bridge_storage->length() >= kEnumCacheBridgeLength);
ASSERT(new_index_cache->IsSmi() || new_index_cache->IsFixedArray());
- if (HasEnumCache()) {
- FixedArray::cast(get(kEnumerationIndexIndex))->
- set(kEnumCacheBridgeCacheIndex, new_cache);
- FixedArray::cast(get(kEnumerationIndexIndex))->
- set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
- } else {
- if (IsEmpty()) return; // Do nothing for empty descriptor array.
- FixedArray::cast(bridge_storage)->
- set(kEnumCacheBridgeCacheIndex, new_cache);
- FixedArray::cast(bridge_storage)->
- set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
- NoWriteBarrierSet(FixedArray::cast(bridge_storage),
- kEnumCacheBridgeEnumIndex,
- get(kEnumerationIndexIndex));
- set(kEnumerationIndexIndex, bridge_storage);
- }
+ ASSERT(!IsEmpty());
+ ASSERT(!HasEnumCache() || new_cache->length() > GetEnumCache()->length());
+ FixedArray::cast(bridge_storage)->
+ set(kEnumCacheBridgeCacheIndex, new_cache);
+ FixedArray::cast(bridge_storage)->
+ set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
+ set(kEnumCacheIndex, bridge_storage);
}
-static bool InsertionPointFound(String* key1, String* key2) {
- return key1->Hash() > key2->Hash() || key1 == key2;
-}
-
-
-void DescriptorArray::CopyFrom(Handle<DescriptorArray> dst,
- int dst_index,
- Handle<DescriptorArray> src,
+void DescriptorArray::CopyFrom(int dst_index,
+ DescriptorArray* src,
int src_index,
const WhitenessWitness& witness) {
- CALL_HEAP_FUNCTION_VOID(dst->GetIsolate(),
- dst->CopyFrom(dst_index, *src, src_index, witness));
-}
-
-
-MaybeObject* DescriptorArray::CopyFrom(int dst_index,
- DescriptorArray* src,
- int src_index,
- const WhitenessWitness& witness) {
Object* value = src->GetValue(src_index);
PropertyDetails details = src->GetDetails(src_index);
- if (details.type() == CALLBACKS && value->IsAccessorPair()) {
- MaybeObject* maybe_copy =
- AccessorPair::cast(value)->CopyWithoutTransitions();
- if (!maybe_copy->To(&value)) return maybe_copy;
- }
Descriptor desc(src->GetKey(src_index), value, details);
Set(dst_index, &desc, witness);
- return this;
-}
-
-
-MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
- TransitionFlag transition_flag) {
- // Transitions are only kept when inserting another transition.
- // This precondition is not required by this function's implementation, but
- // is currently required by the semantics of maps, so we check it.
- // Conversely, we filter after replacing, so replacing a transition and
- // removing all other transitions is not supported.
- bool remove_transitions = transition_flag == REMOVE_TRANSITIONS;
- ASSERT(remove_transitions == !descriptor->ContainsTransition());
- ASSERT(descriptor->GetDetails().type() != NULL_DESCRIPTOR);
-
- // Ensure the key is a symbol.
- { MaybeObject* maybe_result = descriptor->KeyToSymbol();
- if (maybe_result->IsFailure()) return maybe_result;
- }
-
- int new_size = 0;
- for (int i = 0; i < number_of_descriptors(); i++) {
- if (IsNullDescriptor(i)) continue;
- if (remove_transitions && IsTransitionOnly(i)) continue;
- new_size++;
- }
-
- // If key is in descriptor, we replace it in-place when filtering.
- // Count a null descriptor for key as inserted, not replaced.
- int index = Search(descriptor->GetKey());
- const bool replacing = (index != kNotFound);
- bool keep_enumeration_index = false;
- if (replacing) {
- // We are replacing an existing descriptor. We keep the enumeration
- // index of a visible property.
- PropertyType t = GetDetails(index).type();
- if (t == CONSTANT_FUNCTION ||
- t == FIELD ||
- t == CALLBACKS ||
- t == INTERCEPTOR) {
- keep_enumeration_index = true;
- } else if (remove_transitions) {
- // Replaced descriptor has been counted as removed if it is
- // a transition that will be replaced. Adjust count in this case.
- ++new_size;
- }
- } else {
- ++new_size;
- }
-
- DescriptorArray* new_descriptors;
- { MaybeObject* maybe_result = Allocate(new_size);
- if (!maybe_result->To(&new_descriptors)) return maybe_result;
- }
-
- DescriptorArray::WhitenessWitness witness(new_descriptors);
-
- // Set the enumeration index in the descriptors and set the enumeration index
- // in the result.
- int enumeration_index = NextEnumerationIndex();
- if (!descriptor->ContainsTransition()) {
- if (keep_enumeration_index) {
- descriptor->SetEnumerationIndex(GetDetails(index).index());
- } else {
- descriptor->SetEnumerationIndex(enumeration_index);
- ++enumeration_index;
- }
- }
- new_descriptors->SetNextEnumerationIndex(enumeration_index);
-
- // Copy the descriptors, filtering out transitions and null descriptors,
- // and inserting or replacing a descriptor.
- int to_index = 0;
- int insertion_index = -1;
- int from_index = 0;
- while (from_index < number_of_descriptors()) {
- if (insertion_index < 0 &&
- InsertionPointFound(GetKey(from_index), descriptor->GetKey())) {
- insertion_index = to_index++;
- if (replacing) from_index++;
- } else {
- if (!(IsNullDescriptor(from_index) ||
- (remove_transitions && IsTransitionOnly(from_index)))) {
- MaybeObject* copy_result =
- new_descriptors->CopyFrom(to_index++, this, from_index, witness);
- if (copy_result->IsFailure()) return copy_result;
- }
- from_index++;
- }
- }
- if (insertion_index < 0) insertion_index = to_index++;
- new_descriptors->Set(insertion_index, descriptor, witness);
-
- ASSERT(to_index == new_descriptors->number_of_descriptors());
- SLOW_ASSERT(new_descriptors->IsSortedNoDuplicates());
-
- return new_descriptors;
}
-MaybeObject* DescriptorArray::RemoveTransitions() {
- // Allocate the new descriptor array.
- int new_number_of_descriptors = 0;
- for (int i = 0; i < number_of_descriptors(); i++) {
- if (IsProperty(i)) new_number_of_descriptors++;
- }
- DescriptorArray* new_descriptors;
- { MaybeObject* maybe_result = Allocate(new_number_of_descriptors);
- if (!maybe_result->To(&new_descriptors)) return maybe_result;
- }
-
- // Copy the content.
- DescriptorArray::WhitenessWitness witness(new_descriptors);
- int next_descriptor = 0;
- for (int i = 0; i < number_of_descriptors(); i++) {
- if (IsProperty(i)) {
- MaybeObject* copy_result =
- new_descriptors->CopyFrom(next_descriptor++, this, i, witness);
- if (copy_result->IsFailure()) return copy_result;
- }
- }
- ASSERT(next_descriptor == new_descriptors->number_of_descriptors());
-
- return new_descriptors;
-}
-
-
-void DescriptorArray::SortUnchecked(const WhitenessWitness& witness) {
+// We need the whiteness witness since sort will reshuffle the entries in the
+// descriptor array. If the descriptor array were to be black, the shuffling
+// would move a slot that was already recorded as pointing into an evacuation
+// candidate. This would result in missing updates upon evacuation.
+void DescriptorArray::Sort() {
// In-place heap sort.
int len = number_of_descriptors();
-
+ // Reset sorting since the descriptor array might contain invalid pointers.
+ for (int i = 0; i < len; ++i) SetSortedKey(i, i);
// Bottom-up max-heap construction.
// Index of the last node with children
const int max_parent_index = (len / 2) - 1;
for (int i = max_parent_index; i >= 0; --i) {
int parent_index = i;
- const uint32_t parent_hash = GetKey(i)->Hash();
+ const uint32_t parent_hash = GetSortedKey(i)->Hash();
while (parent_index <= max_parent_index) {
int child_index = 2 * parent_index + 1;
- uint32_t child_hash = GetKey(child_index)->Hash();
+ uint32_t child_hash = GetSortedKey(child_index)->Hash();
if (child_index + 1 < len) {
- uint32_t right_child_hash = GetKey(child_index + 1)->Hash();
+ uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
if (right_child_hash > child_hash) {
child_index++;
child_hash = right_child_hash;
}
}
if (child_hash <= parent_hash) break;
- NoIncrementalWriteBarrierSwapDescriptors(parent_index, child_index);
+ SwapSortedKeys(parent_index, child_index);
// Now element at child_index could be < its children.
parent_index = child_index; // parent_hash remains correct.
}
@@ -5889,95 +6423,45 @@ void DescriptorArray::SortUnchecked(const WhitenessWitness& witness) {
// Extract elements and create sorted array.
for (int i = len - 1; i > 0; --i) {
// Put max element at the back of the array.
- NoIncrementalWriteBarrierSwapDescriptors(0, i);
+ SwapSortedKeys(0, i);
// Shift down the new top element.
int parent_index = 0;
- const uint32_t parent_hash = GetKey(parent_index)->Hash();
+ const uint32_t parent_hash = GetSortedKey(parent_index)->Hash();
const int max_parent_index = (i / 2) - 1;
while (parent_index <= max_parent_index) {
int child_index = parent_index * 2 + 1;
- uint32_t child_hash = GetKey(child_index)->Hash();
+ uint32_t child_hash = GetSortedKey(child_index)->Hash();
if (child_index + 1 < i) {
- uint32_t right_child_hash = GetKey(child_index + 1)->Hash();
+ uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
if (right_child_hash > child_hash) {
child_index++;
child_hash = right_child_hash;
}
}
if (child_hash <= parent_hash) break;
- NoIncrementalWriteBarrierSwapDescriptors(parent_index, child_index);
+ SwapSortedKeys(parent_index, child_index);
parent_index = child_index;
}
}
+ ASSERT(IsSortedNoDuplicates());
}
-void DescriptorArray::Sort(const WhitenessWitness& witness) {
- SortUnchecked(witness);
- SLOW_ASSERT(IsSortedNoDuplicates());
-}
-
-
-int DescriptorArray::BinarySearch(String* name, int low, int high) {
- uint32_t hash = name->Hash();
-
- while (low <= high) {
- int mid = (low + high) / 2;
- String* mid_name = GetKey(mid);
- uint32_t mid_hash = mid_name->Hash();
-
- if (mid_hash > hash) {
- high = mid - 1;
- continue;
- }
- if (mid_hash < hash) {
- low = mid + 1;
- continue;
- }
- // Found an element with the same hash-code.
- ASSERT(hash == mid_hash);
- // There might be more, so we find the first one and
- // check them all to see if we have a match.
- if (name == mid_name && !IsNullDescriptor(mid)) return mid;
- while ((mid > low) && (GetKey(mid - 1)->Hash() == hash)) mid--;
- for (; (mid <= high) && (GetKey(mid)->Hash() == hash); mid++) {
- if (GetKey(mid)->Equals(name) && !IsNullDescriptor(mid)) return mid;
- }
- break;
- }
- return kNotFound;
-}
-
-
-int DescriptorArray::LinearSearch(String* name, int len) {
- uint32_t hash = name->Hash();
- for (int number = 0; number < len; number++) {
- String* entry = GetKey(number);
- if ((entry->Hash() == hash) &&
- name->Equals(entry) &&
- !IsNullDescriptor(number)) {
- return number;
- }
- }
- return kNotFound;
-}
-
-
-MaybeObject* AccessorPair::CopyWithoutTransitions() {
+MaybeObject* AccessorPair::Copy() {
Heap* heap = GetHeap();
AccessorPair* copy;
- { MaybeObject* maybe_copy = heap->AllocateAccessorPair();
- if (!maybe_copy->To(&copy)) return maybe_copy;
- }
- copy->set_getter(getter()->IsMap() ? heap->the_hole_value() : getter());
- copy->set_setter(setter()->IsMap() ? heap->the_hole_value() : setter());
+ MaybeObject* maybe_copy = heap->AllocateAccessorPair();
+ if (!maybe_copy->To(&copy)) return maybe_copy;
+
+ copy->set_getter(getter());
+ copy->set_setter(setter());
return copy;
}
Object* AccessorPair::GetComponent(AccessorComponent component) {
- Object* accessor = (component == ACCESSOR_GETTER) ? getter() : setter();
- return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
+ Object* accessor = get(component);
+ return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
}
@@ -6003,9 +6487,9 @@ bool DescriptorArray::IsEqualTo(DescriptorArray* other) {
if (other->IsEmpty()) return false;
if (length() != other->length()) return false;
for (int i = 0; i < length(); ++i) {
- if (get(i) != other->get(i) && i != kContentArrayIndex) return false;
+ if (get(i) != other->get(i)) return false;
}
- return GetContentArray()->IsEqualTo(other->GetContentArray());
+ return true;
}
#endif
@@ -6037,7 +6521,7 @@ String::FlatContent String::GetFlatContent() {
ASSERT(shape.representation_tag() != kConsStringTag &&
shape.representation_tag() != kSlicedStringTag);
}
- if (shape.encoding_tag() == kAsciiStringTag) {
+ if (shape.encoding_tag() == kOneByteStringTag) {
const char* start;
if (shape.representation_tag() == kSeqStringTag) {
start = SeqAsciiString::cast(string)->GetChars();
@@ -6726,7 +7210,7 @@ void String::WriteToFlat(String* src,
while (true) {
ASSERT(0 <= from && from <= to && to <= source->length());
switch (StringShape(source).full_representation_tag()) {
- case kAsciiStringTag | kExternalStringTag: {
+ case kOneByteStringTag | kExternalStringTag: {
CopyChars(sink,
ExternalAsciiString::cast(source)->GetChars() + from,
to - from);
@@ -6740,7 +7224,7 @@ void String::WriteToFlat(String* src,
to - from);
return;
}
- case kAsciiStringTag | kSeqStringTag: {
+ case kOneByteStringTag | kSeqStringTag: {
CopyChars(sink,
SeqAsciiString::cast(source)->GetChars() + from,
to - from);
@@ -6752,7 +7236,7 @@ void String::WriteToFlat(String* src,
to - from);
return;
}
- case kAsciiStringTag | kConsStringTag:
+ case kOneByteStringTag | kConsStringTag:
case kTwoByteStringTag | kConsStringTag: {
ConsString* cons_string = ConsString::cast(source);
String* first = cons_string->first();
@@ -6793,7 +7277,7 @@ void String::WriteToFlat(String* src,
}
break;
}
- case kAsciiStringTag | kSlicedStringTag:
+ case kOneByteStringTag | kSlicedStringTag:
case kTwoByteStringTag | kSlicedStringTag: {
SlicedString* slice = SlicedString::cast(source);
unsigned offset = slice->offset();
@@ -6883,72 +7367,6 @@ static inline bool CompareStringContentsPartial(Isolate* isolate,
}
-bool String::SlowEqualsExternal(uc16 *string, int length) {
- int len = this->length();
- if (len != length) return false;
- if (len == 0) return true;
-
- // We know the strings are both non-empty. Compare the first chars
- // before we try to flatten the strings.
- if (this->Get(0) != string[0]) return false;
-
- String* lhs = this->TryFlattenGetString();
-
- if (lhs->IsFlat()) {
- String::FlatContent lhs_content = lhs->GetFlatContent();
- if (lhs->IsAsciiRepresentation()) {
- Vector<const char> vec1 = lhs_content.ToAsciiVector();
- VectorIterator<char> buf1(vec1);
- VectorIterator<uc16> ib(string, length);
- return CompareStringContents(&buf1, &ib);
- } else {
- Vector<const uc16> vec1 = lhs_content.ToUC16Vector();
- Vector<const uc16> vec2(string, length);
- return CompareRawStringContents(vec1, vec2);
- }
- } else {
- Isolate* isolate = GetIsolate();
- isolate->objects_string_compare_buffer_a()->Reset(0, lhs);
- VectorIterator<uc16> ib(string, length);
- return CompareStringContents(isolate->objects_string_compare_buffer_a(),
- &ib);
- }
-}
-
-
-bool String::SlowEqualsExternal(char *string, int length) {
- int len = this->length();
- if (len != length) return false;
- if (len == 0) return true;
-
- // We know the strings are both non-empty. Compare the first chars
- // before we try to flatten the strings.
- if (this->Get(0) != string[0]) return false;
-
- String* lhs = this->TryFlattenGetString();
-
- if (StringShape(lhs).IsSequentialAscii()) {
- const char* str1 = SeqAsciiString::cast(lhs)->GetChars();
- return CompareRawStringContents(Vector<const char>(str1, len),
- Vector<const char>(string, len));
- }
-
- if (lhs->IsFlat()) {
- String::FlatContent lhs_content = lhs->GetFlatContent();
- Vector<const uc16> vec1 = lhs_content.ToUC16Vector();
- VectorIterator<const uc16> buf1(vec1);
- VectorIterator<char> buf2(string, length);
- return CompareStringContents(&buf1, &buf2);
- } else {
- Isolate* isolate = GetIsolate();
- isolate->objects_string_compare_buffer_a()->Reset(0, lhs);
- VectorIterator<char> ib(string, length);
- return CompareStringContents(isolate->objects_string_compare_buffer_a(),
- &ib);
- }
-}
-
-
bool String::SlowEquals(String* other) {
// Fast check: negative check with lengths.
int len = length();
@@ -7216,7 +7634,6 @@ void StringHasher::AddSurrogatePairNoIndex(uc32 c) {
uint32_t StringHasher::GetHashField() {
- ASSERT(is_valid());
if (length_ <= String::kMaxHashCalcLength) {
if (is_array_index()) {
return MakeArrayIndexHash(array_index(), length_);
@@ -7271,89 +7688,116 @@ void String::PrintOn(FILE* file) {
}
+static void TrimEnumCache(Heap* heap, Map* map, DescriptorArray* descriptors) {
+ int live_enum = map->EnumLength();
+ if (live_enum == Map::kInvalidEnumCache) {
+ live_enum = map->NumberOfDescribedProperties(OWN_DESCRIPTORS, DONT_ENUM);
+ }
+ if (live_enum == 0) return descriptors->ClearEnumCache();
+
+ FixedArray* enum_cache = descriptors->GetEnumCache();
+
+ int to_trim = enum_cache->length() - live_enum;
+ if (to_trim <= 0) return;
+ RightTrimFixedArray<FROM_GC>(heap, descriptors->GetEnumCache(), to_trim);
+
+ if (!descriptors->HasEnumIndicesCache()) return;
+ FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache();
+ RightTrimFixedArray<FROM_GC>(heap, enum_indices_cache, to_trim);
+}
+
+
+static void TrimDescriptorArray(Heap* heap,
+ Map* map,
+ DescriptorArray* descriptors,
+ int number_of_own_descriptors) {
+ int number_of_descriptors = descriptors->number_of_descriptors();
+ int to_trim = number_of_descriptors - number_of_own_descriptors;
+ if (to_trim <= 0) return;
+
+ RightTrimFixedArray<FROM_GC>(heap, descriptors, to_trim);
+ descriptors->SetNumberOfDescriptors(number_of_own_descriptors);
+
+ if (descriptors->HasEnumCache()) TrimEnumCache(heap, map, descriptors);
+ descriptors->Sort();
+}
+
+
// Clear a possible back pointer in case the transition leads to a dead map.
// Return true in case a back pointer has been cleared and false otherwise.
-// Set *keep_entry to true when a live map transition has been found.
-static bool ClearBackPointer(Heap* heap, Object* target, bool* keep_entry) {
- if (!target->IsMap()) return false;
- Map* map = Map::cast(target);
- if (Marking::MarkBitFrom(map).Get()) {
- *keep_entry = true;
- return false;
- } else {
- map->SetBackPointer(heap->undefined_value(), SKIP_WRITE_BARRIER);
- return true;
- }
+static bool ClearBackPointer(Heap* heap, Map* target) {
+ if (Marking::MarkBitFrom(target).Get()) return false;
+ target->SetBackPointer(heap->undefined_value(), SKIP_WRITE_BARRIER);
+ return true;
}
+// TODO(mstarzinger): This method should be moved into MarkCompactCollector,
+// because it cannot be called from outside the GC and we already have methods
+// depending on the transitions layout in the GC anyways.
void Map::ClearNonLiveTransitions(Heap* heap) {
- DescriptorArray* d = DescriptorArray::cast(
- *RawField(this, Map::kInstanceDescriptorsOrBitField3Offset));
- if (d->IsEmpty()) return;
- Smi* NullDescriptorDetails =
- PropertyDetails(NONE, NULL_DESCRIPTOR).AsSmi();
- FixedArray* contents = FixedArray::cast(
- d->get(DescriptorArray::kContentArrayIndex));
- ASSERT(contents->length() >= 2);
- for (int i = 0; i < contents->length(); i += 2) {
- // If the pair (value, details) is a map transition, check if the target is
- // live. If not, null the descriptor. Also drop the back pointer for that
- // map transition, so that this map is not reached again by following a back
- // pointer from that non-live map.
- bool keep_entry = false;
- PropertyDetails details(Smi::cast(contents->get(i + 1)));
- switch (details.type()) {
- case MAP_TRANSITION:
- case CONSTANT_TRANSITION:
- ClearBackPointer(heap, contents->get(i), &keep_entry);
- break;
- case ELEMENTS_TRANSITION: {
- Object* object = contents->get(i);
- if (object->IsMap()) {
- ClearBackPointer(heap, object, &keep_entry);
- } else {
- FixedArray* array = FixedArray::cast(object);
- for (int j = 0; j < array->length(); ++j) {
- if (ClearBackPointer(heap, array->get(j), &keep_entry)) {
- array->set_undefined(j);
- }
- }
- }
- break;
+ // If there are no transitions to be cleared, return.
+ // TODO(verwaest) Should be an assert, otherwise back pointers are not
+ // properly cleared.
+ if (!HasTransitionArray()) return;
+
+ TransitionArray* t = transitions();
+ MarkCompactCollector* collector = heap->mark_compact_collector();
+
+ int transition_index = 0;
+
+ DescriptorArray* descriptors = instance_descriptors();
+ bool descriptors_owner_died = false;
+
+ // Compact all live descriptors to the left.
+ for (int i = 0; i < t->number_of_transitions(); ++i) {
+ Map* target = t->GetTarget(i);
+ if (ClearBackPointer(heap, target)) {
+ if (target->instance_descriptors() == descriptors) {
+ descriptors_owner_died = true;
}
- case CALLBACKS: {
- Object* object = contents->get(i);
- if (object->IsAccessorPair()) {
- AccessorPair* accessors = AccessorPair::cast(object);
- if (ClearBackPointer(heap, accessors->getter(), &keep_entry)) {
- accessors->set_getter(heap->the_hole_value());
- }
- if (ClearBackPointer(heap, accessors->setter(), &keep_entry)) {
- accessors->set_setter(heap->the_hole_value());
- }
- } else {
- keep_entry = true;
- }
- break;
+ } else {
+ if (i != transition_index) {
+ String* key = t->GetKey(i);
+ t->SetKey(transition_index, key);
+ Object** key_slot = t->GetKeySlot(transition_index);
+ collector->RecordSlot(key_slot, key_slot, key);
+ // Target slots do not need to be recorded since maps are not compacted.
+ t->SetTarget(transition_index, t->GetTarget(i));
}
- case NORMAL:
- case FIELD:
- case CONSTANT_FUNCTION:
- case HANDLER:
- case INTERCEPTOR:
- case NULL_DESCRIPTOR:
- keep_entry = true;
- break;
+ transition_index++;
+ }
+ }
+
+ if (t->HasElementsTransition() &&
+ ClearBackPointer(heap, t->elements_transition())) {
+ if (t->elements_transition()->instance_descriptors() == descriptors) {
+ descriptors_owner_died = true;
}
- // Make sure that an entry containing only dead transitions gets collected.
- // What we *really* want to do here is removing this entry completely, but
- // for technical reasons we can't do this, so we zero it out instead.
- if (!keep_entry) {
- contents->set_unchecked(i + 1, NullDescriptorDetails);
- contents->set_null_unchecked(heap, i);
+ t->ClearElementsTransition();
+ } else {
+ // If there are no transitions to be cleared, return.
+ // TODO(verwaest) Should be an assert, otherwise back pointers are not
+ // properly cleared.
+ if (transition_index == t->number_of_transitions()) return;
+ }
+
+ int number_of_own_descriptors = NumberOfOwnDescriptors();
+
+ if (descriptors_owner_died) {
+ if (number_of_own_descriptors > 0) {
+ TrimDescriptorArray(heap, this, descriptors, number_of_own_descriptors);
+ ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
+ } else {
+ ASSERT(descriptors == GetHeap()->empty_descriptor_array());
}
}
+
+ int trim = t->number_of_transitions() - transition_index;
+ if (trim > 0) {
+ RightTrimFixedArray<FROM_GC>(heap, t, t->IsSimpleTransition()
+ ? trim : trim * TransitionArray::kTransitionSize);
+ }
}
@@ -7386,8 +7830,8 @@ bool Map::EquivalentToForNormalization(Map* other,
instance_type() == other->instance_type() &&
bit_field() == other->bit_field() &&
bit_field2() == other->bit_field2() &&
- (bit_field3() & ~(1<<Map::kIsShared)) ==
- (other->bit_field3() & ~(1<<Map::kIsShared));
+ is_observed() == other->is_observed() &&
+ function_with_prototype() == other->function_with_prototype();
}
@@ -7408,13 +7852,19 @@ void JSFunction::MarkForLazyRecompilation() {
ReplaceCode(builtins->builtin(Builtins::kLazyRecompile));
}
+void JSFunction::MarkForParallelRecompilation() {
+ ASSERT(is_compiled() && !IsOptimized());
+ ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
+ Builtins* builtins = GetIsolate()->builtins();
+ ReplaceCode(builtins->builtin(Builtins::kParallelRecompile));
-bool SharedFunctionInfo::EnsureCompiled(Handle<SharedFunctionInfo> shared,
- ClearExceptionFlag flag) {
- return shared->is_compiled() || CompileLazy(shared, flag);
+ // Unlike MarkForLazyRecompilation, after queuing a function for
+ // recompilation on the compiler thread, we actually tail-call into
+ // the full code. We reset the profiler ticks here so that the
+ // function doesn't bother the runtime profiler too much.
+ shared()->code()->set_profiler_ticks(0);
}
-
static bool CompileLazyHelper(CompilationInfo* info,
ClearExceptionFlag flag) {
// Compile the source information to a code object.
@@ -7431,11 +7881,78 @@ static bool CompileLazyHelper(CompilationInfo* info,
bool SharedFunctionInfo::CompileLazy(Handle<SharedFunctionInfo> shared,
ClearExceptionFlag flag) {
- CompilationInfo info(shared);
+ ASSERT(shared->allows_lazy_compilation_without_context());
+ CompilationInfoWithZone info(shared);
return CompileLazyHelper(&info, flag);
}
+void SharedFunctionInfo::ClearOptimizedCodeMap() {
+ set_optimized_code_map(Smi::FromInt(0));
+}
+
+
+void SharedFunctionInfo::AddToOptimizedCodeMap(
+ Handle<SharedFunctionInfo> shared,
+ Handle<Context> native_context,
+ Handle<Code> code,
+ Handle<FixedArray> literals) {
+ ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
+ ASSERT(native_context->IsNativeContext());
+ STATIC_ASSERT(kEntryLength == 3);
+ Object* value = shared->optimized_code_map();
+ Handle<FixedArray> new_code_map;
+ if (value->IsSmi()) {
+ // No optimized code map.
+ ASSERT_EQ(0, Smi::cast(value)->value());
+ // Crate 3 entries per context {context, code, literals}.
+ new_code_map = FACTORY->NewFixedArray(kEntryLength);
+ new_code_map->set(0, *native_context);
+ new_code_map->set(1, *code);
+ new_code_map->set(2, *literals);
+ } else {
+ // Copy old map and append one new entry.
+ Handle<FixedArray> old_code_map(FixedArray::cast(value));
+ ASSERT_EQ(-1, shared->SearchOptimizedCodeMap(*native_context));
+ int old_length = old_code_map->length();
+ int new_length = old_length + kEntryLength;
+ new_code_map = FACTORY->NewFixedArray(new_length);
+ old_code_map->CopyTo(0, *new_code_map, 0, old_length);
+ new_code_map->set(old_length, *native_context);
+ new_code_map->set(old_length + 1, *code);
+ new_code_map->set(old_length + 2, *literals);
+ }
+#ifdef DEBUG
+ for (int i = 0; i < new_code_map->length(); i += kEntryLength) {
+ ASSERT(new_code_map->get(i)->IsNativeContext());
+ ASSERT(new_code_map->get(i + 1)->IsCode());
+ ASSERT(Code::cast(new_code_map->get(i + 1))->kind() ==
+ Code::OPTIMIZED_FUNCTION);
+ ASSERT(new_code_map->get(i + 2)->IsFixedArray());
+ }
+#endif
+ shared->set_optimized_code_map(*new_code_map);
+}
+
+
+void SharedFunctionInfo::InstallFromOptimizedCodeMap(JSFunction* function,
+ int index) {
+ ASSERT(index > 0);
+ ASSERT(optimized_code_map()->IsFixedArray());
+ FixedArray* code_map = FixedArray::cast(optimized_code_map());
+ if (!bound()) {
+ FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
+ ASSERT(cached_literals != NULL);
+ function->set_literals(cached_literals);
+ }
+ Code* code = Code::cast(code_map->get(index));
+ ASSERT(code != NULL);
+ ASSERT(function->context()->native_context() == code_map->get(index - 1));
+ function->ReplaceCode(code);
+ code->MakeYoung();
+}
+
+
bool JSFunction::CompileLazy(Handle<JSFunction> function,
ClearExceptionFlag flag) {
bool result = true;
@@ -7443,7 +7960,8 @@ bool JSFunction::CompileLazy(Handle<JSFunction> function,
function->ReplaceCode(function->shared()->code());
function->shared()->set_code_age(0);
} else {
- CompilationInfo info(function);
+ ASSERT(function->shared()->allows_lazy_compilation());
+ CompilationInfoWithZone info(function);
result = CompileLazyHelper(&info, flag);
ASSERT(!result || function->is_compiled());
}
@@ -7452,14 +7970,20 @@ bool JSFunction::CompileLazy(Handle<JSFunction> function,
bool JSFunction::CompileOptimized(Handle<JSFunction> function,
- int osr_ast_id,
+ BailoutId osr_ast_id,
ClearExceptionFlag flag) {
- CompilationInfo info(function);
+ CompilationInfoWithZone info(function);
info.SetOptimizing(osr_ast_id);
return CompileLazyHelper(&info, flag);
}
+bool JSFunction::EnsureCompiled(Handle<JSFunction> function,
+ ClearExceptionFlag flag) {
+ return function->is_compiled() || CompileLazy(function, flag);
+}
+
+
bool JSFunction::IsInlineable() {
if (IsBuiltin()) return false;
SharedFunctionInfo* shared_info = shared();
@@ -7474,19 +7998,86 @@ bool JSFunction::IsInlineable() {
}
+MaybeObject* JSObject::OptimizeAsPrototype() {
+ if (IsGlobalObject()) return this;
+
+ // Make sure prototypes are fast objects and their maps have the bit set
+ // so they remain fast.
+ if (!HasFastProperties()) {
+ MaybeObject* new_proto = TransformToFastProperties(0);
+ if (new_proto->IsFailure()) return new_proto;
+ ASSERT(new_proto == this);
+ }
+ return this;
+}
+
+
+MUST_USE_RESULT static MaybeObject* CacheInitialJSArrayMaps(
+ Context* native_context, Map* initial_map) {
+ // Replace all of the cached initial array maps in the native context with
+ // the appropriate transitioned elements kind maps.
+ Heap* heap = native_context->GetHeap();
+ MaybeObject* maybe_maps =
+ heap->AllocateFixedArrayWithHoles(kElementsKindCount);
+ FixedArray* maps;
+ if (!maybe_maps->To(&maps)) return maybe_maps;
+
+ Map* current_map = initial_map;
+ ElementsKind kind = current_map->elements_kind();
+ ASSERT(kind == GetInitialFastElementsKind());
+ maps->set(kind, current_map);
+ for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1;
+ i < kFastElementsKindCount; ++i) {
+ Map* new_map;
+ ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i);
+ MaybeObject* maybe_new_map =
+ current_map->CopyAsElementsKind(next_kind, INSERT_TRANSITION);
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+ maps->set(next_kind, new_map);
+ current_map = new_map;
+ }
+ native_context->set_js_array_maps(maps);
+ return initial_map;
+}
+
+
MaybeObject* JSFunction::SetInstancePrototype(Object* value) {
ASSERT(value->IsJSReceiver());
Heap* heap = GetHeap();
+
+ // First some logic for the map of the prototype to make sure it is in fast
+ // mode.
+ if (value->IsJSObject()) {
+ MaybeObject* ok = JSObject::cast(value)->OptimizeAsPrototype();
+ if (ok->IsFailure()) return ok;
+ }
+
+ // Now some logic for the maps of the objects that are created by using this
+ // function as a constructor.
if (has_initial_map()) {
- // If the function has allocated the initial map
- // replace it with a copy containing the new prototype.
+ // If the function has allocated the initial map replace it with a
+ // copy containing the new prototype. Also complete any in-object
+ // slack tracking that is in progress at this point because it is
+ // still tracking the old copy.
+ if (shared()->IsInobjectSlackTrackingInProgress()) {
+ shared()->CompleteInobjectSlackTracking();
+ }
Map* new_map;
- MaybeObject* maybe_new_map = initial_map()->CopyDropTransitions();
- if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+ MaybeObject* maybe_object = initial_map()->Copy();
+ if (!maybe_object->To(&new_map)) return maybe_object;
new_map->set_prototype(value);
- MaybeObject* maybe_object =
- set_initial_map_and_cache_transitions(new_map);
- if (maybe_object->IsFailure()) return maybe_object;
+
+ // If the function is used as the global Array function, cache the
+ // initial map (and transitioned versions) in the native context.
+ Context* native_context = context()->native_context();
+ Object* array_function = native_context->get(Context::ARRAY_FUNCTION_INDEX);
+ if (array_function->IsJSFunction() &&
+ this == JSFunction::cast(array_function)) {
+ MaybeObject* ok = CacheInitialJSArrayMaps(native_context, new_map);
+ if (ok->IsFailure()) return ok;
+ }
+
+ set_initial_map(new_map);
} else {
// Put the value in the initial map field until an initial map is
// needed. At that point, a new initial map is created and the
@@ -7511,15 +8102,15 @@ MaybeObject* JSFunction::SetPrototype(Object* value) {
// Remove map transitions because they point to maps with a
// different prototype.
Map* new_map;
- { MaybeObject* maybe_new_map = map()->CopyDropTransitions();
- if (!maybe_new_map->To(&new_map)) return maybe_new_map;
- }
+ MaybeObject* maybe_new_map = map()->Copy();
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+
Heap* heap = new_map->GetHeap();
set_map(new_map);
new_map->set_constructor(value);
new_map->set_non_instance_prototype(true);
construct_prototype =
- heap->isolate()->context()->global_context()->
+ heap->isolate()->context()->native_context()->
initial_object_prototype();
} else {
map()->set_non_instance_prototype(false);
@@ -7529,41 +8120,36 @@ MaybeObject* JSFunction::SetPrototype(Object* value) {
}
-Object* JSFunction::RemovePrototype() {
- Context* global_context = context()->global_context();
+void JSFunction::RemovePrototype() {
+ Context* native_context = context()->native_context();
Map* no_prototype_map = shared()->is_classic_mode()
- ? global_context->function_without_prototype_map()
- : global_context->strict_mode_function_without_prototype_map();
+ ? native_context->function_without_prototype_map()
+ : native_context->strict_mode_function_without_prototype_map();
- if (map() == no_prototype_map) {
- // Be idempotent.
- return this;
- }
+ if (map() == no_prototype_map) return;
ASSERT(map() == (shared()->is_classic_mode()
- ? global_context->function_map()
- : global_context->strict_mode_function_map()));
+ ? native_context->function_map()
+ : native_context->strict_mode_function_map()));
set_map(no_prototype_map);
set_prototype_or_initial_map(no_prototype_map->GetHeap()->the_hole_value());
- return this;
}
-Object* JSFunction::SetInstanceClassName(String* name) {
+void JSFunction::SetInstanceClassName(String* name) {
shared()->set_instance_class_name(name);
- return this;
}
void JSFunction::PrintName(FILE* out) {
SmartArrayPointer<char> name = shared()->DebugName()->ToCString();
- PrintF(out, "%s", *name);
+ FPrintF(out, "%s", *name);
}
-Context* JSFunction::GlobalContextFromLiterals(FixedArray* literals) {
- return Context::cast(literals->get(JSFunction::kLiteralGlobalContextIndex));
+Context* JSFunction::NativeContextFromLiterals(FixedArray* literals) {
+ return Context::cast(literals->get(JSFunction::kLiteralNativeContextIndex));
}
@@ -7631,26 +8217,33 @@ bool SharedFunctionInfo::CanGenerateInlineConstructor(Object* prototype) {
return false;
}
- // If the prototype is null inline constructors cause no problems.
- if (!prototype->IsJSObject()) {
- ASSERT(prototype->IsNull());
- return true;
- }
-
Heap* heap = GetHeap();
- // Traverse the proposed prototype chain looking for setters for properties of
- // the same names as are set by the inline constructor.
+ // Traverse the proposed prototype chain looking for properties of the
+ // same names as are set by the inline constructor.
for (Object* obj = prototype;
obj != heap->null_value();
obj = obj->GetPrototype()) {
- JSObject* js_object = JSObject::cast(obj);
+ JSReceiver* receiver = JSReceiver::cast(obj);
for (int i = 0; i < this_property_assignments_count(); i++) {
LookupResult result(heap->isolate());
String* name = GetThisPropertyAssignmentName(i);
- js_object->LocalLookupRealNamedProperty(name, &result);
- if (result.IsFound() && result.type() == CALLBACKS) {
- return false;
+ receiver->LocalLookup(name, &result);
+ if (result.IsFound()) {
+ switch (result.type()) {
+ case NORMAL:
+ case FIELD:
+ case CONSTANT_FUNCTION:
+ break;
+ case INTERCEPTOR:
+ case CALLBACKS:
+ case HANDLER:
+ return false;
+ case TRANSITION:
+ case NONEXISTENT:
+ UNREACHABLE();
+ break;
+ }
}
}
}
@@ -7795,7 +8388,7 @@ void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) {
}
-void SharedFunctionInfo::DisableOptimization() {
+void SharedFunctionInfo::DisableOptimization(const char* reason) {
// Disable optimization for the shared function info and mark the
// code as non-optimizable. The marker on the shared function info
// is there because we flush non-optimized code thereby loosing the
@@ -7811,13 +8404,14 @@ void SharedFunctionInfo::DisableOptimization() {
code()->set_optimizable(false);
}
if (FLAG_trace_opt) {
- PrintF("[disabled optimization for %s]\n", *DebugName()->ToCString());
+ PrintF("[disabled optimization for %s, reason: %s]\n",
+ *DebugName()->ToCString(), reason);
}
}
-bool SharedFunctionInfo::VerifyBailoutId(int id) {
- ASSERT(id != AstNode::kNoNumber);
+bool SharedFunctionInfo::VerifyBailoutId(BailoutId id) {
+ ASSERT(!id.IsNone());
Code* unoptimized = code();
DeoptimizationOutputData* data =
DeoptimizationOutputData::cast(unoptimized->deoptimization_data());
@@ -7859,8 +8453,7 @@ void SharedFunctionInfo::DetachInitialMap() {
Map* map = reinterpret_cast<Map*>(initial_map());
// Make the map remember to restore the link if it survives the GC.
- map->set_bit_field3(
- map->bit_field3() | (1 << Map::kAttachedToSharedFunctionInfo));
+ map->set_attached_to_shared_function_info(true);
// Undo state changes made by StartInobjectTracking (except the
// construction_count). This way if the initial map does not survive the GC
@@ -7868,7 +8461,7 @@ void SharedFunctionInfo::DetachInitialMap() {
// constructor is called. The countdown will continue and (possibly after
// several more GCs) CompleteInobjectSlackTracking will eventually be called.
Heap* heap = map->GetHeap();
- set_initial_map(heap->raw_unchecked_undefined_value());
+ set_initial_map(heap->undefined_value());
Builtins* builtins = heap->isolate()->builtins();
ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown),
*RawField(this, kConstructStubOffset));
@@ -7880,8 +8473,7 @@ void SharedFunctionInfo::DetachInitialMap() {
// Called from GC, hence reinterpret_cast and unchecked accessors.
void SharedFunctionInfo::AttachInitialMap(Map* map) {
- map->set_bit_field3(
- map->bit_field3() & ~(1 << Map::kAttachedToSharedFunctionInfo));
+ map->set_attached_to_shared_function_info(false);
// Resume inobject slack tracking.
set_initial_map(map);
@@ -7900,12 +8492,13 @@ void SharedFunctionInfo::ResetForNewContext(int new_ic_age) {
if (code()->kind() == Code::FUNCTION) {
code()->set_profiler_ticks(0);
if (optimization_disabled() &&
- opt_count() >= Compiler::kDefaultMaxOptCount) {
+ opt_count() >= FLAG_max_opt_count) {
// Re-enable optimizations if they were disabled due to opt_count limit.
set_optimization_disabled(false);
code()->set_optimizable(true);
}
set_opt_count(0);
+ set_deopt_count(0);
}
}
@@ -7953,9 +8546,20 @@ void SharedFunctionInfo::CompleteInobjectSlackTracking() {
}
-void SharedFunctionInfo::SharedFunctionInfoIterateBody(ObjectVisitor* v) {
- v->VisitSharedFunctionInfo(this);
- SharedFunctionInfo::BodyDescriptor::IterateBody(this, v);
+int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context) {
+ ASSERT(native_context->IsNativeContext());
+ if (!FLAG_cache_optimized_code) return -1;
+ Object* value = optimized_code_map();
+ if (!value->IsSmi()) {
+ FixedArray* optimized_code_map = FixedArray::cast(value);
+ int length = optimized_code_map->length();
+ for (int i = 0; i < length; i += 3) {
+ if (optimized_code_map->get(i) == native_context) {
+ return i + 1;
+ }
+ }
+ }
+ return -1;
}
@@ -7984,6 +8588,15 @@ void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
}
+void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) {
+ ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
+ Object* stub = rinfo->code_age_stub();
+ if (stub) {
+ VisitPointer(&stub);
+ }
+}
+
+
void ObjectVisitor::VisitCodeEntry(Address entry_address) {
Object* code = Code::GetObjectFromEntryAddress(entry_address);
Object* old_code = code;
@@ -8183,7 +8796,6 @@ void Code::ClearTypeFeedbackCells(Heap* heap) {
TypeFeedbackCells* type_feedback_cells =
TypeFeedbackInfo::cast(raw_info)->type_feedback_cells();
for (int i = 0; i < type_feedback_cells->CellCount(); i++) {
- ASSERT(type_feedback_cells->AstId(i)->IsSmi());
JSGlobalPropertyCell* cell = type_feedback_cells->Cell(i);
cell->set_value(TypeFeedbackCells::RawUninitializedSentinel(heap));
}
@@ -8191,25 +8803,124 @@ void Code::ClearTypeFeedbackCells(Heap* heap) {
}
+bool Code::allowed_in_shared_map_code_cache() {
+ return is_keyed_load_stub() || is_keyed_store_stub() ||
+ (is_compare_ic_stub() && compare_state() == CompareIC::KNOWN_OBJECTS);
+}
+
+
+void Code::MakeCodeAgeSequenceYoung(byte* sequence) {
+ PatchPlatformCodeAge(sequence, kNoAge, NO_MARKING_PARITY);
+}
+
+
+void Code::MakeYoung() {
+ byte* sequence = FindCodeAgeSequence();
+ if (sequence != NULL) {
+ PatchPlatformCodeAge(sequence, kNoAge, NO_MARKING_PARITY);
+ }
+}
+
+
+void Code::MakeOlder(MarkingParity current_parity) {
+ byte* sequence = FindCodeAgeSequence();
+ if (sequence != NULL) {
+ Age age;
+ MarkingParity code_parity;
+ GetCodeAgeAndParity(sequence, &age, &code_parity);
+ if (age != kLastCodeAge && code_parity != current_parity) {
+ PatchPlatformCodeAge(sequence, static_cast<Age>(age + 1),
+ current_parity);
+ }
+ }
+}
+
+
+bool Code::IsOld() {
+ byte* sequence = FindCodeAgeSequence();
+ if (sequence == NULL) return false;
+ Age age;
+ MarkingParity parity;
+ GetCodeAgeAndParity(sequence, &age, &parity);
+ return age >= kSexagenarianCodeAge;
+}
+
+
+byte* Code::FindCodeAgeSequence() {
+ return FLAG_age_code &&
+ strlen(FLAG_stop_at) == 0 &&
+ !ProfileEntryHookStub::HasEntryHook() &&
+ (kind() == OPTIMIZED_FUNCTION ||
+ (kind() == FUNCTION && !has_debug_break_slots()))
+ ? FindPlatformCodeAgeSequence()
+ : NULL;
+}
+
+
+void Code::GetCodeAgeAndParity(Code* code, Age* age,
+ MarkingParity* parity) {
+ Isolate* isolate = Isolate::Current();
+ Builtins* builtins = isolate->builtins();
+ Code* stub = NULL;
+#define HANDLE_CODE_AGE(AGE) \
+ stub = *builtins->Make##AGE##CodeYoungAgainEvenMarking(); \
+ if (code == stub) { \
+ *age = k##AGE##CodeAge; \
+ *parity = EVEN_MARKING_PARITY; \
+ return; \
+ } \
+ stub = *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
+ if (code == stub) { \
+ *age = k##AGE##CodeAge; \
+ *parity = ODD_MARKING_PARITY; \
+ return; \
+ }
+ CODE_AGE_LIST(HANDLE_CODE_AGE)
+#undef HANDLE_CODE_AGE
+ UNREACHABLE();
+}
+
+
+Code* Code::GetCodeAgeStub(Age age, MarkingParity parity) {
+ Isolate* isolate = Isolate::Current();
+ Builtins* builtins = isolate->builtins();
+ switch (age) {
+#define HANDLE_CODE_AGE(AGE) \
+ case k##AGE##CodeAge: { \
+ Code* stub = parity == EVEN_MARKING_PARITY \
+ ? *builtins->Make##AGE##CodeYoungAgainEvenMarking() \
+ : *builtins->Make##AGE##CodeYoungAgainOddMarking(); \
+ return stub; \
+ }
+ CODE_AGE_LIST(HANDLE_CODE_AGE)
+#undef HANDLE_CODE_AGE
+ default:
+ UNREACHABLE();
+ break;
+ }
+ return NULL;
+}
+
+
#ifdef ENABLE_DISASSEMBLER
void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
disasm::NameConverter converter;
int deopt_count = DeoptCount();
- PrintF(out, "Deoptimization Input Data (deopt points = %d)\n", deopt_count);
+ FPrintF(out, "Deoptimization Input Data (deopt points = %d)\n", deopt_count);
if (0 == deopt_count) return;
- PrintF(out, "%6s %6s %6s %6s %12s\n", "index", "ast id", "argc", "pc",
+ FPrintF(out, "%6s %6s %6s %6s %12s\n", "index", "ast id", "argc", "pc",
FLAG_print_code_verbose ? "commands" : "");
for (int i = 0; i < deopt_count; i++) {
- PrintF(out, "%6d %6d %6d %6d",
+ FPrintF(out, "%6d %6d %6d %6d",
i,
- AstId(i)->value(),
+ AstId(i).ToInt(),
ArgumentsStackHeight(i)->value(),
Pc(i)->value());
if (!FLAG_print_code_verbose) {
- PrintF(out, "\n");
+ FPrintF(out, "\n");
continue;
}
// Print details of the frame translation.
@@ -8220,7 +8931,7 @@ void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
ASSERT(Translation::BEGIN == opcode);
int frame_count = iterator.Next();
int jsframe_count = iterator.Next();
- PrintF(out, " %s {frame count=%d, js frame count=%d}\n",
+ FPrintF(out, " %s {frame count=%d, js frame count=%d}\n",
Translation::StringFor(opcode),
frame_count,
jsframe_count);
@@ -8228,7 +8939,7 @@ void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
while (iterator.HasNext() &&
Translation::BEGIN !=
(opcode = static_cast<Translation::Opcode>(iterator.Next()))) {
- PrintF(out, "%24s %s ", "", Translation::StringFor(opcode));
+ FPrintF(out, "%24s %s ", "", Translation::StringFor(opcode));
switch (opcode) {
case Translation::BEGIN:
@@ -8238,12 +8949,15 @@ void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
case Translation::JS_FRAME: {
int ast_id = iterator.Next();
int function_id = iterator.Next();
- JSFunction* function =
- JSFunction::cast(LiteralArray()->get(function_id));
unsigned height = iterator.Next();
- PrintF(out, "{ast_id=%d, function=", ast_id);
- function->PrintName(out);
- PrintF(out, ", height=%u}", height);
+ FPrintF(out, "{ast_id=%d, function=", ast_id);
+ if (function_id != Translation::kSelfLiteralId) {
+ Object* function = LiteralArray()->get(function_id);
+ JSFunction::cast(function)->PrintName(out);
+ } else {
+ FPrintF(out, "<self>");
+ }
+ FPrintF(out, ", height=%u}", height);
break;
}
@@ -8253,9 +8967,20 @@ void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
JSFunction* function =
JSFunction::cast(LiteralArray()->get(function_id));
unsigned height = iterator.Next();
- PrintF(out, "{function=");
+ FPrintF(out, "{function=");
+ function->PrintName(out);
+ FPrintF(out, ", height=%u}", height);
+ break;
+ }
+
+ case Translation::GETTER_STUB_FRAME:
+ case Translation::SETTER_STUB_FRAME: {
+ int function_id = iterator.Next();
+ JSFunction* function =
+ JSFunction::cast(LiteralArray()->get(function_id));
+ FPrintF(out, "{function=");
function->PrintName(out);
- PrintF(out, ", height=%u}", height);
+ FPrintF(out, "}");
break;
}
@@ -8264,58 +8989,72 @@ void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
case Translation::REGISTER: {
int reg_code = iterator.Next();
- PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
+ FPrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
break;
}
case Translation::INT32_REGISTER: {
int reg_code = iterator.Next();
- PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
+ FPrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
+ break;
+ }
+
+ case Translation::UINT32_REGISTER: {
+ int reg_code = iterator.Next();
+ FPrintF(out,
+ "{input=%s (unsigned)}",
+ converter.NameOfCPURegister(reg_code));
break;
}
case Translation::DOUBLE_REGISTER: {
int reg_code = iterator.Next();
- PrintF(out, "{input=%s}",
+ FPrintF(out, "{input=%s}",
DoubleRegister::AllocationIndexToString(reg_code));
break;
}
case Translation::STACK_SLOT: {
int input_slot_index = iterator.Next();
- PrintF(out, "{input=%d}", input_slot_index);
+ FPrintF(out, "{input=%d}", input_slot_index);
break;
}
case Translation::INT32_STACK_SLOT: {
int input_slot_index = iterator.Next();
- PrintF(out, "{input=%d}", input_slot_index);
+ FPrintF(out, "{input=%d}", input_slot_index);
+ break;
+ }
+
+ case Translation::UINT32_STACK_SLOT: {
+ int input_slot_index = iterator.Next();
+ FPrintF(out, "{input=%d (unsigned)}", input_slot_index);
break;
}
case Translation::DOUBLE_STACK_SLOT: {
int input_slot_index = iterator.Next();
- PrintF(out, "{input=%d}", input_slot_index);
+ FPrintF(out, "{input=%d}", input_slot_index);
break;
}
case Translation::LITERAL: {
unsigned literal_index = iterator.Next();
- PrintF(out, "{literal_id=%u}", literal_index);
+ FPrintF(out, "{literal_id=%u}", literal_index);
break;
}
case Translation::ARGUMENTS_OBJECT:
break;
}
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
}
}
void DeoptimizationOutputData::DeoptimizationOutputDataPrint(FILE* out) {
- PrintF(out, "Deoptimization Output Data (deopt points = %d)\n",
+ FPrintF(out, "Deoptimization Output Data (deopt points = %d)\n",
this->DeoptPoints());
if (this->DeoptPoints() == 0) return;
@@ -8323,7 +9062,7 @@ void DeoptimizationOutputData::DeoptimizationOutputDataPrint(FILE* out) {
for (int i = 0; i < this->DeoptPoints(); i++) {
int pc_and_state = this->PcAndState(i)->value();
PrintF("%6d %8d %s\n",
- this->AstId(i)->value(),
+ this->AstId(i).ToInt(),
FullCodeGenerator::PcField::decode(pc_and_state),
FullCodeGenerator::State2String(
FullCodeGenerator::StateField::decode(pc_and_state)));
@@ -8369,18 +9108,15 @@ const char* Code::ICState2String(InlineCacheState state) {
}
-const char* Code::PropertyType2String(PropertyType type) {
+const char* Code::StubType2String(StubType type) {
switch (type) {
case NORMAL: return "NORMAL";
case FIELD: return "FIELD";
case CONSTANT_FUNCTION: return "CONSTANT_FUNCTION";
case CALLBACKS: return "CALLBACKS";
- case HANDLER: return "HANDLER";
case INTERCEPTOR: return "INTERCEPTOR";
case MAP_TRANSITION: return "MAP_TRANSITION";
- case ELEMENTS_TRANSITION: return "ELEMENTS_TRANSITION";
- case CONSTANT_TRANSITION: return "CONSTANT_TRANSITION";
- case NULL_DESCRIPTOR: return "NULL_DESCRIPTOR";
+ case NONEXISTENT: return "NONEXISTENT";
}
UNREACHABLE(); // keep the compiler happy
return NULL;
@@ -8405,43 +9141,43 @@ void Code::PrintExtraICState(FILE* out, Kind kind, ExtraICState extra) {
break;
}
if (name != NULL) {
- PrintF(out, "extra_ic_state = %s\n", name);
+ FPrintF(out, "extra_ic_state = %s\n", name);
} else {
- PrintF(out, "extra_ic_state = %d\n", extra);
+ FPrintF(out, "extra_ic_state = %d\n", extra);
}
}
void Code::Disassemble(const char* name, FILE* out) {
- PrintF(out, "kind = %s\n", Kind2String(kind()));
+ FPrintF(out, "kind = %s\n", Kind2String(kind()));
if (is_inline_cache_stub()) {
- PrintF(out, "ic_state = %s\n", ICState2String(ic_state()));
+ FPrintF(out, "ic_state = %s\n", ICState2String(ic_state()));
PrintExtraICState(out, kind(), extra_ic_state());
if (ic_state() == MONOMORPHIC) {
- PrintF(out, "type = %s\n", PropertyType2String(type()));
+ FPrintF(out, "type = %s\n", StubType2String(type()));
}
if (is_call_stub() || is_keyed_call_stub()) {
- PrintF(out, "argc = %d\n", arguments_count());
+ FPrintF(out, "argc = %d\n", arguments_count());
}
if (is_compare_ic_stub()) {
CompareIC::State state = CompareIC::ComputeState(this);
- PrintF(out, "compare_state = %s\n", CompareIC::GetStateName(state));
+ FPrintF(out, "compare_state = %s\n", CompareIC::GetStateName(state));
}
if (is_compare_ic_stub() && major_key() == CodeStub::CompareIC) {
Token::Value op = CompareIC::ComputeOperation(this);
- PrintF(out, "compare_operation = %s\n", Token::Name(op));
+ FPrintF(out, "compare_operation = %s\n", Token::Name(op));
}
}
if ((name != NULL) && (name[0] != '\0')) {
- PrintF(out, "name = %s\n", name);
+ FPrintF(out, "name = %s\n", name);
}
if (kind() == OPTIMIZED_FUNCTION) {
- PrintF(out, "stack_slots = %d\n", stack_slots());
+ FPrintF(out, "stack_slots = %d\n", stack_slots());
}
- PrintF(out, "Instructions (size = %d)\n", instruction_size());
+ FPrintF(out, "Instructions (size = %d)\n", instruction_size());
Disassembler::Decode(out, this);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
if (kind() == FUNCTION) {
DeoptimizationOutputData* data =
@@ -8456,24 +9192,26 @@ void Code::Disassemble(const char* name, FILE* out) {
if (kind() == OPTIMIZED_FUNCTION) {
SafepointTable table(this);
- PrintF(out, "Safepoints (size = %u)\n", table.size());
+ FPrintF(out, "Safepoints (size = %u)\n", table.size());
for (unsigned i = 0; i < table.length(); i++) {
unsigned pc_offset = table.GetPcOffset(i);
- PrintF(out, "%p %4d ", (instruction_start() + pc_offset), pc_offset);
+ FPrintF(out, "%p %4d ", (instruction_start() + pc_offset), pc_offset);
table.PrintEntry(i);
- PrintF(out, " (sp -> fp)");
+ FPrintF(out, " (sp -> fp)");
SafepointEntry entry = table.GetEntry(i);
if (entry.deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
- PrintF(out, " %6d", entry.deoptimization_index());
+ FPrintF(out, " %6d", entry.deoptimization_index());
} else {
- PrintF(out, " <none>");
+ FPrintF(out, " <none>");
}
if (entry.argument_count() > 0) {
- PrintF(out, " argc: %d", entry.argument_count());
+ FPrintF(out, " argc: %d", entry.argument_count());
}
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
- PrintF(out, "\n");
+ FPrintF(out, "\n");
+ // Just print if type feedback info is ever used for optimized code.
+ ASSERT(type_feedback_info()->IsUndefined());
} else if (kind() == FUNCTION) {
unsigned offset = stack_check_table_offset();
// If there is no stack check table, the "table start" will at or after
@@ -8482,19 +9220,25 @@ void Code::Disassemble(const char* name, FILE* out) {
unsigned* address =
reinterpret_cast<unsigned*>(instruction_start() + offset);
unsigned length = address[0];
- PrintF(out, "Stack checks (size = %u)\n", length);
- PrintF(out, "ast_id pc_offset\n");
+ FPrintF(out, "Stack checks (size = %u)\n", length);
+ FPrintF(out, "ast_id pc_offset\n");
for (unsigned i = 0; i < length; ++i) {
unsigned index = (2 * i) + 1;
- PrintF(out, "%6u %9u\n", address[index], address[index + 1]);
+ FPrintF(out, "%6u %9u\n", address[index], address[index + 1]);
}
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
+#ifdef OBJECT_PRINT
+ if (!type_feedback_info()->IsUndefined()) {
+ TypeFeedbackInfo::cast(type_feedback_info())->TypeFeedbackInfoPrint(out);
+ FPrintF(out, "\n");
+ }
+#endif
}
PrintF("RelocInfo (size = %d)\n", relocation_size());
for (RelocIterator it(this); !it.done(); it.next()) it.rinfo()->Print(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
#endif // ENABLE_DISASSEMBLER
@@ -8502,7 +9246,7 @@ void Code::Disassemble(const char* name, FILE* out) {
MaybeObject* JSObject::SetFastElementsCapacityAndLength(
int capacity,
int length,
- SetFastElementsCapacityMode set_capacity_mode) {
+ SetFastElementsCapacitySmiMode smi_mode) {
Heap* heap = GetHeap();
// We should never end in here with a pixel or external array.
ASSERT(!HasExternalArrayElements());
@@ -8513,34 +9257,40 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength(
if (!maybe->To(&new_elements)) return maybe;
}
- // Find the new map to use for this object if there is a map change.
- Map* new_map = NULL;
- if (elements()->map() != heap->non_strict_arguments_elements_map()) {
- // The resized array has FAST_SMI_ONLY_ELEMENTS if the capacity mode forces
- // it, or if it's allowed and the old elements array contained only SMIs.
- bool has_fast_smi_only_elements =
- (set_capacity_mode == kForceSmiOnlyElements) ||
- ((set_capacity_mode == kAllowSmiOnlyElements) &&
- (elements()->map()->has_fast_smi_only_elements() ||
- elements() == heap->empty_fixed_array()));
- ElementsKind elements_kind = has_fast_smi_only_elements
- ? FAST_SMI_ONLY_ELEMENTS
- : FAST_ELEMENTS;
- MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(), elements_kind);
- if (!maybe->To(&new_map)) return maybe;
+ ElementsKind elements_kind = GetElementsKind();
+ ElementsKind new_elements_kind;
+ // The resized array has FAST_*_SMI_ELEMENTS if the capacity mode forces it,
+ // or if it's allowed and the old elements array contained only SMIs.
+ bool has_fast_smi_elements =
+ (smi_mode == kForceSmiElements) ||
+ ((smi_mode == kAllowSmiElements) && HasFastSmiElements());
+ if (has_fast_smi_elements) {
+ if (IsHoleyElementsKind(elements_kind)) {
+ new_elements_kind = FAST_HOLEY_SMI_ELEMENTS;
+ } else {
+ new_elements_kind = FAST_SMI_ELEMENTS;
+ }
+ } else {
+ if (IsHoleyElementsKind(elements_kind)) {
+ new_elements_kind = FAST_HOLEY_ELEMENTS;
+ } else {
+ new_elements_kind = FAST_ELEMENTS;
+ }
}
-
FixedArrayBase* old_elements = elements();
- ElementsKind elements_kind = GetElementsKind();
ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
- ElementsKind to_kind = (elements_kind == FAST_SMI_ONLY_ELEMENTS)
- ? FAST_SMI_ONLY_ELEMENTS
- : FAST_ELEMENTS;
{ MaybeObject* maybe_obj =
- accessor->CopyElements(this, new_elements, to_kind);
+ accessor->CopyElements(this, new_elements, new_elements_kind);
if (maybe_obj->IsFailure()) return maybe_obj;
}
if (elements_kind != NON_STRICT_ARGUMENTS_ELEMENTS) {
+ Map* new_map = map();
+ if (new_elements_kind != elements_kind) {
+ MaybeObject* maybe =
+ GetElementsTransitionMap(GetIsolate(), new_elements_kind);
+ if (!maybe->To(&new_map)) return maybe;
+ }
+ ValidateElements();
set_map_and_elements(new_map, new_elements);
} else {
FixedArray* parameter_map = FixedArray::cast(old_elements);
@@ -8552,11 +9302,9 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength(
GetElementsKind(), new_elements);
}
- // Update the length if necessary.
if (IsJSArray()) {
JSArray::cast(this)->set_length(Smi::FromInt(length));
}
-
return new_elements;
}
@@ -8574,20 +9322,28 @@ MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength(
if (!maybe_obj->To(&elems)) return maybe_obj;
}
+ ElementsKind elements_kind = GetElementsKind();
+ ElementsKind new_elements_kind = elements_kind;
+ if (IsHoleyElementsKind(elements_kind)) {
+ new_elements_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
+ } else {
+ new_elements_kind = FAST_DOUBLE_ELEMENTS;
+ }
+
Map* new_map;
{ MaybeObject* maybe_obj =
- GetElementsTransitionMap(heap->isolate(), FAST_DOUBLE_ELEMENTS);
+ GetElementsTransitionMap(heap->isolate(), new_elements_kind);
if (!maybe_obj->To(&new_map)) return maybe_obj;
}
FixedArrayBase* old_elements = elements();
- ElementsKind elements_kind = GetElementsKind();
ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
{ MaybeObject* maybe_obj =
accessor->CopyElements(this, elems, FAST_DOUBLE_ELEMENTS);
if (maybe_obj->IsFailure()) return maybe_obj;
}
if (elements_kind != NON_STRICT_ARGUMENTS_ELEMENTS) {
+ ValidateElements();
set_map_and_elements(new_map, elems);
} else {
FixedArray* parameter_map = FixedArray::cast(old_elements);
@@ -8596,7 +9352,7 @@ MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength(
if (FLAG_trace_elements_transitions) {
PrintElementsTransition(stdout, elements_kind, old_elements,
- FAST_DOUBLE_ELEMENTS, elems);
+ GetElementsKind(), elems);
}
if (IsJSArray()) {
@@ -8636,8 +9392,8 @@ MaybeObject* JSArray::SetElementsLength(Object* len) {
}
-Object* Map::GetPrototypeTransition(Object* prototype) {
- FixedArray* cache = prototype_transitions();
+Map* Map::GetPrototypeTransition(Object* prototype) {
+ FixedArray* cache = GetPrototypeTransitions();
int number_of_transitions = NumberOfProtoTransitions();
const int proto_offset =
kProtoTransitionHeaderSize + kProtoTransitionPrototypeOffset;
@@ -8646,8 +9402,7 @@ Object* Map::GetPrototypeTransition(Object* prototype) {
for (int i = 0; i < number_of_transitions; i++) {
if (cache->get(proto_offset + i * step) == prototype) {
Object* map = cache->get(map_offset + i * step);
- ASSERT(map->IsMap());
- return map;
+ return Map::cast(map);
}
}
return NULL;
@@ -8660,7 +9415,7 @@ MaybeObject* Map::PutPrototypeTransition(Object* prototype, Map* map) {
// Don't cache prototype transition if this map is shared.
if (is_shared() || !FLAG_cache_prototype_transitions) return this;
- FixedArray* cache = prototype_transitions();
+ FixedArray* cache = GetPrototypeTransitions();
const int step = kProtoTransitionElementsPerEntry;
const int header = kProtoTransitionHeaderSize;
@@ -8683,7 +9438,8 @@ MaybeObject* Map::PutPrototypeTransition(Object* prototype, Map* map) {
new_cache->set(i + header, cache->get(i + header));
}
cache = new_cache;
- set_prototype_transitions(cache);
+ MaybeObject* set_result = SetPrototypeTransitions(cache);
+ if (set_result->IsFailure()) return set_result;
}
int last = transitions - 1;
@@ -8696,6 +9452,22 @@ MaybeObject* Map::PutPrototypeTransition(Object* prototype, Map* map) {
}
+void Map::ZapTransitions() {
+ TransitionArray* transition_array = transitions();
+ MemsetPointer(transition_array->data_start(),
+ GetHeap()->the_hole_value(),
+ transition_array->length());
+}
+
+
+void Map::ZapPrototypeTransitions() {
+ FixedArray* proto_transitions = GetPrototypeTransitions();
+ MemsetPointer(proto_transitions->data_start(),
+ GetHeap()->the_hole_value(),
+ proto_transitions->length());
+}
+
+
MaybeObject* JSReceiver::SetPrototype(Object* value,
bool skip_hidden_prototypes) {
#ifdef DEBUG
@@ -8755,21 +9527,24 @@ MaybeObject* JSReceiver::SetPrototype(Object* value,
// Nothing to do if prototype is already set.
if (map->prototype() == value) return value;
- Object* new_map = map->GetPrototypeTransition(value);
+ if (value->IsJSObject()) {
+ MaybeObject* ok = JSObject::cast(value)->OptimizeAsPrototype();
+ if (ok->IsFailure()) return ok;
+ }
+
+ Map* new_map = map->GetPrototypeTransition(value);
if (new_map == NULL) {
- { MaybeObject* maybe_new_map = map->CopyDropTransitions();
- if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
- }
+ MaybeObject* maybe_new_map = map->Copy();
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
- { MaybeObject* maybe_new_cache =
- map->PutPrototypeTransition(value, Map::cast(new_map));
- if (maybe_new_cache->IsFailure()) return maybe_new_cache;
- }
+ MaybeObject* maybe_new_cache =
+ map->PutPrototypeTransition(value, new_map);
+ if (maybe_new_cache->IsFailure()) return maybe_new_cache;
- Map::cast(new_map)->set_prototype(value);
+ new_map->set_prototype(value);
}
- ASSERT(Map::cast(new_map)->prototype() == value);
- real_receiver->set_map(Map::cast(new_map));
+ ASSERT(new_map->prototype() == value);
+ real_receiver->set_map(new_map);
heap->ClearInstanceofCache();
ASSERT(size == Size());
@@ -8790,64 +9565,7 @@ MaybeObject* JSObject::EnsureCanContainElements(Arguments* args,
}
-bool JSObject::HasElementWithInterceptor(JSReceiver* receiver, uint32_t index) {
- Isolate* isolate = GetIsolate();
- // Make sure that the top context does not change when doing
- // callbacks or interceptor calls.
- AssertNoContextChange ncc;
- HandleScope scope(isolate);
- Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
- Handle<JSReceiver> receiver_handle(receiver);
- Handle<JSObject> holder_handle(this);
- CustomArguments args(isolate, interceptor->data(), receiver, this);
- v8::AccessorInfo info(args.end());
- if (!interceptor->query()->IsUndefined()) {
- v8::IndexedPropertyQuery query =
- v8::ToCData<v8::IndexedPropertyQuery>(interceptor->query());
- LOG(isolate,
- ApiIndexedPropertyAccess("interceptor-indexed-has", this, index));
- v8::Handle<v8::Integer> result;
- {
- // Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
- result = query(index, info);
- }
- if (!result.IsEmpty()) {
- ASSERT(result->IsInt32());
- return true; // absence of property is signaled by empty handle.
- }
- } else if (!interceptor->getter()->IsUndefined()) {
- v8::IndexedPropertyGetter getter =
- v8::ToCData<v8::IndexedPropertyGetter>(interceptor->getter());
- LOG(isolate,
- ApiIndexedPropertyAccess("interceptor-indexed-has-get", this, index));
- v8::Handle<v8::Value> result;
- {
- // Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
- result = getter(index, info);
- }
- if (!result.IsEmpty()) return true;
- }
-
- if (holder_handle->GetElementsAccessor()->HasElement(
- *receiver_handle, *holder_handle, index)) {
- return true;
- }
-
- if (holder_handle->IsStringObjectWithCharacterAt(index)) return true;
- Object* pt = holder_handle->GetPrototype();
- if (pt->IsJSProxy()) {
- // We need to follow the spec and simulate a call to [[GetOwnProperty]].
- return JSProxy::cast(pt)->GetElementAttributeWithHandler(
- receiver, index) != ABSENT;
- }
- if (pt->IsNull()) return false;
- return JSObject::cast(pt)->HasElementWithReceiver(*receiver_handle, index);
-}
-
-
-JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
+JSObject::LocalElementType JSObject::GetLocalElementType(uint32_t index) {
// Check access rights if needed.
if (IsAccessCheckNeeded()) {
Heap* heap = GetHeap();
@@ -8861,13 +9579,13 @@ JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
Object* proto = GetPrototype();
if (proto->IsNull()) return UNDEFINED_ELEMENT;
ASSERT(proto->IsJSGlobalObject());
- return JSObject::cast(proto)->HasLocalElement(index);
+ return JSObject::cast(proto)->GetLocalElementType(index);
}
// Check for lookup interceptor
if (HasIndexedInterceptor()) {
- return HasElementWithInterceptor(this, index) ? INTERCEPTED_ELEMENT
- : UNDEFINED_ELEMENT;
+ return GetElementAttributeWithInterceptor(this, index, false) != ABSENT
+ ? INTERCEPTED_ELEMENT : UNDEFINED_ELEMENT;
}
// Handle [] on String objects.
@@ -8876,8 +9594,10 @@ JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
}
switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
+ case FAST_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS: {
uint32_t length = IsJSArray() ?
static_cast<uint32_t>
(Smi::cast(JSArray::cast(this)->length())->value()) :
@@ -8888,7 +9608,8 @@ JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
}
break;
}
- case FAST_DOUBLE_ELEMENTS: {
+ case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS: {
uint32_t length = IsJSArray() ?
static_cast<uint32_t>
(Smi::cast(JSArray::cast(this)->length())->value()) :
@@ -8953,40 +9674,6 @@ JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
}
-bool JSObject::HasElementWithReceiver(JSReceiver* receiver, uint32_t index) {
- // Check access rights if needed.
- if (IsAccessCheckNeeded()) {
- Heap* heap = GetHeap();
- if (!heap->isolate()->MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
- heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS);
- return false;
- }
- }
-
- // Check for lookup interceptor
- if (HasIndexedInterceptor()) {
- return HasElementWithInterceptor(receiver, index);
- }
-
- ElementsAccessor* accessor = GetElementsAccessor();
- if (accessor->HasElement(receiver, this, index)) {
- return true;
- }
-
- // Handle [] on String objects.
- if (this->IsStringObjectWithCharacterAt(index)) return true;
-
- Object* pt = GetPrototype();
- if (pt->IsNull()) return false;
- if (pt->IsJSProxy()) {
- // We need to follow the spec and simulate a call to [[GetOwnProperty]].
- return JSProxy::cast(pt)->GetElementAttributeWithHandler(
- receiver, index) != ABSENT;
- }
- return JSObject::cast(pt)->HasElementWithReceiver(receiver, index);
-}
-
-
MaybeObject* JSObject::SetElementWithInterceptor(uint32_t index,
Object* value,
PropertyAttributes attributes,
@@ -9041,6 +9728,7 @@ MaybeObject* JSObject::GetElementWithCallback(Object* receiver,
Handle<AccessorInfo> data(AccessorInfo::cast(structure));
Object* fun_obj = data->getter();
v8::AccessorGetter call_fun = v8::ToCData<v8::AccessorGetter>(fun_obj);
+ if (call_fun == NULL) return isolate->heap()->undefined_value();
HandleScope scope(isolate);
Handle<JSObject> self(JSObject::cast(receiver));
Handle<JSObject> holder_handle(JSObject::cast(holder));
@@ -9057,7 +9745,9 @@ MaybeObject* JSObject::GetElementWithCallback(Object* receiver,
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (result.IsEmpty()) return isolate->heap()->undefined_value();
- return *v8::Utils::OpenHandle(*result);
+ Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
+ result_internal->VerifyApiCallResultType();
+ return *result_internal;
}
// __defineGetter__ callback
@@ -9172,7 +9862,7 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
Object* value,
StrictModeFlag strict_mode,
bool check_prototype) {
- ASSERT(HasFastTypeElements() ||
+ ASSERT(HasFastSmiOrObjectElements() ||
HasFastArgumentsElements());
FixedArray* backing_store = FixedArray::cast(elements());
@@ -9198,13 +9888,29 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
// Check if the length property of this object needs to be updated.
uint32_t array_length = 0;
bool must_update_array_length = false;
+ bool introduces_holes = true;
if (IsJSArray()) {
CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_length));
+ introduces_holes = index > array_length;
if (index >= array_length) {
must_update_array_length = true;
array_length = index + 1;
}
+ } else {
+ introduces_holes = index >= capacity;
+ }
+
+ // If the array is growing, and it's not growth by a single element at the
+ // end, make sure that the ElementsKind is HOLEY.
+ ElementsKind elements_kind = GetElementsKind();
+ if (introduces_holes &&
+ IsFastElementsKind(elements_kind) &&
+ !IsFastHoleyElementsKind(elements_kind)) {
+ ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
+ MaybeObject* maybe = TransitionElementsKind(transitioned_kind);
+ if (maybe->IsFailure()) return maybe;
}
+
// Check if the capacity of the backing store needs to be increased, or if
// a transition to slow elements is necessary.
if (index >= capacity) {
@@ -9224,42 +9930,44 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
}
}
// Convert to fast double elements if appropriate.
- if (HasFastSmiOnlyElements() && !value->IsSmi() && value->IsNumber()) {
+ if (HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) {
MaybeObject* maybe =
SetFastDoubleElementsCapacityAndLength(new_capacity, array_length);
if (maybe->IsFailure()) return maybe;
FixedDoubleArray::cast(elements())->set(index, value->Number());
+ ValidateElements();
return value;
}
- // Change elements kind from SMI_ONLY to generic FAST if necessary.
- if (HasFastSmiOnlyElements() && !value->IsSmi()) {
+ // Change elements kind from Smi-only to generic FAST if necessary.
+ if (HasFastSmiElements() && !value->IsSmi()) {
Map* new_map;
- { MaybeObject* maybe_new_map = GetElementsTransitionMap(GetIsolate(),
- FAST_ELEMENTS);
- if (!maybe_new_map->To(&new_map)) return maybe_new_map;
- }
+ ElementsKind kind = HasFastHoleyElements()
+ ? FAST_HOLEY_ELEMENTS
+ : FAST_ELEMENTS;
+ MaybeObject* maybe_new_map = GetElementsTransitionMap(GetIsolate(),
+ kind);
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+
set_map(new_map);
- if (FLAG_trace_elements_transitions) {
- PrintElementsTransition(stdout, FAST_SMI_ONLY_ELEMENTS, elements(),
- FAST_ELEMENTS, elements());
- }
}
// Increase backing store capacity if that's been decided previously.
if (new_capacity != capacity) {
FixedArray* new_elements;
- SetFastElementsCapacityMode set_capacity_mode =
- value->IsSmi() && HasFastSmiOnlyElements()
- ? kAllowSmiOnlyElements
- : kDontAllowSmiOnlyElements;
+ SetFastElementsCapacitySmiMode smi_mode =
+ value->IsSmi() && HasFastSmiElements()
+ ? kAllowSmiElements
+ : kDontAllowSmiElements;
{ MaybeObject* maybe =
SetFastElementsCapacityAndLength(new_capacity,
array_length,
- set_capacity_mode);
+ smi_mode);
if (!maybe->To(&new_elements)) return maybe;
}
new_elements->set(index, value);
+ ValidateElements();
return value;
}
+
// Finally, set the new element and length.
ASSERT(elements()->IsFixedArray());
backing_store->set(index, value);
@@ -9303,7 +10011,8 @@ MaybeObject* JSObject::SetDictionaryElement(uint32_t index,
// is read-only (a declared const that has not been initialized). If a
// value is being defined we skip attribute checks completely.
if (set_mode == DEFINE_PROPERTY) {
- details = PropertyDetails(attributes, NORMAL, details.index());
+ details = PropertyDetails(
+ attributes, NORMAL, details.dictionary_index());
dictionary->DetailsAtPut(entry, details);
} else if (details.IsReadOnly() && !element->IsTheHole()) {
if (strict_mode == kNonStrictMode) {
@@ -9383,20 +10092,21 @@ MaybeObject* JSObject::SetDictionaryElement(uint32_t index,
} else {
new_length = dictionary->max_number_key() + 1;
}
- SetFastElementsCapacityMode set_capacity_mode = FLAG_smi_only_arrays
- ? kAllowSmiOnlyElements
- : kDontAllowSmiOnlyElements;
+ SetFastElementsCapacitySmiMode smi_mode = FLAG_smi_only_arrays
+ ? kAllowSmiElements
+ : kDontAllowSmiElements;
bool has_smi_only_elements = false;
bool should_convert_to_fast_double_elements =
ShouldConvertToFastDoubleElements(&has_smi_only_elements);
if (has_smi_only_elements) {
- set_capacity_mode = kForceSmiOnlyElements;
+ smi_mode = kForceSmiElements;
}
MaybeObject* result = should_convert_to_fast_double_elements
? SetFastDoubleElementsCapacityAndLength(new_length, new_length)
: SetFastElementsCapacityAndLength(new_length,
new_length,
- set_capacity_mode);
+ smi_mode);
+ ValidateElements();
if (result->IsFailure()) return result;
#ifdef DEBUG
if (FLAG_trace_normalization) {
@@ -9435,27 +10145,40 @@ MUST_USE_RESULT MaybeObject* JSObject::SetFastDoubleElement(
// If the value object is not a heap number, switch to fast elements and try
// again.
bool value_is_smi = value->IsSmi();
+ bool introduces_holes = true;
+ uint32_t length = elms_length;
+ if (IsJSArray()) {
+ CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length));
+ introduces_holes = index > length;
+ } else {
+ introduces_holes = index >= elms_length;
+ }
+
if (!value->IsNumber()) {
- Object* obj;
- uint32_t length = elms_length;
- if (IsJSArray()) {
- CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length));
- }
MaybeObject* maybe_obj = SetFastElementsCapacityAndLength(
elms_length,
length,
- kDontAllowSmiOnlyElements);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- return SetFastElement(index,
- value,
- strict_mode,
- check_prototype);
+ kDontAllowSmiElements);
+ if (maybe_obj->IsFailure()) return maybe_obj;
+ maybe_obj = SetFastElement(index, value, strict_mode, check_prototype);
+ if (maybe_obj->IsFailure()) return maybe_obj;
+ ValidateElements();
+ return maybe_obj;
}
double double_value = value_is_smi
? static_cast<double>(Smi::cast(value)->value())
: HeapNumber::cast(value)->value();
+ // If the array is growing, and it's not growth by a single element at the
+ // end, make sure that the ElementsKind is HOLEY.
+ ElementsKind elements_kind = GetElementsKind();
+ if (introduces_holes && !IsFastHoleyElementsKind(elements_kind)) {
+ ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
+ MaybeObject* maybe = TransitionElementsKind(transitioned_kind);
+ if (maybe->IsFailure()) return maybe;
+ }
+
// Check whether there is extra space in the fixed array.
if (index < elms_length) {
FixedDoubleArray* elms = FixedDoubleArray::cast(elements());
@@ -9477,13 +10200,11 @@ MUST_USE_RESULT MaybeObject* JSObject::SetFastDoubleElement(
int new_capacity = NewElementsCapacity(index+1);
if (!ShouldConvertToSlowElements(new_capacity)) {
ASSERT(static_cast<uint32_t>(new_capacity) > index);
- Object* obj;
- { MaybeObject* maybe_obj =
- SetFastDoubleElementsCapacityAndLength(new_capacity,
- index + 1);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
+ MaybeObject* maybe_obj =
+ SetFastDoubleElementsCapacityAndLength(new_capacity, index + 1);
+ if (maybe_obj->IsFailure()) return maybe_obj;
FixedDoubleArray::cast(elements())->set(index, double_value);
+ ValidateElements();
return value;
}
}
@@ -9508,7 +10229,7 @@ MaybeObject* JSReceiver::SetElement(uint32_t index,
bool check_proto) {
if (IsJSProxy()) {
return JSProxy::cast(this)->SetElementWithHandler(
- index, value, strict_mode);
+ this, index, value, strict_mode);
} else {
return JSObject::cast(this)->SetElement(
index, value, attributes, strict_mode, check_proto);
@@ -9550,28 +10271,31 @@ Handle<Object> JSObject::SetElement(Handle<JSObject> object,
MaybeObject* JSObject::SetElement(uint32_t index,
- Object* value,
+ Object* value_raw,
PropertyAttributes attributes,
StrictModeFlag strict_mode,
bool check_prototype,
SetPropertyMode set_mode) {
+ Isolate* isolate = GetIsolate();
+ HandleScope scope(isolate);
+ Handle<JSObject> self(this);
+ Handle<Object> value(value_raw);
+
// Check access rights if needed.
if (IsAccessCheckNeeded()) {
Heap* heap = GetHeap();
- if (!heap->isolate()->MayIndexedAccess(this, index, v8::ACCESS_SET)) {
- HandleScope scope(heap->isolate());
- Handle<Object> value_handle(value);
- heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_SET);
- return *value_handle;
+ if (!heap->isolate()->MayIndexedAccess(*self, index, v8::ACCESS_SET)) {
+ heap->isolate()->ReportFailedAccessCheck(*self, v8::ACCESS_SET);
+ return *value;
}
}
if (IsJSGlobalProxy()) {
Object* proto = GetPrototype();
- if (proto->IsNull()) return value;
+ if (proto->IsNull()) return *value;
ASSERT(proto->IsJSGlobalObject());
return JSObject::cast(proto)->SetElement(index,
- value,
+ *value,
attributes,
strict_mode,
check_prototype,
@@ -9580,9 +10304,8 @@ MaybeObject* JSObject::SetElement(uint32_t index,
// Don't allow element properties to be redefined for external arrays.
if (HasExternalArrayElements() && set_mode == DEFINE_PROPERTY) {
- Isolate* isolate = GetHeap()->isolate();
Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
- Handle<Object> args[] = { Handle<Object>(this), number };
+ Handle<Object> args[] = { self, number };
Handle<Object> error = isolate->factory()->NewTypeError(
"redef_external_array_element", HandleVector(args, ARRAY_SIZE(args)));
return isolate->Throw(*error);
@@ -9597,22 +10320,55 @@ MaybeObject* JSObject::SetElement(uint32_t index,
dictionary->set_requires_slow_elements();
}
+ // From here on, everything has to be handlified.
+ Handle<String> name;
+ Handle<Object> old_value(isolate->heap()->the_hole_value());
+ Handle<Object> old_array_length;
+ PropertyAttributes old_attributes = ABSENT;
+ bool preexists = false;
+ if (FLAG_harmony_observation && map()->is_observed()) {
+ name = isolate->factory()->Uint32ToString(index);
+ preexists = self->HasLocalElement(index);
+ if (preexists) {
+ old_attributes = self->GetLocalPropertyAttribute(*name);
+ // TODO(observe): only read & set old_value if we have a data property
+ old_value = Object::GetElement(self, index);
+ } else if (self->IsJSArray()) {
+ // Store old array length in case adding an element grows the array.
+ old_array_length = handle(Handle<JSArray>::cast(self)->length());
+ }
+ }
+
// Check for lookup interceptor
- if (HasIndexedInterceptor()) {
- return SetElementWithInterceptor(index,
- value,
- attributes,
- strict_mode,
- check_prototype,
- set_mode);
+ MaybeObject* result = self->HasIndexedInterceptor()
+ ? self->SetElementWithInterceptor(
+ index, *value, attributes, strict_mode, check_prototype, set_mode)
+ : self->SetElementWithoutInterceptor(
+ index, *value, attributes, strict_mode, check_prototype, set_mode);
+
+ Handle<Object> hresult;
+ if (!result->ToHandle(&hresult)) return result;
+
+ if (FLAG_harmony_observation && map()->is_observed()) {
+ PropertyAttributes new_attributes = self->GetLocalPropertyAttribute(*name);
+ if (!preexists) {
+ EnqueueChangeRecord(self, "new", name, old_value);
+ if (self->IsJSArray() &&
+ !old_array_length->SameValue(Handle<JSArray>::cast(self)->length())) {
+ EnqueueChangeRecord(self, "updated",
+ isolate->factory()->length_symbol(),
+ old_array_length);
+ }
+ } else if (new_attributes != old_attributes || old_value->IsTheHole()) {
+ EnqueueChangeRecord(self, "reconfigured", name, old_value);
+ } else {
+ Handle<Object> new_value = Object::GetElement(self, index);
+ if (!new_value->SameValue(*old_value))
+ EnqueueChangeRecord(self, "updated", name, old_value);
+ }
}
- return SetElementWithoutInterceptor(index,
- value,
- attributes,
- strict_mode,
- check_prototype,
- set_mode);
+ return *hresult;
}
@@ -9627,10 +10383,13 @@ MaybeObject* JSObject::SetElementWithoutInterceptor(uint32_t index,
(attr & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0);
Isolate* isolate = GetIsolate();
switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
return SetFastElement(index, value, strict_mode, check_prototype);
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
return SetFastDoubleElement(index, value, strict_mode, check_prototype);
case EXTERNAL_PIXEL_ELEMENTS: {
ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
@@ -9721,11 +10480,19 @@ Handle<Object> JSObject::TransitionElementsKind(Handle<JSObject> object,
MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
ElementsKind from_kind = map()->elements_kind();
+ if (IsFastHoleyElementsKind(from_kind)) {
+ to_kind = GetHoleyElementsKind(to_kind);
+ }
+
Isolate* isolate = GetIsolate();
- if ((from_kind == FAST_SMI_ONLY_ELEMENTS ||
- elements() == isolate->heap()->empty_fixed_array()) &&
- to_kind == FAST_ELEMENTS) {
- ASSERT(from_kind != FAST_ELEMENTS);
+ if (elements() == isolate->heap()->empty_fixed_array() ||
+ (IsFastSmiOrObjectElementsKind(from_kind) &&
+ IsFastSmiOrObjectElementsKind(to_kind)) ||
+ (from_kind == FAST_DOUBLE_ELEMENTS &&
+ to_kind == FAST_HOLEY_DOUBLE_ELEMENTS)) {
+ ASSERT(from_kind != TERMINAL_FAST_ELEMENTS_KIND);
+ // No change is needed to the elements() buffer, the transition
+ // only requires a map change.
MaybeObject* maybe_new_map = GetElementsTransitionMap(isolate, to_kind);
Map* new_map;
if (!maybe_new_map->To(&new_map)) return maybe_new_map;
@@ -9752,18 +10519,21 @@ MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
}
}
- if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
- to_kind == FAST_DOUBLE_ELEMENTS) {
+ if (IsFastSmiElementsKind(from_kind) &&
+ IsFastDoubleElementsKind(to_kind)) {
MaybeObject* maybe_result =
SetFastDoubleElementsCapacityAndLength(capacity, length);
if (maybe_result->IsFailure()) return maybe_result;
+ ValidateElements();
return this;
}
- if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ if (IsFastDoubleElementsKind(from_kind) &&
+ IsFastObjectElementsKind(to_kind)) {
MaybeObject* maybe_result = SetFastElementsCapacityAndLength(
- capacity, length, kDontAllowSmiOnlyElements);
+ capacity, length, kDontAllowSmiElements);
if (maybe_result->IsFailure()) return maybe_result;
+ ValidateElements();
return this;
}
@@ -9777,10 +10547,14 @@ MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
// static
bool Map::IsValidElementsTransition(ElementsKind from_kind,
ElementsKind to_kind) {
- return
- (from_kind == FAST_SMI_ONLY_ELEMENTS &&
- (to_kind == FAST_DOUBLE_ELEMENTS || to_kind == FAST_ELEMENTS)) ||
- (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS);
+ // Transitions can't go backwards.
+ if (!IsMoreGeneralElementsKindTransition(from_kind, to_kind)) {
+ return false;
+ }
+
+ // Transitions from HOLEY -> PACKED are not allowed.
+ return !IsFastHoleyElementsKind(from_kind) ||
+ IsFastHoleyElementsKind(to_kind);
}
@@ -9826,7 +10600,11 @@ MaybeObject* JSObject::GetElementWithInterceptor(Object* receiver,
result = getter(index, info);
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
- if (!result.IsEmpty()) return *v8::Utils::OpenHandle(*result);
+ if (!result.IsEmpty()) {
+ Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
+ result_internal->VerifyApiCallResultType();
+ return *result_internal;
+ }
}
Heap* heap = holder_handle->GetHeap();
@@ -9871,8 +10649,16 @@ void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
break;
}
// Fall through.
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
+ if (IsJSArray()) {
+ *capacity = backing_store_base->length();
+ *used = Smi::cast(JSArray::cast(this)->length())->value();
+ break;
+ }
+ // Fall through if packing is not guaranteed.
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
backing_store = FixedArray::cast(backing_store_base);
*capacity = backing_store->length();
for (int i = 0; i < *capacity; ++i) {
@@ -9886,7 +10672,14 @@ void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
*used = dictionary->NumberOfElements();
break;
}
- case FAST_DOUBLE_ELEMENTS: {
+ case FAST_DOUBLE_ELEMENTS:
+ if (IsJSArray()) {
+ *capacity = backing_store_base->length();
+ *used = Smi::cast(JSArray::cast(this)->length())->value();
+ break;
+ }
+ // Fall through if packing is not guaranteed.
+ case FAST_HOLEY_DOUBLE_ELEMENTS: {
FixedDoubleArray* elms = FixedDoubleArray::cast(elements());
*capacity = elms->length();
for (int i = 0; i < *capacity; i++) {
@@ -10003,15 +10796,15 @@ void Dictionary<Shape, Key>::Print(FILE* out) {
for (int i = 0; i < capacity; i++) {
Object* k = HashTable<Shape, Key>::KeyAt(i);
if (HashTable<Shape, Key>::IsKey(k)) {
- PrintF(out, " ");
+ FPrintF(out, " ");
if (k->IsString()) {
String::cast(k)->StringPrint(out);
} else {
k->ShortPrint(out);
}
- PrintF(out, ": ");
+ FPrintF(out, ": ");
ValueAt(i)->ShortPrint(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
}
}
}
@@ -10055,13 +10848,13 @@ InterceptorInfo* JSObject::GetIndexedInterceptor() {
MaybeObject* JSObject::GetPropertyPostInterceptor(
- JSReceiver* receiver,
+ Object* receiver,
String* name,
PropertyAttributes* attributes) {
// Check local property in holder, ignore interceptor.
LookupResult result(GetIsolate());
LocalLookupRealNamedProperty(name, &result);
- if (result.IsProperty()) {
+ if (result.IsFound()) {
return GetProperty(receiver, &result, name, attributes);
}
// Continue searching via the prototype chain.
@@ -10073,13 +10866,13 @@ MaybeObject* JSObject::GetPropertyPostInterceptor(
MaybeObject* JSObject::GetLocalPropertyPostInterceptor(
- JSReceiver* receiver,
+ Object* receiver,
String* name,
PropertyAttributes* attributes) {
// Check local property in holder, ignore interceptor.
LookupResult result(GetIsolate());
LocalLookupRealNamedProperty(name, &result);
- if (result.IsProperty()) {
+ if (result.IsFound()) {
return GetProperty(receiver, &result, name, attributes);
}
return GetHeap()->undefined_value();
@@ -10087,13 +10880,13 @@ MaybeObject* JSObject::GetLocalPropertyPostInterceptor(
MaybeObject* JSObject::GetPropertyWithInterceptor(
- JSReceiver* receiver,
+ Object* receiver,
String* name,
PropertyAttributes* attributes) {
Isolate* isolate = GetIsolate();
InterceptorInfo* interceptor = GetNamedInterceptor();
HandleScope scope(isolate);
- Handle<JSReceiver> receiver_handle(receiver);
+ Handle<Object> receiver_handle(receiver);
Handle<JSObject> holder_handle(this);
Handle<String> name_handle(name);
@@ -10113,7 +10906,9 @@ MaybeObject* JSObject::GetPropertyWithInterceptor(
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (!result.IsEmpty()) {
*attributes = NONE;
- return *v8::Utils::OpenHandle(*result);
+ Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
+ result_internal->VerifyApiCallResultType();
+ return *result_internal;
}
}
@@ -10138,7 +10933,7 @@ bool JSObject::HasRealNamedProperty(String* key) {
LookupResult result(isolate);
LocalLookupRealNamedProperty(key, &result);
- return result.IsProperty() && (result.type() != INTERCEPTOR);
+ return result.IsFound() && !result.IsInterceptor();
}
@@ -10156,16 +10951,19 @@ bool JSObject::HasRealElementProperty(uint32_t index) {
if (this->IsStringObjectWithCharacterAt(index)) return true;
switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
- uint32_t length = IsJSArray() ?
+ case FAST_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS: {
+ uint32_t length = IsJSArray() ?
static_cast<uint32_t>(
Smi::cast(JSArray::cast(this)->length())->value()) :
static_cast<uint32_t>(FixedArray::cast(elements())->length());
return (index < length) &&
!FixedArray::cast(elements())->get(index)->IsTheHole();
}
- case FAST_DOUBLE_ELEMENTS: {
+ case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS: {
uint32_t length = IsJSArray() ?
static_cast<uint32_t>(
Smi::cast(JSArray::cast(this)->length())->value()) :
@@ -10215,14 +11013,21 @@ bool JSObject::HasRealNamedCallbackProperty(String* key) {
LookupResult result(isolate);
LocalLookupRealNamedProperty(key, &result);
- return result.IsFound() && (result.type() == CALLBACKS);
+ return result.IsPropertyCallbacks();
}
int JSObject::NumberOfLocalProperties(PropertyAttributes filter) {
- return HasFastProperties() ?
- map()->NumberOfDescribedProperties(filter) :
- property_dictionary()->NumberOfElementsFilterAttributes(filter);
+ if (HasFastProperties()) {
+ Map* map = this->map();
+ if (filter == NONE) return map->NumberOfOwnDescriptors();
+ if (filter == DONT_ENUM) {
+ int result = map->EnumLength();
+ if (result != Map::kInvalidEnumCache) return result;
+ }
+ return map->NumberOfDescribedProperties(OWN_DESCRIPTORS, filter);
+ }
+ return property_dictionary()->NumberOfElementsFilterAttributes(filter);
}
@@ -10345,11 +11150,12 @@ void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) {
void JSObject::GetLocalPropertyNames(FixedArray* storage, int index) {
ASSERT(storage->length() >= (NumberOfLocalProperties() - index));
if (HasFastProperties()) {
+ int real_size = map()->NumberOfOwnDescriptors();
DescriptorArray* descs = map()->instance_descriptors();
- for (int i = 0; i < descs->number_of_descriptors(); i++) {
- if (descs->IsProperty(i)) storage->set(index++, descs->GetKey(i));
+ ASSERT(storage->length() >= index + real_size);
+ for (int i = 0; i < real_size; i++) {
+ storage->set(index + i, descs->GetKey(i));
}
- ASSERT(storage->length() >= index);
} else {
property_dictionary()->CopyKeysTo(storage,
index,
@@ -10365,7 +11171,7 @@ int JSObject::NumberOfLocalElements(PropertyAttributes filter) {
int JSObject::NumberOfEnumElements() {
// Fast case for objects with no elements.
- if (!IsJSValue() && HasFastElements()) {
+ if (!IsJSValue() && HasFastObjectElements()) {
uint32_t length = IsJSArray() ?
static_cast<uint32_t>(
Smi::cast(JSArray::cast(this)->length())->value()) :
@@ -10381,8 +11187,10 @@ int JSObject::GetLocalElementKeys(FixedArray* storage,
PropertyAttributes filter) {
int counter = 0;
switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
+ case FAST_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS: {
int length = IsJSArray() ?
Smi::cast(JSArray::cast(this)->length())->value() :
FixedArray::cast(elements())->length();
@@ -10397,7 +11205,8 @@ int JSObject::GetLocalElementKeys(FixedArray* storage,
ASSERT(!storage || storage->length() >= counter);
break;
}
- case FAST_DOUBLE_ELEMENTS: {
+ case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS: {
int length = IsJSArray() ?
Smi::cast(JSArray::cast(this)->length())->value() :
FixedDoubleArray::cast(elements())->length();
@@ -10934,8 +11743,12 @@ void HashTable<Shape, Key>::IterateElements(ObjectVisitor* v) {
template<typename Shape, typename Key>
MaybeObject* HashTable<Shape, Key>::Allocate(int at_least_space_for,
+ MinimumCapacity capacity_option,
PretenureFlag pretenure) {
- int capacity = ComputeCapacity(at_least_space_for);
+ ASSERT(!capacity_option || IS_POWER_OF_TWO(at_least_space_for));
+ int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY)
+ ? at_least_space_for
+ : ComputeCapacity(at_least_space_for);
if (capacity > HashTable::kMaxCapacity) {
return Failure::OutOfMemoryException();
}
@@ -10992,31 +11805,6 @@ int StringDictionary::FindEntry(String* key) {
}
-bool StringDictionary::ContainsTransition(int entry) {
- switch (DetailsAt(entry).type()) {
- case MAP_TRANSITION:
- case CONSTANT_TRANSITION:
- case ELEMENTS_TRANSITION:
- return true;
- case CALLBACKS: {
- Object* value = ValueAt(entry);
- if (!value->IsAccessorPair()) return false;
- AccessorPair* accessors = AccessorPair::cast(value);
- return accessors->getter()->IsMap() || accessors->setter()->IsMap();
- }
- case NORMAL:
- case FIELD:
- case CONSTANT_FUNCTION:
- case HANDLER:
- case INTERCEPTOR:
- case NULL_DESCRIPTOR:
- return false;
- }
- UNREACHABLE(); // Keep the compiler happy.
- return false;
-}
-
-
template<typename Shape, typename Key>
MaybeObject* HashTable<Shape, Key>::Rehash(HashTable* new_table, Key key) {
ASSERT(NumberOfElements() < new_table->Capacity());
@@ -11069,7 +11857,9 @@ MaybeObject* HashTable<Shape, Key>::EnsureCapacity(int n, Key key) {
(capacity > kMinCapacityForPretenure) && !GetHeap()->InNewSpace(this);
Object* obj;
{ MaybeObject* maybe_obj =
- Allocate(nof * 2, pretenure ? TENURED : NOT_TENURED);
+ Allocate(nof * 2,
+ USE_DEFAULT_MINIMUM_CAPACITY,
+ pretenure ? TENURED : NOT_TENURED);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
@@ -11098,7 +11888,9 @@ MaybeObject* HashTable<Shape, Key>::Shrink(Key key) {
!GetHeap()->InNewSpace(this);
Object* obj;
{ MaybeObject* maybe_obj =
- Allocate(at_least_room_for, pretenure ? TENURED : NOT_TENURED);
+ Allocate(at_least_room_for,
+ USE_DEFAULT_MINIMUM_CAPACITY,
+ pretenure ? TENURED : NOT_TENURED);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
@@ -11349,10 +12141,9 @@ MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
// Convert to fast elements.
Object* obj;
- { MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
- FAST_ELEMENTS);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
+ MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
+ FAST_HOLEY_ELEMENTS);
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
Map* new_map = Map::cast(obj);
PretenureFlag tenure = heap->InNewSpace(this) ? NOT_TENURED: TENURED;
@@ -11363,9 +12154,9 @@ MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
}
FixedArray* fast_elements = FixedArray::cast(new_array);
dict->CopyValuesTo(fast_elements);
+ ValidateElements();
- set_map(new_map);
- set_elements(fast_elements);
+ set_map_and_elements(new_map, fast_elements);
} else if (HasExternalArrayElements()) {
// External arrays cannot have holes or undefined elements.
return Smi::FromInt(ExternalArray::cast(elements())->length());
@@ -11375,7 +12166,7 @@ MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
}
- ASSERT(HasFastTypeElements() || HasFastDoubleElements());
+ ASSERT(HasFastSmiOrObjectElements() || HasFastDoubleElements());
// Collect holes at the end, undefined before that and the rest at the
// start, and return the number of non-hole, non-undefined values.
@@ -11505,7 +12296,7 @@ Object* ExternalPixelArray::SetValue(uint32_t index, Object* value) {
clamped_value = 255;
} else {
// Other doubles are rounded to the nearest integer.
- clamped_value = static_cast<uint8_t>(double_value + 0.5);
+ clamped_value = static_cast<uint8_t>(lrint(double_value));
}
} else {
// Clamp undefined to zero (default). All other types have been
@@ -11709,7 +12500,7 @@ class TwoCharHashTableKey : public HashTableKey {
hash += hash << 3;
hash ^= hash >> 11;
hash += hash << 15;
- if ((hash & String::kHashBitMask) == 0) hash = String::kZeroHash;
+ if ((hash & String::kHashBitMask) == 0) hash = StringHasher::kZeroHash;
#ifdef DEBUG
StringHasher hasher(2, seed);
hasher.AddCharacter(c1);
@@ -11845,8 +12636,23 @@ MaybeObject* SymbolTable::LookupKey(HashTableKey* key, Object** s) {
}
-Object* CompilationCacheTable::Lookup(String* src) {
- StringKey key(src);
+// The key for the script compilation cache is dependent on the mode flags,
+// because they change the global language mode and thus binding behaviour.
+// If flags change at some point, we must ensure that we do not hit the cache
+// for code compiled with different settings.
+static LanguageMode CurrentGlobalLanguageMode() {
+ return FLAG_use_strict
+ ? (FLAG_harmony_scoping ? EXTENDED_MODE : STRICT_MODE)
+ : CLASSIC_MODE;
+}
+
+
+Object* CompilationCacheTable::Lookup(String* src, Context* context) {
+ SharedFunctionInfo* shared = context->closure()->shared();
+ StringSharedKey key(src,
+ shared,
+ CurrentGlobalLanguageMode(),
+ RelocInfo::kNoPosition);
int entry = FindEntry(&key);
if (entry == kNotFound) return GetHeap()->undefined_value();
return get(EntryToIndex(entry) + 1);
@@ -11876,17 +12682,24 @@ Object* CompilationCacheTable::LookupRegExp(String* src,
}
-MaybeObject* CompilationCacheTable::Put(String* src, Object* value) {
- StringKey key(src);
- Object* obj;
- { MaybeObject* maybe_obj = EnsureCapacity(1, &key);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
+MaybeObject* CompilationCacheTable::Put(String* src,
+ Context* context,
+ Object* value) {
+ SharedFunctionInfo* shared = context->closure()->shared();
+ StringSharedKey key(src,
+ shared,
+ CurrentGlobalLanguageMode(),
+ RelocInfo::kNoPosition);
+ CompilationCacheTable* cache;
+ MaybeObject* maybe_cache = EnsureCapacity(1, &key);
+ if (!maybe_cache->To(&cache)) return maybe_cache;
+
+ Object* k;
+ MaybeObject* maybe_k = key.AsObject();
+ if (!maybe_k->To(&k)) return maybe_k;
- CompilationCacheTable* cache =
- reinterpret_cast<CompilationCacheTable*>(obj);
int entry = cache->FindInsertionEntry(key.Hash());
- cache->set(EntryToIndex(entry), src);
+ cache->set(EntryToIndex(entry), k);
cache->set(EntryToIndex(entry) + 1, value);
cache->ElementAdded();
return cache;
@@ -12030,6 +12843,12 @@ MaybeObject* Dictionary<Shape, Key>::Allocate(int at_least_space_for) {
}
+void StringDictionary::DoGenerateNewEnumerationIndices(
+ Handle<StringDictionary> dictionary) {
+ CALL_HEAP_FUNCTION_VOID(dictionary->GetIsolate(),
+ dictionary->GenerateNewEnumerationIndices());
+}
+
template<typename Shape, typename Key>
MaybeObject* Dictionary<Shape, Key>::GenerateNewEnumerationIndices() {
Heap* heap = Dictionary<Shape, Key>::GetHeap();
@@ -12056,7 +12875,8 @@ MaybeObject* Dictionary<Shape, Key>::GenerateNewEnumerationIndices() {
int pos = 0;
for (int i = 0; i < capacity; i++) {
if (Dictionary<Shape, Key>::IsKey(Dictionary<Shape, Key>::KeyAt(i))) {
- enumeration_order->set(pos++, Smi::FromInt(DetailsAt(i).index()));
+ int index = DetailsAt(i).dictionary_index();
+ enumeration_order->set(pos++, Smi::FromInt(index));
}
}
@@ -12155,6 +12975,8 @@ template<typename Shape, typename Key>
MaybeObject* Dictionary<Shape, Key>::Add(Key key,
Object* value,
PropertyDetails details) {
+ ASSERT(details.dictionary_index() == details.descriptor_index());
+
// Valdate key is absent.
SLOW_ASSERT((this->FindEntry(key) == Dictionary<Shape, Key>::kNotFound));
// Check whether the dictionary should be extended.
@@ -12182,7 +13004,9 @@ MaybeObject* Dictionary<Shape, Key>::AddEntry(Key key,
uint32_t entry = Dictionary<Shape, Key>::FindInsertionEntry(hash);
// Insert element at empty or deleted entry
- if (!details.IsDeleted() && details.index() == 0 && Shape::kIsEnumerable) {
+ if (!details.IsDeleted() &&
+ details.dictionary_index() == 0 &&
+ Shape::kIsEnumerable) {
// Assign an enumeration index to the property and update
// SetNextEnumerationIndex.
int index = NextEnumerationIndex();
@@ -12273,7 +13097,7 @@ MaybeObject* SeededNumberDictionary::Set(uint32_t key,
// Preserve enumeration index.
details = PropertyDetails(details.attributes(),
details.type(),
- DetailsAt(entry).index());
+ DetailsAt(entry).dictionary_index());
MaybeObject* maybe_object_key = SeededNumberDictionaryShape::AsObject(key);
Object* object_key;
if (!maybe_object_key->ToObject(&object_key)) return maybe_object_key;
@@ -12344,23 +13168,45 @@ void Dictionary<Shape, Key>::CopyKeysTo(
}
-void StringDictionary::CopyEnumKeysTo(FixedArray* storage,
- FixedArray* sort_array) {
- ASSERT(storage->length() >= NumberOfEnumElements());
+FixedArray* StringDictionary::CopyEnumKeysTo(FixedArray* storage) {
+ int length = storage->length();
+ ASSERT(length >= NumberOfEnumElements());
+ Heap* heap = GetHeap();
+ Object* undefined_value = heap->undefined_value();
int capacity = Capacity();
- int index = 0;
+ int properties = 0;
+
+ // Fill in the enumeration array by assigning enumerable keys at their
+ // enumeration index. This will leave holes in the array if there are keys
+ // that are deleted or not enumerable.
for (int i = 0; i < capacity; i++) {
Object* k = KeyAt(i);
if (IsKey(k)) {
PropertyDetails details = DetailsAt(i);
if (details.IsDeleted() || details.IsDontEnum()) continue;
- storage->set(index, k);
- sort_array->set(index, Smi::FromInt(details.index()));
- index++;
+ properties++;
+ storage->set(details.dictionary_index() - 1, k);
+ if (properties == length) break;
}
}
- storage->SortPairs(sort_array, sort_array->length());
- ASSERT(storage->length() >= index);
+
+ // There are holes in the enumeration array if less properties were assigned
+ // than the length of the array. If so, crunch all the existing properties
+ // together by shifting them to the left (maintaining the enumeration order),
+ // and trimming of the right side of the array.
+ if (properties < length) {
+ if (properties == 0) return heap->empty_fixed_array();
+ properties = 0;
+ for (int i = 0; i < length; ++i) {
+ Object* value = storage->get(i);
+ if (value != undefined_value) {
+ storage->set(properties, value);
+ ++properties;
+ }
+ }
+ RightTrimFixedArray<FROM_MUTATOR>(heap, storage, length - properties);
+ }
+ return storage;
}
@@ -12410,18 +13256,12 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor(
JSObject* obj, int unused_property_fields) {
// Make sure we preserve dictionary representation if there are too many
// descriptors.
- if (NumberOfElements() > DescriptorArray::kMaxNumberOfDescriptors) return obj;
-
- // Figure out if it is necessary to generate new enumeration indices.
- int max_enumeration_index =
- NextEnumerationIndex() +
- (DescriptorArray::kMaxNumberOfDescriptors -
- NumberOfElements());
- if (!PropertyDetails::IsValidIndex(max_enumeration_index)) {
- Object* result;
- { MaybeObject* maybe_result = GenerateNewEnumerationIndices();
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
+ int number_of_elements = NumberOfElements();
+ if (number_of_elements > DescriptorArray::kMaxNumberOfDescriptors) return obj;
+
+ if (number_of_elements != NextEnumerationIndex()) {
+ MaybeObject* maybe_result = GenerateNewEnumerationIndices();
+ if (maybe_result->IsFailure()) return maybe_result;
}
int instance_descriptor_length = 0;
@@ -12445,18 +13285,35 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor(
}
}
+ int inobject_props = obj->map()->inobject_properties();
+
+ // Allocate new map.
+ Map* new_map;
+ MaybeObject* maybe_new_map = obj->map()->CopyDropDescriptors();
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+ new_map->set_dictionary_map(false);
+
+ if (instance_descriptor_length == 0) {
+ ASSERT_LE(unused_property_fields, inobject_props);
+ // Transform the object.
+ new_map->set_unused_property_fields(inobject_props);
+ obj->set_map(new_map);
+ obj->set_properties(heap->empty_fixed_array());
+ // Check that it really works.
+ ASSERT(obj->HasFastProperties());
+ return obj;
+ }
+
// Allocate the instance descriptor.
DescriptorArray* descriptors;
- { MaybeObject* maybe_descriptors =
- DescriptorArray::Allocate(instance_descriptor_length);
- if (!maybe_descriptors->To<DescriptorArray>(&descriptors)) {
- return maybe_descriptors;
- }
+ MaybeObject* maybe_descriptors =
+ DescriptorArray::Allocate(instance_descriptor_length);
+ if (!maybe_descriptors->To(&descriptors)) {
+ return maybe_descriptors;
}
DescriptorArray::WhitenessWitness witness(descriptors);
- int inobject_props = obj->map()->inobject_properties();
int number_of_allocated_fields =
number_of_fields + unused_property_fields - inobject_props;
if (number_of_allocated_fields < 0) {
@@ -12466,33 +13323,33 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor(
}
// Allocate the fixed array for the fields.
- Object* fields;
- { MaybeObject* maybe_fields =
- heap->AllocateFixedArray(number_of_allocated_fields);
- if (!maybe_fields->ToObject(&fields)) return maybe_fields;
- }
+ FixedArray* fields;
+ MaybeObject* maybe_fields =
+ heap->AllocateFixedArray(number_of_allocated_fields);
+ if (!maybe_fields->To(&fields)) return maybe_fields;
// Fill in the instance descriptor and the fields.
- int next_descriptor = 0;
int current_offset = 0;
for (int i = 0; i < capacity; i++) {
Object* k = KeyAt(i);
if (IsKey(k)) {
Object* value = ValueAt(i);
// Ensure the key is a symbol before writing into the instance descriptor.
- Object* key;
- { MaybeObject* maybe_key = heap->LookupSymbol(String::cast(k));
- if (!maybe_key->ToObject(&key)) return maybe_key;
- }
+ String* key;
+ MaybeObject* maybe_key = heap->LookupSymbol(String::cast(k));
+ if (!maybe_key->To(&key)) return maybe_key;
+
PropertyDetails details = DetailsAt(i);
+ ASSERT(details.descriptor_index() == details.dictionary_index());
+ int enumeration_index = details.descriptor_index();
PropertyType type = details.type();
if (value->IsJSFunction() && !heap->InNewSpace(value)) {
- ConstantFunctionDescriptor d(String::cast(key),
+ ConstantFunctionDescriptor d(key,
JSFunction::cast(value),
details.attributes(),
- details.index());
- descriptors->Set(next_descriptor++, &d, witness);
+ enumeration_index);
+ descriptors->Set(enumeration_index - 1, &d, witness);
} else if (type == NORMAL) {
if (current_offset < inobject_props) {
obj->InObjectPropertyAtPut(current_offset,
@@ -12500,24 +13357,19 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor(
UPDATE_WRITE_BARRIER);
} else {
int offset = current_offset - inobject_props;
- FixedArray::cast(fields)->set(offset, value);
+ fields->set(offset, value);
}
- FieldDescriptor d(String::cast(key),
+ FieldDescriptor d(key,
current_offset++,
details.attributes(),
- details.index());
- descriptors->Set(next_descriptor++, &d, witness);
+ enumeration_index);
+ descriptors->Set(enumeration_index - 1, &d, witness);
} else if (type == CALLBACKS) {
- if (value->IsAccessorPair()) {
- MaybeObject* maybe_copy =
- AccessorPair::cast(value)->CopyWithoutTransitions();
- if (!maybe_copy->To(&value)) return maybe_copy;
- }
- CallbacksDescriptor d(String::cast(key),
+ CallbacksDescriptor d(key,
value,
details.attributes(),
- details.index());
- descriptors->Set(next_descriptor++, &d, witness);
+ enumeration_index);
+ descriptors->Set(enumeration_index - 1, &d, witness);
} else {
UNREACHABLE();
}
@@ -12525,22 +13377,17 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor(
}
ASSERT(current_offset == number_of_fields);
- descriptors->Sort(witness);
- // Allocate new map.
- Object* new_map;
- { MaybeObject* maybe_new_map = obj->map()->CopyDropDescriptors();
- if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
- }
+ descriptors->Sort();
+
+ new_map->InitializeDescriptors(descriptors);
+ new_map->set_unused_property_fields(unused_property_fields);
// Transform the object.
- obj->set_map(Map::cast(new_map));
- obj->map()->set_instance_descriptors(descriptors);
- obj->map()->set_unused_property_fields(unused_property_fields);
+ obj->set_map(new_map);
- obj->set_properties(FixedArray::cast(fields));
+ obj->set_properties(fields);
ASSERT(obj->IsJSObject());
- descriptors->SetNextEnumerationIndex(NextEnumerationIndex());
// Check that it really works.
ASSERT(obj->HasFastProperties());
@@ -12611,11 +13458,11 @@ Object* ObjectHashTable::Lookup(Object* key) {
// If the object does not have an identity hash, it was never used as a key.
{ MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
if (maybe_hash->ToObjectUnchecked()->IsUndefined()) {
- return GetHeap()->undefined_value();
+ return GetHeap()->the_hole_value();
}
}
int entry = FindEntry(key);
- if (entry == kNotFound) return GetHeap()->undefined_value();
+ if (entry == kNotFound) return GetHeap()->the_hole_value();
return get(EntryToIndex(entry) + 1);
}
@@ -12632,7 +13479,7 @@ MaybeObject* ObjectHashTable::Put(Object* key, Object* value) {
int entry = FindEntry(key);
// Check whether to perform removal operation.
- if (value->IsUndefined()) {
+ if (value->IsTheHole()) {
if (entry == kNotFound) return this;
RemoveEntry(entry);
return Shrink(key);
diff --git a/src/3rdparty/v8/src/objects.h b/src/3rdparty/v8/src/objects.h
index 59458da..fe9655a 100644
--- a/src/3rdparty/v8/src/objects.h
+++ b/src/3rdparty/v8/src/objects.h
@@ -30,9 +30,10 @@
#include "allocation.h"
#include "builtins.h"
+#include "elements-kind.h"
#include "list.h"
#include "property-details.h"
-#include "smart-array-pointer.h"
+#include "smart-pointers.h"
#include "unicode-inl.h"
#if V8_TARGET_ARCH_ARM
#include "arm/constants-arm.h"
@@ -40,6 +41,7 @@
#include "mips/constants-mips.h"
#endif
#include "v8checks.h"
+#include "zone.h"
//
@@ -82,6 +84,7 @@
// - Context
// - JSFunctionResultCache
// - ScopeInfo
+// - TransitionArray
// - FixedDoubleArray
// - ExternalArray
// - ExternalPixelArray
@@ -131,40 +134,6 @@
namespace v8 {
namespace internal {
-enum ElementsKind {
- // The "fast" kind for elements that only contain SMI values. Must be first
- // to make it possible to efficiently check maps for this kind.
- FAST_SMI_ONLY_ELEMENTS,
-
- // The "fast" kind for tagged values. Must be second to make it possible to
- // efficiently check maps for this and the FAST_SMI_ONLY_ELEMENTS kind
- // together at once.
- FAST_ELEMENTS,
-
- // The "fast" kind for unwrapped, non-tagged double values.
- FAST_DOUBLE_ELEMENTS,
-
- // The "slow" kind.
- DICTIONARY_ELEMENTS,
- NON_STRICT_ARGUMENTS_ELEMENTS,
- // The "fast" kind for external arrays
- EXTERNAL_BYTE_ELEMENTS,
- EXTERNAL_UNSIGNED_BYTE_ELEMENTS,
- EXTERNAL_SHORT_ELEMENTS,
- EXTERNAL_UNSIGNED_SHORT_ELEMENTS,
- EXTERNAL_INT_ELEMENTS,
- EXTERNAL_UNSIGNED_INT_ELEMENTS,
- EXTERNAL_FLOAT_ELEMENTS,
- EXTERNAL_DOUBLE_ELEMENTS,
- EXTERNAL_PIXEL_ELEMENTS,
-
- // Derived constants from ElementsKind
- FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND = EXTERNAL_BYTE_ELEMENTS,
- LAST_EXTERNAL_ARRAY_ELEMENTS_KIND = EXTERNAL_PIXEL_ELEMENTS,
- FIRST_ELEMENTS_KIND = FAST_SMI_ONLY_ELEMENTS,
- LAST_ELEMENTS_KIND = EXTERNAL_PIXEL_ELEMENTS
-};
-
enum CompareMapMode {
REQUIRE_EXACT_MAP,
ALLOW_ELEMENT_TRANSITION_MAPS
@@ -175,13 +144,6 @@ enum KeyedAccessGrowMode {
ALLOW_JSARRAY_GROWTH
};
-const int kElementsKindCount = LAST_ELEMENTS_KIND - FIRST_ELEMENTS_KIND + 1;
-
-void PrintElementsKind(FILE* out, ElementsKind kind);
-
-inline bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
- ElementsKind to_kind);
-
// Setter that skips the write barrier if mode is SKIP_WRITE_BARRIER.
enum WriteBarrierMode { SKIP_WRITE_BARRIER, UPDATE_WRITE_BARRIER };
@@ -209,9 +171,47 @@ enum CreationFlag {
};
+// Indicates whether transitions can be added to a source map or not.
+enum TransitionFlag {
+ INSERT_TRANSITION,
+ OMIT_TRANSITION
+};
+
+
+// Indicates whether the transition is simple: the target map of the transition
+// either extends the current map with a new property, or it modifies the
+// property that was added last to the current map.
+enum SimpleTransitionFlag {
+ SIMPLE_TRANSITION,
+ FULL_TRANSITION
+};
+
+
+// Indicates whether we are only interested in the descriptors of a particular
+// map, or in all descriptors in the descriptor array.
+enum DescriptorFlag {
+ ALL_DESCRIPTORS,
+ OWN_DESCRIPTORS
+};
+
+// The GC maintains a bit of information, the MarkingParity, which toggles
+// from odd to even and back every time marking is completed. Incremental
+// marking can visit an object twice during a marking phase, so algorithms that
+// that piggy-back on marking can use the parity to ensure that they only
+// perform an operation on an object once per marking phase: they record the
+// MarkingParity when they visit an object, and only re-visit the object when it
+// is marked again and the MarkingParity changes.
+enum MarkingParity {
+ NO_MARKING_PARITY,
+ ODD_MARKING_PARITY,
+ EVEN_MARKING_PARITY
+};
+
// Instance size sentinel for objects of variable size.
const int kVariableSizeSentinel = 0;
+const int kStubMajorKeyBits = 6;
+const int kStubMinorKeyBits = kBitsPerInt - kSmiTagSize - kStubMajorKeyBits;
// All Maps have a field instance_type containing a InstanceType.
// It describes the type of the instances.
@@ -479,7 +479,7 @@ const uint32_t kSymbolTag = 0x40;
// two-byte characters or one-byte characters.
const uint32_t kStringEncodingMask = 0x4;
const uint32_t kTwoByteStringTag = 0x0;
-const uint32_t kAsciiStringTag = 0x4;
+const uint32_t kOneByteStringTag = 0x4;
// If bit 7 is clear, the low-order 2 bits indicate the representation
// of the string.
@@ -530,39 +530,39 @@ const uint32_t kShortcutTypeTag = kConsStringTag;
enum InstanceType {
// String types.
SYMBOL_TYPE = kTwoByteStringTag | kSymbolTag | kSeqStringTag,
- ASCII_SYMBOL_TYPE = kAsciiStringTag | kSymbolTag | kSeqStringTag,
+ ASCII_SYMBOL_TYPE = kOneByteStringTag | kSymbolTag | kSeqStringTag,
CONS_SYMBOL_TYPE = kTwoByteStringTag | kSymbolTag | kConsStringTag,
- CONS_ASCII_SYMBOL_TYPE = kAsciiStringTag | kSymbolTag | kConsStringTag,
+ CONS_ASCII_SYMBOL_TYPE = kOneByteStringTag | kSymbolTag | kConsStringTag,
SHORT_EXTERNAL_SYMBOL_TYPE = kTwoByteStringTag | kSymbolTag |
kExternalStringTag | kShortExternalStringTag,
SHORT_EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE =
kTwoByteStringTag | kSymbolTag | kExternalStringTag |
kAsciiDataHintTag | kShortExternalStringTag,
- SHORT_EXTERNAL_ASCII_SYMBOL_TYPE = kAsciiStringTag | kExternalStringTag |
+ SHORT_EXTERNAL_ASCII_SYMBOL_TYPE = kOneByteStringTag | kExternalStringTag |
kSymbolTag | kShortExternalStringTag,
EXTERNAL_SYMBOL_TYPE = kTwoByteStringTag | kSymbolTag | kExternalStringTag,
EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE =
kTwoByteStringTag | kSymbolTag | kExternalStringTag | kAsciiDataHintTag,
EXTERNAL_ASCII_SYMBOL_TYPE =
- kAsciiStringTag | kSymbolTag | kExternalStringTag,
+ kOneByteStringTag | kSymbolTag | kExternalStringTag,
STRING_TYPE = kTwoByteStringTag | kSeqStringTag,
- ASCII_STRING_TYPE = kAsciiStringTag | kSeqStringTag,
+ ASCII_STRING_TYPE = kOneByteStringTag | kSeqStringTag,
CONS_STRING_TYPE = kTwoByteStringTag | kConsStringTag,
- CONS_ASCII_STRING_TYPE = kAsciiStringTag | kConsStringTag,
+ CONS_ASCII_STRING_TYPE = kOneByteStringTag | kConsStringTag,
SLICED_STRING_TYPE = kTwoByteStringTag | kSlicedStringTag,
- SLICED_ASCII_STRING_TYPE = kAsciiStringTag | kSlicedStringTag,
+ SLICED_ASCII_STRING_TYPE = kOneByteStringTag | kSlicedStringTag,
SHORT_EXTERNAL_STRING_TYPE =
kTwoByteStringTag | kExternalStringTag | kShortExternalStringTag,
SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE =
kTwoByteStringTag | kExternalStringTag |
kAsciiDataHintTag | kShortExternalStringTag,
SHORT_EXTERNAL_ASCII_STRING_TYPE =
- kAsciiStringTag | kExternalStringTag | kShortExternalStringTag,
+ kOneByteStringTag | kExternalStringTag | kShortExternalStringTag,
EXTERNAL_STRING_TYPE = kTwoByteStringTag | kExternalStringTag,
EXTERNAL_STRING_WITH_ASCII_DATA_TYPE =
kTwoByteStringTag | kExternalStringTag | kAsciiDataHintTag,
// LAST_STRING_TYPE
- EXTERNAL_ASCII_STRING_TYPE = kAsciiStringTag | kExternalStringTag,
+ EXTERNAL_ASCII_STRING_TYPE = kOneByteStringTag | kExternalStringTag,
PRIVATE_EXTERNAL_ASCII_STRING_TYPE = EXTERNAL_ASCII_STRING_TYPE,
// Objects allocated in their own spaces (never in new space).
@@ -684,6 +684,25 @@ STATIC_CHECK(ODDBALL_TYPE == Internals::kOddballType);
STATIC_CHECK(FOREIGN_TYPE == Internals::kForeignType);
+#define FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(V) \
+ V(FAST_ELEMENTS_SUB_TYPE) \
+ V(DICTIONARY_ELEMENTS_SUB_TYPE) \
+ V(FAST_PROPERTIES_SUB_TYPE) \
+ V(DICTIONARY_PROPERTIES_SUB_TYPE) \
+ V(MAP_CODE_CACHE_SUB_TYPE) \
+ V(SCOPE_INFO_SUB_TYPE) \
+ V(SYMBOL_TABLE_SUB_TYPE) \
+ V(DESCRIPTOR_ARRAY_SUB_TYPE) \
+ V(TRANSITION_ARRAY_SUB_TYPE)
+
+enum FixedArraySubInstanceType {
+#define DEFINE_FIXED_ARRAY_SUB_INSTANCE_TYPE(name) name,
+ FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(DEFINE_FIXED_ARRAY_SUB_INSTANCE_TYPE)
+#undef DEFINE_FIXED_ARRAY_SUB_INSTANCE_TYPE
+ LAST_FIXED_ARRAY_SUB_TYPE = TRANSITION_ARRAY_SUB_TYPE
+};
+
+
enum CompareResult {
LESS = -1,
EQUAL = 0,
@@ -704,12 +723,13 @@ enum CompareResult {
WriteBarrierMode mode = UPDATE_WRITE_BARRIER); \
+class AccessorPair;
class DictionaryElementsAccessor;
class ElementsAccessor;
+class Failure;
class FixedArrayBase;
class ObjectVisitor;
class StringStream;
-class Failure;
struct ValueInfo : public Malloced {
ValueInfo() : type(FIRST_TYPE), ptr(NULL), str(NULL), number(0) { }
@@ -723,6 +743,11 @@ struct ValueInfo : public Malloced {
// A template-ized version of the IsXXX functions.
template <class C> static inline bool Is(Object* obj);
+#ifdef VERIFY_HEAP
+#define DECLARE_VERIFIER(Name) void Name##Verify();
+#else
+#define DECLARE_VERIFIER(Name)
+#endif
class MaybeObject BASE_EMBEDDED {
public:
@@ -756,6 +781,13 @@ class MaybeObject BASE_EMBEDDED {
return true;
}
+ template<typename T>
+ inline bool ToHandle(Handle<T>* obj) {
+ if (IsFailure()) return false;
+ *obj = handle(T::cast(reinterpret_cast<Object*>(this)));
+ return true;
+ }
+
#ifdef OBJECT_PRINT
// Prints this object with details.
inline void Print() {
@@ -767,7 +799,7 @@ class MaybeObject BASE_EMBEDDED {
void Print(FILE* out);
void PrintLn(FILE* out);
#endif
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
// Verifies the object.
void Verify();
#endif
@@ -810,14 +842,14 @@ class MaybeObject BASE_EMBEDDED {
V(JSModule) \
V(Map) \
V(DescriptorArray) \
+ V(TransitionArray) \
V(DeoptimizationInputData) \
V(DeoptimizationOutputData) \
V(TypeFeedbackCells) \
V(FixedArray) \
V(FixedDoubleArray) \
V(Context) \
- V(GlobalContext) \
- V(ModuleContext) \
+ V(NativeContext) \
V(ScopeInfo) \
V(JSFunction) \
V(Code) \
@@ -853,6 +885,7 @@ class MaybeObject BASE_EMBEDDED {
V(UndetectableObject) \
V(AccessCheckNeeded) \
V(JSGlobalPropertyCell) \
+ V(ObjectHashTable) \
class JSReceiver;
@@ -913,8 +946,8 @@ class Object : public MaybeObject {
Object* ToBoolean(); // ECMA-262 9.2.
// Convert to a JSObject if needed.
- // global_context is used when creating wrapper object.
- MUST_USE_RESULT MaybeObject* ToObject(Context* global_context);
+ // native_context is used when creating wrapper object.
+ MUST_USE_RESULT MaybeObject* ToObject(Context* native_context);
// Converts this to a Smi if possible.
// Failure is returned otherwise.
@@ -974,11 +1007,13 @@ class Object : public MaybeObject {
// < the length of the string. Used to implement [] on strings.
inline bool IsStringObjectWithCharacterAt(uint32_t index);
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
// Verify a pointer is a valid object pointer.
static void VerifyPointer(Object* p);
#endif
+ inline void VerifyApiCallResultType();
+
// Prints this object without details.
inline void ShortPrint() {
ShortPrint(stdout);
@@ -1027,9 +1062,8 @@ class Smi: public Object {
}
void SmiPrint(FILE* out);
void SmiPrint(StringStream* accumulator);
-#ifdef DEBUG
- void SmiVerify();
-#endif
+
+ DECLARE_VERIFIER(Smi)
static const int kMinValue =
(static_cast<unsigned int>(-1)) << (kSmiValueSize - 1);
@@ -1100,9 +1134,8 @@ class Failure: public MaybeObject {
}
void FailurePrint(FILE* out);
void FailurePrint(StringStream* accumulator);
-#ifdef DEBUG
- void FailureVerify();
-#endif
+
+ DECLARE_VERIFIER(Failure)
private:
inline intptr_t value() const;
@@ -1233,9 +1266,8 @@ class HeapObject: public Object {
void HeapObjectPrint(FILE* out);
void PrintHeader(FILE* out, const char* id);
#endif
-
-#ifdef DEBUG
- void HeapObjectVerify();
+ DECLARE_VERIFIER(HeapObject)
+#ifdef VERIFY_HEAP
inline void VerifyObjectField(int offset);
inline void VerifySmiField(int offset);
@@ -1263,9 +1295,6 @@ class HeapObject: public Object {
};
-#define SLOT_ADDR(obj, offset) \
- reinterpret_cast<Object**>((obj)->address() + offset)
-
// This class describes a body of an object of a fixed size
// in which all pointer fields are located in the [start_offset, end_offset)
// interval.
@@ -1280,8 +1309,8 @@ class FixedBodyDescriptor {
template<typename StaticVisitor>
static inline void IterateBody(HeapObject* obj) {
- StaticVisitor::VisitPointers(SLOT_ADDR(obj, start_offset),
- SLOT_ADDR(obj, end_offset));
+ StaticVisitor::VisitPointers(HeapObject::RawField(obj, start_offset),
+ HeapObject::RawField(obj, end_offset));
}
};
@@ -1300,13 +1329,11 @@ class FlexibleBodyDescriptor {
template<typename StaticVisitor>
static inline void IterateBody(HeapObject* obj, int object_size) {
- StaticVisitor::VisitPointers(SLOT_ADDR(obj, start_offset),
- SLOT_ADDR(obj, object_size));
+ StaticVisitor::VisitPointers(HeapObject::RawField(obj, start_offset),
+ HeapObject::RawField(obj, object_size));
}
};
-#undef SLOT_ADDR
-
// The HeapNumber class describes heap allocated numbers that cannot be
// represented in a Smi (small integer)
@@ -1326,9 +1353,7 @@ class HeapNumber: public HeapObject {
}
void HeapNumberPrint(FILE* out);
void HeapNumberPrint(StringStream* accumulator);
-#ifdef DEBUG
- void HeapNumberVerify();
-#endif
+ DECLARE_VERIFIER(HeapNumber)
inline int get_exponent();
inline int get_sign();
@@ -1392,6 +1417,20 @@ class JSReceiver: public HeapObject {
FORCE_DELETION
};
+ // A non-keyed store is of the form a.x = foo or a["x"] = foo whereas
+ // a keyed store is of the form a[expression] = foo.
+ enum StoreFromKeyed {
+ MAY_BE_STORE_FROM_KEYED,
+ CERTAINLY_NOT_STORE_FROM_KEYED
+ };
+
+ // Internal properties (e.g. the hidden properties dictionary) might
+ // be added even though the receiver is non-extensible.
+ enum ExtensibilityCheck {
+ PERFORM_EXTENSIBILITY_CHECK,
+ OMIT_EXTENSIBILITY_CHECK
+ };
+
// Casting.
static inline JSReceiver* cast(Object* obj);
@@ -1402,16 +1441,20 @@ class JSReceiver: public HeapObject {
StrictModeFlag strict_mode,
bool skip_fallback_interceptor = false);
// Can cause GC.
- MUST_USE_RESULT MaybeObject* SetProperty(String* key,
- Object* value,
- PropertyAttributes attributes,
- StrictModeFlag strict_mode,
- bool skip_fallback_interceptor = false);
- MUST_USE_RESULT MaybeObject* SetProperty(LookupResult* result,
- String* key,
- Object* value,
- PropertyAttributes attributes,
- StrictModeFlag strict_mode);
+ MUST_USE_RESULT MaybeObject* SetProperty(
+ String* key,
+ Object* value,
+ PropertyAttributes attributes,
+ StrictModeFlag strict_mode,
+ StoreFromKeyed store_from_keyed = MAY_BE_STORE_FROM_KEYED,
+ bool skip_fallback_interceptor = false);
+ MUST_USE_RESULT MaybeObject* SetProperty(
+ LookupResult* result,
+ String* key,
+ Object* value,
+ PropertyAttributes attributes,
+ StrictModeFlag strict_mode,
+ StoreFromKeyed store_from_keyed = MAY_BE_STORE_FROM_KEYED);
MUST_USE_RESULT MaybeObject* SetPropertyWithDefinedSetter(JSReceiver* setter,
Object* value);
@@ -1441,14 +1484,21 @@ class JSReceiver: public HeapObject {
String* name);
PropertyAttributes GetLocalPropertyAttribute(String* name);
+ inline PropertyAttributes GetElementAttribute(uint32_t index);
+ inline PropertyAttributes GetLocalElementAttribute(uint32_t index);
+
// Can cause a GC.
inline bool HasProperty(String* name);
inline bool HasLocalProperty(String* name);
inline bool HasElement(uint32_t index);
+ inline bool HasLocalElement(uint32_t index);
// Return the object's prototype (might be Heap::null_value()).
inline Object* GetPrototype();
+ // Return the constructor function (may be Heap::null_value()).
+ inline Object* GetConstructor();
+
// Set the object's prototype (only JSReceiver and null are allowed).
MUST_USE_RESULT MaybeObject* SetPrototype(Object* value,
bool skip_hidden_prototypes);
@@ -1470,10 +1520,10 @@ class JSReceiver: public HeapObject {
Smi* GenerateIdentityHash();
private:
- PropertyAttributes GetPropertyAttribute(JSReceiver* receiver,
- LookupResult* result,
- String* name,
- bool continue_search);
+ PropertyAttributes GetPropertyAttributeForResult(JSReceiver* receiver,
+ LookupResult* result,
+ String* name,
+ bool continue_search);
DISALLOW_IMPLICIT_CONSTRUCTORS(JSReceiver);
};
@@ -1515,13 +1565,19 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT inline MaybeObject* ResetElements();
inline ElementsKind GetElementsKind();
inline ElementsAccessor* GetElementsAccessor();
- inline bool HasFastSmiOnlyElements();
- inline bool HasFastElements();
- // Returns if an object has either FAST_ELEMENT or FAST_SMI_ONLY_ELEMENT
- // elements. TODO(danno): Rename HasFastTypeElements to HasFastElements() and
- // HasFastElements to HasFastObjectElements.
- inline bool HasFastTypeElements();
+ // Returns true if an object has elements of FAST_SMI_ELEMENTS ElementsKind.
+ inline bool HasFastSmiElements();
+ // Returns true if an object has elements of FAST_ELEMENTS ElementsKind.
+ inline bool HasFastObjectElements();
+ // Returns true if an object has elements of FAST_ELEMENTS or
+ // FAST_SMI_ONLY_ELEMENTS.
+ inline bool HasFastSmiOrObjectElements();
+ // Returns true if an object has elements of FAST_DOUBLE_ELEMENTS
+ // ElementsKind.
inline bool HasFastDoubleElements();
+ // Returns true if an object has elements of FAST_HOLEY_*_ELEMENTS
+ // ElementsKind.
+ inline bool HasFastHoleyElements();
inline bool HasNonStrictArgumentsElements();
inline bool HasDictionaryElements();
inline bool HasExternalPixelElements();
@@ -1563,7 +1619,8 @@ class JSObject: public JSReceiver {
String* key,
Object* value,
PropertyAttributes attributes,
- StrictModeFlag strict_mode);
+ StrictModeFlag strict_mode,
+ StoreFromKeyed store_mode);
MUST_USE_RESULT MaybeObject* SetPropertyWithFailedAccessCheck(
LookupResult* result,
String* name,
@@ -1585,7 +1642,8 @@ class JSObject: public JSReceiver {
String* name,
Object* value,
PropertyAttributes attributes,
- StrictModeFlag strict_mode);
+ StrictModeFlag strict_mode,
+ ExtensibilityCheck extensibility_check);
static Handle<Object> SetLocalPropertyIgnoreAttributes(
Handle<JSObject> object,
@@ -1593,6 +1651,18 @@ class JSObject: public JSReceiver {
Handle<Object> value,
PropertyAttributes attributes);
+ // Try to follow an existing transition to a field with attributes NONE. The
+ // return value indicates whether the transition was successful.
+ static inline bool TryTransitionToField(Handle<JSObject> object,
+ Handle<String> key);
+
+ inline int LastAddedFieldIndex();
+
+ // Extend the receiver with a single fast property appeared first in the
+ // passed map. This also extends the property backing store if necessary.
+ static void AddFastPropertyUsingMap(Handle<JSObject> object, Handle<Map> map);
+ inline MUST_USE_RESULT MaybeObject* AddFastPropertyUsingMap(Map* map);
+
// Can cause GC.
MUST_USE_RESULT MaybeObject* SetLocalPropertyIgnoreAttributes(
String* key,
@@ -1622,6 +1692,8 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT MaybeObject* DeleteNormalizedProperty(String* name,
DeleteMode mode);
+ MUST_USE_RESULT MaybeObject* OptimizeAsPrototype();
+
// Retrieve interceptors.
InterceptorInfo* GetNamedInterceptor();
InterceptorInfo* GetIndexedInterceptor();
@@ -1638,16 +1710,28 @@ class JSObject: public JSReceiver {
LookupResult* result,
String* name,
bool continue_search);
+ PropertyAttributes GetElementAttributeWithReceiver(JSReceiver* receiver,
+ uint32_t index,
+ bool continue_search);
static void DefineAccessor(Handle<JSObject> object,
Handle<String> name,
Handle<Object> getter,
Handle<Object> setter,
PropertyAttributes attributes);
+ // Can cause GC.
MUST_USE_RESULT MaybeObject* DefineAccessor(String* name,
Object* getter,
Object* setter,
PropertyAttributes attributes);
+ // Try to define a single accessor paying attention to map transitions.
+ // Returns a JavaScript null if this was not possible and we have to use the
+ // slow case. Note that we can fail due to allocations, too.
+ MUST_USE_RESULT MaybeObject* DefineFastAccessor(
+ String* name,
+ AccessorComponent component,
+ Object* accessor,
+ PropertyAttributes attributes);
Object* LookupAccessor(String* name, AccessorComponent component);
MUST_USE_RESULT MaybeObject* DefineAccessor(AccessorInfo* info);
@@ -1659,15 +1743,15 @@ class JSObject: public JSReceiver {
String* name,
PropertyAttributes* attributes);
MUST_USE_RESULT MaybeObject* GetPropertyWithInterceptor(
- JSReceiver* receiver,
+ Object* receiver,
String* name,
PropertyAttributes* attributes);
MUST_USE_RESULT MaybeObject* GetPropertyPostInterceptor(
- JSReceiver* receiver,
+ Object* receiver,
String* name,
PropertyAttributes* attributes);
MUST_USE_RESULT MaybeObject* GetLocalPropertyPostInterceptor(
- JSReceiver* receiver,
+ Object* receiver,
String* name,
PropertyAttributes* attributes);
@@ -1707,16 +1791,17 @@ class JSObject: public JSReceiver {
static int GetIdentityHash(Handle<JSObject> obj);
MUST_USE_RESULT MaybeObject* GetIdentityHash(CreationFlag flag);
- MUST_USE_RESULT MaybeObject* SetIdentityHash(Object* hash, CreationFlag flag);
+ MUST_USE_RESULT MaybeObject* SetIdentityHash(Smi* hash, CreationFlag flag);
static Handle<Object> DeleteProperty(Handle<JSObject> obj,
Handle<String> name);
+ // Can cause GC.
MUST_USE_RESULT MaybeObject* DeleteProperty(String* name, DeleteMode mode);
static Handle<Object> DeleteElement(Handle<JSObject> obj, uint32_t index);
MUST_USE_RESULT MaybeObject* DeleteElement(uint32_t index, DeleteMode mode);
- inline void ValidateSmiOnlyElements();
+ inline void ValidateElements();
// Makes sure that this object can contain HeapObject as elements.
MUST_USE_RESULT inline MaybeObject* EnsureCanContainHeapObjectElements();
@@ -1728,6 +1813,7 @@ class JSObject: public JSReceiver {
EnsureElementsMode mode);
MUST_USE_RESULT inline MaybeObject* EnsureCanContainElements(
FixedArrayBase* elements,
+ uint32_t length,
EnsureElementsMode mode);
MUST_USE_RESULT MaybeObject* EnsureCanContainElements(
Arguments* arguments,
@@ -1748,9 +1834,6 @@ class JSObject: public JSReceiver {
// be represented as a double and not a Smi.
bool ShouldConvertToFastDoubleElements(bool* has_smi_only_elements);
- // Tells whether the index'th element is present.
- bool HasElementWithReceiver(JSReceiver* receiver, uint32_t index);
-
// Computes the new capacity when expanding the elements of a JSObject.
static int NewElementsCapacity(int old_capacity) {
// (old_capacity + 50%) + 16
@@ -1775,9 +1858,7 @@ class JSObject: public JSReceiver {
DICTIONARY_ELEMENT
};
- LocalElementType HasLocalElement(uint32_t index);
-
- bool HasElementWithInterceptor(JSReceiver* receiver, uint32_t index);
+ LocalElementType GetLocalElementType(uint32_t index);
MUST_USE_RESULT MaybeObject* SetFastElement(uint32_t index,
Object* value,
@@ -1826,10 +1907,10 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT MaybeObject* GetElementWithInterceptor(Object* receiver,
uint32_t index);
- enum SetFastElementsCapacityMode {
- kAllowSmiOnlyElements,
- kForceSmiOnlyElements,
- kDontAllowSmiOnlyElements
+ enum SetFastElementsCapacitySmiMode {
+ kAllowSmiElements,
+ kForceSmiElements,
+ kDontAllowSmiElements
};
// Replace the elements' backing store with fast elements of the given
@@ -1838,7 +1919,7 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT MaybeObject* SetFastElementsCapacityAndLength(
int capacity,
int length,
- SetFastElementsCapacityMode set_capacity_mode);
+ SetFastElementsCapacitySmiMode smi_mode);
MUST_USE_RESULT MaybeObject* SetFastDoubleElementsCapacityAndLength(
int capacity,
int length);
@@ -1870,10 +1951,9 @@ class JSObject: public JSReceiver {
void LocalLookupRealNamedProperty(String* name, LookupResult* result);
void LookupRealNamedProperty(String* name, LookupResult* result);
void LookupRealNamedPropertyInPrototypes(String* name, LookupResult* result);
- void LookupCallbackSetterInPrototypes(String* name, LookupResult* result);
MUST_USE_RESULT MaybeObject* SetElementWithCallbackSetterInPrototypes(
uint32_t index, Object* value, bool* found, StrictModeFlag strict_mode);
- void LookupCallback(String* name, LookupResult* result);
+ void LookupCallbackProperty(String* name, LookupResult* result);
// Returns the number of properties on this object filtering out properties
// with the specified attributes (ignoring interceptors).
@@ -1901,7 +1981,8 @@ class JSObject: public JSReceiver {
// new_map.
MUST_USE_RESULT MaybeObject* AddFastPropertyUsingMap(Map* new_map,
String* name,
- Object* value);
+ Object* value,
+ int field_index);
// Add a constant function property to a fast-case object.
// This leaves a CONSTANT_TRANSITION in the old map, and
@@ -1934,39 +2015,40 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT MaybeObject* TransitionElementsKind(ElementsKind to_kind);
- // Converts a descriptor of any other type to a real field,
- // backed by the properties array. Descriptors of visible
- // types, such as CONSTANT_FUNCTION, keep their enumeration order.
- // Converts the descriptor on the original object's map to a
- // map transition, and the the new field is on the object's new map.
- MUST_USE_RESULT MaybeObject* ConvertDescriptorToFieldAndMapTransition(
+ // Replaces an existing transition with a transition to a map with a FIELD.
+ MUST_USE_RESULT MaybeObject* ConvertTransitionToMapTransition(
+ int transition_index,
String* name,
Object* new_value,
PropertyAttributes attributes);
- // Converts a descriptor of any other type to a real field,
- // backed by the properties array. Descriptors of visible
- // types, such as CONSTANT_FUNCTION, keep their enumeration order.
+ // Converts a descriptor of any other type to a real field, backed by the
+ // properties array.
MUST_USE_RESULT MaybeObject* ConvertDescriptorToField(
String* name,
Object* new_value,
PropertyAttributes attributes);
// Add a property to a fast-case object.
- MUST_USE_RESULT MaybeObject* AddFastProperty(String* name,
- Object* value,
- PropertyAttributes attributes);
+ MUST_USE_RESULT MaybeObject* AddFastProperty(
+ String* name,
+ Object* value,
+ PropertyAttributes attributes,
+ StoreFromKeyed store_mode = MAY_BE_STORE_FROM_KEYED);
// Add a property to a slow-case object.
MUST_USE_RESULT MaybeObject* AddSlowProperty(String* name,
Object* value,
PropertyAttributes attributes);
- // Add a property to an object.
- MUST_USE_RESULT MaybeObject* AddProperty(String* name,
- Object* value,
- PropertyAttributes attributes,
- StrictModeFlag strict_mode);
+ // Add a property to an object. May cause GC.
+ MUST_USE_RESULT MaybeObject* AddProperty(
+ String* name,
+ Object* value,
+ PropertyAttributes attributes,
+ StrictModeFlag strict_mode,
+ StoreFromKeyed store_mode = MAY_BE_STORE_FROM_KEYED,
+ ExtensibilityCheck extensibility_check = PERFORM_EXTENSIBILITY_CHECK);
// Convert the object to use the canonical dictionary
// representation. If the object is expected to have additional properties
@@ -2041,9 +2123,7 @@ class JSObject: public JSReceiver {
}
void JSObjectPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSObjectVerify();
-#endif
+ DECLARE_VERIFIER(JSObject)
#ifdef OBJECT_PRINT
inline void PrintProperties() {
PrintProperties(stdout);
@@ -2054,6 +2134,10 @@ class JSObject: public JSReceiver {
PrintElements(stdout);
}
void PrintElements(FILE* out);
+ inline void PrintTransitions() {
+ PrintTransitions(stdout);
+ }
+ void PrintTransitions(FILE* out);
#endif
void PrintElementsTransition(
@@ -2086,7 +2170,7 @@ class JSObject: public JSReceiver {
// Maximal number of fast properties for the JSObject. Used to
// restrict the number of map transitions to avoid an explosion in
// the number of maps for objects used as dictionaries.
- inline int MaxFastProperties();
+ inline bool TooManyFastProperties(int properties, StoreFromKeyed store_mode);
// Maximal number of elements (numbered 0 .. kMaxElementCount - 1).
// Also maximal value of JSArray's length property.
@@ -2108,7 +2192,8 @@ class JSObject: public JSReceiver {
static const int kMaxUncheckedOldFastElementsLength = 500;
static const int kInitialMaxFastElementArray = 100000;
- static const int kMaxFastProperties = 12;
+ static const int kFastPropertiesSoftLimit = 12;
+ static const int kMaxFastProperties = 64;
static const int kMaxInstanceSize = 255 * kPointerSize;
// When extending the backing storage for property values, we increase
// its size by more than the 1 entry necessary, so sequentially adding fields
@@ -2127,6 +2212,15 @@ class JSObject: public JSReceiver {
static inline int SizeOf(Map* map, HeapObject* object);
};
+ // Enqueue change record for Object.observe. May cause GC.
+ static void EnqueueChangeRecord(Handle<JSObject> object,
+ const char* type,
+ Handle<String> name,
+ Handle<Object> old_value);
+
+ // Deliver change records to observers. May cause GC.
+ static void DeliverChangeRecords(Isolate* isolate);
+
private:
friend class DictionaryElementsAccessor;
@@ -2134,6 +2228,14 @@ class JSObject: public JSReceiver {
Object* structure,
uint32_t index,
Object* holder);
+ MUST_USE_RESULT PropertyAttributes GetElementAttributeWithInterceptor(
+ JSReceiver* receiver,
+ uint32_t index,
+ bool continue_search);
+ MUST_USE_RESULT PropertyAttributes GetElementAttributeWithoutInterceptor(
+ JSReceiver* receiver,
+ uint32_t index,
+ bool continue_search);
MUST_USE_RESULT MaybeObject* SetElementWithCallback(
Object* structure,
uint32_t index,
@@ -2155,17 +2257,16 @@ class JSObject: public JSReceiver {
bool check_prototype,
SetPropertyMode set_mode);
- // Searches the prototype chain for a callback setter and sets the property
- // with the setter if it finds one. The '*found' flag indicates whether
- // a setter was found or not.
- // This function can cause GC and can return a failure result with
- // '*found==true'.
- MUST_USE_RESULT MaybeObject* SetPropertyWithCallbackSetterInPrototypes(
+ // Searches the prototype chain for property 'name'. If it is found and
+ // has a setter, invoke it and set '*done' to true. If it is found and is
+ // read-only, reject and set '*done' to true. Otherwise, set '*done' to
+ // false. Can cause GC and can return a failure result with '*done==true'.
+ MUST_USE_RESULT MaybeObject* SetPropertyViaPrototypes(
String* name,
Object* value,
PropertyAttributes attributes,
- bool* found,
- StrictModeFlag strict_mode);
+ StrictModeFlag strict_mode,
+ bool* done);
MUST_USE_RESULT MaybeObject* DeletePropertyPostInterceptor(String* name,
DeleteMode mode);
@@ -2207,18 +2308,23 @@ class JSObject: public JSReceiver {
Object* getter,
Object* setter,
PropertyAttributes attributes);
- void LookupInDescriptor(String* name, LookupResult* result);
-
- // Returns the hidden properties backing store object, currently
- // a StringDictionary, stored on this object.
- // If no hidden properties object has been put on this object,
- // return undefined, unless create_if_absent is true, in which case
- // a new dictionary is created, added to this object, and returned.
- MUST_USE_RESULT MaybeObject* GetHiddenPropertiesDictionary(
- bool create_if_absent);
- // Updates the existing hidden properties dictionary.
- MUST_USE_RESULT MaybeObject* SetHiddenPropertiesDictionary(
- StringDictionary* dictionary);
+
+
+ enum InitializeHiddenProperties {
+ CREATE_NEW_IF_ABSENT,
+ ONLY_RETURN_INLINE_VALUE
+ };
+
+ // If create_if_absent is true, return the hash table backing store
+ // for hidden properties. If there is no backing store, allocate one.
+ // If create_if_absent is false, return the hash table backing store
+ // or the inline stored identity hash, whatever is found.
+ MUST_USE_RESULT MaybeObject* GetHiddenPropertiesHashTable(
+ InitializeHiddenProperties init_option);
+ // Set the hidden property backing store to either a hash table or
+ // the inline-stored identity hash.
+ MUST_USE_RESULT MaybeObject* SetHiddenPropertiesHashTable(
+ Object* value);
DISALLOW_IMPLICIT_CONSTRUCTORS(JSObject);
};
@@ -2242,6 +2348,8 @@ class FixedArrayBase: public HeapObject {
class FixedDoubleArray;
+class IncrementalMarking;
+
// FixedArray describes fixed-sized arrays with element type Object*.
class FixedArray: public FixedArrayBase {
@@ -2314,8 +2422,8 @@ class FixedArray: public FixedArrayBase {
}
void FixedArrayPrint(FILE* out);
#endif
+ DECLARE_VERIFIER(FixedArray)
#ifdef DEBUG
- void FixedArrayVerify();
// Checks if two FixedArrays have identical contents.
bool IsEqualTo(FixedArray* other);
#endif
@@ -2401,34 +2509,40 @@ class FixedDoubleArray: public FixedArrayBase {
}
void FixedDoubleArrayPrint(FILE* out);
#endif
-
-#ifdef DEBUG
- void FixedDoubleArrayVerify();
-#endif
+ DECLARE_VERIFIER(FixedDoubleArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(FixedDoubleArray);
};
-class IncrementalMarking;
-
-
// DescriptorArrays are fixed arrays used to hold instance descriptors.
// The format of the these objects is:
-// TODO(1399): It should be possible to make room for bit_field3 in the map
-// without overloading the instance descriptors field in the map
-// (and storing it in the DescriptorArray when the map has one).
-// [0]: storage for bit_field3 for Map owning this object (Smi)
-// [1]: point to a fixed array with (value, detail) pairs.
-// [2]: next enumeration index (Smi), or pointer to small fixed array:
-// [0]: next enumeration index (Smi)
-// [1]: pointer to fixed array with enum cache
-// [3]: first key
-// [length() - 1]: last key
-//
+// [0]: Number of descriptors
+// [1]: Either Smi(0) if uninitialized, or a pointer to small fixed array:
+// [0]: pointer to fixed array with enum cache
+// [1]: either Smi(0) or pointer to fixed array with indices
+// [2]: first key
+// [2 + number of descriptors * kDescriptorSize]: start of slack
class DescriptorArray: public FixedArray {
public:
+ // WhitenessWitness is used to prove that a descriptor array is white
+ // (unmarked), so incremental write barriers can be skipped because the
+ // marking invariant cannot be broken and slots pointing into evacuation
+ // candidates will be discovered when the object is scanned. A witness is
+ // always stack-allocated right after creating an array. By allocating a
+ // witness, incremental marking is globally disabled. The witness is then
+ // passed along wherever needed to statically prove that the array is known to
+ // be white.
+ class WhitenessWitness {
+ public:
+ inline explicit WhitenessWitness(FixedArray* array);
+ inline ~WhitenessWitness();
+
+ private:
+ IncrementalMarking* marking_;
+ };
+
// Returns true for both shared empty_descriptor_array and for smis, which the
// map uses to encode additional bit fields when the descriptor array is not
// yet used.
@@ -2436,43 +2550,58 @@ class DescriptorArray: public FixedArray {
// Returns the number of descriptors in the array.
int number_of_descriptors() {
- ASSERT(length() > kFirstIndex || IsEmpty());
+ ASSERT(length() >= kFirstIndex || IsEmpty());
int len = length();
- return len <= kFirstIndex ? 0 : len - kFirstIndex;
+ return len == 0 ? 0 : Smi::cast(get(kDescriptorLengthIndex))->value();
}
- int NextEnumerationIndex() {
- if (IsEmpty()) return PropertyDetails::kInitialIndex;
- Object* obj = get(kEnumerationIndexIndex);
- if (obj->IsSmi()) {
- return Smi::cast(obj)->value();
- } else {
- Object* index = FixedArray::cast(obj)->get(kEnumCacheBridgeEnumIndex);
- return Smi::cast(index)->value();
- }
+ int number_of_descriptors_storage() {
+ int len = length();
+ return len == 0 ? 0 : (len - kFirstIndex) / kDescriptorSize;
}
- // Set next enumeration index and flush any enum cache.
- void SetNextEnumerationIndex(int value) {
- if (!IsEmpty()) {
- set(kEnumerationIndexIndex, Smi::FromInt(value));
- }
+ int NumberOfSlackDescriptors() {
+ return number_of_descriptors_storage() - number_of_descriptors();
}
+
+ inline void SetNumberOfDescriptors(int number_of_descriptors);
+ inline int number_of_entries() { return number_of_descriptors(); }
+
bool HasEnumCache() {
- return !IsEmpty() && !get(kEnumerationIndexIndex)->IsSmi();
+ return !IsEmpty() && !get(kEnumCacheIndex)->IsSmi();
+ }
+
+ void CopyEnumCacheFrom(DescriptorArray* array) {
+ set(kEnumCacheIndex, array->get(kEnumCacheIndex));
+ }
+
+ FixedArray* GetEnumCache() {
+ ASSERT(HasEnumCache());
+ FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
+ return FixedArray::cast(bridge->get(kEnumCacheBridgeCacheIndex));
+ }
+
+ bool HasEnumIndicesCache() {
+ if (IsEmpty()) return false;
+ Object* object = get(kEnumCacheIndex);
+ if (object->IsSmi()) return false;
+ FixedArray* bridge = FixedArray::cast(object);
+ return !bridge->get(kEnumCacheBridgeIndicesCacheIndex)->IsSmi();
+ }
+
+ FixedArray* GetEnumIndicesCache() {
+ ASSERT(HasEnumIndicesCache());
+ FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
+ return FixedArray::cast(bridge->get(kEnumCacheBridgeIndicesCacheIndex));
}
- Object* GetEnumCache() {
+ Object** GetEnumCacheSlot() {
ASSERT(HasEnumCache());
- FixedArray* bridge = FixedArray::cast(get(kEnumerationIndexIndex));
- return bridge->get(kEnumCacheBridgeCacheIndex);
+ return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
+ kEnumCacheOffset);
}
- // TODO(1399): It should be possible to make room for bit_field3 in the map
- // without overloading the instance descriptors field in the map
- // (and storing it in the DescriptorArray when the map has one).
- inline int bit_field3_storage();
- inline void set_bit_field3_storage(int value);
+ void ClearEnumCache();
// Initialize or change the enum cache,
// using the supplied storage for the small "bridge".
@@ -2482,92 +2611,56 @@ class DescriptorArray: public FixedArray {
// Accessors for fetching instance descriptor at descriptor number.
inline String* GetKey(int descriptor_number);
+ inline Object** GetKeySlot(int descriptor_number);
inline Object* GetValue(int descriptor_number);
+ inline Object** GetValueSlot(int descriptor_number);
inline PropertyDetails GetDetails(int descriptor_number);
inline PropertyType GetType(int descriptor_number);
inline int GetFieldIndex(int descriptor_number);
inline JSFunction* GetConstantFunction(int descriptor_number);
inline Object* GetCallbacksObject(int descriptor_number);
inline AccessorDescriptor* GetCallbacks(int descriptor_number);
- inline bool IsProperty(int descriptor_number);
- inline bool IsTransitionOnly(int descriptor_number);
- inline bool IsNullDescriptor(int descriptor_number);
- class WhitenessWitness {
- public:
- inline explicit WhitenessWitness(DescriptorArray* array);
- inline ~WhitenessWitness();
-
- private:
- IncrementalMarking* marking_;
- };
+ inline String* GetSortedKey(int descriptor_number);
+ inline int GetSortedKeyIndex(int descriptor_number);
+ inline void SetSortedKey(int pointer, int descriptor_number);
// Accessor for complete descriptor.
inline void Get(int descriptor_number, Descriptor* desc);
inline void Set(int descriptor_number,
Descriptor* desc,
const WhitenessWitness&);
+ inline void Set(int descriptor_number, Descriptor* desc);
- // Transfer a complete descriptor from the src descriptor array to the dst
- // one, dropping map transitions in CALLBACKS.
- static void CopyFrom(Handle<DescriptorArray> dst,
- int dst_index,
- Handle<DescriptorArray> src,
- int src_index,
- const WhitenessWitness& witness);
+ // Append automatically sets the enumeration index. This should only be used
+ // to add descriptors in bulk at the end, followed by sorting the descriptor
+ // array.
+ inline void Append(Descriptor* desc, const WhitenessWitness&);
+ inline void Append(Descriptor* desc);
// Transfer a complete descriptor from the src descriptor array to this
- // descriptor array, dropping map transitions in CALLBACKS.
- MUST_USE_RESULT MaybeObject* CopyFrom(int dst_index,
- DescriptorArray* src,
- int src_index,
- const WhitenessWitness&);
-
- // Copy the descriptor array, insert a new descriptor and optionally
- // remove map transitions. If the descriptor is already present, it is
- // replaced. If a replaced descriptor is a real property (not a transition
- // or null), its enumeration index is kept as is.
- // If adding a real property, map transitions must be removed. If adding
- // a transition, they must not be removed. All null descriptors are removed.
- MUST_USE_RESULT MaybeObject* CopyInsert(Descriptor* descriptor,
- TransitionFlag transition_flag);
-
- // Return a copy of the array with all transitions and null descriptors
- // removed. Return a Failure object in case of an allocation failure.
- MUST_USE_RESULT MaybeObject* RemoveTransitions();
+ // descriptor array.
+ void CopyFrom(int dst_index,
+ DescriptorArray* src,
+ int src_index,
+ const WhitenessWitness&);
- // Sort the instance descriptors by the hash codes of their keys.
- // Does not check for duplicates.
- void SortUnchecked(const WhitenessWitness&);
+ MUST_USE_RESULT MaybeObject* CopyUpTo(int enumeration_index);
// Sort the instance descriptors by the hash codes of their keys.
- // Checks the result for duplicates.
- void Sort(const WhitenessWitness&);
+ void Sort();
// Search the instance descriptors for given name.
- inline int Search(String* name);
+ INLINE(int Search(String* name, int number_of_own_descriptors));
// As the above, but uses DescriptorLookupCache and updates it when
// necessary.
- inline int SearchWithCache(String* name);
-
- // Tells whether the name is present int the array.
- bool Contains(String* name) { return kNotFound != Search(name); }
-
- // Perform a binary search in the instance descriptors represented
- // by this fixed array. low and high are descriptor indices. If there
- // are three instance descriptors in this array it should be called
- // with low=0 and high=2.
- int BinarySearch(String* name, int low, int high);
-
- // Perform a linear search in the instance descriptors represented
- // by this fixed array. len is the number of descriptor indices that are
- // valid. Does not require the descriptors to be sorted.
- int LinearSearch(String* name, int len);
+ INLINE(int SearchWithCache(String* name, Map* map));
// Allocates a DescriptorArray, but returns the singleton
// empty descriptor array object if number_of_descriptors is 0.
- MUST_USE_RESULT static MaybeObject* Allocate(int number_of_descriptors);
+ MUST_USE_RESULT static MaybeObject* Allocate(int number_of_descriptors,
+ int slack = 0);
// Casting.
static inline DescriptorArray* cast(Object* obj);
@@ -2575,27 +2668,28 @@ class DescriptorArray: public FixedArray {
// Constant for denoting key was not found.
static const int kNotFound = -1;
- static const int kBitField3StorageIndex = 0;
- static const int kContentArrayIndex = 1;
- static const int kEnumerationIndexIndex = 2;
- static const int kFirstIndex = 3;
+ static const int kDescriptorLengthIndex = 0;
+ static const int kEnumCacheIndex = 1;
+ static const int kFirstIndex = 2;
// The length of the "bridge" to the enum cache.
- static const int kEnumCacheBridgeLength = 3;
- static const int kEnumCacheBridgeEnumIndex = 0;
- static const int kEnumCacheBridgeCacheIndex = 1;
- static const int kEnumCacheBridgeIndicesCacheIndex = 2;
+ static const int kEnumCacheBridgeLength = 2;
+ static const int kEnumCacheBridgeCacheIndex = 0;
+ static const int kEnumCacheBridgeIndicesCacheIndex = 1;
// Layout description.
- static const int kBitField3StorageOffset = FixedArray::kHeaderSize;
- static const int kContentArrayOffset = kBitField3StorageOffset + kPointerSize;
- static const int kEnumerationIndexOffset = kContentArrayOffset + kPointerSize;
- static const int kFirstOffset = kEnumerationIndexOffset + kPointerSize;
+ static const int kDescriptorLengthOffset = FixedArray::kHeaderSize;
+ static const int kEnumCacheOffset = kDescriptorLengthOffset + kPointerSize;
+ static const int kFirstOffset = kEnumCacheOffset + kPointerSize;
// Layout description for the bridge array.
- static const int kEnumCacheBridgeEnumOffset = FixedArray::kHeaderSize;
- static const int kEnumCacheBridgeCacheOffset =
- kEnumCacheBridgeEnumOffset + kPointerSize;
+ static const int kEnumCacheBridgeCacheOffset = FixedArray::kHeaderSize;
+
+ // Layout of descriptor.
+ static const int kDescriptorKey = 0;
+ static const int kDescriptorDetails = 1;
+ static const int kDescriptorValue = 2;
+ static const int kDescriptorSize = 3;
#ifdef OBJECT_PRINT
// Print all the descriptors.
@@ -2607,7 +2701,7 @@ class DescriptorArray: public FixedArray {
#ifdef DEBUG
// Is the descriptor array sorted and without duplicates?
- bool IsSortedNoDuplicates();
+ bool IsSortedNoDuplicates(int valid_descriptors = -1);
// Is the descriptor array consistent with the back pointers in targets?
bool IsConsistentWithBackPointers(Map* current_map);
@@ -2620,6 +2714,12 @@ class DescriptorArray: public FixedArray {
// fit in a page).
static const int kMaxNumberOfDescriptors = 1024 + 512;
+ // Returns the fixed array length required to hold number_of_descriptors
+ // descriptors.
+ static int LengthFor(int number_of_descriptors) {
+ return ToKeyIndex(number_of_descriptors);
+ }
+
private:
// An entry in a DescriptorArray, represented as an (array, index) pair.
class Entry {
@@ -2637,32 +2737,40 @@ class DescriptorArray: public FixedArray {
// Conversion from descriptor number to array indices.
static int ToKeyIndex(int descriptor_number) {
- return descriptor_number+kFirstIndex;
+ return kFirstIndex +
+ (descriptor_number * kDescriptorSize) +
+ kDescriptorKey;
}
static int ToDetailsIndex(int descriptor_number) {
- return (descriptor_number << 1) + 1;
+ return kFirstIndex +
+ (descriptor_number * kDescriptorSize) +
+ kDescriptorDetails;
}
static int ToValueIndex(int descriptor_number) {
- return descriptor_number << 1;
+ return kFirstIndex +
+ (descriptor_number * kDescriptorSize) +
+ kDescriptorValue;
}
- // Swap operation on FixedArray without using write barriers.
- static inline void NoIncrementalWriteBarrierSwap(
- FixedArray* array, int first, int second);
-
- // Swap descriptor first and second.
- inline void NoIncrementalWriteBarrierSwapDescriptors(
- int first, int second);
+ // Swap first and second descriptor.
+ inline void SwapSortedKeys(int first, int second);
- FixedArray* GetContentArray() {
- return FixedArray::cast(get(kContentArrayIndex));
- }
DISALLOW_IMPLICIT_CONSTRUCTORS(DescriptorArray);
};
+enum SearchMode { ALL_ENTRIES, VALID_ENTRIES };
+
+template<SearchMode search_mode, typename T>
+inline int LinearSearch(T* array, String* name, int len, int valid_entries);
+
+
+template<SearchMode search_mode, typename T>
+inline int Search(T* array, String* name, int valid_entries = 0);
+
+
// HashTable is a subclass of FixedArray that implements a hash table
// that uses open addressing and quadratic probing.
//
@@ -2715,6 +2823,11 @@ class BaseShape {
template<typename Shape, typename Key>
class HashTable: public FixedArray {
public:
+ enum MinimumCapacity {
+ USE_DEFAULT_MINIMUM_CAPACITY,
+ USE_CUSTOM_MINIMUM_CAPACITY
+ };
+
// Wrapper methods
inline uint32_t Hash(Key key) {
if (Shape::UsesSeed) {
@@ -2767,6 +2880,7 @@ class HashTable: public FixedArray {
// Returns a new HashTable object. Might return Failure.
MUST_USE_RESULT static MaybeObject* Allocate(
int at_least_space_for,
+ MinimumCapacity capacity_option = USE_DEFAULT_MINIMUM_CAPACITY,
PretenureFlag pretenure = NOT_TENURED);
// Computes the required capacity for a table holding the given
@@ -2856,11 +2970,12 @@ class HashTable: public FixedArray {
return (hash + GetProbeOffset(number)) & (size - 1);
}
- static uint32_t FirstProbe(uint32_t hash, uint32_t size) {
+ inline static uint32_t FirstProbe(uint32_t hash, uint32_t size) {
return hash & (size - 1);
}
- static uint32_t NextProbe(uint32_t last, uint32_t number, uint32_t size) {
+ inline static uint32_t NextProbe(
+ uint32_t last, uint32_t number, uint32_t size) {
return (last + number) & (size - 1);
}
@@ -2947,6 +3062,8 @@ class SymbolTable: public HashTable<SymbolTableShape, HashTableKey*> {
private:
MUST_USE_RESULT MaybeObject* LookupKey(HashTableKey* key, Object** s);
+ template <bool seq_ascii> friend class JsonParser;
+
DISALLOW_IMPLICIT_CONSTRUCTORS(SymbolTable);
};
@@ -3044,6 +3161,7 @@ class Dictionary: public HashTable<Shape, Key> {
// Accessors for next enumeration index.
void SetNextEnumerationIndex(int index) {
+ ASSERT(index != 0);
this->set(kNextEnumerationIndexIndex, Smi::FromInt(index));
}
@@ -3117,7 +3235,9 @@ class StringDictionary: public Dictionary<StringDictionaryShape, String*> {
}
// Copies enumerable keys to preallocated fixed array.
- void CopyEnumKeysTo(FixedArray* storage, FixedArray* sort_array);
+ FixedArray* CopyEnumKeysTo(FixedArray* storage);
+ static void DoGenerateNewEnumerationIndices(
+ Handle<StringDictionary> dictionary);
// For transforming properties of a JSObject.
MUST_USE_RESULT MaybeObject* TransformPropertiesToFastFor(
@@ -3127,8 +3247,6 @@ class StringDictionary: public Dictionary<StringDictionaryShape, String*> {
// Find entry for key, otherwise return kNotFound. Optimized version of
// HashTable::FindEntry.
int FindEntry(String* key);
-
- bool ContainsTransition(int entry);
};
@@ -3274,12 +3392,12 @@ class ObjectHashTable: public HashTable<ObjectHashTableShape<2>, Object*> {
return reinterpret_cast<ObjectHashTable*>(obj);
}
- // Looks up the value associated with the given key. The undefined value is
+ // Looks up the value associated with the given key. The hole value is
// returned in case the key is not present.
Object* Lookup(Object* key);
// Adds (or overwrites) the value associated with the given key. Mapping a
- // key to the undefined value causes removal of the whole entry.
+ // key to the hole value causes removal of the whole entry.
MUST_USE_RESULT MaybeObject* Put(Object* key, Object* value);
private:
@@ -3327,9 +3445,7 @@ class JSFunctionResultCache: public FixedArray {
// Casting
static inline JSFunctionResultCache* cast(Object* obj);
-#ifdef DEBUG
- void JSFunctionResultCacheVerify();
-#endif
+ DECLARE_VERIFIER(JSFunctionResultCache)
};
@@ -3436,7 +3552,7 @@ class ScopeInfo : public FixedArray {
// must be a symbol (canonicalized).
int FunctionContextSlotIndex(String* name, VariableMode* mode);
- static Handle<ScopeInfo> Create(Scope* scope);
+ static Handle<ScopeInfo> Create(Scope* scope, Zone* zone);
// Serializes empty scope info.
static ScopeInfo* Empty();
@@ -3482,7 +3598,7 @@ class ScopeInfo : public FixedArray {
FOR_EACH_NUMERIC_FIELD(DECL_INDEX)
#undef DECL_INDEX
#undef FOR_EACH_NUMERIC_FIELD
- kVariablePartIndex
+ kVariablePartIndex
};
// The layout of the variable part of a ScopeInfo is as follows:
@@ -3556,9 +3672,7 @@ class NormalizedMapCache: public FixedArray {
// Casting
static inline NormalizedMapCache* cast(Object* obj);
-#ifdef DEBUG
- void NormalizedMapCacheVerify();
-#endif
+ DECLARE_VERIFIER(NormalizedMapCache)
};
@@ -3607,9 +3721,7 @@ class ByteArray: public FixedArrayBase {
}
void ByteArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ByteArrayVerify();
-#endif
+ DECLARE_VERIFIER(ByteArray)
// Layout description.
static const int kAlignedSize = OBJECT_POINTER_ALIGN(kHeaderSize);
@@ -3643,9 +3755,7 @@ class FreeSpace: public HeapObject {
}
void FreeSpacePrint(FILE* out);
#endif
-#ifdef DEBUG
- void FreeSpaceVerify();
-#endif
+ DECLARE_VERIFIER(FreeSpace)
// Layout description.
// Size is smi tagged when it is stored.
@@ -3725,9 +3835,7 @@ class ExternalPixelArray: public ExternalArray {
}
void ExternalPixelArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalPixelArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalPixelArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalPixelArray);
@@ -3754,9 +3862,7 @@ class ExternalByteArray: public ExternalArray {
}
void ExternalByteArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalByteArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalByteArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalByteArray);
@@ -3783,9 +3889,7 @@ class ExternalUnsignedByteArray: public ExternalArray {
}
void ExternalUnsignedByteArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalUnsignedByteArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalUnsignedByteArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalUnsignedByteArray);
@@ -3812,9 +3916,7 @@ class ExternalShortArray: public ExternalArray {
}
void ExternalShortArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalShortArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalShortArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalShortArray);
@@ -3841,9 +3943,7 @@ class ExternalUnsignedShortArray: public ExternalArray {
}
void ExternalUnsignedShortArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalUnsignedShortArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalUnsignedShortArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalUnsignedShortArray);
@@ -3870,9 +3970,7 @@ class ExternalIntArray: public ExternalArray {
}
void ExternalIntArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalIntArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalIntArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalIntArray);
@@ -3899,9 +3997,7 @@ class ExternalUnsignedIntArray: public ExternalArray {
}
void ExternalUnsignedIntArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalUnsignedIntArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalUnsignedIntArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalUnsignedIntArray);
@@ -3928,9 +4024,7 @@ class ExternalFloatArray: public ExternalArray {
}
void ExternalFloatArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ExternalFloatArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalFloatArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalFloatArray);
@@ -3957,9 +4051,7 @@ class ExternalDoubleArray: public ExternalArray {
}
void ExternalDoubleArrayPrint(FILE* out);
#endif // OBJECT_PRINT
-#ifdef DEBUG
- void ExternalDoubleArrayVerify();
-#endif // DEBUG
+ DECLARE_VERIFIER(ExternalDoubleArray)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalDoubleArray);
@@ -3984,7 +4076,7 @@ class DeoptimizationInputData: public FixedArray {
static const int kFirstDeoptEntryIndex = 5;
// Offsets of deopt entry elements relative to the start of the entry.
- static const int kAstIdOffset = 0;
+ static const int kAstIdRawOffset = 0;
static const int kTranslationIndexOffset = 1;
static const int kArgumentsStackHeightOffset = 2;
static const int kPcOffset = 3;
@@ -4016,13 +4108,21 @@ class DeoptimizationInputData: public FixedArray {
set(IndexForEntry(i) + k##name##Offset, value); \
}
- DEFINE_ENTRY_ACCESSORS(AstId, Smi)
+ DEFINE_ENTRY_ACCESSORS(AstIdRaw, Smi)
DEFINE_ENTRY_ACCESSORS(TranslationIndex, Smi)
DEFINE_ENTRY_ACCESSORS(ArgumentsStackHeight, Smi)
DEFINE_ENTRY_ACCESSORS(Pc, Smi)
#undef DEFINE_ENTRY_ACCESSORS
+ BailoutId AstId(int i) {
+ return BailoutId(AstIdRaw(i)->value());
+ }
+
+ void SetAstId(int i, BailoutId value) {
+ SetAstIdRaw(i, Smi::FromInt(value.ToInt()));
+ }
+
int DeoptCount() {
return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
}
@@ -4057,8 +4157,15 @@ class DeoptimizationInputData: public FixedArray {
class DeoptimizationOutputData: public FixedArray {
public:
int DeoptPoints() { return length() / 2; }
- Smi* AstId(int index) { return Smi::cast(get(index * 2)); }
- void SetAstId(int index, Smi* id) { set(index * 2, id); }
+
+ BailoutId AstId(int index) {
+ return BailoutId(Smi::cast(get(index * 2))->value());
+ }
+
+ void SetAstId(int index, BailoutId id) {
+ set(index * 2, Smi::FromInt(id.ToInt()));
+ }
+
Smi* PcAndState(int index) { return Smi::cast(get(1 + index * 2)); }
void SetPcAndState(int index, Smi* offset) { set(1 + index * 2, offset); }
@@ -4093,8 +4200,8 @@ class TypeFeedbackCells: public FixedArray {
static int LengthOfFixedArray(int cell_count) { return cell_count * 2; }
// Accessors for AST ids associated with cache values.
- inline Smi* AstId(int index);
- inline void SetAstId(int index, Smi* id);
+ inline TypeFeedbackId AstId(int index);
+ inline void SetAstId(int index, TypeFeedbackId id);
// Accessors for global property cells holding the cache values.
inline JSGlobalPropertyCell* Cell(int index);
@@ -4134,30 +4241,49 @@ class Code: public HeapObject {
FLAGS_MAX_VALUE = kMaxInt
};
+#define CODE_KIND_LIST(V) \
+ V(FUNCTION) \
+ V(OPTIMIZED_FUNCTION) \
+ V(STUB) \
+ V(BUILTIN) \
+ V(LOAD_IC) \
+ V(KEYED_LOAD_IC) \
+ V(CALL_IC) \
+ V(KEYED_CALL_IC) \
+ V(STORE_IC) \
+ V(KEYED_STORE_IC) \
+ V(UNARY_OP_IC) \
+ V(BINARY_OP_IC) \
+ V(COMPARE_IC) \
+ V(TO_BOOLEAN_IC)
+
enum Kind {
- FUNCTION,
- OPTIMIZED_FUNCTION,
- STUB,
- BUILTIN,
- LOAD_IC,
- KEYED_LOAD_IC,
- CALL_IC,
- KEYED_CALL_IC,
- STORE_IC,
- KEYED_STORE_IC,
- UNARY_OP_IC,
- BINARY_OP_IC,
- COMPARE_IC,
- TO_BOOLEAN_IC,
- // No more than 16 kinds. The value currently encoded in four bits in
- // Flags.
+#define DEFINE_CODE_KIND_ENUM(name) name,
+ CODE_KIND_LIST(DEFINE_CODE_KIND_ENUM)
+#undef DEFINE_CODE_KIND_ENUM
// Pseudo-kinds.
+ LAST_CODE_KIND = TO_BOOLEAN_IC,
REGEXP = BUILTIN,
FIRST_IC_KIND = LOAD_IC,
LAST_IC_KIND = TO_BOOLEAN_IC
};
+ // No more than 16 kinds. The value is currently encoded in four bits in
+ // Flags.
+ STATIC_ASSERT(LAST_CODE_KIND < 16);
+
+ // Types of stubs.
+ enum StubType {
+ NORMAL,
+ FIELD,
+ CONSTANT_FUNCTION,
+ CALLBACKS,
+ INTERCEPTOR,
+ MAP_TRANSITION,
+ NONEXISTENT
+ };
+
enum {
NUMBER_OF_KINDS = LAST_IC_KIND + 1
};
@@ -4170,7 +4296,7 @@ class Code: public HeapObject {
// Printing
static const char* Kind2String(Kind kind);
static const char* ICState2String(InlineCacheState state);
- static const char* PropertyType2String(PropertyType type);
+ static const char* StubType2String(StubType type);
static void PrintExtraICState(FILE* out, Kind kind, ExtraICState extra);
inline void Disassemble(const char* name) {
Disassemble(name, stdout);
@@ -4220,7 +4346,7 @@ class Code: public HeapObject {
inline Kind kind();
inline InlineCacheState ic_state(); // Only valid for IC stubs.
inline ExtraICState extra_ic_state(); // Only valid for IC stubs.
- inline PropertyType type(); // Only valid for monomorphic IC stubs.
+ inline StubType type(); // Only valid for monomorphic IC stubs.
inline int arguments_count(); // Only valid for call IC stubs.
// Testers for IC stub kinds.
@@ -4324,6 +4450,8 @@ class Code: public HeapObject {
inline bool has_function_cache();
inline void set_has_function_cache(bool flag);
+ bool allowed_in_shared_map_code_cache();
+
// Get the safepoint entry for the given pc.
SafepointEntry GetSafepointEntry(Address pc);
@@ -4361,19 +4489,19 @@ class Code: public HeapObject {
Kind kind,
InlineCacheState ic_state = UNINITIALIZED,
ExtraICState extra_ic_state = kNoExtraICState,
- PropertyType type = NORMAL,
+ StubType type = NORMAL,
int argc = -1,
InlineCacheHolderFlag holder = OWN_MAP);
static inline Flags ComputeMonomorphicFlags(
Kind kind,
- PropertyType type,
+ StubType type,
ExtraICState extra_ic_state = kNoExtraICState,
InlineCacheHolderFlag holder = OWN_MAP,
int argc = -1);
static inline InlineCacheState ExtractICStateFromFlags(Flags flags);
- static inline PropertyType ExtractTypeFromFlags(Flags flags);
+ static inline StubType ExtractTypeFromFlags(Flags flags);
static inline Kind ExtractKindFromFlags(Flags flags);
static inline InlineCacheHolderFlag ExtractCacheHolderFromFlags(Flags flags);
static inline ExtraICState ExtractExtraICStateFromFlags(Flags flags);
@@ -4446,12 +4574,28 @@ class Code: public HeapObject {
}
void CodePrint(FILE* out);
#endif
-#ifdef DEBUG
- void CodeVerify();
-#endif
+ DECLARE_VERIFIER(Code)
+
void ClearInlineCaches();
void ClearTypeFeedbackCells(Heap* heap);
+#define DECLARE_CODE_AGE_ENUM(X) k##X##CodeAge,
+ enum Age {
+ kNoAge = 0,
+ CODE_AGE_LIST(DECLARE_CODE_AGE_ENUM)
+ kAfterLastCodeAge,
+ kLastCodeAge = kAfterLastCodeAge - 1,
+ kCodeAgeCount = kAfterLastCodeAge - 1
+ };
+#undef DECLARE_CODE_AGE_ENUM
+
+ // Code aging
+ static void MakeCodeAgeSequenceYoung(byte* sequence);
+ void MakeYoung();
+ void MakeOlder(MarkingParity);
+ static bool IsYoungSequence(byte* sequence);
+ bool IsOld();
+
// Max loop nesting marker used to postpose OSR. We don't take loop
// nesting that is deeper than 5 levels into account.
static const int kMaxLoopNestingMarker = 6;
@@ -4468,28 +4612,20 @@ class Code: public HeapObject {
static const int kICAgeOffset =
kGCMetadataOffset + kPointerSize;
static const int kFlagsOffset = kICAgeOffset + kIntSize;
- static const int kKindSpecificFlagsOffset = kFlagsOffset + kIntSize;
- static const int kKindSpecificFlagsSize = 2 * kIntSize;
+ static const int kKindSpecificFlags1Offset = kFlagsOffset + kIntSize;
+ static const int kKindSpecificFlags2Offset =
+ kKindSpecificFlags1Offset + kIntSize;
- static const int kHeaderPaddingStart = kKindSpecificFlagsOffset +
- kKindSpecificFlagsSize;
+ static const int kHeaderPaddingStart = kKindSpecificFlags2Offset + kIntSize;
// Add padding to align the instruction start following right after
// the Code object header.
static const int kHeaderSize =
(kHeaderPaddingStart + kCodeAlignmentMask) & ~kCodeAlignmentMask;
- // Byte offsets within kKindSpecificFlagsOffset.
- static const int kStubMajorKeyOffset = kKindSpecificFlagsOffset;
- static const int kOptimizableOffset = kKindSpecificFlagsOffset;
- static const int kStackSlotsOffset = kKindSpecificFlagsOffset;
- static const int kCheckTypeOffset = kKindSpecificFlagsOffset;
-
- static const int kUnaryOpTypeOffset = kStubMajorKeyOffset + 1;
- static const int kBinaryOpTypeOffset = kStubMajorKeyOffset + 1;
- static const int kCompareStateOffset = kStubMajorKeyOffset + 1;
- static const int kToBooleanTypeOffset = kStubMajorKeyOffset + 1;
- static const int kHasFunctionCacheOffset = kStubMajorKeyOffset + 1;
+ // Byte offsets within kKindSpecificFlags1Offset.
+ static const int kOptimizableOffset = kKindSpecificFlags1Offset;
+ static const int kCheckTypeOffset = kKindSpecificFlags1Offset;
static const int kFullCodeFlags = kOptimizableOffset + 1;
class FullCodeFlagsHasDeoptimizationSupportField:
@@ -4497,26 +4633,90 @@ class Code: public HeapObject {
class FullCodeFlagsHasDebugBreakSlotsField: public BitField<bool, 1, 1> {};
class FullCodeFlagsIsCompiledOptimizable: public BitField<bool, 2, 1> {};
- static const int kBinaryOpReturnTypeOffset = kBinaryOpTypeOffset + 1;
-
- static const int kCompareOperationOffset = kCompareStateOffset + 1;
-
static const int kAllowOSRAtLoopNestingLevelOffset = kFullCodeFlags + 1;
static const int kProfilerTicksOffset = kAllowOSRAtLoopNestingLevelOffset + 1;
- static const int kSafepointTableOffsetOffset = kStackSlotsOffset + kIntSize;
- static const int kStackCheckTableOffsetOffset = kStackSlotsOffset + kIntSize;
-
// Flags layout. BitField<type, shift, size>.
class ICStateField: public BitField<InlineCacheState, 0, 3> {};
- class TypeField: public BitField<PropertyType, 3, 4> {};
- class CacheHolderField: public BitField<InlineCacheHolderFlag, 7, 1> {};
- class KindField: public BitField<Kind, 8, 4> {};
- class ExtraICStateField: public BitField<ExtraICState, 12, 2> {};
- class IsPregeneratedField: public BitField<bool, 14, 1> {};
+ class TypeField: public BitField<StubType, 3, 3> {};
+ class CacheHolderField: public BitField<InlineCacheHolderFlag, 6, 1> {};
+ class KindField: public BitField<Kind, 7, 4> {};
+ class ExtraICStateField: public BitField<ExtraICState, 11, 2> {};
+ class IsPregeneratedField: public BitField<bool, 13, 1> {};
+
+ // KindSpecificFlags1 layout (STUB and OPTIMIZED_FUNCTION)
+ static const int kStackSlotsFirstBit = 0;
+ static const int kStackSlotsBitCount = 24;
+ static const int kUnaryOpTypeFirstBit =
+ kStackSlotsFirstBit + kStackSlotsBitCount;
+ static const int kUnaryOpTypeBitCount = 3;
+ static const int kBinaryOpTypeFirstBit =
+ kStackSlotsFirstBit + kStackSlotsBitCount;
+ static const int kBinaryOpTypeBitCount = 3;
+ static const int kBinaryOpResultTypeFirstBit =
+ kBinaryOpTypeFirstBit + kBinaryOpTypeBitCount;
+ static const int kBinaryOpResultTypeBitCount = 3;
+ static const int kCompareStateFirstBit =
+ kStackSlotsFirstBit + kStackSlotsBitCount;
+ static const int kCompareStateBitCount = 3;
+ static const int kCompareOperationFirstBit =
+ kCompareStateFirstBit + kCompareStateBitCount;
+ static const int kCompareOperationBitCount = 4;
+ static const int kToBooleanStateFirstBit =
+ kStackSlotsFirstBit + kStackSlotsBitCount;
+ static const int kToBooleanStateBitCount = 8;
+ static const int kHasFunctionCacheFirstBit =
+ kStackSlotsFirstBit + kStackSlotsBitCount;
+ static const int kHasFunctionCacheBitCount = 1;
+
+ STATIC_ASSERT(kStackSlotsFirstBit + kStackSlotsBitCount <= 32);
+ STATIC_ASSERT(kUnaryOpTypeFirstBit + kUnaryOpTypeBitCount <= 32);
+ STATIC_ASSERT(kBinaryOpTypeFirstBit + kBinaryOpTypeBitCount <= 32);
+ STATIC_ASSERT(kBinaryOpResultTypeFirstBit +
+ kBinaryOpResultTypeBitCount <= 32);
+ STATIC_ASSERT(kCompareStateFirstBit + kCompareStateBitCount <= 32);
+ STATIC_ASSERT(kCompareOperationFirstBit + kCompareOperationBitCount <= 32);
+ STATIC_ASSERT(kToBooleanStateFirstBit + kToBooleanStateBitCount <= 32);
+ STATIC_ASSERT(kHasFunctionCacheFirstBit + kHasFunctionCacheBitCount <= 32);
+
+ class StackSlotsField: public BitField<int,
+ kStackSlotsFirstBit, kStackSlotsBitCount> {}; // NOLINT
+ class UnaryOpTypeField: public BitField<int,
+ kUnaryOpTypeFirstBit, kUnaryOpTypeBitCount> {}; // NOLINT
+ class BinaryOpTypeField: public BitField<int,
+ kBinaryOpTypeFirstBit, kBinaryOpTypeBitCount> {}; // NOLINT
+ class BinaryOpResultTypeField: public BitField<int,
+ kBinaryOpResultTypeFirstBit, kBinaryOpResultTypeBitCount> {}; // NOLINT
+ class CompareStateField: public BitField<int,
+ kCompareStateFirstBit, kCompareStateBitCount> {}; // NOLINT
+ class CompareOperationField: public BitField<int,
+ kCompareOperationFirstBit, kCompareOperationBitCount> {}; // NOLINT
+ class ToBooleanStateField: public BitField<int,
+ kToBooleanStateFirstBit, kToBooleanStateBitCount> {}; // NOLINT
+ class HasFunctionCacheField: public BitField<bool,
+ kHasFunctionCacheFirstBit, kHasFunctionCacheBitCount> {}; // NOLINT
+
+ // KindSpecificFlags2 layout (STUB and OPTIMIZED_FUNCTION)
+ static const int kStubMajorKeyFirstBit = 0;
+ static const int kSafepointTableOffsetFirstBit =
+ kStubMajorKeyFirstBit + kStubMajorKeyBits;
+ static const int kSafepointTableOffsetBitCount = 26;
+
+ STATIC_ASSERT(kStubMajorKeyFirstBit + kStubMajorKeyBits <= 32);
+ STATIC_ASSERT(kSafepointTableOffsetFirstBit +
+ kSafepointTableOffsetBitCount <= 32);
+
+ class SafepointTableOffsetField: public BitField<int,
+ kSafepointTableOffsetFirstBit,
+ kSafepointTableOffsetBitCount> {}; // NOLINT
+ class StubMajorKeyField: public BitField<int,
+ kStubMajorKeyFirstBit, kStubMajorKeyBits> {}; // NOLINT
+
+ // KindSpecificFlags2 layout (FUNCTION)
+ class StackCheckTableOffsetField: public BitField<int, 0, 31> {};
// Signed field cannot be encoded using the BitField class.
- static const int kArgumentsCountShift = 15;
+ static const int kArgumentsCountShift = 14;
static const int kArgumentsCountMask = ~((1 << kArgumentsCountShift) - 1);
// This constant should be encodable in an ARM instruction.
@@ -4524,6 +4724,21 @@ class Code: public HeapObject {
TypeField::kMask | CacheHolderField::kMask;
private:
+ friend class RelocIterator;
+
+ // Code aging
+ byte* FindCodeAgeSequence();
+ static void GetCodeAgeAndParity(Code* code, Age* age,
+ MarkingParity* parity);
+ static void GetCodeAgeAndParity(byte* sequence, Age* age,
+ MarkingParity* parity);
+ static Code* GetCodeAgeStub(Age age, MarkingParity parity);
+
+ // Code aging -- platform-specific
+ byte* FindPlatformCodeAgeSequence();
+ static void PatchPlatformCodeAge(byte* sequence, Age age,
+ MarkingParity parity);
+
DISALLOW_IMPLICIT_CONSTRUCTORS(Code);
};
@@ -4566,12 +4781,20 @@ class Map: public HeapObject {
inline void set_bit_field2(byte value);
// Bit field 3.
- // TODO(1399): It should be possible to make room for bit_field3 in the map
- // without overloading the instance descriptors field (and storing it in the
- // DescriptorArray when the map has one).
inline int bit_field3();
inline void set_bit_field3(int value);
+ class EnumLengthBits: public BitField<int, 0, 11> {};
+ class NumberOfOwnDescriptorsBits: public BitField<int, 11, 11> {};
+ class IsShared: public BitField<bool, 22, 1> {};
+ class FunctionWithPrototype: public BitField<bool, 23, 1> {};
+ class DictionaryMap: public BitField<bool, 24, 1> {};
+ class OwnsDescriptors: public BitField<bool, 25, 1> {};
+ class IsObserved: public BitField<bool, 26, 1> {};
+ class NamedInterceptorIsFallback: public BitField<bool, 27, 1> {};
+ class HasInstanceCallHandler: public BitField<bool, 28, 1> {};
+ class AttachedToSharedFunctionInfo: public BitField<bool, 29, 1> {};
+
// Tells whether the object in the prototype property will be used
// for instances created from this function. If the prototype
// property is set to a value that is not a JSObject, the prototype
@@ -4630,11 +4853,11 @@ class Map: public HeapObject {
// Tells whether the instance has a call-as-function handler.
inline void set_has_instance_call_handler() {
- set_bit_field3(bit_field3() | (1 << kHasInstanceCallHandler));
+ set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
}
inline bool has_instance_call_handler() {
- return ((1 << kHasInstanceCallHandler) & bit_field3()) != 0;
+ return HasInstanceCallHandler::decode(bit_field3());
}
inline void set_is_extensible(bool value);
@@ -4654,17 +4877,21 @@ class Map: public HeapObject {
}
// Tells whether the instance has fast elements that are only Smis.
- inline bool has_fast_smi_only_elements() {
- return elements_kind() == FAST_SMI_ONLY_ELEMENTS;
+ inline bool has_fast_smi_elements() {
+ return IsFastSmiElementsKind(elements_kind());
}
// Tells whether the instance has fast elements.
- inline bool has_fast_elements() {
- return elements_kind() == FAST_ELEMENTS;
+ inline bool has_fast_object_elements() {
+ return IsFastObjectElementsKind(elements_kind());
+ }
+
+ inline bool has_fast_smi_or_object_elements() {
+ return IsFastSmiOrObjectElementsKind(elements_kind());
}
inline bool has_fast_double_elements() {
- return elements_kind() == FAST_DOUBLE_ELEMENTS;
+ return IsFastDoubleElementsKind(elements_kind());
}
inline bool has_non_strict_arguments_elements() {
@@ -4672,13 +4899,11 @@ class Map: public HeapObject {
}
inline bool has_external_array_elements() {
- ElementsKind kind(elements_kind());
- return kind >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
- kind <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND;
+ return IsExternalArrayElementsKind(elements_kind());
}
inline bool has_dictionary_elements() {
- return elements_kind() == DICTIONARY_ELEMENTS;
+ return IsDictionaryElementsKind(elements_kind());
}
inline bool has_slow_elements_kind() {
@@ -4689,6 +4914,20 @@ class Map: public HeapObject {
static bool IsValidElementsTransition(ElementsKind from_kind,
ElementsKind to_kind);
+ inline bool HasTransitionArray();
+ inline bool HasElementsTransition();
+ inline Map* elements_transition_map();
+ MUST_USE_RESULT inline MaybeObject* set_elements_transition_map(
+ Map* transitioned_map);
+ inline void SetTransition(int transition_index, Map* target);
+ inline Map* GetTransition(int transition_index);
+ MUST_USE_RESULT inline MaybeObject* AddTransition(String* key,
+ Map* target,
+ SimpleTransitionFlag flag);
+ DECL_ACCESSORS(transitions, TransitionArray)
+ inline void ClearTransitions(Heap* heap,
+ WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
+
// Tells whether the map is attached to SharedFunctionInfo
// (for inobject slack tracking).
inline void set_attached_to_shared_function_info(bool value);
@@ -4699,9 +4938,15 @@ class Map: public HeapObject {
// behavior. If true, the map should never be modified, instead a clone
// should be created and modified.
inline void set_is_shared(bool value);
-
inline bool is_shared();
+ // Tells whether the map is used for JSObjects in dictionary mode (ie
+ // normalized objects, ie objects for which HasFastProperties returns false).
+ // A map can never be used for both dictionary mode and fast mode JSObjects.
+ // False by default and for HeapObjects that are not JSObjects.
+ inline void set_dictionary_map(bool value);
+ inline bool is_dictionary_map();
+
// Tells whether the instance needs security checks when accessing its
// properties.
inline void set_is_access_check_needed(bool access_check_needed);
@@ -4720,7 +4965,7 @@ class Map: public HeapObject {
// comparisons involving this object
inline void set_use_user_object_comparison(bool value);
inline bool use_user_object_comparison();
-
+
// [prototype]: implicit prototype object.
DECL_ACCESSORS(prototype, Object)
@@ -4729,16 +4974,9 @@ class Map: public HeapObject {
inline JSFunction* unchecked_constructor();
- // Should only be called by the code that initializes map to set initial valid
- // value of the instance descriptor member.
- inline void init_instance_descriptors();
-
// [instance descriptors]: describes the object.
DECL_ACCESSORS(instance_descriptors, DescriptorArray)
-
- // Sets the instance descriptor array for the map to be an empty descriptor
- // array.
- inline void clear_instance_descriptors();
+ inline void InitializeDescriptors(DescriptorArray* descriptors);
// [stub cache]: contains stubs compiled for this map.
DECL_ACCESSORS(code_cache, Object)
@@ -4750,6 +4988,7 @@ class Map: public HeapObject {
inline Object* GetBackPointer();
inline void SetBackPointer(Object* value,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
+ inline void init_back_pointer(Object* undefined);
// [prototype transitions]: cache of prototype transitions.
// Prototype transition is a transition that happens
@@ -4759,27 +4998,29 @@ class Map: public HeapObject {
// 1: back pointer that overlaps with prototype transitions field.
// 2 + 2 * i: prototype
// 3 + 2 * i: target map
- DECL_ACCESSORS(prototype_transitions, FixedArray)
+ inline FixedArray* GetPrototypeTransitions();
+ MUST_USE_RESULT inline MaybeObject* SetPrototypeTransitions(
+ FixedArray* prototype_transitions);
+ inline bool HasPrototypeTransitions();
- inline void init_prototype_transitions(Object* undefined);
- inline HeapObject* unchecked_prototype_transitions();
+ inline HeapObject* UncheckedPrototypeTransitions();
+ inline TransitionArray* unchecked_transition_array();
- static const int kProtoTransitionHeaderSize = 2;
+ static const int kProtoTransitionHeaderSize = 1;
static const int kProtoTransitionNumberOfEntriesOffset = 0;
- static const int kProtoTransitionBackPointerOffset = 1;
static const int kProtoTransitionElementsPerEntry = 2;
static const int kProtoTransitionPrototypeOffset = 0;
static const int kProtoTransitionMapOffset = 1;
inline int NumberOfProtoTransitions() {
- FixedArray* cache = prototype_transitions();
+ FixedArray* cache = GetPrototypeTransitions();
if (cache->length() == 0) return 0;
return
Smi::cast(cache->get(kProtoTransitionNumberOfEntriesOffset))->value();
}
inline void SetNumberOfProtoTransitions(int value) {
- FixedArray* cache = prototype_transitions();
+ FixedArray* cache = GetPrototypeTransitions();
ASSERT(cache->length() != 0);
cache->set_unchecked(kProtoTransitionNumberOfEntriesOffset,
Smi::FromInt(value));
@@ -4788,18 +5029,86 @@ class Map: public HeapObject {
// Lookup in the map's instance descriptors and fill out the result
// with the given holder if the name is found. The holder may be
// NULL when this function is used from the compiler.
- void LookupInDescriptors(JSObject* holder,
- String* name,
- LookupResult* result);
+ inline void LookupDescriptor(JSObject* holder,
+ String* name,
+ LookupResult* result);
+
+ inline void LookupTransition(JSObject* holder,
+ String* name,
+ LookupResult* result);
+
+ // The size of transition arrays are limited so they do not end up in large
+ // object space. Otherwise ClearNonLiveTransitions would leak memory while
+ // applying in-place right trimming.
+ inline bool CanHaveMoreTransitions();
+
+ int LastAdded() {
+ int number_of_own_descriptors = NumberOfOwnDescriptors();
+ ASSERT(number_of_own_descriptors > 0);
+ return number_of_own_descriptors - 1;
+ }
+
+ int NumberOfOwnDescriptors() {
+ return NumberOfOwnDescriptorsBits::decode(bit_field3());
+ }
+
+ void SetNumberOfOwnDescriptors(int number) {
+ ASSERT(number <= instance_descriptors()->number_of_descriptors());
+ set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
+ }
+
+ inline JSGlobalPropertyCell* RetrieveDescriptorsPointer();
+
+ int EnumLength() {
+ return EnumLengthBits::decode(bit_field3());
+ }
+
+ void SetEnumLength(int length) {
+ if (length != kInvalidEnumCache) {
+ ASSERT(length >= 0);
+ ASSERT(length == 0 || instance_descriptors()->HasEnumCache());
+ ASSERT(length <= NumberOfOwnDescriptors());
+ }
+ set_bit_field3(EnumLengthBits::update(bit_field3(), length));
+ }
+
+ inline bool owns_descriptors();
+ inline void set_owns_descriptors(bool is_shared);
+ inline bool is_observed();
+ inline void set_is_observed(bool is_observed);
+
+ MUST_USE_RESULT MaybeObject* RawCopy(int instance_size);
+ MUST_USE_RESULT MaybeObject* CopyWithPreallocatedFieldDescriptors();
MUST_USE_RESULT MaybeObject* CopyDropDescriptors();
+ MUST_USE_RESULT MaybeObject* CopyReplaceDescriptors(
+ DescriptorArray* descriptors,
+ String* name,
+ TransitionFlag flag,
+ int descriptor_index);
+ MUST_USE_RESULT MaybeObject* ShareDescriptor(DescriptorArray* descriptors,
+ Descriptor* descriptor);
+ MUST_USE_RESULT MaybeObject* CopyAddDescriptor(Descriptor* descriptor,
+ TransitionFlag flag);
+ MUST_USE_RESULT MaybeObject* CopyInsertDescriptor(Descriptor* descriptor,
+ TransitionFlag flag);
+ MUST_USE_RESULT MaybeObject* CopyReplaceDescriptor(
+ DescriptorArray* descriptors,
+ Descriptor* descriptor,
+ int index,
+ TransitionFlag flag);
+ MUST_USE_RESULT MaybeObject* CopyAsElementsKind(ElementsKind kind,
+ TransitionFlag flag);
MUST_USE_RESULT MaybeObject* CopyNormalized(PropertyNormalizationMode mode,
NormalizedMapSharingMode sharing);
+ inline void AppendDescriptor(Descriptor* desc,
+ const DescriptorArray::WhitenessWitness&);
+
// Returns a copy of the map, with all transitions dropped from the
// instance descriptors.
- MUST_USE_RESULT MaybeObject* CopyDropTransitions();
+ MUST_USE_RESULT MaybeObject* Copy();
// Returns the property index for name (only valid for FAST MODE).
int PropertyIndexFor(String* name);
@@ -4809,7 +5118,8 @@ class Map: public HeapObject {
// Returns the number of properties described in instance_descriptors
// filtering out properties with the specified attributes.
- int NumberOfDescribedProperties(PropertyAttributes filter = NONE);
+ int NumberOfDescribedProperties(DescriptorFlag which = OWN_DESCRIPTORS,
+ PropertyAttributes filter = NONE);
// Casting.
static inline Map* cast(Object* obj);
@@ -4828,6 +5138,13 @@ class Map: public HeapObject {
Handle<Code> code);
MUST_USE_RESULT MaybeObject* UpdateCodeCache(String* name, Code* code);
+ // Extend the descriptor array of the map with the list of descriptors.
+ // In case of duplicates, the latest descriptor is used.
+ static void AppendCallbackDescriptors(Handle<Map> map,
+ Handle<Object> descriptors);
+
+ static void EnsureDescriptorSlack(Handle<Map> map, int slack);
+
// Returns the found code or undefined if absent.
Object* FindInCodeCache(String* name, Code::Flags flags);
@@ -4852,23 +5169,11 @@ class Map: public HeapObject {
// The "shared" flags of both this map and |other| are ignored.
bool EquivalentToForNormalization(Map* other, PropertyNormalizationMode mode);
- // Returns the contents of this map's descriptor array for the given string.
- // May return NULL. |safe_to_add_transition| is set to false and NULL
- // is returned if adding transitions is not allowed.
- Object* GetDescriptorContents(String* sentinel_name,
- bool* safe_to_add_transitions);
-
// Returns the map that this map transitions to if its elements_kind
// is changed to |elements_kind|, or NULL if no such map is cached yet.
// |safe_to_add_transitions| is set to false if adding transitions is not
// allowed.
- Map* LookupElementsTransitionMap(ElementsKind elements_kind,
- bool* safe_to_add_transition);
-
- // Adds an entry to this map's descriptor array for a transition to
- // |transitioned_map| when its elements_kind is changed to |elements_kind|.
- MUST_USE_RESULT MaybeObject* AddElementsTransition(
- ElementsKind elements_kind, Map* transitioned_map);
+ Map* LookupElementsTransitionMap(ElementsKind elements_kind);
// Returns the transitioned map for this map with the most generic
// elements_kind that's found in |candidates|, or null handle if no match is
@@ -4876,14 +5181,14 @@ class Map: public HeapObject {
Handle<Map> FindTransitionedMap(MapHandleList* candidates);
Map* FindTransitionedMap(MapList* candidates);
- // Zaps the contents of backing data structures in debug mode. Note that the
+ // Zaps the contents of backing data structures. Note that the
// heap verifier (i.e. VerifyMarkingVisitor) relies on zapping of objects
// holding weak references when incremental marking is used, because it also
// iterates over objects that are otherwise unreachable.
-#ifdef DEBUG
- void ZapInstanceDescriptors();
+ // In general we only want to call these functions in release mode when
+ // heap verification is turned on.
void ZapPrototypeTransitions();
-#endif
+ void ZapTransitions();
// Dispatched behavior.
#ifdef OBJECT_PRINT
@@ -4892,8 +5197,9 @@ class Map: public HeapObject {
}
void MapPrint(FILE* out);
#endif
-#ifdef DEBUG
- void MapVerify();
+ DECLARE_VERIFIER(Map)
+
+#ifdef VERIFY_HEAP
void SharedMapVerify();
#endif
@@ -4904,44 +5210,47 @@ class Map: public HeapObject {
void TraverseTransitionTree(TraverseCallback callback, void* data);
+ // When you set the prototype of an object using the __proto__ accessor you
+ // need a new map for the object (the prototype is stored in the map). In
+ // order not to multiply maps unnecessarily we store these as transitions in
+ // the original map. That way we can transition to the same map if the same
+ // prototype is set, rather than creating a new map every time. The
+ // transitions are in the form of a map where the keys are prototype objects
+ // and the values are the maps the are transitioned to.
static const int kMaxCachedPrototypeTransitions = 256;
- Object* GetPrototypeTransition(Object* prototype);
+ Map* GetPrototypeTransition(Object* prototype);
MUST_USE_RESULT MaybeObject* PutPrototypeTransition(Object* prototype,
Map* map);
static const int kMaxPreAllocatedPropertyFields = 255;
+ // Constant for denoting that the enum cache is not yet initialized.
+ static const int kInvalidEnumCache = EnumLengthBits::kMax;
+
// Layout description.
static const int kInstanceSizesOffset = HeapObject::kHeaderSize;
static const int kInstanceAttributesOffset = kInstanceSizesOffset + kIntSize;
static const int kPrototypeOffset = kInstanceAttributesOffset + kIntSize;
static const int kConstructorOffset = kPrototypeOffset + kPointerSize;
- // Storage for instance descriptors is overloaded to also contain additional
- // map flags when unused (bit_field3). When the map has instance descriptors,
- // the flags are transferred to the instance descriptor array and accessed
- // through an extra indirection.
- // TODO(1399): It should be possible to make room for bit_field3 in the map
- // without overloading the instance descriptors field, but the map is
- // currently perfectly aligned to 32 bytes and extending it at all would
- // double its size. After the increment GC work lands, this size restriction
- // could be loosened and bit_field3 moved directly back in the map.
- static const int kInstanceDescriptorsOrBitField3Offset =
+ // Storage for the transition array is overloaded to directly contain a back
+ // pointer if unused. When the map has transitions, the back pointer is
+ // transferred to the transition array and accessed through an extra
+ // indirection.
+ static const int kTransitionsOrBackPointerOffset =
kConstructorOffset + kPointerSize;
+ static const int kDescriptorsOffset =
+ kTransitionsOrBackPointerOffset + kPointerSize;
static const int kCodeCacheOffset =
- kInstanceDescriptorsOrBitField3Offset + kPointerSize;
- static const int kPrototypeTransitionsOrBackPointerOffset =
- kCodeCacheOffset + kPointerSize;
- static const int kPadStart =
- kPrototypeTransitionsOrBackPointerOffset + kPointerSize;
- static const int kSize = MAP_POINTER_ALIGN(kPadStart);
+ kDescriptorsOffset + kPointerSize;
+ static const int kBitField3Offset = kCodeCacheOffset + kPointerSize;
+ static const int kSize = kBitField3Offset + kPointerSize;
// Layout of pointer fields. Heap iteration code relies on them
// being continuously allocated.
static const int kPointerFieldsBeginOffset = Map::kPrototypeOffset;
- static const int kPointerFieldsEndOffset =
- kPrototypeTransitionsOrBackPointerOffset + kPointerSize;
+ static const int kPointerFieldsEndOffset = kBitField3Offset + kPointerSize;
// Byte offsets within kInstanceSizesOffset.
static const int kInstanceSizeOffset = kInstanceSizesOffset + 0;
@@ -4974,40 +5283,33 @@ class Map: public HeapObject {
// Bit positions for bit field 2
static const int kIsExtensible = 0;
- static const int kFunctionWithPrototype = 1;
- static const int kStringWrapperSafeForDefaultValueOf = 2;
- static const int kUseUserObjectComparison = 3;
+ static const int kStringWrapperSafeForDefaultValueOf = 1;
+ static const int kUseUserObjectComparison = 2;
// No bits can be used after kElementsKindFirstBit, they are all reserved for
// storing ElementKind.
- static const int kElementsKindShift = 4;
- static const int kElementsKindBitCount = 4;
+ static const int kElementsKindShift = 3;
+ static const int kElementsKindBitCount = 5;
// Derived values from bit field 2
static const int kElementsKindMask = (-1 << kElementsKindShift) &
((1 << (kElementsKindShift + kElementsKindBitCount)) - 1);
static const int8_t kMaximumBitField2FastElementValue = static_cast<int8_t>(
(FAST_ELEMENTS + 1) << Map::kElementsKindShift) - 1;
- static const int8_t kMaximumBitField2FastSmiOnlyElementValue =
- static_cast<int8_t>((FAST_SMI_ONLY_ELEMENTS + 1) <<
+ static const int8_t kMaximumBitField2FastSmiElementValue =
+ static_cast<int8_t>((FAST_SMI_ELEMENTS + 1) <<
+ Map::kElementsKindShift) - 1;
+ static const int8_t kMaximumBitField2FastHoleyElementValue =
+ static_cast<int8_t>((FAST_HOLEY_ELEMENTS + 1) <<
+ Map::kElementsKindShift) - 1;
+ static const int8_t kMaximumBitField2FastHoleySmiElementValue =
+ static_cast<int8_t>((FAST_HOLEY_SMI_ELEMENTS + 1) <<
Map::kElementsKindShift) - 1;
-
- // Bit positions for bit field 3
- static const int kIsShared = 0;
- static const int kNamedInterceptorIsFallback = 1;
- static const int kHasInstanceCallHandler = 2;
- static const int kAttachedToSharedFunctionInfo = 3;
-
- // Layout of the default cache. It holds alternating name and code objects.
- static const int kCodeCacheEntrySize = 2;
- static const int kCodeCacheEntryNameOffset = 0;
- static const int kCodeCacheEntryCodeOffset = 1;
typedef FixedBodyDescriptor<kPointerFieldsBeginOffset,
kPointerFieldsEndOffset,
kSize> BodyDescriptor;
private:
- String* elements_transition_sentinel_name();
DISALLOW_IMPLICIT_CONSTRUCTORS(Map);
};
@@ -5101,9 +5403,7 @@ class Script: public Struct {
}
void ScriptPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ScriptVerify();
-#endif
+ DECLARE_VERIFIER(Script)
static const int kSourceOffset = HeapObject::kHeaderSize;
static const int kNameOffset = kSourceOffset + kPointerSize;
@@ -5188,6 +5488,29 @@ class SharedFunctionInfo: public HeapObject {
// [code]: Function code.
DECL_ACCESSORS(code, Code)
+ // [optimized_code_map]: Map from native context to optimized code
+ // and a shared literals array or Smi 0 if none.
+ DECL_ACCESSORS(optimized_code_map, Object)
+
+ // Returns index i of the entry with the specified context. At position
+ // i - 1 is the context, position i the code, and i + 1 the literals array.
+ // Returns -1 when no matching entry is found.
+ int SearchOptimizedCodeMap(Context* native_context);
+
+ // Installs optimized code from the code map on the given closure. The
+ // index has to be consistent with a search result as defined above.
+ void InstallFromOptimizedCodeMap(JSFunction* function, int index);
+
+ // Clear optimized code map.
+ void ClearOptimizedCodeMap();
+
+ // Add a new entry to the optimized code map.
+ static void AddToOptimizedCodeMap(Handle<SharedFunctionInfo> shared,
+ Handle<Context> native_context,
+ Handle<Code> code,
+ Handle<FixedArray> literals);
+ static const int kEntryLength = 3;
+
// [scope_info]: Scope info.
DECL_ACCESSORS(scope_info, ScopeInfo)
@@ -5295,6 +5618,10 @@ class SharedFunctionInfo: public HeapObject {
// IsInobjectSlackTrackingInProgress is false after this call.
void CompleteInobjectSlackTracking();
+ // Invoked before pointers in SharedFunctionInfo are being marked.
+ // Also clears the optimized code map.
+ inline void BeforeVisitingPointers();
+
// Clears the initial_map before the GC marking phase to ensure the reference
// is weak. IsInobjectSlackTrackingInProgress is false after this call.
void DetachInitialMap();
@@ -5380,8 +5707,8 @@ class SharedFunctionInfo: public HeapObject {
// A counter used to determine when to stress the deoptimizer with a
// deopt.
- inline int deopt_counter();
- inline void set_deopt_counter(int counter);
+ inline int stress_deopt_counter();
+ inline void set_stress_deopt_counter(int counter);
inline int profiler_ticks();
@@ -5407,6 +5734,12 @@ class SharedFunctionInfo: public HeapObject {
// when doing GC if we expect that the function will no longer be used.
DECL_BOOLEAN_ACCESSORS(allows_lazy_compilation)
+ // Indicates if this function can be lazy compiled without a context.
+ // This is used to determine if we can force compilation without reaching
+ // the function through program execution but through other means (e.g. heap
+ // iteration by the debugger).
+ DECL_BOOLEAN_ACCESSORS(allows_lazy_compilation_without_context)
+
// Indicates how many full GCs this function has survived with assigned
// code object. Used to determine when it is relatively safe to flush
// this code object and replace it with lazy compilation stub.
@@ -5474,6 +5807,9 @@ class SharedFunctionInfo: public HeapObject {
// Indicates that the function cannot be inlined.
DECL_BOOLEAN_ACCESSORS(dont_inline)
+ // Indicates that code for this function cannot be cached.
+ DECL_BOOLEAN_ACCESSORS(dont_cache)
+
// Indicates whether or not the code in the shared function support
// deoptimization.
inline bool has_deoptimization_support();
@@ -5483,12 +5819,12 @@ class SharedFunctionInfo: public HeapObject {
// Disable (further) attempted optimization of all functions sharing this
// shared function info.
- void DisableOptimization();
+ void DisableOptimization(const char* reason);
// Lookup the bailout ID and ASSERT that it exists in the non-optimized
// code, returns whether it asserted (i.e., always true if assertions are
// disabled).
- bool VerifyBailoutId(int id);
+ bool VerifyBailoutId(BailoutId id);
// Check whether a inlined constructor can be generated with the given
// prototype.
@@ -5512,9 +5848,26 @@ class SharedFunctionInfo: public HeapObject {
bool HasSourceCode();
Handle<Object> GetSourceCode();
+ // Number of times the function was optimized.
inline int opt_count();
inline void set_opt_count(int opt_count);
+ // Number of times the function was deoptimized.
+ inline void set_deopt_count(int value);
+ inline int deopt_count();
+ inline void increment_deopt_count();
+
+ // Number of time we tried to re-enable optimization after it
+ // was disabled due to high number of deoptimizations.
+ inline void set_opt_reenable_tries(int value);
+ inline int opt_reenable_tries();
+
+ inline void TryReenableOptimization();
+
+ // Stores deopt_count, opt_reenable_tries and ic_age as bit-fields.
+ inline void set_counters(int value);
+ inline int counters();
+
// Source size of this function.
int SourceSize();
@@ -5533,21 +5886,16 @@ class SharedFunctionInfo: public HeapObject {
}
void SharedFunctionInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void SharedFunctionInfoVerify();
-#endif
+ DECLARE_VERIFIER(SharedFunctionInfo)
void ResetForNewContext(int new_ic_age);
- // Helpers to compile the shared code. Returns true on success, false on
- // failure (e.g., stack overflow during compilation).
- static bool EnsureCompiled(Handle<SharedFunctionInfo> shared,
- ClearExceptionFlag flag);
+ // Helper to compile the shared code. Returns true on success, false on
+ // failure (e.g., stack overflow during compilation). This is only used by
+ // the debugger, it is not possible to compile without a context otherwise.
static bool CompileLazy(Handle<SharedFunctionInfo> shared,
ClearExceptionFlag flag);
- void SharedFunctionInfoIterateBody(ObjectVisitor* v);
-
// Casting.
static inline SharedFunctionInfo* cast(Object* obj);
@@ -5558,7 +5906,8 @@ class SharedFunctionInfo: public HeapObject {
// Pointer fields.
static const int kNameOffset = HeapObject::kHeaderSize;
static const int kCodeOffset = kNameOffset + kPointerSize;
- static const int kScopeInfoOffset = kCodeOffset + kPointerSize;
+ static const int kOptimizedCodeMapOffset = kCodeOffset + kPointerSize;
+ static const int kScopeInfoOffset = kOptimizedCodeMapOffset + kPointerSize;
static const int kConstructStubOffset = kScopeInfoOffset + kPointerSize;
static const int kInstanceClassNameOffset =
kConstructStubOffset + kPointerSize;
@@ -5571,13 +5920,14 @@ class SharedFunctionInfo: public HeapObject {
kInferredNameOffset + kPointerSize;
static const int kThisPropertyAssignmentsOffset =
kInitialMapOffset + kPointerSize;
- // ic_age is a Smi field. It could be grouped with another Smi field into a
- // PSEUDO_SMI_ACCESSORS pair (on x64), if one becomes available.
- static const int kICAgeOffset = kThisPropertyAssignmentsOffset + kPointerSize;
+ // ast_node_count is a Smi field. It could be grouped with another Smi field
+ // into a PSEUDO_SMI_ACCESSORS pair (on x64), if one becomes available.
+ static const int kAstNodeCountOffset =
+ kThisPropertyAssignmentsOffset + kPointerSize;
#if V8_HOST_ARCH_32_BIT
// Smi fields.
static const int kLengthOffset =
- kICAgeOffset + kPointerSize;
+ kAstNodeCountOffset + kPointerSize;
static const int kFormalParameterCountOffset = kLengthOffset + kPointerSize;
static const int kExpectedNofPropertiesOffset =
kFormalParameterCountOffset + kPointerSize;
@@ -5595,12 +5945,11 @@ class SharedFunctionInfo: public HeapObject {
kCompilerHintsOffset + kPointerSize;
static const int kOptCountOffset =
kThisPropertyAssignmentsCountOffset + kPointerSize;
- static const int kAstNodeCountOffset = kOptCountOffset + kPointerSize;
- static const int kDeoptCounterOffset = kAstNodeCountOffset + kPointerSize;
-
+ static const int kCountersOffset = kOptCountOffset + kPointerSize;
+ static const int kStressDeoptCounterOffset = kCountersOffset + kPointerSize;
// Total size.
- static const int kSize = kDeoptCounterOffset + kPointerSize;
+ static const int kSize = kStressDeoptCounterOffset + kPointerSize;
#else
// The only reason to use smi fields instead of int fields
// is to allow iteration without maps decoding during
@@ -5612,7 +5961,7 @@ class SharedFunctionInfo: public HeapObject {
// word is not set and thus this word cannot be treated as pointer
// to HeapObject during old space traversal.
static const int kLengthOffset =
- kICAgeOffset + kPointerSize;
+ kAstNodeCountOffset + kPointerSize;
static const int kFormalParameterCountOffset =
kLengthOffset + kIntSize;
@@ -5636,11 +5985,11 @@ class SharedFunctionInfo: public HeapObject {
static const int kOptCountOffset =
kThisPropertyAssignmentsCountOffset + kIntSize;
- static const int kAstNodeCountOffset = kOptCountOffset + kIntSize;
- static const int kDeoptCounterOffset = kAstNodeCountOffset + kIntSize;
+ static const int kCountersOffset = kOptCountOffset + kIntSize;
+ static const int kStressDeoptCounterOffset = kCountersOffset + kIntSize;
// Total size.
- static const int kSize = kDeoptCounterOffset + kIntSize;
+ static const int kSize = kStressDeoptCounterOffset + kIntSize;
#endif
@@ -5676,6 +6025,7 @@ class SharedFunctionInfo: public HeapObject {
enum CompilerHints {
kHasOnlySimpleThisPropertyAssignments,
kAllowLazyCompilation,
+ kAllowLazyCompilationWithoutContext,
kLiveObjectsMayExist,
kCodeAgeShift,
kOptimizationDisabled = kCodeAgeShift + kCodeAgeSize,
@@ -5691,9 +6041,14 @@ class SharedFunctionInfo: public HeapObject {
kIsFunction,
kDontOptimize,
kDontInline,
+ kDontCache,
kCompilerHintsCount // Pseudo entry
};
+ class DeoptCountBits: public BitField<int, 0, 4> {};
+ class OptReenableTriesBits: public BitField<int, 4, 18> {};
+ class ICAgeBits: public BitField<int, 22, 8> {};
+
private:
#if V8_HOST_ARCH_32_BIT
// On 32 bit platforms, compiler hints is a smi.
@@ -5753,6 +6108,9 @@ class JSModule: public JSObject {
// [context]: the context holding the module's locals, or undefined if none.
DECL_ACCESSORS(context, Object)
+ // [scope_info]: Scope info.
+ DECL_ACCESSORS(scope_info, ScopeInfo)
+
// Casting.
static inline JSModule* cast(Object* obj);
@@ -5763,13 +6121,12 @@ class JSModule: public JSObject {
}
void JSModulePrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSModuleVerify();
-#endif
+ DECLARE_VERIFIER(JSModule)
// Layout description.
static const int kContextOffset = JSObject::kHeaderSize;
- static const int kSize = kContextOffset + kPointerSize;
+ static const int kScopeInfoOffset = kContextOffset + kPointerSize;
+ static const int kSize = kScopeInfoOffset + kPointerSize;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(JSModule);
@@ -5818,18 +6175,26 @@ class JSFunction: public JSObject {
// Mark this function for lazy recompilation. The function will be
// recompiled the next time it is executed.
void MarkForLazyRecompilation();
+ void MarkForParallelRecompilation();
// Helpers to compile this function. Returns true on success, false on
// failure (e.g., stack overflow during compilation).
+ static bool EnsureCompiled(Handle<JSFunction> function,
+ ClearExceptionFlag flag);
static bool CompileLazy(Handle<JSFunction> function,
ClearExceptionFlag flag);
static bool CompileOptimized(Handle<JSFunction> function,
- int osr_ast_id,
+ BailoutId osr_ast_id,
ClearExceptionFlag flag);
// Tells whether or not the function is already marked for lazy
// recompilation.
inline bool IsMarkedForLazyRecompilation();
+ inline bool IsMarkedForParallelRecompilation();
+
+ // Tells whether or not the function is on the parallel
+ // recompilation queue.
+ inline bool IsInRecompileQueue();
// Check whether or not this function is inlineable.
bool IsInlineable();
@@ -5859,8 +6224,6 @@ class JSFunction: public JSObject {
// The initial map for an object created by this constructor.
inline Map* initial_map();
inline void set_initial_map(Map* value);
- MUST_USE_RESULT inline MaybeObject* set_initial_map_and_cache_transitions(
- Map* value);
inline bool has_initial_map();
// Get and set the prototype property on a JSFunction. If the
@@ -5876,7 +6239,7 @@ class JSFunction: public JSObject {
// After prototype is removed, it will not be created when accessed, and
// [[Construct]] from this function will not be allowed.
- Object* RemovePrototype();
+ void RemovePrototype();
inline bool should_have_prototype();
// Accessor for this function's initial map's [[class]]
@@ -5888,7 +6251,7 @@ class JSFunction: public JSObject {
// Instances created afterwards will have a map whose [[class]] is
// set to 'value', but there is no guarantees on instances created
// before.
- Object* SetInstanceClassName(String* name);
+ void SetInstanceClassName(String* name);
// Returns if this function has been compiled to native code yet.
inline bool is_compiled();
@@ -5917,15 +6280,13 @@ class JSFunction: public JSObject {
}
void JSFunctionPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSFunctionVerify();
-#endif
+ DECLARE_VERIFIER(JSFunction)
// Returns the number of allocated literals.
inline int NumberOfLiterals();
- // Retrieve the global context from a function's literal array.
- static Context* GlobalContextFromLiterals(FixedArray* literals);
+ // Retrieve the native context from a function's literal array.
+ static Context* NativeContextFromLiterals(FixedArray* literals);
// Layout descriptors. The last property (from kNonWeakFieldsEndOffset to
// kSize) is weak and has special handling during garbage collection.
@@ -5942,7 +6303,7 @@ class JSFunction: public JSObject {
// Layout of the literals array.
static const int kLiteralsPrefixSize = 1;
- static const int kLiteralGlobalContextIndex = 0;
+ static const int kLiteralNativeContextIndex = 0;
// Layout of the bound-function binding array.
static const int kBoundFunctionIndex = 0;
@@ -5964,9 +6325,9 @@ class JSFunction: public JSObject {
class JSGlobalProxy : public JSObject {
public:
- // [context]: the owner global context of this global proxy object.
+ // [native_context]: the owner native context of this global proxy object.
// It is null value if this object is not used by any context.
- DECL_ACCESSORS(context, Object)
+ DECL_ACCESSORS(native_context, Object)
// Casting.
static inline JSGlobalProxy* cast(Object* obj);
@@ -5978,13 +6339,11 @@ class JSGlobalProxy : public JSObject {
}
void JSGlobalProxyPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSGlobalProxyVerify();
-#endif
+ DECLARE_VERIFIER(JSGlobalProxy)
// Layout description.
- static const int kContextOffset = JSObject::kHeaderSize;
- static const int kSize = kContextOffset + kPointerSize;
+ static const int kNativeContextOffset = JSObject::kHeaderSize;
+ static const int kSize = kNativeContextOffset + kPointerSize;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(JSGlobalProxy);
@@ -6001,7 +6360,10 @@ class GlobalObject: public JSObject {
// [builtins]: the object holding the runtime routines written in JS.
DECL_ACCESSORS(builtins, JSBuiltinsObject)
- // [global context]: the global context corresponding to this global object.
+ // [native context]: the natives corresponding to this global object.
+ DECL_ACCESSORS(native_context, Context)
+
+ // [global context]: the most recent (i.e. innermost) global context.
DECL_ACCESSORS(global_context, Context)
// [global receiver]: the global receiver object of the context
@@ -6032,7 +6394,8 @@ class GlobalObject: public JSObject {
// Layout description.
static const int kBuiltinsOffset = JSObject::kHeaderSize;
- static const int kGlobalContextOffset = kBuiltinsOffset + kPointerSize;
+ static const int kNativeContextOffset = kBuiltinsOffset + kPointerSize;
+ static const int kGlobalContextOffset = kNativeContextOffset + kPointerSize;
static const int kGlobalReceiverOffset = kGlobalContextOffset + kPointerSize;
static const int kHeaderSize = kGlobalReceiverOffset + kPointerSize;
@@ -6054,9 +6417,7 @@ class JSGlobalObject: public GlobalObject {
}
void JSGlobalObjectPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSGlobalObjectVerify();
-#endif
+ DECLARE_VERIFIER(JSGlobalObject)
// Layout description.
static const int kSize = GlobalObject::kHeaderSize;
@@ -6088,9 +6449,7 @@ class JSBuiltinsObject: public GlobalObject {
}
void JSBuiltinsObjectPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSBuiltinsObjectVerify();
-#endif
+ DECLARE_VERIFIER(JSBuiltinsObject)
// Layout description. The size of the builtins object includes
// room for two pointers per runtime routine written in javascript
@@ -6131,9 +6490,7 @@ class JSValue: public JSObject {
}
void JSValuePrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSValueVerify();
-#endif
+ DECLARE_VERIFIER(JSValue)
// Layout description.
static const int kValueOffset = JSObject::kHeaderSize;
@@ -6187,9 +6544,8 @@ class JSDate: public JSObject {
}
void JSDatePrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSDateVerify();
-#endif
+ DECLARE_VERIFIER(JSDate)
+
// The order is important. It must be kept in sync with date macros
// in macros.py.
enum FieldIndex {
@@ -6285,9 +6641,7 @@ class JSMessageObject: public JSObject {
}
void JSMessageObjectPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSMessageObjectVerify();
-#endif
+ DECLARE_VERIFIER(JSMessageObject)
// Layout description.
static const int kTypeOffset = JSObject::kHeaderSize;
@@ -6376,9 +6730,7 @@ class JSRegExp: public JSObject {
static inline JSRegExp* cast(Object* obj);
// Dispatched behavior.
-#ifdef DEBUG
- void JSRegExpVerify();
-#endif
+ DECLARE_VERIFIER(JSRegExp)
static const int kDataOffset = JSObject::kHeaderSize;
static const int kSize = kDataOffset + kPointerSize;
@@ -6478,13 +6830,15 @@ class CompilationCacheTable: public HashTable<CompilationCacheShape,
HashTableKey*> {
public:
// Find cached value for a string key, otherwise return null.
- Object* Lookup(String* src);
+ Object* Lookup(String* src, Context* context);
Object* LookupEval(String* src,
Context* context,
LanguageMode language_mode,
int scope_position);
Object* LookupRegExp(String* source, JSRegExp::Flags flags);
- MUST_USE_RESULT MaybeObject* Put(String* src, Object* value);
+ MUST_USE_RESULT MaybeObject* Put(String* src,
+ Context* context,
+ Object* value);
MUST_USE_RESULT MaybeObject* PutEval(String* src,
Context* context,
SharedFunctionInfo* value,
@@ -6532,9 +6886,7 @@ class CodeCache: public Struct {
}
void CodeCachePrint(FILE* out);
#endif
-#ifdef DEBUG
- void CodeCacheVerify();
-#endif
+ DECLARE_VERIFIER(CodeCache)
static const int kDefaultCacheOffset = HeapObject::kHeaderSize;
static const int kNormalTypeCacheOffset =
@@ -6623,9 +6975,7 @@ class PolymorphicCodeCache: public Struct {
}
void PolymorphicCodeCachePrint(FILE* out);
#endif
-#ifdef DEBUG
- void PolymorphicCodeCacheVerify();
-#endif
+ DECLARE_VERIFIER(PolymorphicCodeCache)
static const int kCacheOffset = HeapObject::kHeaderSize;
static const int kSize = kCacheOffset + kPointerSize;
@@ -6658,7 +7008,15 @@ class TypeFeedbackInfo: public Struct {
inline void set_ic_total_count(int count);
inline int ic_with_type_info_count();
- inline void set_ic_with_type_info_count(int count);
+ inline void change_ic_with_type_info_count(int count);
+
+ inline void initialize_storage();
+
+ inline void change_own_type_change_checksum();
+ inline int own_type_change_checksum();
+
+ inline void set_inlined_type_change_checksum(int checksum);
+ inline bool matches_inlined_type_change_checksum(int checksum);
DECL_ACCESSORS(type_feedback_cells, TypeFeedbackCells)
@@ -6670,18 +7028,27 @@ class TypeFeedbackInfo: public Struct {
}
void TypeFeedbackInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void TypeFeedbackInfoVerify();
-#endif
+ DECLARE_VERIFIER(TypeFeedbackInfo)
- static const int kIcTotalCountOffset = HeapObject::kHeaderSize;
- static const int kIcWithTypeinfoCountOffset =
- kIcTotalCountOffset + kPointerSize;
- static const int kTypeFeedbackCellsOffset =
- kIcWithTypeinfoCountOffset + kPointerSize;
+ static const int kStorage1Offset = HeapObject::kHeaderSize;
+ static const int kStorage2Offset = kStorage1Offset + kPointerSize;
+ static const int kTypeFeedbackCellsOffset = kStorage2Offset + kPointerSize;
static const int kSize = kTypeFeedbackCellsOffset + kPointerSize;
private:
+ static const int kTypeChangeChecksumBits = 7;
+
+ class ICTotalCountField: public BitField<int, 0,
+ kSmiValueSize - kTypeChangeChecksumBits> {}; // NOLINT
+ class OwnTypeChangeChecksum: public BitField<int,
+ kSmiValueSize - kTypeChangeChecksumBits,
+ kTypeChangeChecksumBits> {}; // NOLINT
+ class ICsWithTypeInfoCountField: public BitField<int, 0,
+ kSmiValueSize - kTypeChangeChecksumBits> {}; // NOLINT
+ class InlinedTypeChangeChecksum: public BitField<int,
+ kSmiValueSize - kTypeChangeChecksumBits,
+ kTypeChangeChecksumBits> {}; // NOLINT
+
DISALLOW_IMPLICIT_CONSTRUCTORS(TypeFeedbackInfo);
};
@@ -6707,9 +7074,7 @@ class AliasedArgumentsEntry: public Struct {
}
void AliasedArgumentsEntryPrint(FILE* out);
#endif
-#ifdef DEBUG
- void AliasedArgumentsEntryVerify();
-#endif
+ DECLARE_VERIFIER(AliasedArgumentsEntry)
static const int kAliasedContextSlot = HeapObject::kHeaderSize;
static const int kSize = kAliasedContextSlot + kPointerSize;
@@ -6752,10 +7117,6 @@ class StringHasher {
// index.
bool is_array_index() { return is_array_index_; }
- bool is_valid() { return is_valid_; }
-
- void invalidate() { is_valid_ = false; }
-
// Calculated hash value for a string consisting of 1 to
// String::kMaxArrayIndexSize digits with no leading zeros (except "0").
// value is represented decimal value.
@@ -6774,13 +7135,33 @@ class StringHasher {
inline uint32_t GetHash();
+ // Reusable parts of the hashing algorithm.
+ INLINE(static uint32_t AddCharacterCore(uint32_t running_hash, uint32_t c));
+ INLINE(static uint32_t GetHashCore(uint32_t running_hash));
+
int length_;
uint32_t raw_running_hash_;
uint32_t array_index_;
bool is_array_index_;
bool is_first_char_;
- bool is_valid_;
friend class TwoCharHashTableKey;
+
+ template <bool seq_ascii> friend class JsonParser;
+};
+
+
+class IncrementalAsciiStringHasher {
+ public:
+ explicit inline IncrementalAsciiStringHasher(uint32_t seed, char first_char);
+ inline void AddCharacter(uc32 c);
+ inline uint32_t GetHash();
+
+ private:
+ int length_;
+ uint32_t raw_running_hash_;
+ uint32_t array_index_;
+ bool is_array_index_;
+ char first_char_;
};
@@ -6974,9 +7355,6 @@ class String: public HeapObject {
bool IsAsciiEqualTo(Vector<const char> str);
bool IsTwoByteEqualTo(Vector<const uc16> str);
- bool SlowEqualsExternal(uc16 *string, int length);
- bool SlowEqualsExternal(char *string, int length);
-
// Return a UTF8 representation of the string. The string is null
// terminated but may optionally contain nulls. Length is returned
// in length_output if length_output is not a null pointer The string
@@ -7043,9 +7421,8 @@ class String: public HeapObject {
char* ToAsciiArray();
#endif
-#ifdef DEBUG
- void StringVerify();
-#endif
+ DECLARE_VERIFIER(String)
+
inline bool IsFlat();
// Layout description.
@@ -7114,7 +7491,7 @@ class String: public HeapObject {
kIsNotArrayIndexMask | kHashNotComputedMask;
// Value of hash field containing computed hash equal to zero.
- static const int kZeroHash = kIsNotArrayIndexMask;
+ static const int kEmptyStringHash = kIsNotArrayIndexMask;
// Maximal string length.
static const int kMaxLength = (1 << (32 - 2)) - 1;
@@ -7149,32 +7526,47 @@ class String: public HeapObject {
int from,
int to);
- static inline bool IsAscii(const char* chars, int length) {
+ // The return value may point to the first aligned word containing the
+ // first non-ascii character, rather than directly to the non-ascii character.
+ // If the return value is >= the passed length, the entire string was ASCII.
+ static inline int NonAsciiStart(const char* chars, int length) {
+ const char* start = chars;
const char* limit = chars + length;
#ifdef V8_HOST_CAN_READ_UNALIGNED
ASSERT(kMaxAsciiCharCode == 0x7F);
const uintptr_t non_ascii_mask = kUintptrAllBitsSet / 0xFF * 0x80;
- while (chars <= limit - sizeof(uintptr_t)) {
+ while (chars + sizeof(uintptr_t) <= limit) {
if (*reinterpret_cast<const uintptr_t*>(chars) & non_ascii_mask) {
- return false;
+ return static_cast<int>(chars - start);
}
chars += sizeof(uintptr_t);
}
#endif
while (chars < limit) {
- if (static_cast<uint8_t>(*chars) > kMaxAsciiCharCodeU) return false;
+ if (static_cast<uint8_t>(*chars) > kMaxAsciiCharCodeU) {
+ return static_cast<int>(chars - start);
+ }
++chars;
}
- return true;
+ return static_cast<int>(chars - start);
}
- static inline bool IsAscii(const uc16* chars, int length) {
+ static inline bool IsAscii(const char* chars, int length) {
+ return NonAsciiStart(chars, length) >= length;
+ }
+
+ static inline int NonAsciiStart(const uc16* chars, int length) {
const uc16* limit = chars + length;
+ const uc16* start = chars;
while (chars < limit) {
- if (*chars > kMaxAsciiCharCodeU) return false;
+ if (*chars > kMaxAsciiCharCodeU) return static_cast<int>(chars - start);
++chars;
}
- return true;
+ return static_cast<int>(chars - start);
+ }
+
+ static inline bool IsAscii(const uc16* chars, int length) {
+ return NonAsciiStart(chars, length) >= length;
}
protected:
@@ -7287,6 +7679,8 @@ class SeqAsciiString: public SeqString {
unsigned* offset,
unsigned chars);
+ DECLARE_VERIFIER(SeqAsciiString)
+
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(SeqAsciiString);
};
@@ -7390,9 +7784,7 @@ class ConsString: public String {
typedef FixedBodyDescriptor<kFirstOffset, kSecondOffset + kPointerSize, kSize>
BodyDescriptor;
-#ifdef DEBUG
- void ConsStringVerify();
-#endif
+ DECLARE_VERIFIER(ConsString)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ConsString);
@@ -7414,7 +7806,8 @@ class ConsString: public String {
class SlicedString: public String {
public:
inline String* parent();
- inline void set_parent(String* parent);
+ inline void set_parent(String* parent,
+ WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
inline int offset();
inline void set_offset(int offset);
@@ -7443,9 +7836,7 @@ class SlicedString: public String {
kOffsetOffset + kPointerSize, kSize>
BodyDescriptor;
-#ifdef DEBUG
- void SlicedStringVerify();
-#endif
+ DECLARE_VERIFIER(SlicedString)
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(SlicedString);
@@ -7672,9 +8063,7 @@ class Oddball: public HeapObject {
static inline Oddball* cast(Object* obj);
// Dispatched behavior.
-#ifdef DEBUG
- void OddballVerify();
-#endif
+ DECLARE_VERIFIER(Oddball)
// Initialize the fields.
MUST_USE_RESULT MaybeObject* Initialize(const char* to_string,
@@ -7717,9 +8106,16 @@ class JSGlobalPropertyCell: public HeapObject {
// Casting.
static inline JSGlobalPropertyCell* cast(Object* obj);
-#ifdef DEBUG
- void JSGlobalPropertyCellVerify();
-#endif
+ static inline JSGlobalPropertyCell* FromValueAddress(Address value) {
+ return cast(FromAddress(value - kValueOffset));
+ }
+
+ inline Address ValueAddress() {
+ return address() + kValueOffset;
+ }
+
+ DECLARE_VERIFIER(JSGlobalPropertyCell)
+
#ifdef OBJECT_PRINT
inline void JSGlobalPropertyCellPrint() {
JSGlobalPropertyCellPrint(stdout);
@@ -7763,23 +8159,28 @@ class JSProxy: public JSReceiver {
uint32_t index);
MUST_USE_RESULT MaybeObject* SetPropertyWithHandler(
+ JSReceiver* receiver,
String* name,
Object* value,
PropertyAttributes attributes,
StrictModeFlag strict_mode);
MUST_USE_RESULT MaybeObject* SetElementWithHandler(
+ JSReceiver* receiver,
uint32_t index,
Object* value,
StrictModeFlag strict_mode);
- // If the handler defines an accessor property, invoke its setter
- // (or throw if only a getter exists) and set *found to true. Otherwise false.
- MUST_USE_RESULT MaybeObject* SetPropertyWithHandlerIfDefiningSetter(
+ // If the handler defines an accessor property with a setter, invoke it.
+ // If it defines an accessor property without a setter, or a data property
+ // that is read-only, throw. In all these cases set '*done' to true,
+ // otherwise set it to false.
+ MUST_USE_RESULT MaybeObject* SetPropertyViaPrototypesWithHandler(
+ JSReceiver* receiver,
String* name,
Object* value,
PropertyAttributes attributes,
StrictModeFlag strict_mode,
- bool* found);
+ bool* done);
MUST_USE_RESULT MaybeObject* DeletePropertyWithHandler(
String* name,
@@ -7817,9 +8218,7 @@ class JSProxy: public JSReceiver {
}
void JSProxyPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSProxyVerify();
-#endif
+ DECLARE_VERIFIER(JSProxy)
// Layout description. We add padding so that a proxy has the same
// size as a virgin JSObject. This is essential for becoming a JSObject
@@ -7860,9 +8259,7 @@ class JSFunctionProxy: public JSProxy {
}
void JSFunctionProxyPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSFunctionProxyVerify();
-#endif
+ DECLARE_VERIFIER(JSFunctionProxy)
// Layout description.
static const int kCallTrapOffset = JSProxy::kPaddingOffset;
@@ -7897,9 +8294,7 @@ class JSSet: public JSObject {
}
void JSSetPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSSetVerify();
-#endif
+ DECLARE_VERIFIER(JSSet)
static const int kTableOffset = JSObject::kHeaderSize;
static const int kSize = kTableOffset + kPointerSize;
@@ -7924,9 +8319,7 @@ class JSMap: public JSObject {
}
void JSMapPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSMapVerify();
-#endif
+ DECLARE_VERIFIER(JSMap)
static const int kTableOffset = JSObject::kHeaderSize;
static const int kSize = kTableOffset + kPointerSize;
@@ -7954,9 +8347,7 @@ class JSWeakMap: public JSObject {
}
void JSWeakMapPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSWeakMapVerify();
-#endif
+ DECLARE_VERIFIER(JSWeakMap)
static const int kTableOffset = JSObject::kHeaderSize;
static const int kNextOffset = kTableOffset + kPointerSize;
@@ -7991,9 +8382,7 @@ class Foreign: public HeapObject {
}
void ForeignPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ForeignVerify();
-#endif
+ DECLARE_VERIFIER(Foreign)
// Layout description.
@@ -8050,9 +8439,7 @@ class JSArray: public JSObject {
}
void JSArrayPrint(FILE* out);
#endif
-#ifdef DEBUG
- void JSArrayVerify();
-#endif
+ DECLARE_VERIFIER(JSArray)
// Number of element slots to pre-allocate for an empty array.
static const int kPreallocatedArrayElements = 4;
@@ -8106,6 +8493,7 @@ class AccessorInfo: public Struct {
DECL_ACCESSORS(data, Object)
DECL_ACCESSORS(name, Object)
DECL_ACCESSORS(flag, Smi)
+ DECL_ACCESSORS(expected_receiver_type, Object)
inline bool all_can_read();
inline void set_all_can_read(bool value);
@@ -8119,6 +8507,9 @@ class AccessorInfo: public Struct {
inline PropertyAttributes property_attributes();
inline void set_property_attributes(PropertyAttributes attributes);
+ // Checks whether the given receiver is compatible with this accessor.
+ inline bool IsCompatibleReceiver(Object* receiver);
+
static inline AccessorInfo* cast(Object* obj);
#ifdef OBJECT_PRINT
@@ -8127,16 +8518,15 @@ class AccessorInfo: public Struct {
}
void AccessorInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void AccessorInfoVerify();
-#endif
+ DECLARE_VERIFIER(AccessorInfo)
static const int kGetterOffset = HeapObject::kHeaderSize;
static const int kSetterOffset = kGetterOffset + kPointerSize;
static const int kDataOffset = kSetterOffset + kPointerSize;
static const int kNameOffset = kDataOffset + kPointerSize;
static const int kFlagOffset = kNameOffset + kPointerSize;
- static const int kSize = kFlagOffset + kPointerSize;
+ static const int kExpectedReceiverTypeOffset = kFlagOffset + kPointerSize;
+ static const int kSize = kExpectedReceiverTypeOffset + kPointerSize;
private:
// Bit positions in flag.
@@ -8162,7 +8552,19 @@ class AccessorPair: public Struct {
static inline AccessorPair* cast(Object* obj);
- MUST_USE_RESULT MaybeObject* CopyWithoutTransitions();
+ MUST_USE_RESULT MaybeObject* Copy();
+
+ Object* get(AccessorComponent component) {
+ return component == ACCESSOR_GETTER ? getter() : setter();
+ }
+
+ void set(AccessorComponent component, Object* value) {
+ if (component == ACCESSOR_GETTER) {
+ set_getter(value);
+ } else {
+ set_setter(value);
+ }
+ }
// Note: Returns undefined instead in case of a hole.
Object* GetComponent(AccessorComponent component);
@@ -8180,9 +8582,7 @@ class AccessorPair: public Struct {
#ifdef OBJECT_PRINT
void AccessorPairPrint(FILE* out = stdout);
#endif
-#ifdef DEBUG
- void AccessorPairVerify();
-#endif
+ DECLARE_VERIFIER(AccessorPair)
static const int kGetterOffset = HeapObject::kHeaderSize;
static const int kSetterOffset = kGetterOffset + kPointerSize;
@@ -8216,9 +8616,7 @@ class AccessCheckInfo: public Struct {
}
void AccessCheckInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void AccessCheckInfoVerify();
-#endif
+ DECLARE_VERIFIER(AccessCheckInfo)
static const int kNamedCallbackOffset = HeapObject::kHeaderSize;
static const int kIndexedCallbackOffset = kNamedCallbackOffset + kPointerSize;
@@ -8248,9 +8646,7 @@ class InterceptorInfo: public Struct {
}
void InterceptorInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void InterceptorInfoVerify();
-#endif
+ DECLARE_VERIFIER(InterceptorInfo)
static const int kGetterOffset = HeapObject::kHeaderSize;
static const int kSetterOffset = kGetterOffset + kPointerSize;
@@ -8279,9 +8675,7 @@ class CallHandlerInfo: public Struct {
}
void CallHandlerInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void CallHandlerInfoVerify();
-#endif
+ DECLARE_VERIFIER(CallHandlerInfo)
static const int kCallbackOffset = HeapObject::kHeaderSize;
static const int kDataOffset = kCallbackOffset + kPointerSize;
@@ -8297,9 +8691,7 @@ class TemplateInfo: public Struct {
DECL_ACCESSORS(tag, Object)
DECL_ACCESSORS(property_list, Object)
-#ifdef DEBUG
- void TemplateInfoVerify();
-#endif
+ DECLARE_VERIFIER(TemplateInfo)
static const int kTagOffset = HeapObject::kHeaderSize;
static const int kPropertyListOffset = kTagOffset + kPointerSize;
@@ -8342,9 +8734,7 @@ class FunctionTemplateInfo: public TemplateInfo {
}
void FunctionTemplateInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void FunctionTemplateInfoVerify();
-#endif
+ DECLARE_VERIFIER(FunctionTemplateInfo)
static const int kSerialNumberOffset = TemplateInfo::kHeaderSize;
static const int kCallCodeOffset = kSerialNumberOffset + kPointerSize;
@@ -8393,9 +8783,7 @@ class ObjectTemplateInfo: public TemplateInfo {
}
void ObjectTemplateInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void ObjectTemplateInfoVerify();
-#endif
+ DECLARE_VERIFIER(ObjectTemplateInfo)
static const int kConstructorOffset = TemplateInfo::kHeaderSize;
static const int kInternalFieldCountOffset =
@@ -8419,9 +8807,7 @@ class SignatureInfo: public Struct {
}
void SignatureInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void SignatureInfoVerify();
-#endif
+ DECLARE_VERIFIER(SignatureInfo)
static const int kReceiverOffset = Struct::kHeaderSize;
static const int kArgsOffset = kReceiverOffset + kPointerSize;
@@ -8444,9 +8830,7 @@ class TypeSwitchInfo: public Struct {
}
void TypeSwitchInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void TypeSwitchInfoVerify();
-#endif
+ DECLARE_VERIFIER(TypeSwitchInfo)
static const int kTypesOffset = Struct::kHeaderSize;
static const int kSize = kTypesOffset + kPointerSize;
@@ -8496,9 +8880,7 @@ class DebugInfo: public Struct {
}
void DebugInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void DebugInfoVerify();
-#endif
+ DECLARE_VERIFIER(DebugInfo)
static const int kSharedFunctionInfoIndex = Struct::kHeaderSize;
static const int kOriginalCodeIndex = kSharedFunctionInfoIndex + kPointerSize;
@@ -8554,9 +8936,7 @@ class BreakPointInfo: public Struct {
}
void BreakPointInfoPrint(FILE* out);
#endif
-#ifdef DEBUG
- void BreakPointInfoVerify();
-#endif
+ DECLARE_VERIFIER(BreakPointInfo)
static const int kCodePositionIndex = Struct::kHeaderSize;
static const int kSourcePositionIndex = kCodePositionIndex + kPointerSize;
@@ -8574,6 +8954,7 @@ class BreakPointInfo: public Struct {
#undef DECL_BOOLEAN_ACCESSORS
#undef DECL_ACCESSORS
+#undef DECLARE_VERIFIER
#define VISITOR_SYNCHRONIZATION_TAGS_LIST(V) \
V(kSymbolTable, "symbol_table", "(Symbols)") \
@@ -8638,14 +9019,16 @@ class ObjectVisitor BASE_EMBEDDED {
// Visits a debug call target in the instruction stream.
virtual void VisitDebugTarget(RelocInfo* rinfo);
+ // Visits the byte sequence in a function's prologue that contains information
+ // about the code's age.
+ virtual void VisitCodeAgeSequence(RelocInfo* rinfo);
+
// Handy shorthand for visiting a single pointer.
virtual void VisitPointer(Object** p) { VisitPointers(p, p + 1); }
// Visit pointer embedded into a code object.
virtual void VisitEmbeddedPointer(RelocInfo* rinfo);
- virtual void VisitSharedFunctionInfo(SharedFunctionInfo* shared) {}
-
// Visits a contiguous arrays of external references (references to the C++
// heap) in the half-open range [start, end). Any or all of the values
// may be modified on return.
diff --git a/src/3rdparty/v8/src/optimizing-compiler-thread.cc b/src/3rdparty/v8/src/optimizing-compiler-thread.cc
new file mode 100644
index 0000000..83ff104
--- /dev/null
+++ b/src/3rdparty/v8/src/optimizing-compiler-thread.cc
@@ -0,0 +1,132 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// For Windows CE, Windows headers need to be included first as they define ASSERT
+#ifdef _WIN32_WCE
+# include "win32-headers.h"
+#endif
+
+#include "optimizing-compiler-thread.h"
+
+#include "v8.h"
+
+#include "hydrogen.h"
+#include "isolate.h"
+#include "v8threads.h"
+
+namespace v8 {
+namespace internal {
+
+
+void OptimizingCompilerThread::Run() {
+#ifdef DEBUG
+ thread_id_ = ThreadId::Current().ToInteger();
+#endif
+ Isolate::SetIsolateThreadLocals(isolate_, NULL);
+
+ int64_t epoch = 0;
+ if (FLAG_trace_parallel_recompilation) epoch = OS::Ticks();
+
+ while (true) {
+ input_queue_semaphore_->Wait();
+ if (Acquire_Load(&stop_thread_)) {
+ stop_semaphore_->Signal();
+ if (FLAG_trace_parallel_recompilation) {
+ time_spent_total_ = OS::Ticks() - epoch;
+ }
+ return;
+ }
+
+ int64_t compiling_start = 0;
+ if (FLAG_trace_parallel_recompilation) compiling_start = OS::Ticks();
+
+ Heap::RelocationLock relocation_lock(isolate_->heap());
+ OptimizingCompiler* optimizing_compiler = NULL;
+ input_queue_.Dequeue(&optimizing_compiler);
+ Barrier_AtomicIncrement(&queue_length_, static_cast<Atomic32>(-1));
+
+ ASSERT(!optimizing_compiler->info()->closure()->IsOptimized());
+
+ OptimizingCompiler::Status status = optimizing_compiler->OptimizeGraph();
+ ASSERT(status != OptimizingCompiler::FAILED);
+ // Prevent an unused-variable error in release mode.
+ USE(status);
+
+ output_queue_.Enqueue(optimizing_compiler);
+ isolate_->stack_guard()->RequestCodeReadyEvent();
+
+ if (FLAG_trace_parallel_recompilation) {
+ time_spent_compiling_ += OS::Ticks() - compiling_start;
+ }
+ }
+}
+
+
+void OptimizingCompilerThread::Stop() {
+ Release_Store(&stop_thread_, static_cast<AtomicWord>(true));
+ input_queue_semaphore_->Signal();
+ stop_semaphore_->Wait();
+
+ if (FLAG_trace_parallel_recompilation) {
+ double compile_time = static_cast<double>(time_spent_compiling_);
+ double total_time = static_cast<double>(time_spent_total_);
+ double percentage = (compile_time * 100) / total_time;
+ PrintF(" ** Compiler thread did %.2f%% useful work\n", percentage);
+ }
+}
+
+
+void OptimizingCompilerThread::InstallOptimizedFunctions() {
+ HandleScope handle_scope(isolate_);
+ int functions_installed = 0;
+ while (!output_queue_.IsEmpty()) {
+ OptimizingCompiler* compiler = NULL;
+ output_queue_.Dequeue(&compiler);
+ Compiler::InstallOptimizedCode(compiler);
+ functions_installed++;
+ }
+ if (FLAG_trace_parallel_recompilation && functions_installed != 0) {
+ PrintF(" ** Installed %d function(s).\n", functions_installed);
+ }
+}
+
+
+void OptimizingCompilerThread::QueueForOptimization(
+ OptimizingCompiler* optimizing_compiler) {
+ input_queue_.Enqueue(optimizing_compiler);
+ input_queue_semaphore_->Signal();
+}
+
+#ifdef DEBUG
+bool OptimizingCompilerThread::IsOptimizerThread() {
+ if (!FLAG_parallel_recompilation) return false;
+ return ThreadId::Current().ToInteger() == thread_id_;
+}
+#endif
+
+
+} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/optimizing-compiler-thread.h b/src/3rdparty/v8/src/optimizing-compiler-thread.h
new file mode 100644
index 0000000..d562726
--- /dev/null
+++ b/src/3rdparty/v8/src/optimizing-compiler-thread.h
@@ -0,0 +1,101 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_OPTIMIZING_COMPILER_THREAD_H_
+#define V8_OPTIMIZING_COMPILER_THREAD_H_
+
+#include "atomicops.h"
+#include "platform.h"
+#include "flags.h"
+#include "unbound-queue.h"
+
+namespace v8 {
+namespace internal {
+
+class HGraphBuilder;
+class OptimizingCompiler;
+
+class OptimizingCompilerThread : public Thread {
+ public:
+ explicit OptimizingCompilerThread(Isolate *isolate) :
+ Thread("OptimizingCompilerThread"),
+ isolate_(isolate),
+ stop_semaphore_(OS::CreateSemaphore(0)),
+ input_queue_semaphore_(OS::CreateSemaphore(0)),
+ time_spent_compiling_(0),
+ time_spent_total_(0) {
+ NoBarrier_Store(&stop_thread_, static_cast<AtomicWord>(false));
+ NoBarrier_Store(&queue_length_, static_cast<AtomicWord>(0));
+ }
+
+ void Run();
+ void Stop();
+ void QueueForOptimization(OptimizingCompiler* optimizing_compiler);
+ void InstallOptimizedFunctions();
+
+ inline bool IsQueueAvailable() {
+ // We don't need a barrier since we have a data dependency right
+ // after.
+ Atomic32 current_length = NoBarrier_Load(&queue_length_);
+
+ // This can be queried only from the execution thread.
+ ASSERT(!IsOptimizerThread());
+ // Since only the execution thread increments queue_length_ and
+ // only one thread can run inside an Isolate at one time, a direct
+ // doesn't introduce a race -- queue_length_ may decreased in
+ // meantime, but not increased.
+ return (current_length < FLAG_parallel_recompilation_queue_length);
+ }
+
+#ifdef DEBUG
+ bool IsOptimizerThread();
+#endif
+
+ ~OptimizingCompilerThread() {
+ delete input_queue_semaphore_;
+ delete stop_semaphore_;
+ }
+
+ private:
+ Isolate* isolate_;
+ Semaphore* stop_semaphore_;
+ Semaphore* input_queue_semaphore_;
+ UnboundQueue<OptimizingCompiler*> input_queue_;
+ UnboundQueue<OptimizingCompiler*> output_queue_;
+ volatile AtomicWord stop_thread_;
+ volatile Atomic32 queue_length_;
+ int64_t time_spent_compiling_;
+ int64_t time_spent_total_;
+
+#ifdef DEBUG
+ int thread_id_;
+#endif
+};
+
+} } // namespace v8::internal
+
+#endif // V8_OPTIMIZING_COMPILER_THREAD_H_
diff --git a/src/3rdparty/v8/src/parser.cc b/src/3rdparty/v8/src/parser.cc
index b26a9f9..da4685f 100644
--- a/src/3rdparty/v8/src/parser.cc
+++ b/src/3rdparty/v8/src/parser.cc
@@ -86,8 +86,8 @@ class PositionStack {
};
-RegExpBuilder::RegExpBuilder()
- : zone_(Isolate::Current()->zone()),
+RegExpBuilder::RegExpBuilder(Zone* zone)
+ : zone_(zone),
pending_empty_(false),
characters_(NULL),
terms_(),
@@ -103,7 +103,7 @@ void RegExpBuilder::FlushCharacters() {
if (characters_ != NULL) {
RegExpTree* atom = new(zone()) RegExpAtom(characters_->ToConstVector());
characters_ = NULL;
- text_.Add(atom);
+ text_.Add(atom, zone());
LAST(ADD_ATOM);
}
}
@@ -115,12 +115,12 @@ void RegExpBuilder::FlushText() {
if (num_text == 0) {
return;
} else if (num_text == 1) {
- terms_.Add(text_.last());
+ terms_.Add(text_.last(), zone());
} else {
- RegExpText* text = new(zone()) RegExpText();
+ RegExpText* text = new(zone()) RegExpText(zone());
for (int i = 0; i < num_text; i++)
- text_.Get(i)->AppendToText(text);
- terms_.Add(text);
+ text_.Get(i)->AppendToText(text, zone());
+ terms_.Add(text, zone());
}
text_.Clear();
}
@@ -129,9 +129,9 @@ void RegExpBuilder::FlushText() {
void RegExpBuilder::AddCharacter(uc16 c) {
pending_empty_ = false;
if (characters_ == NULL) {
- characters_ = new(zone()) ZoneList<uc16>(4);
+ characters_ = new(zone()) ZoneList<uc16>(4, zone());
}
- characters_->Add(c);
+ characters_->Add(c, zone());
LAST(ADD_CHAR);
}
@@ -148,10 +148,10 @@ void RegExpBuilder::AddAtom(RegExpTree* term) {
}
if (term->IsTextElement()) {
FlushCharacters();
- text_.Add(term);
+ text_.Add(term, zone());
} else {
FlushText();
- terms_.Add(term);
+ terms_.Add(term, zone());
}
LAST(ADD_ATOM);
}
@@ -159,7 +159,7 @@ void RegExpBuilder::AddAtom(RegExpTree* term) {
void RegExpBuilder::AddAssertion(RegExpTree* assert) {
FlushText();
- terms_.Add(assert);
+ terms_.Add(assert, zone());
LAST(ADD_ASSERT);
}
@@ -178,9 +178,9 @@ void RegExpBuilder::FlushTerms() {
} else if (num_terms == 1) {
alternative = terms_.last();
} else {
- alternative = new(zone()) RegExpAlternative(terms_.GetList());
+ alternative = new(zone()) RegExpAlternative(terms_.GetList(zone()));
}
- alternatives_.Add(alternative);
+ alternatives_.Add(alternative, zone());
terms_.Clear();
LAST(ADD_NONE);
}
@@ -195,7 +195,7 @@ RegExpTree* RegExpBuilder::ToRegExp() {
if (num_alternatives == 1) {
return alternatives_.last();
}
- return new(zone()) RegExpDisjunction(alternatives_.GetList());
+ return new(zone()) RegExpDisjunction(alternatives_.GetList(zone()));
}
@@ -214,7 +214,7 @@ void RegExpBuilder::AddQuantifierToAtom(int min,
int num_chars = char_vector.length();
if (num_chars > 1) {
Vector<const uc16> prefix = char_vector.SubVector(0, num_chars - 1);
- text_.Add(new(zone()) RegExpAtom(prefix));
+ text_.Add(new(zone()) RegExpAtom(prefix), zone());
char_vector = char_vector.SubVector(num_chars - 1, num_chars);
}
characters_ = NULL;
@@ -233,7 +233,7 @@ void RegExpBuilder::AddQuantifierToAtom(int min,
if (min == 0) {
return;
}
- terms_.Add(atom);
+ terms_.Add(atom, zone());
return;
}
} else {
@@ -241,7 +241,7 @@ void RegExpBuilder::AddQuantifierToAtom(int min,
UNREACHABLE();
return;
}
- terms_.Add(new(zone()) RegExpQuantifier(min, max, type, atom));
+ terms_.Add(new(zone()) RegExpQuantifier(min, max, type, atom), zone());
LAST(ADD_TERM);
}
@@ -270,7 +270,7 @@ Handle<String> Parser::LookupCachedSymbol(int symbol_id) {
if (symbol_cache_.length() <= symbol_id) {
// Increase length to index + 1.
symbol_cache_.AddBlock(Handle<String>::null(),
- symbol_id + 1 - symbol_cache_.length());
+ symbol_id + 1 - symbol_cache_.length(), zone());
}
Handle<String> result = symbol_cache_.at(symbol_id);
if (result.is_null()) {
@@ -408,7 +408,7 @@ unsigned* ScriptDataImpl::ReadAddress(int position) {
Scope* Parser::NewScope(Scope* parent, ScopeType type) {
- Scope* result = new(zone()) Scope(parent, type);
+ Scope* result = new(zone()) Scope(parent, type, zone());
result->Initialize();
return result;
}
@@ -493,10 +493,10 @@ Parser::FunctionState::FunctionState(Parser* parser,
outer_function_state_(parser->current_function_state_),
outer_scope_(parser->top_scope_),
saved_ast_node_id_(isolate->ast_node_id()),
- factory_(isolate) {
+ factory_(isolate, parser->zone()) {
parser->top_scope_ = scope;
parser->current_function_state_ = this;
- isolate->set_ast_node_id(AstNode::kDeclarationsId + 1);
+ isolate->set_ast_node_id(BailoutId::FirstUsable().ToInt());
}
@@ -532,13 +532,13 @@ Parser::FunctionState::~FunctionState() {
// ----------------------------------------------------------------------------
// Implementation of Parser
-Parser::Parser(Handle<Script> script,
+Parser::Parser(CompilationInfo* info,
int parser_flags,
v8::Extension* extension,
ScriptDataImpl* pre_data)
- : isolate_(script->GetIsolate()),
- symbol_cache_(pre_data ? pre_data->symbol_count() : 0),
- script_(script),
+ : isolate_(info->isolate()),
+ symbol_cache_(pre_data ? pre_data->symbol_count() : 0, info->zone()),
+ script_(info->script()),
scanner_(isolate_->unicode_cache()),
reusable_preparser_(NULL),
top_scope_(NULL),
@@ -551,7 +551,10 @@ Parser::Parser(Handle<Script> script,
allow_lazy_((parser_flags & kAllowLazy) != 0),
allow_modules_((parser_flags & kAllowModules) != 0),
stack_overflow_(false),
- parenthesized_function_(false) {
+ parenthesized_function_(false),
+ zone_(info->zone()),
+ info_(info) {
+ ASSERT(!script_.is_null());
isolate_->set_ast_node_id(0);
if ((parser_flags & kLanguageModeMask) == EXTENDED_MODE) {
scanner().SetHarmonyScoping(true);
@@ -562,16 +565,17 @@ Parser::Parser(Handle<Script> script,
}
-FunctionLiteral* Parser::ParseProgram(CompilationInfo* info) {
- ZoneScope zone_scope(isolate(), DONT_DELETE_ON_EXIT);
-
+FunctionLiteral* Parser::ParseProgram() {
+ ZoneScope zone_scope(zone(), DONT_DELETE_ON_EXIT);
HistogramTimerScope timer(isolate()->counters()->parse());
Handle<String> source(String::cast(script_->source()));
isolate()->counters()->total_parse_size()->Increment(source->length());
- fni_ = new(zone()) FuncNameInferrer(isolate());
+ int64_t start = FLAG_trace_parse ? OS::Ticks() : 0;
+ fni_ = new(zone()) FuncNameInferrer(isolate(), zone());
// Initialize parser state.
source->TryFlatten();
+ FunctionLiteral* result;
if (source->IsExternalTwoByteString()) {
// Notice that the stream is destroyed at the end of the branch block.
// The last line of the blocks can't be moved outside, even though they're
@@ -579,12 +583,27 @@ FunctionLiteral* Parser::ParseProgram(CompilationInfo* info) {
ExternalTwoByteStringUtf16CharacterStream stream(
Handle<ExternalTwoByteString>::cast(source), 0, source->length());
scanner_.Initialize(&stream);
- return DoParseProgram(info, source, &zone_scope);
+ result = DoParseProgram(info(), source, &zone_scope);
} else {
GenericStringUtf16CharacterStream stream(source, 0, source->length());
scanner_.Initialize(&stream);
- return DoParseProgram(info, source, &zone_scope);
+ result = DoParseProgram(info(), source, &zone_scope);
+ }
+
+ if (FLAG_trace_parse && result != NULL) {
+ double ms = static_cast<double>(OS::Ticks() - start) / 1000;
+ if (info()->is_eval()) {
+ PrintF("[parsing eval");
+ } else if (info()->script()->name()->IsString()) {
+ String* name = String::cast(info()->script()->name());
+ SmartArrayPointer<char> name_chars = name->ToCString();
+ PrintF("[parsing script: %s", *name_chars);
+ } else {
+ PrintF("[parsing script");
+ }
+ PrintF(" - took %0.3f ms]\n", ms);
}
+ return result;
}
@@ -596,34 +615,37 @@ FunctionLiteral* Parser::DoParseProgram(CompilationInfo* info,
if (pre_data_ != NULL) pre_data_->Initialize();
// Compute the parsing mode.
- mode_ = (FLAG_lazy && allow_lazy_) ? PARSE_LAZILY : PARSE_EAGERLY;
- if (allow_natives_syntax_ || extension_ != NULL) mode_ = PARSE_EAGERLY;
+ Mode mode = (FLAG_lazy && allow_lazy_) ? PARSE_LAZILY : PARSE_EAGERLY;
+ if (allow_natives_syntax_ || extension_ != NULL) mode = PARSE_EAGERLY;
+ ParsingModeScope parsing_mode(this, mode);
Handle<String> no_name = isolate()->factory()->empty_symbol();
FunctionLiteral* result = NULL;
{ Scope* scope = NewScope(top_scope_, GLOBAL_SCOPE);
info->SetGlobalScope(scope);
+ if (!info->context().is_null()) {
+ scope = Scope::DeserializeScopeChain(*info->context(), scope, zone());
+ }
if (info->is_eval()) {
- Handle<SharedFunctionInfo> shared = info->shared_info();
- if (!info->is_global() && (shared.is_null() || shared->is_function())) {
- scope = Scope::DeserializeScopeChain(*info->calling_context(), scope);
- }
if (!scope->is_global_scope() || info->language_mode() != CLASSIC_MODE) {
scope = NewScope(scope, EVAL_SCOPE);
}
+ } else if (info->is_global()) {
+ scope = NewScope(scope, GLOBAL_SCOPE);
}
scope->set_start_position(0);
scope->set_end_position(source->length());
- FunctionState function_state(this, scope, isolate());
+
+ FunctionState function_state(this, scope, isolate()); // Enters 'scope'.
top_scope_->SetLanguageMode(info->language_mode());
if (info->is_qml_mode()) {
scope->EnableQmlModeFlag();
}
- ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(16);
+ ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(16, zone());
bool ok = true;
int beg_loc = scanner().location().beg_pos;
- ParseSourceElements(body, Token::EOS, info->is_eval(), &ok);
+ ParseSourceElements(body, Token::EOS, info->is_eval(), true, &ok);
if (ok && !top_scope_->is_classic_mode()) {
CheckOctalLiteral(beg_loc, scanner().location().end_pos, &ok);
}
@@ -645,7 +667,8 @@ FunctionLiteral* Parser::DoParseProgram(CompilationInfo* info,
0,
FunctionLiteral::kNoDuplicateParameters,
FunctionLiteral::ANONYMOUS_EXPRESSION,
- FunctionLiteral::kGlobalOrEval);
+ FunctionLiteral::kGlobalOrEval,
+ FunctionLiteral::kNotParenthesized);
result->set_ast_properties(factory()->visitor()->ast_properties());
} else if (stack_overflow_) {
isolate()->StackOverflow();
@@ -662,45 +685,51 @@ FunctionLiteral* Parser::DoParseProgram(CompilationInfo* info,
}
-FunctionLiteral* Parser::ParseLazy(CompilationInfo* info) {
- ZoneScope zone_scope(isolate(), DONT_DELETE_ON_EXIT);
+FunctionLiteral* Parser::ParseLazy() {
+ ZoneScope zone_scope(zone(), DONT_DELETE_ON_EXIT);
HistogramTimerScope timer(isolate()->counters()->parse_lazy());
Handle<String> source(String::cast(script_->source()));
isolate()->counters()->total_parse_size()->Increment(source->length());
+ int64_t start = FLAG_trace_parse ? OS::Ticks() : 0;
+ Handle<SharedFunctionInfo> shared_info = info()->shared_info();
- Handle<SharedFunctionInfo> shared_info = info->shared_info();
// Initialize parser state.
source->TryFlatten();
+ FunctionLiteral* result;
if (source->IsExternalTwoByteString()) {
ExternalTwoByteStringUtf16CharacterStream stream(
Handle<ExternalTwoByteString>::cast(source),
shared_info->start_position(),
shared_info->end_position());
- FunctionLiteral* result = ParseLazy(info, &stream, &zone_scope);
- return result;
+ result = ParseLazy(&stream, &zone_scope);
} else {
GenericStringUtf16CharacterStream stream(source,
shared_info->start_position(),
shared_info->end_position());
- FunctionLiteral* result = ParseLazy(info, &stream, &zone_scope);
- return result;
+ result = ParseLazy(&stream, &zone_scope);
}
+
+ if (FLAG_trace_parse && result != NULL) {
+ double ms = static_cast<double>(OS::Ticks() - start) / 1000;
+ SmartArrayPointer<char> name_chars = result->debug_name()->ToCString();
+ PrintF("[parsing function: %s - took %0.3f ms]\n", *name_chars, ms);
+ }
+ return result;
}
-FunctionLiteral* Parser::ParseLazy(CompilationInfo* info,
- Utf16CharacterStream* source,
+FunctionLiteral* Parser::ParseLazy(Utf16CharacterStream* source,
ZoneScope* zone_scope) {
- Handle<SharedFunctionInfo> shared_info = info->shared_info();
+ Handle<SharedFunctionInfo> shared_info = info()->shared_info();
scanner_.Initialize(source);
ASSERT(top_scope_ == NULL);
ASSERT(target_stack_ == NULL);
Handle<String> name(String::cast(shared_info->name()));
- fni_ = new(zone()) FuncNameInferrer(isolate());
+ fni_ = new(zone()) FuncNameInferrer(isolate(), zone());
fni_->PushEnclosingName(name);
- mode_ = PARSE_EAGERLY;
+ ParsingModeScope parsing_mode(this, PARSE_EAGERLY);
// Place holder for the result.
FunctionLiteral* result = NULL;
@@ -708,15 +737,16 @@ FunctionLiteral* Parser::ParseLazy(CompilationInfo* info,
{
// Parse the function literal.
Scope* scope = NewScope(top_scope_, GLOBAL_SCOPE);
- info->SetGlobalScope(scope);
- if (!info->closure().is_null()) {
- scope = Scope::DeserializeScopeChain(info->closure()->context(), scope);
+ info()->SetGlobalScope(scope);
+ if (!info()->closure().is_null()) {
+ scope = Scope::DeserializeScopeChain(info()->closure()->context(), scope,
+ zone());
}
FunctionState function_state(this, scope, isolate());
- ASSERT(scope->language_mode() != STRICT_MODE || !info->is_classic_mode());
+ ASSERT(scope->language_mode() != STRICT_MODE || !info()->is_classic_mode());
ASSERT(scope->language_mode() != EXTENDED_MODE ||
- info->is_extended_mode());
- ASSERT(info->language_mode() == shared_info->language_mode());
+ info()->is_extended_mode());
+ ASSERT(info()->language_mode() == shared_info->language_mode());
scope->SetLanguageMode(shared_info->language_mode());
if (shared_info->qml_mode()) {
top_scope_->EnableQmlModeFlag();
@@ -808,152 +838,25 @@ void Parser::ReportMessageAt(Scanner::Location source_location,
}
-// Base class containing common code for the different finder classes used by
-// the parser.
-class ParserFinder {
- protected:
- ParserFinder() {}
- static Assignment* AsAssignment(Statement* stat) {
- if (stat == NULL) return NULL;
- ExpressionStatement* exp_stat = stat->AsExpressionStatement();
- if (exp_stat == NULL) return NULL;
- return exp_stat->expression()->AsAssignment();
- }
-};
-
-
-// An InitializationBlockFinder finds and marks sequences of statements of the
-// form expr.a = ...; expr.b = ...; etc.
-class InitializationBlockFinder : public ParserFinder {
- public:
- // We find and mark the initialization blocks in top level
- // non-looping code only. This is because the optimization prevents
- // reuse of the map transitions, so it should be used only for code
- // that will only be run once.
- InitializationBlockFinder(Scope* top_scope, Target* target)
- : enabled_(top_scope->DeclarationScope()->is_global_scope() &&
- !IsLoopTarget(target)),
- first_in_block_(NULL),
- last_in_block_(NULL),
- block_size_(0) {}
-
- ~InitializationBlockFinder() {
- if (!enabled_) return;
- if (InBlock()) EndBlock();
- }
-
- void Update(Statement* stat) {
- if (!enabled_) return;
- Assignment* assignment = AsAssignment(stat);
- if (InBlock()) {
- if (BlockContinues(assignment)) {
- UpdateBlock(assignment);
- } else {
- EndBlock();
- }
- }
- if (!InBlock() && (assignment != NULL) &&
- (assignment->op() == Token::ASSIGN)) {
- StartBlock(assignment);
- }
- }
-
- private:
- // The minimum number of contiguous assignment that will
- // be treated as an initialization block. Benchmarks show that
- // the overhead exceeds the savings below this limit.
- static const int kMinInitializationBlock = 3;
-
- static bool IsLoopTarget(Target* target) {
- while (target != NULL) {
- if (target->node()->AsIterationStatement() != NULL) return true;
- target = target->previous();
- }
- return false;
- }
-
- // Returns true if the expressions appear to denote the same object.
- // In the context of initialization blocks, we only consider expressions
- // of the form 'expr.x' or expr["x"].
- static bool SameObject(Expression* e1, Expression* e2) {
- VariableProxy* v1 = e1->AsVariableProxy();
- VariableProxy* v2 = e2->AsVariableProxy();
- if (v1 != NULL && v2 != NULL) {
- return v1->name()->Equals(*v2->name());
- }
- Property* p1 = e1->AsProperty();
- Property* p2 = e2->AsProperty();
- if ((p1 == NULL) || (p2 == NULL)) return false;
- Literal* key1 = p1->key()->AsLiteral();
- Literal* key2 = p2->key()->AsLiteral();
- if ((key1 == NULL) || (key2 == NULL)) return false;
- if (!key1->handle()->IsString() || !key2->handle()->IsString()) {
- return false;
- }
- String* name1 = String::cast(*key1->handle());
- String* name2 = String::cast(*key2->handle());
- if (!name1->Equals(name2)) return false;
- return SameObject(p1->obj(), p2->obj());
- }
-
- // Returns true if the expressions appear to denote different properties
- // of the same object.
- static bool PropertyOfSameObject(Expression* e1, Expression* e2) {
- Property* p1 = e1->AsProperty();
- Property* p2 = e2->AsProperty();
- if ((p1 == NULL) || (p2 == NULL)) return false;
- return SameObject(p1->obj(), p2->obj());
- }
-
- bool BlockContinues(Assignment* assignment) {
- if ((assignment == NULL) || (first_in_block_ == NULL)) return false;
- if (assignment->op() != Token::ASSIGN) return false;
- return PropertyOfSameObject(first_in_block_->target(),
- assignment->target());
- }
-
- void StartBlock(Assignment* assignment) {
- first_in_block_ = assignment;
- last_in_block_ = assignment;
- block_size_ = 1;
- }
-
- void UpdateBlock(Assignment* assignment) {
- last_in_block_ = assignment;
- ++block_size_;
- }
-
- void EndBlock() {
- if (block_size_ >= kMinInitializationBlock) {
- first_in_block_->mark_block_start();
- last_in_block_->mark_block_end();
- }
- last_in_block_ = first_in_block_ = NULL;
- block_size_ = 0;
- }
-
- bool InBlock() { return first_in_block_ != NULL; }
-
- const bool enabled_;
- Assignment* first_in_block_;
- Assignment* last_in_block_;
- int block_size_;
-
- DISALLOW_COPY_AND_ASSIGN(InitializationBlockFinder);
-};
-
-
// A ThisNamedPropertyAssignmentFinder finds and marks statements of the form
// this.x = ...;, where x is a named property. It also determines whether a
// function contains only assignments of this type.
-class ThisNamedPropertyAssignmentFinder : public ParserFinder {
+class ThisNamedPropertyAssignmentFinder {
public:
- explicit ThisNamedPropertyAssignmentFinder(Isolate* isolate)
+ ThisNamedPropertyAssignmentFinder(Isolate* isolate, Zone* zone)
: isolate_(isolate),
only_simple_this_property_assignments_(true),
- names_(0),
- assigned_arguments_(0),
- assigned_constants_(0) {
+ names_(0, zone),
+ assigned_arguments_(0, zone),
+ assigned_constants_(0, zone),
+ zone_(zone) {
+ }
+
+ static Assignment* AsAssignment(Statement* stat) {
+ if (stat == NULL) return NULL;
+ ExpressionStatement* exp_stat = stat->AsExpressionStatement();
+ if (exp_stat == NULL) return NULL;
+ return exp_stat->expression()->AsAssignment();
}
void Update(Scope* scope, Statement* stat) {
@@ -1062,9 +965,9 @@ class ThisNamedPropertyAssignmentFinder : public ParserFinder {
return;
}
}
- names_.Add(name);
- assigned_arguments_.Add(index);
- assigned_constants_.Add(isolate_->factory()->undefined_value());
+ names_.Add(name, zone());
+ assigned_arguments_.Add(index, zone());
+ assigned_constants_.Add(isolate_->factory()->undefined_value(), zone());
}
void AssignmentFromConstant(Handle<String> name, Handle<Object> value) {
@@ -1076,9 +979,9 @@ class ThisNamedPropertyAssignmentFinder : public ParserFinder {
return;
}
}
- names_.Add(name);
- assigned_arguments_.Add(-1);
- assigned_constants_.Add(value);
+ names_.Add(name, zone());
+ assigned_arguments_.Add(-1, zone());
+ assigned_constants_.Add(value, zone());
}
void AssignmentFromSomethingElse() {
@@ -1090,23 +993,27 @@ class ThisNamedPropertyAssignmentFinder : public ParserFinder {
if (names_.capacity() == 0) {
ASSERT(assigned_arguments_.capacity() == 0);
ASSERT(assigned_constants_.capacity() == 0);
- names_.Initialize(4);
- assigned_arguments_.Initialize(4);
- assigned_constants_.Initialize(4);
+ names_.Initialize(4, zone());
+ assigned_arguments_.Initialize(4, zone());
+ assigned_constants_.Initialize(4, zone());
}
}
+ Zone* zone() const { return zone_; }
+
Isolate* isolate_;
bool only_simple_this_property_assignments_;
ZoneStringList names_;
ZoneList<int> assigned_arguments_;
ZoneObjectList assigned_constants_;
+ Zone* zone_;
};
void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
int end_token,
bool is_eval,
+ bool is_global,
bool* ok) {
// SourceElements ::
// (ModuleElement)* <end_token>
@@ -1118,8 +1025,8 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
TargetScope scope(&this->target_stack_);
ASSERT(processor != NULL);
- InitializationBlockFinder block_finder(top_scope_, target_stack_);
- ThisNamedPropertyAssignmentFinder this_property_assignment_finder(isolate());
+ ThisNamedPropertyAssignmentFinder this_property_assignment_finder(isolate(),
+ zone());
bool directive_prologue = true; // Parsing directive prologue.
while (peek() != end_token) {
@@ -1128,7 +1035,12 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
}
Scanner::Location token_loc = scanner().peek_location();
- Statement* stat = ParseModuleElement(NULL, CHECK_OK);
+ Statement* stat;
+ if (is_global && !is_eval) {
+ stat = ParseModuleElement(NULL, CHECK_OK);
+ } else {
+ stat = ParseBlockElement(NULL, CHECK_OK);
+ }
if (stat == NULL || stat->IsEmpty()) {
directive_prologue = false; // End of directive prologue.
continue;
@@ -1172,12 +1084,11 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
}
}
- block_finder.Update(stat);
// Find and mark all assignments to named properties in this (this.x =)
if (top_scope_->is_function_scope()) {
this_property_assignment_finder.Update(top_scope_, stat);
}
- processor->Add(stat);
+ processor->Add(stat, zone());
}
// Propagate the collected information on this property assignments.
@@ -1243,12 +1154,10 @@ Statement* Parser::ParseModuleElement(ZoneStringList* labels,
}
-Block* Parser::ParseModuleDeclaration(ZoneStringList* names, bool* ok) {
+Statement* Parser::ParseModuleDeclaration(ZoneStringList* names, bool* ok) {
// ModuleDeclaration:
// 'module' Identifier Module
- // Create new block with one expected declaration.
- Block* block = factory()->NewBlock(NULL, 1, true);
Handle<String> name = ParseIdentifier(CHECK_OK);
#ifdef DEBUG
@@ -1272,10 +1181,11 @@ Block* Parser::ParseModuleDeclaration(ZoneStringList* names, bool* ok) {
}
#endif
- // TODO(rossberg): Add initialization statement to block.
-
- if (names) names->Add(name);
- return block;
+ if (names) names->Add(name, zone());
+ if (module->body() == NULL)
+ return factory()->NewEmptyStatement();
+ else
+ return module->body();
}
@@ -1323,16 +1233,14 @@ Module* Parser::ParseModuleLiteral(bool* ok) {
{
BlockState block_state(this, scope);
- TargetCollector collector;
+ TargetCollector collector(zone());
Target target(&this->target_stack_, &collector);
Target target_body(&this->target_stack_, body);
- InitializationBlockFinder block_finder(top_scope_, target_stack_);
while (peek() != Token::RBRACE) {
Statement* stat = ParseModuleElement(NULL, CHECK_OK);
if (stat && !stat->IsEmpty()) {
- body->AddStatement(stat);
- block_finder.Update(stat);
+ body->AddStatement(stat, zone());
}
}
}
@@ -1341,16 +1249,23 @@ Module* Parser::ParseModuleLiteral(bool* ok) {
scope->set_end_position(scanner().location().end_pos);
body->set_scope(scope);
- // Instance objects have to be created ahead of time (before code generation
- // linking them) because of potentially cyclic references between them.
- // We create them here, to avoid another pass over the AST.
+ // Check that all exports are bound.
Interface* interface = scope->interface();
+ for (Interface::Iterator it = interface->iterator();
+ !it.done(); it.Advance()) {
+ if (scope->LocalLookup(it.name()) == NULL) {
+ Handle<String> name(it.name());
+ ReportMessage("module_export_undefined",
+ Vector<Handle<String> >(&name, 1));
+ *ok = false;
+ return NULL;
+ }
+ }
+
interface->MakeModule(ok);
- ASSERT(ok);
- interface->MakeSingleton(Isolate::Current()->factory()->NewJSModule(), ok);
- ASSERT(ok);
+ ASSERT(*ok);
interface->Freeze(ok);
- ASSERT(ok);
+ ASSERT(*ok);
return factory()->NewModuleLiteral(body, interface);
}
@@ -1368,7 +1283,7 @@ Module* Parser::ParseModulePath(bool* ok) {
PrintF("# Path .%s ", name->ToAsciiArray());
#endif
Module* member = factory()->NewModulePath(result, name);
- result->interface()->Add(name, member->interface(), ok);
+ result->interface()->Add(name, member->interface(), zone(), ok);
if (!*ok) {
#ifdef DEBUG
if (FLAG_print_interfaces) {
@@ -1399,7 +1314,8 @@ Module* Parser::ParseModuleVariable(bool* ok) {
PrintF("# Module variable %s ", name->ToAsciiArray());
#endif
VariableProxy* proxy = top_scope_->NewUnresolved(
- factory(), name, scanner().location().beg_pos, Interface::NewModule());
+ factory(), name, Interface::NewModule(zone()),
+ scanner().location().beg_pos);
return factory()->NewModuleVariable(proxy);
}
@@ -1420,10 +1336,12 @@ Module* Parser::ParseModuleUrl(bool* ok) {
Module* result = factory()->NewModuleUrl(symbol);
Interface* interface = result->interface();
- interface->MakeSingleton(Isolate::Current()->factory()->NewJSModule(), ok);
- ASSERT(ok);
interface->Freeze(ok);
- ASSERT(ok);
+ ASSERT(*ok);
+ // Create dummy scope to avoid errors as long as the feature isn't finished.
+ Scope* scope = NewScope(top_scope_, MODULE_SCOPE);
+ interface->Unify(scope->interface(), zone(), ok);
+ ASSERT(*ok);
return result;
}
@@ -1448,14 +1366,14 @@ Block* Parser::ParseImportDeclaration(bool* ok) {
// TODO(ES6): implement destructuring ImportSpecifiers
Expect(Token::IMPORT, CHECK_OK);
- ZoneStringList names(1);
+ ZoneStringList names(1, zone());
Handle<String> name = ParseIdentifierName(CHECK_OK);
- names.Add(name);
+ names.Add(name, zone());
while (peek() == Token::COMMA) {
Consume(Token::COMMA);
name = ParseIdentifierName(CHECK_OK);
- names.Add(name);
+ names.Add(name, zone());
}
ExpectContextualKeyword("from", CHECK_OK);
@@ -1470,8 +1388,8 @@ Block* Parser::ParseImportDeclaration(bool* ok) {
if (FLAG_print_interface_details)
PrintF("# Import %s ", names[i]->ToAsciiArray());
#endif
- Interface* interface = Interface::NewUnknown();
- module->interface()->Add(names[i], interface, ok);
+ Interface* interface = Interface::NewUnknown(zone());
+ module->interface()->Add(names[i], interface, zone(), ok);
if (!*ok) {
#ifdef DEBUG
if (FLAG_print_interfaces) {
@@ -1487,7 +1405,6 @@ Block* Parser::ParseImportDeclaration(bool* ok) {
Declaration* declaration =
factory()->NewImportDeclaration(proxy, module, top_scope_);
Declare(declaration, true, CHECK_OK);
- // TODO(rossberg): Add initialization statement to block.
}
return block;
@@ -1506,17 +1423,17 @@ Statement* Parser::ParseExportDeclaration(bool* ok) {
Expect(Token::EXPORT, CHECK_OK);
Statement* result = NULL;
- ZoneStringList names(1);
+ ZoneStringList names(1, zone());
switch (peek()) {
case Token::IDENTIFIER: {
Handle<String> name = ParseIdentifier(CHECK_OK);
// Handle 'module' as a context-sensitive keyword.
if (!name->IsEqualTo(CStrVector("module"))) {
- names.Add(name);
+ names.Add(name, zone());
while (peek() == Token::COMMA) {
Consume(Token::COMMA);
name = ParseIdentifier(CHECK_OK);
- names.Add(name);
+ names.Add(name, zone());
}
ExpectSemicolon(CHECK_OK);
result = factory()->NewEmptyStatement();
@@ -1549,8 +1466,10 @@ Statement* Parser::ParseExportDeclaration(bool* ok) {
if (FLAG_print_interface_details)
PrintF("# Export %s ", names[i]->ToAsciiArray());
#endif
- Interface* inner = Interface::NewUnknown();
- interface->Add(names[i], inner, CHECK_OK);
+ Interface* inner = Interface::NewUnknown(zone());
+ interface->Add(names[i], inner, zone(), CHECK_OK);
+ if (!*ok)
+ return NULL;
VariableProxy* proxy = NewUnresolved(names[i], LET, inner);
USE(proxy);
// TODO(rossberg): Rethink whether we actually need to store export
@@ -1683,7 +1602,7 @@ Statement* Parser::ParseStatement(ZoneStringList* labels, bool* ok) {
if (statement) {
statement->set_statement_pos(statement_pos);
}
- if (result) result->AddStatement(statement);
+ if (result) result->AddStatement(statement, zone());
return result;
}
@@ -1726,7 +1645,7 @@ VariableProxy* Parser::NewUnresolved(
// Let/const variables in harmony mode are always added to the immediately
// enclosing scope.
return DeclarationScope(mode)->NewUnresolved(
- factory(), name, scanner().location().beg_pos, interface);
+ factory(), name, interface, scanner().location().beg_pos);
}
@@ -1737,7 +1656,7 @@ void Parser::Declare(Declaration* declaration, bool resolve, bool* ok) {
Scope* declaration_scope = DeclarationScope(mode);
Variable* var = NULL;
- // If a function scope exists, then we can statically declare this
+ // If a suitable scope exists, then we can statically declare this
// variable and also set its mode. In any case, a Declaration node
// will be added to the scope so that the declaration can be added
// to the corresponding activation frame at runtime if necessary.
@@ -1745,56 +1664,58 @@ void Parser::Declare(Declaration* declaration, bool resolve, bool* ok) {
// to the calling function context.
// Similarly, strict mode eval scope does not leak variable declarations to
// the caller's scope so we declare all locals, too.
- // Also for block scoped let/const bindings the variable can be
- // statically declared.
if (declaration_scope->is_function_scope() ||
declaration_scope->is_strict_or_extended_eval_scope() ||
declaration_scope->is_block_scope() ||
declaration_scope->is_module_scope() ||
- declaration->AsModuleDeclaration() != NULL) {
- // Declare the variable in the function scope.
- var = declaration_scope->LocalLookup(name);
+ declaration_scope->is_global_scope()) {
+ // Declare the variable in the declaration scope.
+ // For the global scope, we have to check for collisions with earlier
+ // (i.e., enclosing) global scopes, to maintain the illusion of a single
+ // global scope.
+ var = declaration_scope->is_global_scope()
+ ? declaration_scope->Lookup(name)
+ : declaration_scope->LocalLookup(name);
if (var == NULL) {
// Declare the name.
var = declaration_scope->DeclareLocal(
name, mode, declaration->initialization(), proxy->interface());
- } else {
+ } else if ((mode != VAR || var->mode() != VAR) &&
+ (!declaration_scope->is_global_scope() ||
+ IsLexicalVariableMode(mode) ||
+ IsLexicalVariableMode(var->mode()))) {
// The name was declared in this scope before; check for conflicting
// re-declarations. We have a conflict if either of the declarations is
- // not a var. There is similar code in runtime.cc in the Declare
+ // not a var (in the global scope, we also have to ignore legacy const for
+ // compatibility). There is similar code in runtime.cc in the Declare
// functions. The function CheckNonConflictingScope checks for conflicting
// var and let bindings from different scopes whereas this is a check for
// conflicting declarations within the same scope. This check also covers
+ // the special case
//
// function () { let x; { var x; } }
//
// because the var declaration is hoisted to the function scope where 'x'
// is already bound.
- if ((mode != VAR) || (var->mode() != VAR)) {
- // We only have vars, consts and lets in declarations.
- ASSERT(var->mode() == VAR ||
- var->mode() == CONST ||
- var->mode() == CONST_HARMONY ||
- var->mode() == LET);
- if (is_extended_mode()) {
- // In harmony mode we treat re-declarations as early errors. See
- // ES5 16 for a definition of early errors.
- SmartArrayPointer<char> c_string = name->ToCString(DISALLOW_NULLS);
- const char* elms[2] = { "Variable", *c_string };
- Vector<const char*> args(elms, 2);
- ReportMessage("redeclaration", args);
- *ok = false;
- return;
- }
- const char* type = (var->mode() == VAR)
- ? "var" : var->is_const_mode() ? "const" : "let";
- Handle<String> type_string =
- isolate()->factory()->NewStringFromUtf8(CStrVector(type), TENURED);
- Expression* expression =
- NewThrowTypeError(isolate()->factory()->redeclaration_symbol(),
- type_string, name);
- declaration_scope->SetIllegalRedeclaration(expression);
+ ASSERT(IsDeclaredVariableMode(var->mode()));
+ if (is_extended_mode()) {
+ // In harmony mode we treat re-declarations as early errors. See
+ // ES5 16 for a definition of early errors.
+ SmartArrayPointer<char> c_string = name->ToCString(DISALLOW_NULLS);
+ const char* elms[2] = { "Variable", *c_string };
+ Vector<const char*> args(elms, 2);
+ ReportMessage("redeclaration", args);
+ *ok = false;
+ return;
}
+ const char* type =
+ (var->mode() == VAR) ? "var" : var->is_const_mode() ? "const" : "let";
+ Handle<String> type_string =
+ isolate()->factory()->NewStringFromUtf8(CStrVector(type), TENURED);
+ Expression* expression =
+ NewThrowTypeError(isolate()->factory()->redeclaration_symbol(),
+ type_string, name);
+ declaration_scope->SetIllegalRedeclaration(expression);
}
}
@@ -1816,8 +1737,7 @@ void Parser::Declare(Declaration* declaration, bool resolve, bool* ok) {
// Runtime::DeclareContextSlot() calls.
declaration_scope->AddDeclaration(declaration);
- if ((mode == CONST || mode == CONST_HARMONY) &&
- declaration_scope->is_global_scope()) {
+ if (mode == CONST && declaration_scope->is_global_scope()) {
// For global const variables we bind the proxy to a variable.
ASSERT(resolve); // should be set by all callers
Variable::Kind kind = Variable::NORMAL;
@@ -1868,6 +1788,25 @@ void Parser::Declare(Declaration* declaration, bool resolve, bool* ok) {
// both access to the static and the dynamic context chain; the
// runtime needs to provide both.
if (resolve && var != NULL) {
+ if (declaration_scope->is_qml_mode()) {
+ Handle<GlobalObject> global = isolate_->global_object();
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ if (isolate_->debug()->IsLoaded() && isolate_->debug()->InDebugger()) {
+ // Get the context before the debugger was entered.
+ SaveContext *save = isolate_->save_context();
+ while (save != NULL && *save->context() == *isolate_->debug()->debug_context())
+ save = save->prev();
+
+ global = Handle<GlobalObject>(save->context()->global_object());
+ }
+#endif
+
+ if (!global->HasProperty(*(proxy->name()))) {
+ var->set_is_qml_global(true);
+ }
+ }
+
proxy->BindTo(var);
if (FLAG_harmony_modules) {
@@ -1876,7 +1815,7 @@ void Parser::Declare(Declaration* declaration, bool resolve, bool* ok) {
if (FLAG_print_interface_details)
PrintF("# Declare %s\n", var->name()->ToAsciiArray());
#endif
- proxy->interface()->Unify(var->interface(), &ok);
+ proxy->interface()->Unify(var->interface(), zone(), &ok);
if (!ok) {
#ifdef DEBUG
if (FLAG_print_interfaces) {
@@ -1942,7 +1881,7 @@ Statement* Parser::ParseNativeDeclaration(bool* ok) {
// TODO(1240846): It's weird that native function declarations are
// introduced dynamically when we meet their declarations, whereas
// other functions are set up when entering the surrounding scope.
- VariableProxy* proxy = NewUnresolved(name, VAR);
+ VariableProxy* proxy = NewUnresolved(name, VAR, Interface::NewValue());
Declaration* declaration =
factory()->NewVariableDeclaration(proxy, VAR, top_scope_);
Declare(declaration, true, CHECK_OK);
@@ -1968,14 +1907,17 @@ Statement* Parser::ParseFunctionDeclaration(ZoneStringList* names, bool* ok) {
FunctionLiteral::DECLARATION,
CHECK_OK);
// Even if we're not at the top-level of the global or a function
- // scope, we treat is as such and introduce the function with it's
+ // scope, we treat it as such and introduce the function with its
// initial value upon entering the corresponding scope.
- VariableMode mode = is_extended_mode() ? LET : VAR;
- VariableProxy* proxy = NewUnresolved(name, mode);
+ // In extended mode, a function behaves as a lexical binding, except in the
+ // global scope.
+ VariableMode mode =
+ is_extended_mode() && !top_scope_->is_global_scope() ? LET : VAR;
+ VariableProxy* proxy = NewUnresolved(name, mode, Interface::NewValue());
Declaration* declaration =
factory()->NewFunctionDeclaration(proxy, mode, fun, top_scope_);
Declare(declaration, true, CHECK_OK);
- if (names) names->Add(name);
+ if (names) names->Add(name, zone());
return factory()->NewEmptyStatement();
}
@@ -1993,12 +1935,10 @@ Block* Parser::ParseBlock(ZoneStringList* labels, bool* ok) {
Block* result = factory()->NewBlock(labels, 16, false);
Target target(&this->target_stack_, result);
Expect(Token::LBRACE, CHECK_OK);
- InitializationBlockFinder block_finder(top_scope_, target_stack_);
while (peek() != Token::RBRACE) {
Statement* stat = ParseStatement(NULL, CHECK_OK);
if (stat && !stat->IsEmpty()) {
- result->AddStatement(stat);
- block_finder.Update(stat);
+ result->AddStatement(stat, zone());
}
}
Expect(Token::RBRACE, CHECK_OK);
@@ -2020,16 +1960,14 @@ Block* Parser::ParseScopedBlock(ZoneStringList* labels, bool* ok) {
Expect(Token::LBRACE, CHECK_OK);
block_scope->set_start_position(scanner().location().beg_pos);
{ BlockState block_state(this, block_scope);
- TargetCollector collector;
+ TargetCollector collector(zone());
Target target(&this->target_stack_, &collector);
Target target_body(&this->target_stack_, body);
- InitializationBlockFinder block_finder(top_scope_, target_stack_);
while (peek() != Token::RBRACE) {
Statement* stat = ParseBlockElement(NULL, CHECK_OK);
if (stat && !stat->IsEmpty()) {
- body->AddStatement(stat);
- block_finder.Update(stat);
+ body->AddStatement(stat, zone());
}
}
}
@@ -2203,7 +2141,9 @@ Block* Parser::ParseVariableDeclarations(
// For let/const declarations in harmony mode, we can also immediately
// pre-resolve the proxy because it resides in the same scope as the
// declaration.
- VariableProxy* proxy = NewUnresolved(name, mode);
+ Interface* interface =
+ is_const ? Interface::NewConst() : Interface::NewValue();
+ VariableProxy* proxy = NewUnresolved(name, mode, interface);
Declaration* declaration =
factory()->NewVariableDeclaration(proxy, mode, top_scope_);
Declare(declaration, mode != VAR, CHECK_OK);
@@ -2214,7 +2154,7 @@ Block* Parser::ParseVariableDeclarations(
*ok = false;
return NULL;
}
- if (names) names->Add(name);
+ if (names) names->Add(name, zone());
// Parse initialization expression if present and/or needed. A
// declaration of the form:
@@ -2291,21 +2231,24 @@ Block* Parser::ParseVariableDeclarations(
// declaration statement has been executed. This is important in
// browsers where the global object (window) has lots of
// properties defined in prototype objects.
- if (initialization_scope->is_global_scope()) {
+ if (initialization_scope->is_global_scope() &&
+ !IsLexicalVariableMode(mode)) {
// Compute the arguments for the runtime call.
- ZoneList<Expression*>* arguments = new(zone()) ZoneList<Expression*>(3);
+ ZoneList<Expression*>* arguments =
+ new(zone()) ZoneList<Expression*>(3, zone());
// We have at least 1 parameter.
- arguments->Add(factory()->NewLiteral(name));
+ arguments->Add(factory()->NewLiteral(name), zone());
CallRuntime* initialize;
if (is_const) {
- arguments->Add(value);
+ arguments->Add(value, zone());
value = NULL; // zap the value to avoid the unnecessary assignment
int qml_mode = 0;
- if (top_scope_->is_qml_mode() && !Isolate::Current()->global()->HasProperty(*name))
+ if (top_scope_->is_qml_mode()
+ && !Isolate::Current()->global_object()->HasProperty(*name))
qml_mode = 1;
- arguments->Add(factory()->NewNumberLiteral(qml_mode));
+ arguments->Add(factory()->NewNumberLiteral(qml_mode), zone());
// Construct the call to Runtime_InitializeConstGlobal
// and add it to the initialization statement block.
@@ -2319,19 +2262,20 @@ Block* Parser::ParseVariableDeclarations(
// Add strict mode.
// We may want to pass singleton to avoid Literal allocations.
LanguageMode language_mode = initialization_scope->language_mode();
- arguments->Add(factory()->NewNumberLiteral(language_mode));
+ arguments->Add(factory()->NewNumberLiteral(language_mode), zone());
int qml_mode = 0;
- if (top_scope_->is_qml_mode() && !Isolate::Current()->global()->HasProperty(*name))
+ if (top_scope_->is_qml_mode()
+ && !Isolate::Current()->global_object()->HasProperty(*name))
qml_mode = 1;
- arguments->Add(factory()->NewNumberLiteral(qml_mode));
+ arguments->Add(factory()->NewNumberLiteral(qml_mode), zone());
// Be careful not to assign a value to the global variable if
// we're in a with. The initialization value should not
// necessarily be stored in the global object in that case,
// which is why we need to generate a separate assignment node.
if (value != NULL && !inside_with()) {
- arguments->Add(value);
+ arguments->Add(value, zone());
value = NULL; // zap the value to avoid the unnecessary assignment
}
@@ -2345,7 +2289,8 @@ Block* Parser::ParseVariableDeclarations(
arguments);
}
- block->AddStatement(factory()->NewExpressionStatement(initialize));
+ block->AddStatement(factory()->NewExpressionStatement(initialize),
+ zone());
} else if (needs_init) {
// Constant initializations always assign to the declared constant which
// is always at the function scope level. This is only relevant for
@@ -2359,7 +2304,8 @@ Block* Parser::ParseVariableDeclarations(
ASSERT(value != NULL);
Assignment* assignment =
factory()->NewAssignment(init_op, proxy, value, position);
- block->AddStatement(factory()->NewExpressionStatement(assignment));
+ block->AddStatement(factory()->NewExpressionStatement(assignment),
+ zone());
value = NULL;
}
@@ -2371,10 +2317,11 @@ Block* Parser::ParseVariableDeclarations(
// if they are inside a 'with' statement - they may change a 'with' object
// property).
VariableProxy* proxy =
- initialization_scope->NewUnresolved(factory(), name);
+ initialization_scope->NewUnresolved(factory(), name, interface);
Assignment* assignment =
factory()->NewAssignment(init_op, proxy, value, position);
- block->AddStatement(factory()->NewExpressionStatement(assignment));
+ block->AddStatement(factory()->NewExpressionStatement(assignment),
+ zone());
}
if (fni_ != NULL) fni_->Leave();
@@ -2428,8 +2375,10 @@ Statement* Parser::ParseExpressionOrLabelledStatement(ZoneStringList* labels,
*ok = false;
return NULL;
}
- if (labels == NULL) labels = new(zone()) ZoneStringList(4);
- labels->Add(label);
+ if (labels == NULL) {
+ labels = new(zone()) ZoneStringList(4, zone());
+ }
+ labels->Add(label, zone());
// Remove the "ghost" variable that turned out to be a label
// from the top scope. This way, we don't try to resolve it
// during the scope processing.
@@ -2641,12 +2590,13 @@ CaseClause* Parser::ParseCaseClause(bool* default_seen_ptr, bool* ok) {
}
Expect(Token::COLON, CHECK_OK);
int pos = scanner().location().beg_pos;
- ZoneList<Statement*>* statements = new(zone()) ZoneList<Statement*>(5);
+ ZoneList<Statement*>* statements =
+ new(zone()) ZoneList<Statement*>(5, zone());
while (peek() != Token::CASE &&
peek() != Token::DEFAULT &&
peek() != Token::RBRACE) {
Statement* stat = ParseStatement(NULL, CHECK_OK);
- statements->Add(stat);
+ statements->Add(stat, zone());
}
return new(zone()) CaseClause(isolate(), label, statements, pos);
@@ -2667,11 +2617,11 @@ SwitchStatement* Parser::ParseSwitchStatement(ZoneStringList* labels,
Expect(Token::RPAREN, CHECK_OK);
bool default_seen = false;
- ZoneList<CaseClause*>* cases = new(zone()) ZoneList<CaseClause*>(4);
+ ZoneList<CaseClause*>* cases = new(zone()) ZoneList<CaseClause*>(4, zone());
Expect(Token::LBRACE, CHECK_OK);
while (peek() != Token::RBRACE) {
CaseClause* clause = ParseCaseClause(&default_seen, CHECK_OK);
- cases->Add(clause);
+ cases->Add(clause, zone());
}
Expect(Token::RBRACE, CHECK_OK);
@@ -2712,7 +2662,7 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
Expect(Token::TRY, CHECK_OK);
- TargetCollector try_collector;
+ TargetCollector try_collector(zone());
Block* try_block;
{ Target target(&this->target_stack_, &try_collector);
@@ -2730,7 +2680,7 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
// then we will need to collect escaping targets from the catch
// block. Since we don't know yet if there will be a finally block, we
// always collect the targets.
- TargetCollector catch_collector;
+ TargetCollector catch_collector(zone());
Scope* catch_scope = NULL;
Variable* catch_variable = NULL;
Block* catch_block = NULL;
@@ -2785,7 +2735,7 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
index, try_block, catch_scope, catch_variable, catch_block);
statement->set_escaping_targets(try_collector.targets());
try_block = factory()->NewBlock(NULL, 1, false);
- try_block->AddStatement(statement);
+ try_block->AddStatement(statement, zone());
catch_block = NULL; // Clear to indicate it's been handled.
}
@@ -2801,7 +2751,7 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
int index = current_function_state_->NextHandlerIndex();
result = factory()->NewTryFinallyStatement(index, try_block, finally_block);
// Combine the jump targets of the try block and the possible catch block.
- try_collector.targets()->AddAll(*catch_collector.targets());
+ try_collector.targets()->AddAll(*catch_collector.targets(), zone());
}
result->set_escaping_targets(try_collector.targets());
@@ -2875,12 +2825,14 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
for_scope->set_start_position(scanner().location().beg_pos);
if (peek() != Token::SEMICOLON) {
if (peek() == Token::VAR || peek() == Token::CONST) {
+ bool is_const = peek() == Token::CONST;
Handle<String> name;
Block* variable_statement =
ParseVariableDeclarations(kForStatement, NULL, NULL, &name, CHECK_OK);
if (peek() == Token::IN && !name.is_null()) {
- VariableProxy* each = top_scope_->NewUnresolved(factory(), name);
+ Interface* interface =
+ is_const ? Interface::NewConst() : Interface::NewValue();
ForInStatement* loop = factory()->NewForInStatement(labels);
Target target(&this->target_stack_, loop);
@@ -2888,11 +2840,13 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
Expression* enumerable = ParseExpression(true, CHECK_OK);
Expect(Token::RPAREN, CHECK_OK);
+ VariableProxy* each =
+ top_scope_->NewUnresolved(factory(), name, interface);
Statement* body = ParseStatement(NULL, CHECK_OK);
loop->Initialize(each, enumerable, body);
Block* result = factory()->NewBlock(NULL, 2, false);
- result->AddStatement(variable_statement);
- result->AddStatement(loop);
+ result->AddStatement(variable_statement, zone());
+ result->AddStatement(loop, zone());
top_scope_ = saved_scope;
for_scope->set_end_position(scanner().location().end_pos);
for_scope = for_scope->FinalizeBlockScope();
@@ -2925,25 +2879,33 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
// TODO(keuchel): Move the temporary variable to the block scope, after
// implementing stack allocated block scoped variables.
- Variable* temp = top_scope_->DeclarationScope()->NewTemporary(name);
+ Factory* heap_factory = isolate()->factory();
+ Handle<String> tempstr =
+ heap_factory->NewConsString(heap_factory->dot_for_symbol(), name);
+ Handle<String> tempname = heap_factory->LookupSymbol(tempstr);
+ Variable* temp = top_scope_->DeclarationScope()->NewTemporary(tempname);
VariableProxy* temp_proxy = factory()->NewVariableProxy(temp);
- VariableProxy* each = top_scope_->NewUnresolved(factory(), name);
ForInStatement* loop = factory()->NewForInStatement(labels);
Target target(&this->target_stack_, loop);
+ // The expression does not see the loop variable.
Expect(Token::IN, CHECK_OK);
+ top_scope_ = saved_scope;
Expression* enumerable = ParseExpression(true, CHECK_OK);
+ top_scope_ = for_scope;
Expect(Token::RPAREN, CHECK_OK);
+ VariableProxy* each =
+ top_scope_->NewUnresolved(factory(), name, Interface::NewValue());
Statement* body = ParseStatement(NULL, CHECK_OK);
Block* body_block = factory()->NewBlock(NULL, 3, false);
Assignment* assignment = factory()->NewAssignment(
Token::ASSIGN, each, temp_proxy, RelocInfo::kNoPosition);
Statement* assignment_statement =
factory()->NewExpressionStatement(assignment);
- body_block->AddStatement(variable_statement);
- body_block->AddStatement(assignment_statement);
- body_block->AddStatement(body);
+ body_block->AddStatement(variable_statement, zone());
+ body_block->AddStatement(assignment_statement, zone());
+ body_block->AddStatement(body, zone());
loop->Initialize(temp_proxy, enumerable, body_block);
top_scope_ = saved_scope;
for_scope->set_end_position(scanner().location().end_pos);
@@ -3026,8 +2988,8 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
// }
ASSERT(init != NULL);
Block* result = factory()->NewBlock(NULL, 2, false);
- result->AddStatement(init);
- result->AddStatement(loop);
+ result->AddStatement(init, zone());
+ result->AddStatement(loop, zone());
result->set_scope(for_scope);
if (loop) loop->Initialize(NULL, cond, next, body);
return result;
@@ -3409,6 +3371,12 @@ Expression* Parser::ParseLeftHandSideExpression(bool* ok) {
// should not point to the closing brace otherwise it will intersect
// with positions recorded for function literal and confuse debugger.
pos = scanner().peek_location().beg_pos;
+ // Also the trailing parenthesis are a hint that the function will
+ // be called immediately. If we happen to have parsed a preceding
+ // function literal eagerly, we can also compile it eagerly.
+ if (result->IsFunctionLiteral() && mode() == PARSE_EAGERLY) {
+ result->AsFunctionLiteral()->set_parenthesized();
+ }
}
ZoneList<Expression*>* args = ParseArguments(CHECK_OK);
@@ -3470,7 +3438,7 @@ Expression* Parser::ParseNewPrefix(PositionStack* stack, bool* ok) {
if (!stack->is_empty()) {
int last = stack->pop();
result = factory()->NewCallNew(
- result, new(zone()) ZoneList<Expression*>(0), last);
+ result, new(zone()) ZoneList<Expression*>(0, zone()), last);
}
return result;
}
@@ -3660,9 +3628,9 @@ Expression* Parser::ParsePrimaryExpression(bool* ok) {
if (FLAG_print_interface_details)
PrintF("# Variable %s ", name->ToAsciiArray());
#endif
- Interface* interface = Interface::NewUnknown();
+ Interface* interface = Interface::NewUnknown(zone());
result = top_scope_->NewUnresolved(
- factory(), name, scanner().location().beg_pos, interface);
+ factory(), name, interface, scanner().location().beg_pos);
break;
}
@@ -3760,7 +3728,7 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
// ArrayLiteral ::
// '[' Expression? (',' Expression?)* ']'
- ZoneList<Expression*>* values = new(zone()) ZoneList<Expression*>(4);
+ ZoneList<Expression*>* values = new(zone()) ZoneList<Expression*>(4, zone());
Expect(Token::LBRACK, CHECK_OK);
while (peek() != Token::RBRACK) {
Expression* elem;
@@ -3769,7 +3737,7 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
} else {
elem = ParseAssignmentExpression(true, CHECK_OK);
}
- values->Add(elem);
+ values->Add(elem, zone());
if (peek() != Token::RBRACK) {
Expect(Token::COMMA, CHECK_OK);
}
@@ -3783,10 +3751,12 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
Handle<FixedArray> object_literals =
isolate()->factory()->NewFixedArray(values->length(), TENURED);
Handle<FixedDoubleArray> double_literals;
- ElementsKind elements_kind = FAST_SMI_ONLY_ELEMENTS;
+ ElementsKind elements_kind = FAST_SMI_ELEMENTS;
bool has_only_undefined_values = true;
+ bool has_hole_values = false;
// Fill in the literals.
+ Heap* heap = isolate()->heap();
bool is_simple = true;
int depth = 1;
for (int i = 0, n = values->length(); i < n; i++) {
@@ -3795,12 +3765,18 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
depth = m_literal->depth() + 1;
}
Handle<Object> boilerplate_value = GetBoilerplateValue(values->at(i));
- if (boilerplate_value->IsUndefined()) {
+ if (boilerplate_value->IsTheHole()) {
+ has_hole_values = true;
object_literals->set_the_hole(i);
if (elements_kind == FAST_DOUBLE_ELEMENTS) {
double_literals->set_the_hole(i);
}
+ } else if (boilerplate_value->IsUndefined()) {
is_simple = false;
+ object_literals->set(i, Smi::FromInt(0));
+ if (elements_kind == FAST_DOUBLE_ELEMENTS) {
+ double_literals->set(i, 0);
+ }
} else {
// Examine each literal element, and adjust the ElementsKind if the
// literal element is not of a type that can be stored in the current
@@ -3810,7 +3786,7 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
// ultimately end up in FAST_ELEMENTS.
has_only_undefined_values = false;
object_literals->set(i, *boilerplate_value);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (elements_kind == FAST_SMI_ELEMENTS) {
// Smi only elements. Notice if a transition to FAST_DOUBLE_ELEMENTS or
// FAST_ELEMENTS is required.
if (!boilerplate_value->IsSmi()) {
@@ -3858,7 +3834,7 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
// elements array to a copy-on-write array.
if (is_simple && depth == 1 && values->length() > 0 &&
elements_kind != FAST_DOUBLE_ELEMENTS) {
- object_literals->set_map(isolate()->heap()->fixed_cow_array_map());
+ object_literals->set_map(heap->fixed_cow_array_map());
}
Handle<FixedArrayBase> element_values = elements_kind == FAST_DOUBLE_ELEMENTS
@@ -3870,6 +3846,10 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
Handle<FixedArray> literals =
isolate()->factory()->NewFixedArray(2, TENURED);
+ if (has_hole_values || !FLAG_packed_arrays) {
+ elements_kind = GetHoleyElementsKind(elements_kind);
+ }
+
literals->set(0, Smi::FromInt(elements_kind));
literals->set(1, *element_values);
@@ -4126,7 +4106,7 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
// )*[','] '}'
ZoneList<ObjectLiteral::Property*>* properties =
- new(zone()) ZoneList<ObjectLiteral::Property*>(4);
+ new(zone()) ZoneList<ObjectLiteral::Property*>(4, zone());
int number_of_boilerplate_properties = 0;
bool has_function = false;
@@ -4163,7 +4143,7 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
}
// Validate the property.
checker.CheckProperty(property, loc, CHECK_OK);
- properties->Add(property);
+ properties->Add(property, zone());
if (peek() != Token::RBRACE) Expect(Token::COMMA, CHECK_OK);
if (fni_ != NULL) {
@@ -4231,7 +4211,7 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
if (IsBoilerplateProperty(property)) number_of_boilerplate_properties++;
// Validate the property
checker.CheckProperty(property, loc, CHECK_OK);
- properties->Add(property);
+ properties->Add(property, zone());
// TODO(1240767): Consider allowing trailing comma.
if (peek() != Token::RBRACE) Expect(Token::COMMA, CHECK_OK);
@@ -4290,12 +4270,12 @@ ZoneList<Expression*>* Parser::ParseArguments(bool* ok) {
// Arguments ::
// '(' (AssignmentExpression)*[','] ')'
- ZoneList<Expression*>* result = new(zone()) ZoneList<Expression*>(4);
+ ZoneList<Expression*>* result = new(zone()) ZoneList<Expression*>(4, zone());
Expect(Token::LPAREN, CHECK_OK);
bool done = (peek() == Token::RPAREN);
while (!done) {
Expression* argument = ParseAssignmentExpression(true, CHECK_OK);
- result->Add(argument);
+ result->Add(argument, zone());
if (result->length() > kMaxNumFunctionParameters) {
ReportMessageAt(scanner().location(), "too_many_arguments",
Vector<const char*>::empty());
@@ -4341,6 +4321,7 @@ class SingletonLogger : public ParserRecorder {
int end,
const char* message,
const char* argument_opt) {
+ if (has_error_) return;
has_error_ = true;
start_ = start;
end_ = end;
@@ -4435,6 +4416,9 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
Handle<FixedArray> this_property_assignments;
FunctionLiteral::ParameterFlag duplicate_parameters =
FunctionLiteral::kNoDuplicateParameters;
+ FunctionLiteral::IsParenthesizedFlag parenthesized = parenthesized_function_
+ ? FunctionLiteral::kIsParenthesized
+ : FunctionLiteral::kNotParenthesized;
AstProperties ast_properties;
// Parse function body.
{ FunctionState function_state(this, scope, isolate());
@@ -4495,7 +4479,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
VariableMode fvar_mode = is_extended_mode() ? CONST_HARMONY : CONST;
fvar = new(zone()) Variable(top_scope_,
function_name, fvar_mode, true /* is valid LHS */,
- Variable::NORMAL, kCreatedInitialized);
+ Variable::NORMAL, kCreatedInitialized, Interface::NewConst());
VariableProxy* proxy = factory()->NewVariableProxy(fvar);
VariableDeclaration* fvar_declaration =
factory()->NewVariableDeclaration(proxy, fvar_mode, top_scope_);
@@ -4506,7 +4490,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
// The heuristics are:
// - It must not have been prohibited by the caller to Parse (some callers
// need a full AST).
- // - The outer scope must be trivial (only global variables in scope).
+ // - The outer scope must allow lazy compilation of inner functions.
// - The function mustn't be a function expression with an open parenthesis
// before; we consider that a hint that the function will be called
// immediately, and it would be a waste of time to make it lazily
@@ -4514,8 +4498,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
// These are all things we can know at this point, without looking at the
// function itself.
bool is_lazily_compiled = (mode() == PARSE_LAZILY &&
- top_scope_->outer_scope()->is_global_scope() &&
- top_scope_->HasTrivialOuterContext() &&
+ top_scope_->AllowsLazyCompilation() &&
!parenthesized_function_);
parenthesized_function_ = false; // The bit was set for this function only.
@@ -4584,18 +4567,20 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
}
if (!is_lazily_compiled) {
- body = new(zone()) ZoneList<Statement*>(8);
+ ParsingModeScope parsing_mode(this, PARSE_EAGERLY);
+ body = new(zone()) ZoneList<Statement*>(8, zone());
if (fvar != NULL) {
- VariableProxy* fproxy =
- top_scope_->NewUnresolved(factory(), function_name);
+ VariableProxy* fproxy = top_scope_->NewUnresolved(
+ factory(), function_name, Interface::NewConst());
fproxy->BindTo(fvar);
body->Add(factory()->NewExpressionStatement(
factory()->NewAssignment(fvar_init_op,
fproxy,
factory()->NewThisFunction(),
- RelocInfo::kNoPosition)));
+ RelocInfo::kNoPosition)),
+ zone());
}
- ParseSourceElements(body, Token::RBRACE, false, CHECK_OK);
+ ParseSourceElements(body, Token::RBRACE, false, false, CHECK_OK);
materialized_literal_count = function_state.materialized_literal_count();
expected_property_count = function_state.expected_property_count();
@@ -4673,7 +4658,8 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
num_parameters,
duplicate_parameters,
type,
- FunctionLiteral::kIsFunction);
+ FunctionLiteral::kIsFunction,
+ parenthesized);
function_literal->set_function_token_position(function_token_position);
function_literal->set_ast_properties(&ast_properties);
@@ -4745,6 +4731,13 @@ Expression* Parser::ParseV8Intrinsic(bool* ok) {
return NULL;
}
+ // Check that the function is defined if it's an inline runtime call.
+ if (function == NULL && name->Get(0) == '_') {
+ ReportMessage("not_defined", Vector<Handle<String> >(&name, 1));
+ *ok = false;
+ return NULL;
+ }
+
// We have a valid intrinsics call or a call to a builtin.
return factory()->NewCallRuntime(name, function, args);
}
@@ -4992,7 +4985,7 @@ void Parser::RegisterTargetUse(Label* target, Target* stop) {
// the break target to any TargetCollectors passed on the stack.
for (Target* t = target_stack_; t != stop; t = t->previous()) {
TargetCollector* collector = t->node()->AsTargetCollector();
- if (collector != NULL) collector->AddTarget(target);
+ if (collector != NULL) collector->AddTarget(target, zone());
}
}
@@ -5039,9 +5032,9 @@ Expression* Parser::NewThrowError(Handle<String> constructor,
Handle<JSArray> array = isolate()->factory()->NewJSArrayWithElements(
elements, FAST_ELEMENTS, TENURED);
- ZoneList<Expression*>* args = new(zone()) ZoneList<Expression*>(2);
- args->Add(factory()->NewLiteral(type));
- args->Add(factory()->NewLiteral(array));
+ ZoneList<Expression*>* args = new(zone()) ZoneList<Expression*>(2, zone());
+ args->Add(factory()->NewLiteral(type), zone());
+ args->Add(factory()->NewLiteral(array), zone());
CallRuntime* call_constructor =
factory()->NewCallRuntime(constructor, NULL, args);
return factory()->NewThrow(call_constructor, scanner().location().beg_pos);
@@ -5053,8 +5046,10 @@ Expression* Parser::NewThrowError(Handle<String> constructor,
RegExpParser::RegExpParser(FlatStringReader* in,
Handle<String>* error,
- bool multiline)
+ bool multiline,
+ Zone* zone)
: isolate_(Isolate::Current()),
+ zone_(zone),
error_(error),
captures_(NULL),
in_(in),
@@ -5085,7 +5080,7 @@ void RegExpParser::Advance() {
StackLimitCheck check(isolate());
if (check.HasOverflowed()) {
ReportError(CStrVector(Isolate::kStackOverflowMessage));
- } else if (isolate()->zone()->excess_allocation()) {
+ } else if (zone()->excess_allocation()) {
ReportError(CStrVector("Regular expression too large"));
} else {
current_ = in()->Get(next_pos_);
@@ -5150,7 +5145,7 @@ RegExpTree* RegExpParser::ParsePattern() {
// Atom Quantifier
RegExpTree* RegExpParser::ParseDisjunction() {
// Used to store current state while parsing subexpressions.
- RegExpParserState initial_state(NULL, INITIAL, 0);
+ RegExpParserState initial_state(NULL, INITIAL, 0, zone());
RegExpParserState* stored_state = &initial_state;
// Cache the builder in a local variable for quick access.
RegExpBuilder* builder = initial_state.builder();
@@ -5235,8 +5230,8 @@ RegExpTree* RegExpParser::ParseDisjunction() {
Advance();
// everything except \x0a, \x0d, \u2028 and \u2029
ZoneList<CharacterRange>* ranges =
- new(zone()) ZoneList<CharacterRange>(2);
- CharacterRange::AddClassEscape('.', ranges);
+ new(zone()) ZoneList<CharacterRange>(2, zone());
+ CharacterRange::AddClassEscape('.', ranges, zone());
RegExpTree* atom = new(zone()) RegExpCharacterClass(ranges, false);
builder->AddAtom(atom);
break;
@@ -5262,17 +5257,16 @@ RegExpTree* RegExpParser::ParseDisjunction() {
Advance(2);
} else {
if (captures_ == NULL) {
- captures_ = new(zone()) ZoneList<RegExpCapture*>(2);
+ captures_ = new(zone()) ZoneList<RegExpCapture*>(2, zone());
}
if (captures_started() >= kMaxCaptures) {
ReportError(CStrVector("Too many captures") CHECK_FAILED);
}
- captures_->Add(NULL);
+ captures_->Add(NULL, zone());
}
// Store current state and begin new disjunction parsing.
- stored_state = new(zone()) RegExpParserState(stored_state,
- type,
- captures_started());
+ stored_state = new(zone()) RegExpParserState(stored_state, type,
+ captures_started(), zone());
builder = stored_state->builder();
continue;
}
@@ -5306,8 +5300,8 @@ RegExpTree* RegExpParser::ParseDisjunction() {
uc32 c = Next();
Advance(2);
ZoneList<CharacterRange>* ranges =
- new(zone()) ZoneList<CharacterRange>(2);
- CharacterRange::AddClassEscape(c, ranges);
+ new(zone()) ZoneList<CharacterRange>(2, zone());
+ CharacterRange::AddClassEscape(c, ranges, zone());
RegExpTree* atom = new(zone()) RegExpCharacterClass(ranges, false);
builder->AddAtom(atom);
break;
@@ -5782,11 +5776,12 @@ static const uc16 kNoCharClass = 0;
// escape (i.e., 's' means whitespace, from '\s').
static inline void AddRangeOrEscape(ZoneList<CharacterRange>* ranges,
uc16 char_class,
- CharacterRange range) {
+ CharacterRange range,
+ Zone* zone) {
if (char_class != kNoCharClass) {
- CharacterRange::AddClassEscape(char_class, ranges);
+ CharacterRange::AddClassEscape(char_class, ranges, zone);
} else {
- ranges->Add(range);
+ ranges->Add(range, zone);
}
}
@@ -5802,7 +5797,8 @@ RegExpTree* RegExpParser::ParseCharacterClass() {
is_negated = true;
Advance();
}
- ZoneList<CharacterRange>* ranges = new(zone()) ZoneList<CharacterRange>(2);
+ ZoneList<CharacterRange>* ranges =
+ new(zone()) ZoneList<CharacterRange>(2, zone());
while (has_more() && current() != ']') {
uc16 char_class = kNoCharClass;
CharacterRange first = ParseClassAtom(&char_class CHECK_FAILED);
@@ -5813,25 +5809,25 @@ RegExpTree* RegExpParser::ParseCharacterClass() {
// following code report an error.
break;
} else if (current() == ']') {
- AddRangeOrEscape(ranges, char_class, first);
- ranges->Add(CharacterRange::Singleton('-'));
+ AddRangeOrEscape(ranges, char_class, first, zone());
+ ranges->Add(CharacterRange::Singleton('-'), zone());
break;
}
uc16 char_class_2 = kNoCharClass;
CharacterRange next = ParseClassAtom(&char_class_2 CHECK_FAILED);
if (char_class != kNoCharClass || char_class_2 != kNoCharClass) {
// Either end is an escaped character class. Treat the '-' verbatim.
- AddRangeOrEscape(ranges, char_class, first);
- ranges->Add(CharacterRange::Singleton('-'));
- AddRangeOrEscape(ranges, char_class_2, next);
+ AddRangeOrEscape(ranges, char_class, first, zone());
+ ranges->Add(CharacterRange::Singleton('-'), zone());
+ AddRangeOrEscape(ranges, char_class_2, next, zone());
continue;
}
if (first.from() > next.to()) {
return ReportError(CStrVector(kRangeOutOfOrder) CHECK_FAILED);
}
- ranges->Add(CharacterRange::Range(first.from(), next.to()));
+ ranges->Add(CharacterRange::Range(first.from(), next.to()), zone());
} else {
- AddRangeOrEscape(ranges, char_class, first);
+ AddRangeOrEscape(ranges, char_class, first, zone());
}
}
if (!has_more()) {
@@ -5839,7 +5835,7 @@ RegExpTree* RegExpParser::ParseCharacterClass() {
}
Advance();
if (ranges->length() == 0) {
- ranges->Add(CharacterRange::Everything());
+ ranges->Add(CharacterRange::Everything(), zone());
is_negated = !is_negated;
}
return new(zone()) RegExpCharacterClass(ranges, is_negated);
@@ -5947,31 +5943,6 @@ static ScriptDataImpl* DoPreParse(Utf16CharacterStream* source,
}
-// Preparse, but only collect data that is immediately useful,
-// even if the preparser data is only used once.
-ScriptDataImpl* ParserApi::PartialPreParse(Handle<String> source,
- v8::Extension* extension,
- int flags) {
- bool allow_lazy = FLAG_lazy && (extension == NULL);
- if (!allow_lazy) {
- // Partial preparsing is only about lazily compiled functions.
- // If we don't allow lazy compilation, the log data will be empty.
- return NULL;
- }
- flags |= kAllowLazy;
- PartialParserRecorder recorder;
- int source_length = source->length();
- if (source->IsExternalTwoByteString()) {
- ExternalTwoByteStringUtf16CharacterStream stream(
- Handle<ExternalTwoByteString>::cast(source), 0, source_length);
- return DoPreParse(&stream, flags, &recorder);
- } else {
- GenericStringUtf16CharacterStream stream(source, 0, source_length);
- return DoPreParse(&stream, flags, &recorder);
- }
-}
-
-
ScriptDataImpl* ParserApi::PreParse(Utf16CharacterStream* source,
v8::Extension* extension,
int flags) {
@@ -5986,9 +5957,10 @@ ScriptDataImpl* ParserApi::PreParse(Utf16CharacterStream* source,
bool RegExpParser::ParseRegExp(FlatStringReader* input,
bool multiline,
- RegExpCompileData* result) {
+ RegExpCompileData* result,
+ Zone* zone) {
ASSERT(result != NULL);
- RegExpParser parser(input, &result->error, multiline);
+ RegExpParser parser(input, &result->error, multiline, zone);
RegExpTree* tree = parser.ParsePattern();
if (parser.failed()) {
ASSERT(tree == NULL);
@@ -6009,7 +5981,6 @@ bool RegExpParser::ParseRegExp(FlatStringReader* input,
bool ParserApi::Parse(CompilationInfo* info, int parsing_flags) {
ASSERT(info->function() == NULL);
FunctionLiteral* result = NULL;
- Handle<Script> script = info->script();
ASSERT((parsing_flags & kLanguageModeMask) == CLASSIC_MODE);
if (!info->is_native() && FLAG_harmony_scoping) {
// Harmony scoping is requested.
@@ -6024,15 +5995,15 @@ bool ParserApi::Parse(CompilationInfo* info, int parsing_flags) {
}
if (info->is_lazy()) {
ASSERT(!info->is_eval());
- Parser parser(script, parsing_flags, NULL, NULL);
+ Parser parser(info, parsing_flags, NULL, NULL);
if (info->shared_info()->is_function()) {
- result = parser.ParseLazy(info);
+ result = parser.ParseLazy();
} else {
- result = parser.ParseProgram(info);
+ result = parser.ParseProgram();
}
} else {
ScriptDataImpl* pre_data = info->pre_parse_data();
- Parser parser(script, parsing_flags, info->extension(), pre_data);
+ Parser parser(info, parsing_flags, info->extension(), pre_data);
if (pre_data != NULL && pre_data->has_error()) {
Scanner::Location loc = pre_data->MessageLocation();
const char* message = pre_data->BuildMessage();
@@ -6045,7 +6016,7 @@ bool ParserApi::Parse(CompilationInfo* info, int parsing_flags) {
DeleteArray(args.start());
ASSERT(info->isolate()->has_pending_exception());
} else {
- result = parser.ParseProgram(info);
+ result = parser.ParseProgram();
}
}
info->SetFunction(result);
diff --git a/src/3rdparty/v8/src/parser.h b/src/3rdparty/v8/src/parser.h
index b4d8825..93fd1b8 100644
--- a/src/3rdparty/v8/src/parser.h
+++ b/src/3rdparty/v8/src/parser.h
@@ -175,12 +175,6 @@ class ParserApi {
static ScriptDataImpl* PreParse(Utf16CharacterStream* source,
v8::Extension* extension,
int flags);
-
- // Preparser that only does preprocessing that makes sense if only used
- // immediately after.
- static ScriptDataImpl* PartialPreParse(Handle<String> source,
- v8::Extension* extension,
- int flags);
};
// ----------------------------------------------------------------------------
@@ -200,12 +194,12 @@ class BufferedZoneList {
// Adds element at end of list. This element is buffered and can
// be read using last() or removed using RemoveLast until a new Add or until
// RemoveLast or GetList has been called.
- void Add(T* value) {
+ void Add(T* value, Zone* zone) {
if (last_ != NULL) {
if (list_ == NULL) {
- list_ = new ZoneList<T*>(initial_size);
+ list_ = new(zone) ZoneList<T*>(initial_size, zone);
}
- list_->Add(last_);
+ list_->Add(last_, zone);
}
last_ = value;
}
@@ -250,12 +244,12 @@ class BufferedZoneList {
return length + ((last_ == NULL) ? 0 : 1);
}
- ZoneList<T*>* GetList() {
+ ZoneList<T*>* GetList(Zone* zone) {
if (list_ == NULL) {
- list_ = new ZoneList<T*>(initial_size);
+ list_ = new(zone) ZoneList<T*>(initial_size, zone);
}
if (last_ != NULL) {
- list_->Add(last_);
+ list_->Add(last_, zone);
last_ = NULL;
}
return list_;
@@ -270,7 +264,7 @@ class BufferedZoneList {
// Accumulates RegExp atoms and assertions into lists of terms and alternatives.
class RegExpBuilder: public ZoneObject {
public:
- RegExpBuilder();
+ explicit RegExpBuilder(Zone* zone);
void AddCharacter(uc16 character);
// "Adds" an empty expression. Does nothing except consume a
// following quantifier
@@ -285,7 +279,7 @@ class RegExpBuilder: public ZoneObject {
void FlushCharacters();
void FlushText();
void FlushTerms();
- Zone* zone() { return zone_; }
+ Zone* zone() const { return zone_; }
Zone* zone_;
bool pending_empty_;
@@ -306,11 +300,13 @@ class RegExpParser {
public:
RegExpParser(FlatStringReader* in,
Handle<String>* error,
- bool multiline_mode);
+ bool multiline_mode,
+ Zone* zone);
static bool ParseRegExp(FlatStringReader* input,
bool multiline,
- RegExpCompileData* result);
+ RegExpCompileData* result,
+ Zone* zone);
RegExpTree* ParsePattern();
RegExpTree* ParseDisjunction();
@@ -368,9 +364,10 @@ class RegExpParser {
public:
RegExpParserState(RegExpParserState* previous_state,
SubexpressionType group_type,
- int disjunction_capture_index)
+ int disjunction_capture_index,
+ Zone* zone)
: previous_state_(previous_state),
- builder_(new RegExpBuilder()),
+ builder_(new(zone) RegExpBuilder(zone)),
group_type_(group_type),
disjunction_capture_index_(disjunction_capture_index) {}
// Parser state of containing expression, if any.
@@ -397,7 +394,7 @@ class RegExpParser {
};
Isolate* isolate() { return isolate_; }
- Zone* zone() { return isolate_->zone(); }
+ Zone* zone() const { return zone_; }
uc32 current() { return current_; }
bool has_more() { return has_more_; }
@@ -407,6 +404,7 @@ class RegExpParser {
void ScanForCaptures();
Isolate* isolate_;
+ Zone* zone_;
Handle<String>* error_;
ZoneList<RegExpCapture*>* captures_;
FlatStringReader* in_;
@@ -430,7 +428,7 @@ class SingletonLogger;
class Parser {
public:
- Parser(Handle<Script> script,
+ Parser(CompilationInfo* info,
int parsing_flags, // Combination of ParsingFlags
v8::Extension* extension,
ScriptDataImpl* pre_data);
@@ -440,8 +438,8 @@ class Parser {
}
// Returns NULL if parsing failed.
- FunctionLiteral* ParseProgram(CompilationInfo* info);
- FunctionLiteral* ParseLazy(CompilationInfo* info);
+ FunctionLiteral* ParseProgram();
+ FunctionLiteral* ParseLazy();
void ReportMessageAt(Scanner::Location loc,
const char* message,
@@ -456,7 +454,7 @@ class Parser {
// construct a hashable id, so if more than 2^17 are allowed, this
// should be checked.
static const int kMaxNumFunctionParameters = 32766;
- static const int kMaxNumFunctionLocals = 32767;
+ static const int kMaxNumFunctionLocals = 131071; // 2^17-1
enum Mode {
PARSE_LAZILY,
@@ -538,15 +536,28 @@ class Parser {
AstNodeFactory<AstConstructionVisitor> factory_;
};
+ class ParsingModeScope BASE_EMBEDDED {
+ public:
+ ParsingModeScope(Parser* parser, Mode mode)
+ : parser_(parser),
+ old_mode_(parser->mode()) {
+ parser_->mode_ = mode;
+ }
+ ~ParsingModeScope() {
+ parser_->mode_ = old_mode_;
+ }
+ private:
+ Parser* parser_;
+ Mode old_mode_;
+ };
-
- FunctionLiteral* ParseLazy(CompilationInfo* info,
- Utf16CharacterStream* source,
+ FunctionLiteral* ParseLazy(Utf16CharacterStream* source,
ZoneScope* zone_scope);
Isolate* isolate() { return isolate_; }
- Zone* zone() { return isolate_->zone(); }
+ Zone* zone() const { return zone_; }
+ CompilationInfo* info() const { return info_; }
// Called by ParseProgram after setting up the scanner.
FunctionLiteral* DoParseProgram(CompilationInfo* info,
@@ -568,7 +579,7 @@ class Parser {
return top_scope_->is_extended_mode();
}
Scope* DeclarationScope(VariableMode mode) {
- return (mode == LET || mode == CONST_HARMONY)
+ return IsLexicalVariableMode(mode)
? top_scope_ : top_scope_->DeclarationScope();
}
@@ -579,10 +590,10 @@ class Parser {
// which is set to false if parsing failed; it is unchanged otherwise.
// By making the 'exception handling' explicit, we are forced to check
// for failure at the call sites.
- void* ParseSourceElements(ZoneList<Statement*>* processor,
- int end_token, bool is_eval, bool* ok);
+ void* ParseSourceElements(ZoneList<Statement*>* processor, int end_token,
+ bool is_eval, bool is_global, bool* ok);
Statement* ParseModuleElement(ZoneStringList* labels, bool* ok);
- Block* ParseModuleDeclaration(ZoneStringList* names, bool* ok);
+ Statement* ParseModuleDeclaration(ZoneStringList* names, bool* ok);
Module* ParseModule(bool* ok);
Module* ParseModuleLiteral(bool* ok);
Module* ParseModulePath(bool* ok);
@@ -767,7 +778,7 @@ class Parser {
// Parser support
VariableProxy* NewUnresolved(Handle<String> name,
VariableMode mode,
- Interface* interface = Interface::NewValue());
+ Interface* interface);
void Declare(Declaration* declaration, bool resolve, bool* ok);
bool TargetStackContainsLabel(Handle<String> label);
@@ -834,6 +845,8 @@ class Parser {
// so never lazily compile it.
bool parenthesized_function_;
+ Zone* zone_;
+ CompilationInfo* info_;
friend class BlockState;
friend class FunctionState;
};
diff --git a/src/3rdparty/v8/src/platform-cygwin.cc b/src/3rdparty/v8/src/platform-cygwin.cc
index 089ea38..24e256a 100644
--- a/src/3rdparty/v8/src/platform-cygwin.cc
+++ b/src/3rdparty/v8/src/platform-cygwin.cc
@@ -359,6 +359,12 @@ bool VirtualMemory::Guard(void* address) {
}
+bool VirtualMemory::HasLazyCommits() {
+ // TODO(alph): implement for the platform.
+ return false;
+}
+
+
class Thread::PlatformData : public Malloced {
public:
PlatformData() : thread_(kNoThread) {}
diff --git a/src/3rdparty/v8/src/platform-freebsd.cc b/src/3rdparty/v8/src/platform-freebsd.cc
index 511759c..1da4605 100644
--- a/src/3rdparty/v8/src/platform-freebsd.cc
+++ b/src/3rdparty/v8/src/platform-freebsd.cc
@@ -456,6 +456,12 @@ bool VirtualMemory::ReleaseRegion(void* base, size_t size) {
}
+bool VirtualMemory::HasLazyCommits() {
+ // TODO(alph): implement for the platform.
+ return false;
+}
+
+
class Thread::PlatformData : public Malloced {
public:
pthread_t thread_; // Thread handle for pthread.
diff --git a/src/3rdparty/v8/src/platform-linux.cc b/src/3rdparty/v8/src/platform-linux.cc
index 18f59dd..e6c328f 100644
--- a/src/3rdparty/v8/src/platform-linux.cc
+++ b/src/3rdparty/v8/src/platform-linux.cc
@@ -53,6 +53,13 @@
#include <errno.h>
#include <stdarg.h>
+// GLibc on ARM defines mcontext_t has a typedef for 'struct sigcontext'.
+// Old versions of the C library <signal.h> didn't define the type.
+#if defined(__ANDROID__) && !defined(__BIONIC_HAVE_UCONTEXT_T) && \
+ defined(__arm__) && !defined(__BIONIC_HAVE_STRUCT_SIGCONTEXT)
+#include <asm/sigcontext.h>
+#endif
+
#undef MAP_TYPE
#include "v8.h"
@@ -132,12 +139,18 @@ bool OS::ArmCpuHasFeature(CpuFeature feature) {
// facility is universally available on the ARM architectures,
// so it's up to individual OSes to provide such.
switch (feature) {
+ case VFP2:
+ search_string = "vfp";
+ break;
case VFP3:
search_string = "vfpv3";
break;
case ARMv7:
search_string = "ARMv7";
break;
+ case SUDIV:
+ search_string = "idiva";
+ break;
default:
UNREACHABLE();
}
@@ -161,6 +174,23 @@ bool OS::ArmCpuHasFeature(CpuFeature feature) {
}
+CpuImplementer OS::GetCpuImplementer() {
+ static bool use_cached_value = false;
+ static CpuImplementer cached_value = UNKNOWN_IMPLEMENTER;
+ if (use_cached_value) {
+ return cached_value;
+ }
+ if (CPUInfoContainsString("CPU implementer\t: 0x41")) {
+ cached_value = ARM_IMPLEMENTER;
+ } else if (CPUInfoContainsString("CPU implementer\t: 0x51")) {
+ cached_value = QUALCOMM_IMPLEMENTER;
+ } else {
+ cached_value = UNKNOWN_IMPLEMENTER;
+ }
+ use_cached_value = true;
+ return cached_value;
+}
+
bool OS::ArmUsingHardFloat() {
// GCC versions 4.6 and above define __ARM_PCS or __ARM_PCS_VFP to specify
// the Floating Point ABI used (PCS stands for Procedure Call Standard).
@@ -197,6 +227,7 @@ bool OS::ArmUsingHardFloat() {
#endif
#undef GCC_VERSION
}
+
#endif // def __arm__
@@ -501,9 +532,6 @@ void OS::LogSharedLibraryAddresses() {
}
-static const char kGCFakeMmap[] = "/tmp/__v8_gc__";
-
-
void OS::SignalCodeMovingGC() {
// Support for ll_prof.py.
//
@@ -514,7 +542,7 @@ void OS::SignalCodeMovingGC() {
// by the kernel and allows us to synchronize V8 code log and the
// kernel log.
int size = sysconf(_SC_PAGESIZE);
- FILE* f = fopen(kGCFakeMmap, "w+");
+ FILE* f = fopen(FLAG_gc_fake_mmap, "w+");
void* addr = mmap(OS::GetRandomMmapAddr(),
size,
PROT_READ | PROT_EXEC,
@@ -693,6 +721,11 @@ bool VirtualMemory::ReleaseRegion(void* base, size_t size) {
}
+bool VirtualMemory::HasLazyCommits() {
+ return true;
+}
+
+
class Thread::PlatformData : public Malloced {
public:
PlatformData() : thread_(kNoThread) {}
@@ -903,32 +936,30 @@ Semaphore* OS::CreateSemaphore(int count) {
}
-#if !defined(__GLIBC__) && (defined(__arm__) || defined(__thumb__))
-// Android runs a fairly new Linux kernel, so signal info is there,
-// but the C library doesn't have the structs defined.
+#if defined(__ANDROID__) && !defined(__BIONIC_HAVE_UCONTEXT_T)
+
+// Not all versions of Android's C library provide ucontext_t.
+// Detect this and provide custom but compatible definitions. Note that these
+// follow the GLibc naming convention to access register values from
+// mcontext_t.
+//
+// See http://code.google.com/p/android/issues/detail?id=34784
+
+#if defined(__arm__)
-struct sigcontext {
- uint32_t trap_no;
- uint32_t error_code;
- uint32_t oldmask;
- uint32_t gregs[16];
- uint32_t arm_cpsr;
- uint32_t fault_address;
-};
-typedef uint32_t __sigset_t;
typedef struct sigcontext mcontext_t;
+
typedef struct ucontext {
uint32_t uc_flags;
struct ucontext* uc_link;
stack_t uc_stack;
mcontext_t uc_mcontext;
- __sigset_t uc_sigmask;
+ // Other fields are not used by V8, don't define them here.
} ucontext_t;
-enum ArmRegisters {R15 = 15, R13 = 13, R11 = 11};
-#elif !defined(__GLIBC__) && defined(__mips__)
+#elif defined(__mips__)
// MIPS version of sigcontext, for Android bionic.
-struct sigcontext {
+typedef struct {
uint32_t regmask;
uint32_t status;
uint64_t pc;
@@ -947,44 +978,44 @@ struct sigcontext {
uint32_t lo2;
uint32_t hi3;
uint32_t lo3;
-};
-typedef uint32_t __sigset_t;
-typedef struct sigcontext mcontext_t;
+} mcontext_t;
+
typedef struct ucontext {
uint32_t uc_flags;
struct ucontext* uc_link;
stack_t uc_stack;
mcontext_t uc_mcontext;
- __sigset_t uc_sigmask;
+ // Other fields are not used by V8, don't define them here.
} ucontext_t;
-#elif !defined(__GLIBC__) && defined(__i386__)
+#elif defined(__i386__)
// x86 version for Android.
-struct sigcontext {
+typedef struct {
uint32_t gregs[19];
void* fpregs;
uint32_t oldmask;
uint32_t cr2;
-};
+} mcontext_t;
-typedef uint32_t __sigset_t;
-typedef struct sigcontext mcontext_t;
+typedef uint32_t kernel_sigset_t[2]; // x86 kernel uses 64-bit signal masks
typedef struct ucontext {
uint32_t uc_flags;
struct ucontext* uc_link;
stack_t uc_stack;
mcontext_t uc_mcontext;
- __sigset_t uc_sigmask;
+ // Other fields are not used by V8, don't define them here.
} ucontext_t;
enum { REG_EBP = 6, REG_ESP = 7, REG_EIP = 14 };
#endif
+#endif // __ANDROID__ && !defined(__BIONIC_HAVE_UCONTEXT_T)
static int GetThreadID() {
- // Glibc doesn't provide a wrapper for gettid(2).
-#if defined(ANDROID)
- return syscall(__NR_gettid);
+#if defined(__ANDROID__)
+ // Android's C library provides gettid(2).
+ return gettid();
#else
+ // Glibc doesn't provide a wrapper for gettid(2).
return syscall(SYS_gettid);
#endif
}
@@ -1023,8 +1054,10 @@ static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
sample->sp = reinterpret_cast<Address>(mcontext.gregs[REG_RSP]);
sample->fp = reinterpret_cast<Address>(mcontext.gregs[REG_RBP]);
#elif V8_HOST_ARCH_ARM
-// An undefined macro evaluates to 0, so this applies to Android's Bionic also.
-#if (__GLIBC__ < 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ <= 3))
+#if defined(__GLIBC__) && !defined(__UCLIBC__) && \
+ (__GLIBC__ < 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ <= 3))
+ // Old GLibc ARM versions used a gregs[] array to access the register
+ // values from mcontext_t.
sample->pc = reinterpret_cast<Address>(mcontext.gregs[R15]);
sample->sp = reinterpret_cast<Address>(mcontext.gregs[R13]);
sample->fp = reinterpret_cast<Address>(mcontext.gregs[R11]);
@@ -1032,7 +1065,8 @@ static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
sample->pc = reinterpret_cast<Address>(mcontext.arm_pc);
sample->sp = reinterpret_cast<Address>(mcontext.arm_sp);
sample->fp = reinterpret_cast<Address>(mcontext.arm_fp);
-#endif // (__GLIBC__ < 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ <= 3))
+#endif // defined(__GLIBC__) && !defined(__UCLIBC__) &&
+ // (__GLIBC__ < 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ <= 3))
#elif V8_HOST_ARCH_MIPS
sample->pc = reinterpret_cast<Address>(mcontext.pc);
sample->sp = reinterpret_cast<Address>(mcontext.gregs[29]);
diff --git a/src/3rdparty/v8/src/platform-macos.cc b/src/3rdparty/v8/src/platform-macos.cc
index a937ed3..22d2bcf 100644
--- a/src/3rdparty/v8/src/platform-macos.cc
+++ b/src/3rdparty/v8/src/platform-macos.cc
@@ -471,6 +471,11 @@ bool VirtualMemory::ReleaseRegion(void* address, size_t size) {
}
+bool VirtualMemory::HasLazyCommits() {
+ return false;
+}
+
+
class Thread::PlatformData : public Malloced {
public:
PlatformData() : thread_(kNoThread) {}
@@ -682,17 +687,27 @@ Mutex* OS::CreateMutex() {
class MacOSSemaphore : public Semaphore {
public:
explicit MacOSSemaphore(int count) {
- semaphore_create(mach_task_self(), &semaphore_, SYNC_POLICY_FIFO, count);
+ int r;
+ r = semaphore_create(mach_task_self(),
+ &semaphore_,
+ SYNC_POLICY_FIFO,
+ count);
+ ASSERT(r == KERN_SUCCESS);
}
~MacOSSemaphore() {
- semaphore_destroy(mach_task_self(), semaphore_);
+ int r;
+ r = semaphore_destroy(mach_task_self(), semaphore_);
+ ASSERT(r == KERN_SUCCESS);
}
- // The MacOS mach semaphore documentation claims it does not have spurious
- // wakeups, the way pthreads semaphores do. So the code from the linux
- // platform is not needed here.
- void Wait() { semaphore_wait(semaphore_); }
+ void Wait() {
+ int r;
+ do {
+ r = semaphore_wait(semaphore_);
+ ASSERT(r == KERN_SUCCESS || r == KERN_ABORTED);
+ } while (r == KERN_ABORTED);
+ }
bool Wait(int timeout);
diff --git a/src/3rdparty/v8/src/platform-nullos.cc b/src/3rdparty/v8/src/platform-nullos.cc
index 679ef8e..ccd2123 100644
--- a/src/3rdparty/v8/src/platform-nullos.cc
+++ b/src/3rdparty/v8/src/platform-nullos.cc
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -215,6 +215,11 @@ double OS::nan_value() {
}
+CpuImplementer OS::GetCpuImplementer() {
+ UNIMPLEMENTED();
+}
+
+
bool OS::ArmCpuHasFeature(CpuFeature feature) {
UNIMPLEMENTED();
}
@@ -335,6 +340,12 @@ bool VirtualMemory::Guard(void* address) {
}
+bool VirtualMemory::HasLazyCommits() {
+ // TODO(alph): implement for the platform.
+ return false;
+}
+
+
class Thread::PlatformData : public Malloced {
public:
PlatformData() {
diff --git a/src/3rdparty/v8/src/platform-openbsd.cc b/src/3rdparty/v8/src/platform-openbsd.cc
index ba33a84..292927b 100644
--- a/src/3rdparty/v8/src/platform-openbsd.cc
+++ b/src/3rdparty/v8/src/platform-openbsd.cc
@@ -323,9 +323,6 @@ void OS::LogSharedLibraryAddresses() {
}
-static const char kGCFakeMmap[] = "/tmp/__v8_gc__";
-
-
void OS::SignalCodeMovingGC() {
// Support for ll_prof.py.
//
@@ -336,7 +333,7 @@ void OS::SignalCodeMovingGC() {
// by the kernel and allows us to synchronize V8 code log and the
// kernel log.
int size = sysconf(_SC_PAGESIZE);
- FILE* f = fopen(kGCFakeMmap, "w+");
+ FILE* f = fopen(FLAG_gc_fake_mmap, "w+");
void* addr = mmap(NULL, size, PROT_READ | PROT_EXEC, MAP_PRIVATE,
fileno(f), 0);
ASSERT(addr != MAP_FAILED);
@@ -507,6 +504,12 @@ bool VirtualMemory::ReleaseRegion(void* base, size_t size) {
}
+bool VirtualMemory::HasLazyCommits() {
+ // TODO(alph): implement for the platform.
+ return false;
+}
+
+
class Thread::PlatformData : public Malloced {
public:
PlatformData() : thread_(kNoThread) {}
diff --git a/src/3rdparty/v8/src/platform-posix.cc b/src/3rdparty/v8/src/platform-posix.cc
index d942d78..3bc8373 100644
--- a/src/3rdparty/v8/src/platform-posix.cc
+++ b/src/3rdparty/v8/src/platform-posix.cc
@@ -153,6 +153,11 @@ double OS::nan_value() {
}
+int OS::GetCurrentProcessId() {
+ return static_cast<int>(getpid());
+}
+
+
// ----------------------------------------------------------------------------
// POSIX date/time support.
//
diff --git a/src/3rdparty/v8/src/platform-qnx.cc b/src/3rdparty/v8/src/platform-qnx.cc
index 46d69b8..e535756 100644
--- a/src/3rdparty/v8/src/platform-qnx.cc
+++ b/src/3rdparty/v8/src/platform-qnx.cc
@@ -125,12 +125,15 @@ static bool CPUInfoContainsString(const char * search_string) {
bool OS::ArmCpuHasFeature(CpuFeature feature) {
switch (feature) {
+ case VFP2:
case VFP3:
// All shipping devices currently support this and QNX has no easy way to
// determine this at runtime.
return true;
case ARMv7:
return (SYSPAGE_ENTRY(cpuinfo)->flags & ARM_CPU_FLAG_V7) != 0;
+ case SUDIV:
+ return CPUInfoContainsString("idiva");
default:
UNREACHABLE();
}
@@ -138,6 +141,12 @@ bool OS::ArmCpuHasFeature(CpuFeature feature) {
return false;
}
+CpuImplementer OS::GetCpuImplementer() {
+ // We do NOT return QUALCOMM_IMPLEMENTER, even though /proc/cpuinfo
+ // has "CPU implementer : 0x51" in it, as that leads to a runtime
+ // error on the first JS function call.
+ return UNKNOWN_IMPLEMENTER;
+}
bool OS::ArmUsingHardFloat() {
// GCC versions 4.6 and above define __ARM_PCS or __ARM_PCS_VFP to specify
@@ -377,7 +386,7 @@ void OS::LogSharedLibraryAddresses() {
return;
}
- /* Get the number of map entrys. */
+ /* Get the number of map entries. */
if (devctl(proc_fd, DCMD_PROC_MAPINFO, NULL, 0, &num) != EOK) {
close(proc_fd);
return;
@@ -389,7 +398,7 @@ void OS::LogSharedLibraryAddresses() {
return;
}
- /* Fill the map entrys. */
+ /* Fill the map entries. */
if (devctl(proc_fd, DCMD_PROC_PAGEDATA, mapinfos, num * sizeof(procfs_mapinfo), &num) != EOK) {
free(mapinfos);
close(proc_fd);
@@ -595,6 +604,10 @@ bool VirtualMemory::ReleaseRegion(void* base, size_t size) {
return munmap(base, size) == 0;
}
+bool VirtualMemory::HasLazyCommits() {
+ return false;
+}
+
class Thread::PlatformData : public Malloced {
public:
@@ -980,7 +993,7 @@ class SignalSender : public Thread {
void Sleep(SleepInterval full_or_half) {
// Convert ms to us and subtract 100 us to compensate delays
- // occuring during signal delivery.
+ // occurring during signal delivery.
useconds_t interval = interval_ * 1000 - 100;
if (full_or_half == HALF_INTERVAL) interval /= 2;
int result = usleep(interval);
diff --git a/src/3rdparty/v8/src/platform-solaris.cc b/src/3rdparty/v8/src/platform-solaris.cc
index 4248ea2..5652741 100644
--- a/src/3rdparty/v8/src/platform-solaris.cc
+++ b/src/3rdparty/v8/src/platform-solaris.cc
@@ -125,12 +125,8 @@ const char* OS::LocalTimezone(double time) {
double OS::LocalTimeOffset() {
- // On Solaris, struct tm does not contain a tm_gmtoff field.
- time_t utc = time(NULL);
- ASSERT(utc != -1);
- struct tm* loc = localtime(&utc);
- ASSERT(loc != NULL);
- return static_cast<double>((mktime(loc) - utc) * msPerSecond);
+ tzset();
+ return -static_cast<double>(timezone * msPerSecond);
}
@@ -448,6 +444,12 @@ bool VirtualMemory::ReleaseRegion(void* base, size_t size) {
}
+bool VirtualMemory::HasLazyCommits() {
+ // TODO(alph): implement for the platform.
+ return false;
+}
+
+
class Thread::PlatformData : public Malloced {
public:
PlatformData() : thread_(kNoThread) { }
diff --git a/src/3rdparty/v8/src/platform-tls-win32.h b/src/3rdparty/v8/src/platform-tls-win32.h
index 4056e8c..a981d18 100644
--- a/src/3rdparty/v8/src/platform-tls-win32.h
+++ b/src/3rdparty/v8/src/platform-tls-win32.h
@@ -35,7 +35,7 @@
namespace v8 {
namespace internal {
-#if defined(_WIN32) && !defined(_WIN64)
+#if defined(_WIN32) && !defined(_WIN64) && !defined(_WIN32_WCE)
#define V8_FAST_TLS_SUPPORTED 1
diff --git a/src/3rdparty/v8/src/platform-win32.cc b/src/3rdparty/v8/src/platform-win32.cc
index ba57803..76e35f5 100644
--- a/src/3rdparty/v8/src/platform-win32.cc
+++ b/src/3rdparty/v8/src/platform-win32.cc
@@ -58,6 +58,20 @@ int strncasecmp(const char* s1, const char* s2, int n) {
#endif // _MSC_VER
+#ifdef _WIN32_WCE
+// Convert a Latin1 string into a utf16 string
+wchar_t* wce_mbtowc(const char* a) {
+ int length = strlen(a);
+ wchar_t *wbuf = new wchar_t[length];
+
+ for (int i = 0; i < length; ++i)
+ wbuf[i] = (wchar_t)a[i];
+
+ return wbuf;
+}
+#endif // _WIN32_WCE
+
+
// Extra functions for MinGW. Most of these are the _s functions which are in
// the Microsoft Visual Studio C++ CRT.
#ifdef __MINGW32__
@@ -162,6 +176,13 @@ void OS::MemCopy(void* dest, const void* src, size_t size) {
}
#endif // V8_TARGET_ARCH_IA32
+#ifdef _WIN32_WCE
+// TODO: Implement
+CpuImplementer OS::GetCpuImplementer() {
+ return UNKNOWN_IMPLEMENTER;
+}
+#endif // _WIN32_WCE
+
#ifdef _WIN64
typedef double (*ModuloFunction)(double, double);
static ModuloFunction modulo_function = NULL;
@@ -377,7 +398,9 @@ void Time::TzSet() {
if (tz_initialized_) return;
// Initialize POSIX time zone data.
+#ifndef _WIN32_WCE
_tzset();
+#endif // _WIN32_WCE
// Obtain timezone information from operating system.
memset(&tzinfo_, 0, sizeof(tzinfo_));
if (GetTimeZoneInformation(&tzinfo_) == TIME_ZONE_ID_INVALID) {
@@ -489,6 +512,7 @@ void Time::SetToCurrentTime() {
// Also, adding the time-zone offset to the input must not overflow.
// The function EquivalentTime() in date.js guarantees this.
int64_t Time::LocalOffset() {
+#ifndef _WIN32_WCE
// Initialize timezone information, if needed.
TzSet();
@@ -519,6 +543,11 @@ int64_t Time::LocalOffset() {
} else {
return tzinfo_.Bias * -kMsPerMinute;
}
+#else
+ // Windows CE has a different handling of Timezones.
+ // TODO: Adapt this for Windows CE
+ return 0;
+#endif
}
@@ -570,6 +599,14 @@ void OS::PostSetUp() {
#endif
}
+#ifdef V8_TARGET_ARCH_ARM
+// TODO: Implement
+// Windows CE is the only platform right now that supports ARM.
+bool OS::ArmCpuHasFeature(CpuFeature feature) {
+ return false;
+}
+#endif // V8_TARGET_ARCH_ARM
+
// Returns the accumulated user time for thread.
int OS::GetUserTime(uint32_t* secs, uint32_t* usecs) {
@@ -634,6 +671,11 @@ int OS::GetLastError() {
}
+int OS::GetCurrentProcessId() {
+ return static_cast<int>(::GetCurrentProcessId());
+}
+
+
// ----------------------------------------------------------------------------
// Win32 console output.
//
@@ -659,6 +701,7 @@ static OutputMode output_mode = UNKNOWN; // Current output mode.
static bool HasConsole() {
// Only check the first time. Eventual race conditions are not a problem,
// because all threads will eventually determine the same mode.
+#ifndef _WIN32_WCE
if (output_mode == UNKNOWN) {
// We cannot just check that the standard output is attached to a console
// because this would fail if output is redirected to a file. Therefore we
@@ -671,6 +714,10 @@ static bool HasConsole() {
output_mode = ODS;
}
return output_mode == CONSOLE;
+#else
+ // Windows CE has no shell enabled in the standard BSP
+ return false;
+#endif // _WIN32_WCE
}
@@ -683,7 +730,14 @@ static void VPrintHelper(FILE* stream, const char* format, va_list args) {
// does not crash.
EmbeddedVector<char, 4096> buffer;
OS::VSNPrintF(buffer, format, args);
+#ifdef _WIN32_WCE
+ wchar_t wbuf[4096];
+ for (int i = 0; i < 4096; ++i)
+ wbuf[i] = (wchar_t)buffer.start()[i];
+ OutputDebugStringW(wbuf);
+#else
OutputDebugStringA(buffer.start());
+#endif // _WIN32_WCE
}
}
@@ -699,23 +753,30 @@ FILE* OS::FOpen(const char* path, const char* mode) {
bool OS::Remove(const char* path) {
+#ifndef _WIN32_WCE
return (DeleteFileA(path) != 0);
+#else
+ wchar_t *wpath = wce_mbtowc(path);
+ bool ret = (DeleteFileW(wpath) != 0);
+ delete wpath;
+ return ret;
+#endif // _WIN32_WCE
}
FILE* OS::OpenTemporaryFile() {
// tmpfile_s tries to use the root dir, don't use it.
- char tempPathBuffer[MAX_PATH];
+ wchar_t tempPathBuffer[MAX_PATH];
DWORD path_result = 0;
- path_result = GetTempPathA(MAX_PATH, tempPathBuffer);
+ path_result = GetTempPathW(MAX_PATH, tempPathBuffer);
if (path_result > MAX_PATH || path_result == 0) return NULL;
UINT name_result = 0;
- char tempNameBuffer[MAX_PATH];
- name_result = GetTempFileNameA(tempPathBuffer, "", 0, tempNameBuffer);
+ wchar_t tempNameBuffer[MAX_PATH];
+ name_result = GetTempFileNameW(tempPathBuffer, L"", 0, tempNameBuffer);
if (name_result == 0) return NULL;
- FILE* result = FOpen(tempNameBuffer, "w+"); // Same mode as tmpfile uses.
+ FILE* result = _wfopen(tempNameBuffer, L"w+"); // Same mode as tmpfile uses.
if (result != NULL) {
- Remove(tempNameBuffer); // Delete on close.
+ DeleteFileW(tempNameBuffer); // Delete on close.
}
return result;
}
@@ -969,7 +1030,11 @@ void OS::Abort() {
DebugBreak();
} else {
// Make the MSVCRT do a silent abort.
+#ifndef _WIN32_WCE
raise(SIGABRT);
+#else
+ exit(3);
+#endif // _WIN32_WCE
}
}
@@ -1006,8 +1071,15 @@ class Win32MemoryMappedFile : public OS::MemoryMappedFile {
OS::MemoryMappedFile* OS::MemoryMappedFile::open(const char* name) {
// Open a physical file
+#ifndef _WIN32_WCE
HANDLE file = CreateFileA(name, GENERIC_READ | GENERIC_WRITE,
FILE_SHARE_READ | FILE_SHARE_WRITE, NULL, OPEN_EXISTING, 0, NULL);
+#else
+ wchar_t *wname = wce_mbtowc(name);
+ HANDLE file = CreateFileW(wname, GENERIC_READ | GENERIC_WRITE,
+ FILE_SHARE_READ | FILE_SHARE_WRITE, NULL, OPEN_EXISTING, 0, NULL);
+ delete wname;
+#endif // _WIN32_WCE
if (file == INVALID_HANDLE_VALUE) return NULL;
int size = static_cast<int>(GetFileSize(file, NULL));
@@ -1026,8 +1098,15 @@ OS::MemoryMappedFile* OS::MemoryMappedFile::open(const char* name) {
OS::MemoryMappedFile* OS::MemoryMappedFile::create(const char* name, int size,
void* initial) {
// Open a physical file
+#ifndef _WIN32_WCE
HANDLE file = CreateFileA(name, GENERIC_READ | GENERIC_WRITE,
FILE_SHARE_READ | FILE_SHARE_WRITE, NULL, OPEN_ALWAYS, 0, NULL);
+#else
+ wchar_t *wname = wce_mbtowc(name);
+ HANDLE file = CreateFileW(wname, GENERIC_READ | GENERIC_WRITE,
+ FILE_SHARE_READ | FILE_SHARE_WRITE, NULL, OPEN_ALWAYS, 0, NULL);
+ delete wname;
+#endif // _WIN32_WCE
if (file == NULL) return NULL;
// Create a file mapping for the physical file
HANDLE file_mapping = CreateFileMapping(file, NULL,
@@ -1089,8 +1168,8 @@ Win32MemoryMappedFile::~Win32MemoryMappedFile() {
#define VOID void
#endif
-// DbgHelp isn't supported on MinGW yet
-#ifndef __MINGW32__
+// DbgHelp isn't supported on MinGW yet, nor does Windows CE have it
+#if !defined(__MINGW32__) && !defined(_WIN32_WCE)
// DbgHelp.h functions.
typedef BOOL (__stdcall *DLL_FUNC_TYPE(SymInitialize))(IN HANDLE hProcess,
IN PSTR UserSearchPath,
@@ -1561,6 +1640,12 @@ bool VirtualMemory::ReleaseRegion(void* base, size_t size) {
}
+bool VirtualMemory::HasLazyCommits() {
+ // TODO(alph): implement for the platform.
+ return false;
+}
+
+
// ----------------------------------------------------------------------------
// Win32 thread support.
@@ -1613,6 +1698,7 @@ Thread::~Thread() {
// the Win32 function CreateThread(), because the CreateThread() does not
// initialize thread specific structures in the C runtime library.
void Thread::Start() {
+#ifndef _WIN32_WCE
data_->thread_ = reinterpret_cast<HANDLE>(
_beginthreadex(NULL,
static_cast<unsigned>(stack_size_),
@@ -1620,6 +1706,18 @@ void Thread::Start() {
this,
0,
&data_->thread_id_));
+#else
+ unsigned initflag = 0;
+ if (stack_size_ > 0)
+ initflag |= STACK_SIZE_PARAM_IS_A_RESERVATION;
+ data_->thread_ = reinterpret_cast<HANDLE>(
+ CreateThread( NULL,
+ static_cast<unsigned>(stack_size_),
+ (LPTHREAD_START_ROUTINE)ThreadEntry,
+ this,
+ initflag,
+ (LPDWORD)&data_->thread_id_));
+#endif // _WIN32_WCE
}
@@ -1714,7 +1812,7 @@ Mutex* OS::CreateMutex() {
class Win32Semaphore : public Semaphore {
public:
explicit Win32Semaphore(int count) {
- sem = ::CreateSemaphoreA(NULL, count, 0x7fffffff, NULL);
+ sem = ::CreateSemaphoreW(NULL, count, 0x7fffffff, NULL);
}
~Win32Semaphore() {
@@ -2062,10 +2160,17 @@ class SamplerThread : public Thread {
sample->pc = reinterpret_cast<Address>(context.Rip);
sample->sp = reinterpret_cast<Address>(context.Rsp);
sample->fp = reinterpret_cast<Address>(context.Rbp);
-#else
+#elif V8_HOST_ARCH_IA32
sample->pc = reinterpret_cast<Address>(context.Eip);
sample->sp = reinterpret_cast<Address>(context.Esp);
sample->fp = reinterpret_cast<Address>(context.Ebp);
+#elif V8_HOST_ARCH_ARM
+ // Taken from http://msdn.microsoft.com/en-us/library/aa448762.aspx
+ sample->pc = reinterpret_cast<Address>(context.Pc);
+ sample->sp = reinterpret_cast<Address>(context.Sp);
+ sample->fp = reinterpret_cast<Address>(context.R11);
+#else
+#error This Platform is not supported.
#endif
sampler->SampleStack(sample);
sampler->Tick(sample);
diff --git a/src/3rdparty/v8/src/platform.h b/src/3rdparty/v8/src/platform.h
index a9464de..f3ab08d 100644
--- a/src/3rdparty/v8/src/platform.h
+++ b/src/3rdparty/v8/src/platform.h
@@ -71,6 +71,24 @@ int signbit(double x);
int strncasecmp(const char* s1, const char* s2, int n);
+inline int lrint(double flt) {
+ int intgr;
+#if defined(V8_TARGET_ARCH_IA32)
+ __asm {
+ fld flt
+ fistp intgr
+ };
+#else
+ intgr = static_cast<int>(flt + 0.5);
+ if ((intgr & 1) != 0 && intgr - flt == 0.5) {
+ // If the number is halfway between two integers, round to the even one.
+ intgr--;
+ }
+#endif
+ return intgr;
+}
+
+
#endif // _MSC_VER
// Random is missing on both Visual Studio and MinGW.
@@ -89,7 +107,11 @@ namespace internal {
// Use AtomicWord for a machine-sized pointer. It is assumed that
// reads and writes of naturally aligned values of this type are atomic.
+#if defined(__OpenBSD__) && defined(__i386__)
+typedef Atomic32 AtomicWord;
+#else
typedef intptr_t AtomicWord;
+#endif
class Semaphore;
class Mutex;
@@ -286,6 +308,9 @@ class OS {
// Returns the double constant NAN
static double nan_value();
+ // Support runtime detection of Cpu implementer
+ static CpuImplementer GetCpuImplementer();
+
// Support runtime detection of VFP3 on ARM CPUs.
static bool ArmCpuHasFeature(CpuFeature feature);
@@ -317,6 +342,8 @@ class OS {
static const int kMinComplexMemCopy = 256;
#endif // V8_TARGET_ARCH_IA32
+ static int GetCurrentProcessId();
+
private:
static const int msPerSecond = 1000;
@@ -405,6 +432,11 @@ class VirtualMemory {
// and the same size it was reserved with.
static bool ReleaseRegion(void* base, size_t size);
+ // Returns true if OS performs lazy commits, i.e. the memory allocation call
+ // defers actual physical memory allocation till the first memory access.
+ // Otherwise returns false.
+ static bool HasLazyCommits();
+
private:
void* address_; // Start address of the virtual memory.
size_t size_; // Size of the virtual memory.
diff --git a/src/3rdparty/v8/src/preparser.cc b/src/3rdparty/v8/src/preparser.cc
index 0c17eec..21da4f8 100644
--- a/src/3rdparty/v8/src/preparser.cc
+++ b/src/3rdparty/v8/src/preparser.cc
@@ -602,14 +602,17 @@ PreParser::Statement PreParser::ParseSwitchStatement(bool* ok) {
if (token == i::Token::CASE) {
Expect(i::Token::CASE, CHECK_OK);
ParseExpression(true, CHECK_OK);
- Expect(i::Token::COLON, CHECK_OK);
- } else if (token == i::Token::DEFAULT) {
- Expect(i::Token::DEFAULT, CHECK_OK);
- Expect(i::Token::COLON, CHECK_OK);
} else {
- ParseStatement(CHECK_OK);
+ Expect(i::Token::DEFAULT, CHECK_OK);
}
+ Expect(i::Token::COLON, CHECK_OK);
token = peek();
+ while (token != i::Token::CASE &&
+ token != i::Token::DEFAULT &&
+ token != i::Token::RBRACE) {
+ ParseStatement(CHECK_OK);
+ token = peek();
+ }
}
Expect(i::Token::RBRACE, ok);
return Statement::Default();
diff --git a/src/3rdparty/v8/src/profile-generator-inl.h b/src/3rdparty/v8/src/profile-generator-inl.h
index 9afc52f..02e146f 100644
--- a/src/3rdparty/v8/src/profile-generator-inl.h
+++ b/src/3rdparty/v8/src/profile-generator-inl.h
@@ -84,6 +84,7 @@ CodeEntry* ProfileGenerator::EntryForVMState(StateTag tag) {
return gc_entry_;
case JS:
case COMPILER:
+ case PARALLEL_COMPILER_PROLOGUE:
// DOM events handlers are reported as OTHER / EXTERNAL entries.
// To avoid confusing people, let's put all these entries into
// one bucket.
@@ -118,32 +119,12 @@ int HeapEntry::set_children_index(int index) {
}
-int HeapEntry::set_retainers_index(int index) {
- retainers_index_ = index;
- int next_index = index + retainers_count_;
- retainers_count_ = 0;
- return next_index;
-}
-
-
HeapGraphEdge** HeapEntry::children_arr() {
ASSERT(children_index_ >= 0);
return &snapshot_->children()[children_index_];
}
-HeapGraphEdge** HeapEntry::retainers_arr() {
- ASSERT(retainers_index_ >= 0);
- return &snapshot_->retainers()[retainers_index_];
-}
-
-
-HeapEntry* HeapEntry::dominator() const {
- ASSERT(dominator_ >= 0);
- return &snapshot_->entries()[dominator_];
-}
-
-
SnapshotObjectId HeapObjectsMap::GetNthGcSubrootId(int delta) {
return kGcRootsFirstSubrootId + delta * kObjectIdStep;
}
diff --git a/src/3rdparty/v8/src/profile-generator.cc b/src/3rdparty/v8/src/profile-generator.cc
index da2a969..9839edf 100644
--- a/src/3rdparty/v8/src/profile-generator.cc
+++ b/src/3rdparty/v8/src/profile-generator.cc
@@ -169,6 +169,15 @@ const char* StringsStorage::GetName(int index) {
}
+size_t StringsStorage::GetUsedMemorySize() const {
+ size_t size = sizeof(*this);
+ size += sizeof(HashMap::Entry) * names_.capacity();
+ for (HashMap::Entry* p = names_.Start(); p != NULL; p = names_.Next(p)) {
+ size += strlen(reinterpret_cast<const char*>(p->value)) + 1;
+ }
+ return size;
+}
+
const char* const CodeEntry::kEmptyNamePrefix = "";
@@ -964,16 +973,10 @@ HeapEntry::HeapEntry(HeapSnapshot* snapshot,
const char* name,
SnapshotObjectId id,
int self_size)
- : painted_(false),
- user_reachable_(false),
- dominator_(kNoEntry),
- type_(type),
- retainers_count_(0),
- retainers_index_(-1),
+ : type_(type),
children_count_(0),
children_index_(-1),
self_size_(self_size),
- retained_size_(0),
id_(id),
snapshot_(snapshot),
name_(name) { }
@@ -985,7 +988,6 @@ void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
HeapGraphEdge edge(type, name, this->index(), entry->index());
snapshot_->edges().Add(edge);
++children_count_;
- ++entry->retainers_count_;
}
@@ -995,7 +997,6 @@ void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
HeapGraphEdge edge(type, index, this->index(), entry->index());
snapshot_->edges().Add(edge);
++children_count_;
- ++entry->retainers_count_;
}
@@ -1007,9 +1008,8 @@ Handle<HeapObject> HeapEntry::GetHeapObject() {
void HeapEntry::Print(
const char* prefix, const char* edge_name, int max_depth, int indent) {
STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
- OS::Print("%6d %7d @%6u %*c %s%s: ",
- self_size(), retained_size(), id(),
- indent, ' ', prefix, edge_name);
+ OS::Print("%6d @%6u %*c %s%s: ",
+ self_size(), id(), indent, ' ', prefix, edge_name);
if (type() != kString) {
OS::Print("%s %.40s\n", TypeAsString(), name_);
} else {
@@ -1091,13 +1091,17 @@ template <size_t ptr_size> struct SnapshotSizeConstants;
template <> struct SnapshotSizeConstants<4> {
static const int kExpectedHeapGraphEdgeSize = 12;
- static const int kExpectedHeapEntrySize = 40;
+ static const int kExpectedHeapEntrySize = 24;
+ static const int kExpectedHeapSnapshotsCollectionSize = 96;
+ static const int kExpectedHeapSnapshotSize = 136;
static const size_t kMaxSerializableSnapshotRawSize = 256 * MB;
};
template <> struct SnapshotSizeConstants<8> {
static const int kExpectedHeapGraphEdgeSize = 24;
- static const int kExpectedHeapEntrySize = 48;
+ static const int kExpectedHeapEntrySize = 32;
+ static const int kExpectedHeapSnapshotsCollectionSize = 144;
+ static const int kExpectedHeapSnapshotSize = 168;
static const uint64_t kMaxSerializableSnapshotRawSize =
static_cast<uint64_t>(6000) * MB;
};
@@ -1139,16 +1143,6 @@ void HeapSnapshot::RememberLastJSObjectId() {
}
-static void HeapEntryClearPaint(HeapEntry* entry_ptr) {
- entry_ptr->clear_paint();
-}
-
-
-void HeapSnapshot::ClearPaint() {
- entries_.Iterate(HeapEntryClearPaint);
-}
-
-
HeapEntry* HeapSnapshot::AddRootEntry() {
ASSERT(root_index_ == HeapEntry::kNoEntry);
ASSERT(entries_.is_empty()); // Root entry must be the first one.
@@ -1196,32 +1190,19 @@ HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
}
-void HeapSnapshot::FillChildrenAndRetainers() {
+void HeapSnapshot::FillChildren() {
ASSERT(children().is_empty());
children().Allocate(edges().length());
- ASSERT(retainers().is_empty());
- retainers().Allocate(edges().length());
int children_index = 0;
- int retainers_index = 0;
for (int i = 0; i < entries().length(); ++i) {
HeapEntry* entry = &entries()[i];
children_index = entry->set_children_index(children_index);
- retainers_index = entry->set_retainers_index(retainers_index);
}
ASSERT(edges().length() == children_index);
- ASSERT(edges().length() == retainers_index);
for (int i = 0; i < edges().length(); ++i) {
HeapGraphEdge* edge = &edges()[i];
edge->ReplaceToIndexWithEntry(this);
edge->from()->add_child(edge);
- edge->to()->add_retainer(edge);
- }
-}
-
-
-void HeapSnapshot::SetDominatorsToSelf() {
- for (int i = 0; i < entries_.length(); ++i) {
- entries_[i].set_dominator(&entries_[i]);
}
}
@@ -1275,16 +1256,18 @@ void HeapSnapshot::Print(int max_depth) {
template<typename T, class P>
static size_t GetMemoryUsedByList(const List<T, P>& list) {
- return list.capacity() * sizeof(T);
+ return list.length() * sizeof(T) + sizeof(list);
}
size_t HeapSnapshot::RawSnapshotSize() const {
+ STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::kExpectedHeapSnapshotSize ==
+ sizeof(HeapSnapshot)); // NOLINT
return
+ sizeof(*this) +
GetMemoryUsedByList(entries_) +
GetMemoryUsedByList(edges_) +
GetMemoryUsedByList(children_) +
- GetMemoryUsedByList(retainers_) +
GetMemoryUsedByList(sorted_entries_);
}
@@ -1390,7 +1373,7 @@ void HeapObjectsMap::UpdateHeapObjectsMap() {
}
-void HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
+SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
UpdateHeapObjectsMap();
time_intervals_.Add(TimeInterval(next_id_));
int prefered_chunk_size = stream->GetChunkSize();
@@ -1420,7 +1403,7 @@ void HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
if (stats_buffer.length() >= prefered_chunk_size) {
OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
&stats_buffer.first(), stats_buffer.length());
- if (result == OutputStream::kAbort) return;
+ if (result == OutputStream::kAbort) return last_assigned_id();
stats_buffer.Clear();
}
}
@@ -1429,9 +1412,10 @@ void HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
if (!stats_buffer.is_empty()) {
OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
&stats_buffer.first(), stats_buffer.length());
- if (result == OutputStream::kAbort) return;
+ if (result == OutputStream::kAbort) return last_assigned_id();
}
stream->EndOfStream();
+ return last_assigned_id();
}
@@ -1478,6 +1462,15 @@ SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
}
+size_t HeapObjectsMap::GetUsedMemorySize() const {
+ return
+ sizeof(*this) +
+ sizeof(HashMap::Entry) * entries_map_.capacity() +
+ GetMemoryUsedByList(entries_) +
+ GetMemoryUsedByList(time_intervals_);
+}
+
+
HeapSnapshotsCollection::HeapSnapshotsCollection()
: is_tracking_objects_(false),
snapshots_uids_(HeapSnapshotsMatch),
@@ -1557,6 +1550,22 @@ Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
}
+size_t HeapSnapshotsCollection::GetUsedMemorySize() const {
+ STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::
+ kExpectedHeapSnapshotsCollectionSize ==
+ sizeof(HeapSnapshotsCollection)); // NOLINT
+ size_t size = sizeof(*this);
+ size += names_.GetUsedMemorySize();
+ size += ids_.GetUsedMemorySize();
+ size += sizeof(HashMap::Entry) * snapshots_uids_.capacity();
+ size += GetMemoryUsedByList(snapshots_);
+ for (int i = 0; i < snapshots_.length(); ++i) {
+ size += snapshots_[i]->RawSnapshotSize();
+ }
+ return size;
+}
+
+
HeapEntriesMap::HeapEntriesMap()
: entries_(HeapThingsMatch) {
}
@@ -1702,8 +1711,8 @@ HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
name->IsString()
? collection_->names()->GetName(String::cast(name))
: "");
- } else if (object->IsGlobalContext()) {
- return AddEntry(object, HeapEntry::kHidden, "system / GlobalContext");
+ } else if (object->IsNativeContext()) {
+ return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
} else if (object->IsContext()) {
return AddEntry(object, HeapEntry::kHidden, "system / Context");
} else if (object->IsFixedArray() ||
@@ -1937,8 +1946,8 @@ void V8HeapExplorer::ExtractJSObjectReferences(
"builtins", global_obj->builtins(),
GlobalObject::kBuiltinsOffset);
SetInternalReference(global_obj, entry,
- "global_context", global_obj->global_context(),
- GlobalObject::kGlobalContextOffset);
+ "native_context", global_obj->native_context(),
+ GlobalObject::kNativeContextOffset);
SetInternalReference(global_obj, entry,
"global_receiver", global_obj->global_receiver(),
GlobalObject::kGlobalReceiverOffset);
@@ -1973,17 +1982,17 @@ void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
- EXTRACT_CONTEXT_FIELD(GLOBAL_INDEX, GlobalObject, global);
- if (context->IsGlobalContext()) {
+ EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
+ if (context->IsNativeContext()) {
TagObject(context->jsfunction_result_caches(),
"(context func. result caches)");
TagObject(context->normalized_map_cache(), "(context norm. map cache)");
TagObject(context->runtime_context(), "(runtime context)");
TagObject(context->data(), "(context data)");
- GLOBAL_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
+ NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
#undef EXTRACT_CONTEXT_FIELD
for (int i = Context::FIRST_WEAK_SLOT;
- i < Context::GLOBAL_CONTEXT_SLOTS;
+ i < Context::NATIVE_CONTEXT_SLOTS;
++i) {
SetWeakReference(context, entry, i, context->get(i),
FixedArray::OffsetOfElementAt(i));
@@ -1998,22 +2007,34 @@ void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
SetInternalReference(map, entry,
"constructor", map->constructor(),
Map::kConstructorOffset);
- if (!map->instance_descriptors()->IsEmpty()) {
- TagObject(map->instance_descriptors(), "(map descriptors)");
+ if (map->HasTransitionArray()) {
+ TransitionArray* transitions = map->transitions();
+
+ Object* back_pointer = transitions->back_pointer_storage();
+ TagObject(transitions->back_pointer_storage(), "(back pointer)");
+ SetInternalReference(transitions, entry,
+ "backpointer", back_pointer,
+ TransitionArray::kBackPointerStorageOffset);
+ IndexedReferencesExtractor transitions_refs(this, transitions, entry);
+ transitions->Iterate(&transitions_refs);
+
+ TagObject(transitions, "(transition array)");
SetInternalReference(map, entry,
- "descriptors", map->instance_descriptors(),
- Map::kInstanceDescriptorsOrBitField3Offset);
- }
- if (map->unchecked_prototype_transitions()->IsFixedArray()) {
- TagObject(map->prototype_transitions(), "(prototype transitions)");
- SetInternalReference(map, entry,
- "prototype_transitions", map->prototype_transitions(),
- Map::kPrototypeTransitionsOrBackPointerOffset);
+ "transitions", transitions,
+ Map::kTransitionsOrBackPointerOffset);
} else {
+ Object* back_pointer = map->GetBackPointer();
+ TagObject(back_pointer, "(back pointer)");
SetInternalReference(map, entry,
- "back_pointer", map->GetBackPointer(),
- Map::kPrototypeTransitionsOrBackPointerOffset);
+ "backpointer", back_pointer,
+ Map::kTransitionsOrBackPointerOffset);
}
+ DescriptorArray* descriptors = map->instance_descriptors();
+ TagObject(descriptors, "(map descriptors)");
+ SetInternalReference(map, entry,
+ "descriptors", descriptors,
+ Map::kDescriptorsOffset);
+
SetInternalReference(map, entry,
"code_cache", map->code_cache(),
Map::kCodeCacheOffset);
@@ -2169,20 +2190,37 @@ void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
if (js_obj->HasFastProperties()) {
DescriptorArray* descs = js_obj->map()->instance_descriptors();
+ int real_size = js_obj->map()->NumberOfOwnDescriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
+ if (descs->GetDetails(i).descriptor_index() > real_size) continue;
switch (descs->GetType(i)) {
case FIELD: {
int index = descs->GetFieldIndex(i);
+
+ String* k = descs->GetKey(i);
if (index < js_obj->map()->inobject_properties()) {
- SetPropertyReference(
- js_obj, entry,
- descs->GetKey(i), js_obj->InObjectPropertyAt(index),
- NULL,
- js_obj->GetInObjectPropertyOffset(index));
+ Object* value = js_obj->InObjectPropertyAt(index);
+ if (k != heap_->hidden_symbol()) {
+ SetPropertyReference(
+ js_obj, entry,
+ k, value,
+ NULL,
+ js_obj->GetInObjectPropertyOffset(index));
+ } else {
+ TagObject(value, "(hidden properties)");
+ SetInternalReference(
+ js_obj, entry,
+ "hidden_properties", value,
+ js_obj->GetInObjectPropertyOffset(index));
+ }
} else {
- SetPropertyReference(
- js_obj, entry,
- descs->GetKey(i), js_obj->FastPropertyAt(index));
+ Object* value = js_obj->FastPropertyAt(index);
+ if (k != heap_->hidden_symbol()) {
+ SetPropertyReference(js_obj, entry, k, value);
+ } else {
+ TagObject(value, "(hidden properties)");
+ SetInternalReference(js_obj, entry, "hidden_properties", value);
+ }
}
break;
}
@@ -2209,10 +2247,10 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
case NORMAL: // only in slow mode
case HANDLER: // only in lookup results, not in descriptors
case INTERCEPTOR: // only in lookup results, not in descriptors
- case MAP_TRANSITION: // we do not care about transitions here...
- case ELEMENTS_TRANSITION:
- case CONSTANT_TRANSITION:
- case NULL_DESCRIPTOR: // ... and not about "holes"
+ break;
+ case TRANSITION:
+ case NONEXISTENT:
+ UNREACHABLE();
break;
}
}
@@ -2227,7 +2265,7 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
Object* value = target->IsJSGlobalPropertyCell()
? JSGlobalPropertyCell::cast(target)->value()
: target;
- if (String::cast(k)->length() > 0) {
+ if (k != heap_->hidden_symbol()) {
SetPropertyReference(js_obj, entry, String::cast(k), value);
} else {
TagObject(value, "(hidden properties)");
@@ -2240,7 +2278,7 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
- if (js_obj->HasFastElements()) {
+ if (js_obj->HasFastObjectElements()) {
FixedArray* elements = FixedArray::cast(js_obj->elements());
int length = js_obj->IsJSArray() ?
Smi::cast(JSArray::cast(js_obj)->length())->value() :
@@ -2286,11 +2324,12 @@ String* V8HeapExplorer::GetConstructorName(JSObject* object) {
Object* constructor_prop = NULL;
LookupResult result(heap->isolate());
object->LocalLookupRealNamedProperty(heap->constructor_symbol(), &result);
- if (result.IsProperty()) {
- constructor_prop = result.GetLazyValue();
- }
+ if (!result.IsFound()) return object->constructor_name();
+
+ constructor_prop = result.GetLazyValue();
if (constructor_prop->IsJSFunction()) {
- Object* maybe_name = JSFunction::cast(constructor_prop)->shared()->name();
+ Object* maybe_name =
+ JSFunction::cast(constructor_prop)->shared()->name();
if (maybe_name->IsString()) {
String* name = String::cast(maybe_name);
if (name->length() > 0) return name;
@@ -2404,19 +2443,17 @@ bool V8HeapExplorer::IterateAndExtractReferences(
bool V8HeapExplorer::IsEssentialObject(Object* object) {
- // We have to use raw_unchecked_* versions because checked versions
- // would fail during iteration over object properties.
return object->IsHeapObject()
&& !object->IsOddball()
- && object != heap_->raw_unchecked_empty_byte_array()
- && object != heap_->raw_unchecked_empty_fixed_array()
- && object != heap_->raw_unchecked_empty_descriptor_array()
- && object != heap_->raw_unchecked_fixed_array_map()
- && object != heap_->raw_unchecked_global_property_cell_map()
- && object != heap_->raw_unchecked_shared_function_info_map()
- && object != heap_->raw_unchecked_free_space_map()
- && object != heap_->raw_unchecked_one_pointer_filler_map()
- && object != heap_->raw_unchecked_two_pointer_filler_map();
+ && object != heap_->empty_byte_array()
+ && object != heap_->empty_fixed_array()
+ && object != heap_->empty_descriptor_array()
+ && object != heap_->fixed_array_map()
+ && object != heap_->global_property_cell_map()
+ && object != heap_->shared_function_info_map()
+ && object != heap_->free_space_map()
+ && object != heap_->one_pointer_filler_map()
+ && object != heap_->two_pointer_filler_map();
}
@@ -2552,20 +2589,6 @@ void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
}
-void V8HeapExplorer::SetPropertyShortcutReference(HeapObject* parent_obj,
- int parent_entry,
- String* reference_name,
- Object* child_obj) {
- HeapEntry* child_entry = GetEntry(child_obj);
- if (child_entry != NULL) {
- filler_->SetNamedReference(HeapGraphEdge::kShortcut,
- parent_entry,
- collection_->names()->GetName(reference_name),
- child_entry);
- }
-}
-
-
void V8HeapExplorer::SetRootGcRootsReference() {
filler_->SetIndexedAutoIndexReference(
HeapGraphEdge::kElement,
@@ -2646,7 +2669,7 @@ class GlobalObjectsEnumerator : public ObjectVisitor {
public:
virtual void VisitPointers(Object** start, Object** end) {
for (Object** p = start; p < end; p++) {
- if ((*p)->IsGlobalContext()) {
+ if ((*p)->IsNativeContext()) {
Context* context = Context::cast(*p);
JSObject* proxy = context->global_proxy();
if (proxy->IsJSGlobalProxy()) {
@@ -2684,6 +2707,10 @@ void V8HeapExplorer::TagGlobalObjects() {
Object* obj_document;
if (global_obj->GetProperty(*document_string)->ToObject(&obj_document) &&
obj_document->IsJSObject()) {
+ // FixMe: Workaround: SharedWorker's current Isolate has NULL context.
+ // As result GetProperty(*url_string) will crash.
+ if (!Isolate::Current()->context() && obj_document->IsJSGlobalProxy())
+ continue;
JSObject* document = JSObject::cast(obj_document);
Object* obj_url;
if (document->GetProperty(*url_string)->ToObject(&obj_url) &&
@@ -3066,37 +3093,34 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
Heap::kMakeHeapIterableMask,
"HeapSnapshotGenerator::GenerateSnapshot");
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
Heap* debug_heap = Isolate::Current()->heap();
- ASSERT(!debug_heap->old_data_space()->was_swept_conservatively());
- ASSERT(!debug_heap->old_pointer_space()->was_swept_conservatively());
- ASSERT(!debug_heap->code_space()->was_swept_conservatively());
- ASSERT(!debug_heap->cell_space()->was_swept_conservatively());
- ASSERT(!debug_heap->map_space()->was_swept_conservatively());
+ CHECK(!debug_heap->old_data_space()->was_swept_conservatively());
+ CHECK(!debug_heap->old_pointer_space()->was_swept_conservatively());
+ CHECK(!debug_heap->code_space()->was_swept_conservatively());
+ CHECK(!debug_heap->cell_space()->was_swept_conservatively());
+ CHECK(!debug_heap->map_space()->was_swept_conservatively());
#endif
// The following code uses heap iterators, so we want the heap to be
// stable. It should follow TagGlobalObjects as that can allocate.
AssertNoAllocation no_alloc;
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
debug_heap->Verify();
#endif
SetProgressTotal(1); // 1 pass.
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
debug_heap->Verify();
#endif
if (!FillReferences()) return false;
- snapshot_->FillChildrenAndRetainers();
+ snapshot_->FillChildren();
snapshot_->RememberLastJSObjectId();
- if (!SetEntriesDominators()) return false;
- if (!CalculateRetainedSizes()) return false;
-
progress_counter_ = progress_total_;
if (!ProgressReport(true)) return false;
return true;
@@ -3138,187 +3162,6 @@ bool HeapSnapshotGenerator::FillReferences() {
}
-bool HeapSnapshotGenerator::IsUserGlobalReference(const HeapGraphEdge* edge) {
- ASSERT(edge->from() == snapshot_->root());
- return edge->type() == HeapGraphEdge::kShortcut;
-}
-
-
-void HeapSnapshotGenerator::MarkUserReachableObjects() {
- List<HeapEntry*> worklist;
-
- Vector<HeapGraphEdge*> children = snapshot_->root()->children();
- for (int i = 0; i < children.length(); ++i) {
- if (IsUserGlobalReference(children[i])) {
- worklist.Add(children[i]->to());
- }
- }
-
- while (!worklist.is_empty()) {
- HeapEntry* entry = worklist.RemoveLast();
- if (entry->user_reachable()) continue;
- entry->set_user_reachable();
- Vector<HeapGraphEdge*> children = entry->children();
- for (int i = 0; i < children.length(); ++i) {
- HeapEntry* child = children[i]->to();
- if (!child->user_reachable()) {
- worklist.Add(child);
- }
- }
- }
-}
-
-
-static bool IsRetainingEdge(HeapGraphEdge* edge) {
- if (edge->type() == HeapGraphEdge::kShortcut) return false;
- // The edge is not retaining if it goes from system domain
- // (i.e. an object not reachable from window) to the user domain
- // (i.e. a reachable object).
- return edge->from()->user_reachable()
- || !edge->to()->user_reachable();
-}
-
-
-void HeapSnapshotGenerator::FillPostorderIndexes(
- Vector<HeapEntry*>* entries) {
- snapshot_->ClearPaint();
- int current_entry = 0;
- List<HeapEntry*> nodes_to_visit;
- HeapEntry* root = snapshot_->root();
- nodes_to_visit.Add(root);
- snapshot_->root()->paint();
- while (!nodes_to_visit.is_empty()) {
- HeapEntry* entry = nodes_to_visit.last();
- Vector<HeapGraphEdge*> children = entry->children();
- bool has_new_edges = false;
- for (int i = 0; i < children.length(); ++i) {
- if (entry != root && !IsRetainingEdge(children[i])) continue;
- HeapEntry* child = children[i]->to();
- if (!child->painted()) {
- nodes_to_visit.Add(child);
- child->paint();
- has_new_edges = true;
- }
- }
- if (!has_new_edges) {
- entry->set_postorder_index(current_entry);
- (*entries)[current_entry++] = entry;
- nodes_to_visit.RemoveLast();
- }
- }
- ASSERT_EQ(current_entry, entries->length());
-}
-
-
-static int Intersect(int i1, int i2, const Vector<int>& dominators) {
- int finger1 = i1, finger2 = i2;
- while (finger1 != finger2) {
- while (finger1 < finger2) finger1 = dominators[finger1];
- while (finger2 < finger1) finger2 = dominators[finger2];
- }
- return finger1;
-}
-
-
-// The algorithm is based on the article:
-// K. Cooper, T. Harvey and K. Kennedy "A Simple, Fast Dominance Algorithm"
-// Softw. Pract. Exper. 4 (2001), pp. 1-10.
-bool HeapSnapshotGenerator::BuildDominatorTree(
- const Vector<HeapEntry*>& entries,
- Vector<int>* dominators) {
- if (entries.length() == 0) return true;
- HeapEntry* root = snapshot_->root();
- const int entries_length = entries.length(), root_index = entries_length - 1;
- for (int i = 0; i < root_index; ++i) (*dominators)[i] = HeapEntry::kNoEntry;
- (*dominators)[root_index] = root_index;
-
- // The affected array is used to mark entries which dominators
- // have to be racalculated because of changes in their retainers.
- ScopedVector<bool> affected(entries_length);
- for (int i = 0; i < affected.length(); ++i) affected[i] = false;
- // Mark the root direct children as affected.
- Vector<HeapGraphEdge*> children = entries[root_index]->children();
- for (int i = 0; i < children.length(); ++i) {
- affected[children[i]->to()->postorder_index()] = true;
- }
-
- bool changed = true;
- while (changed) {
- changed = false;
- if (!ProgressReport(false)) return false;
- for (int i = root_index - 1; i >= 0; --i) {
- if (!affected[i]) continue;
- affected[i] = false;
- // If dominator of the entry has already been set to root,
- // then it can't propagate any further.
- if ((*dominators)[i] == root_index) continue;
- int new_idom_index = HeapEntry::kNoEntry;
- Vector<HeapGraphEdge*> rets = entries[i]->retainers();
- for (int j = 0; j < rets.length(); ++j) {
- if (rets[j]->from() != root && !IsRetainingEdge(rets[j])) continue;
- int ret_index = rets[j]->from()->postorder_index();
- if (dominators->at(ret_index) != HeapEntry::kNoEntry) {
- new_idom_index = new_idom_index == HeapEntry::kNoEntry
- ? ret_index
- : Intersect(ret_index, new_idom_index, *dominators);
- // If idom has already reached the root, it doesn't make sense
- // to check other retainers.
- if (new_idom_index == root_index) break;
- }
- }
- if (new_idom_index != HeapEntry::kNoEntry
- && dominators->at(i) != new_idom_index) {
- (*dominators)[i] = new_idom_index;
- changed = true;
- Vector<HeapGraphEdge*> children = entries[i]->children();
- for (int j = 0; j < children.length(); ++j) {
- affected[children[j]->to()->postorder_index()] = true;
- }
- }
- }
- }
- return true;
-}
-
-
-bool HeapSnapshotGenerator::SetEntriesDominators() {
- MarkUserReachableObjects();
- // This array is used for maintaining postorder of nodes.
- ScopedVector<HeapEntry*> ordered_entries(snapshot_->entries().length());
- FillPostorderIndexes(&ordered_entries);
- ScopedVector<int> dominators(ordered_entries.length());
- if (!BuildDominatorTree(ordered_entries, &dominators)) return false;
- for (int i = 0; i < ordered_entries.length(); ++i) {
- ASSERT(dominators[i] != HeapEntry::kNoEntry);
- ordered_entries[i]->set_dominator(ordered_entries[dominators[i]]);
- }
- return true;
-}
-
-
-bool HeapSnapshotGenerator::CalculateRetainedSizes() {
- // As for the dominators tree we only know parent nodes, not
- // children, to sum up total sizes we "bubble" node's self size
- // adding it to all of its parents.
- List<HeapEntry>& entries = snapshot_->entries();
- for (int i = 0; i < entries.length(); ++i) {
- HeapEntry* entry = &entries[i];
- entry->set_retained_size(entry->self_size());
- }
- for (int i = 0; i < entries.length(); ++i) {
- int entry_size = entries[i].self_size();
- HeapEntry* current = &entries[i];
- for (HeapEntry* dominator = current->dominator();
- dominator != current;
- current = dominator, dominator = current->dominator()) {
- ASSERT(current->dominator() != NULL);
- dominator->add_retained_size(entry_size);
- }
- }
- return true;
-}
-
-
template<int bytes> struct MaxDecimalDigitsIn;
template<> struct MaxDecimalDigitsIn<4> {
static const int kSigned = 11;
@@ -3417,8 +3260,8 @@ class OutputStreamWriter {
// type, name|index, to_node.
const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
-// type, name, id, self_size, retained_size, dominator, children_index.
-const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 7;
+// type, name, id, self_size, children_index.
+const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
ASSERT(writer_ == NULL);
@@ -3458,14 +3301,12 @@ HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
(snapshot_->RawSnapshotSize() + MB - 1) / MB);
HeapEntry* message = result->AddEntry(HeapEntry::kString, text, 0, 4);
result->root()->SetIndexedReference(HeapGraphEdge::kElement, 1, message);
- result->FillChildrenAndRetainers();
- result->SetDominatorsToSelf();
+ result->FillChildren();
return result;
}
void HeapSnapshotJSONSerializer::SerializeImpl() {
- List<HeapEntry>& nodes = snapshot_->entries();
ASSERT(0 == snapshot_->root()->index());
writer_->AddCharacter('{');
writer_->AddString("\"snapshot\":{");
@@ -3473,11 +3314,11 @@ void HeapSnapshotJSONSerializer::SerializeImpl() {
if (writer_->aborted()) return;
writer_->AddString("},\n");
writer_->AddString("\"nodes\":[");
- SerializeNodes(nodes);
+ SerializeNodes();
if (writer_->aborted()) return;
writer_->AddString("],\n");
writer_->AddString("\"edges\":[");
- SerializeEdges(nodes);
+ SerializeEdges();
if (writer_->aborted()) return;
writer_->AddString("],\n");
writer_->AddString("\"strings\":[");
@@ -3519,9 +3360,9 @@ static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) {
void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
bool first_edge) {
- // The buffer needs space for 3 ints, 3 commas and \0
+ // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
static const int kBufferSize =
- MaxDecimalDigitsIn<sizeof(int)>::kSigned * 3 + 3 + 1; // NOLINT
+ MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
EmbeddedVector<char, kBufferSize> buffer;
int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
|| edge->type() == HeapGraphEdge::kHidden
@@ -3536,32 +3377,28 @@ void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
buffer[buffer_pos++] = ',';
buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
+ buffer[buffer_pos++] = '\n';
buffer[buffer_pos++] = '\0';
writer_->AddString(buffer.start());
}
-void HeapSnapshotJSONSerializer::SerializeEdges(const List<HeapEntry>& nodes) {
- bool first_edge = true;
- for (int i = 0; i < nodes.length(); ++i) {
- HeapEntry* entry = &nodes[i];
- Vector<HeapGraphEdge*> children = entry->children();
- for (int j = 0; j < children.length(); ++j) {
- SerializeEdge(children[j], first_edge);
- first_edge = false;
- if (writer_->aborted()) return;
- }
+void HeapSnapshotJSONSerializer::SerializeEdges() {
+ List<HeapGraphEdge*>& edges = snapshot_->children();
+ for (int i = 0; i < edges.length(); ++i) {
+ ASSERT(i == 0 ||
+ edges[i - 1]->from()->index() <= edges[i]->from()->index());
+ SerializeEdge(edges[i], i == 0);
+ if (writer_->aborted()) return;
}
}
-void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry,
- int edges_index) {
- // The buffer needs space for 6 ints, 1 uint32_t, 7 commas, \n and \0
+void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
+ // The buffer needs space for 5 unsigned ints, 5 commas, \n and \0
static const int kBufferSize =
- 6 * MaxDecimalDigitsIn<sizeof(int)>::kSigned // NOLINT
- + MaxDecimalDigitsIn<sizeof(uint32_t)>::kUnsigned // NOLINT
- + 7 + 1 + 1;
+ 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
+ + 5 + 1 + 1;
EmbeddedVector<char, kBufferSize> buffer;
int buffer_pos = 0;
if (entry_index(entry) != 0) {
@@ -3575,23 +3412,17 @@ void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry,
buffer[buffer_pos++] = ',';
buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
buffer[buffer_pos++] = ',';
- buffer_pos = utoa(entry->retained_size(), buffer, buffer_pos);
- buffer[buffer_pos++] = ',';
- buffer_pos = utoa(entry_index(entry->dominator()), buffer, buffer_pos);
- buffer[buffer_pos++] = ',';
- buffer_pos = utoa(edges_index, buffer, buffer_pos);
+ buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
buffer[buffer_pos++] = '\n';
buffer[buffer_pos++] = '\0';
writer_->AddString(buffer.start());
}
-void HeapSnapshotJSONSerializer::SerializeNodes(const List<HeapEntry>& nodes) {
- int edges_index = 0;
- for (int i = 0; i < nodes.length(); ++i) {
- HeapEntry* entry = &nodes[i];
- SerializeNode(entry, edges_index);
- edges_index += entry->children().length() * kEdgeFieldsCount;
+void HeapSnapshotJSONSerializer::SerializeNodes() {
+ List<HeapEntry>& entries = snapshot_->entries();
+ for (int i = 0; i < entries.length(); ++i) {
+ SerializeNode(&entries[i]);
if (writer_->aborted()) return;
}
}
@@ -3615,9 +3446,7 @@ void HeapSnapshotJSONSerializer::SerializeSnapshot() {
JSON_S("name") ","
JSON_S("id") ","
JSON_S("self_size") ","
- JSON_S("retained_size") ","
- JSON_S("dominator") ","
- JSON_S("edges_index")) ","
+ JSON_S("edge_count")) ","
JSON_S("node_types") ":" JSON_A(
JSON_A(
JSON_S("hidden") ","
diff --git a/src/3rdparty/v8/src/profile-generator.h b/src/3rdparty/v8/src/profile-generator.h
index 92896c2..04f4a1c 100644
--- a/src/3rdparty/v8/src/profile-generator.h
+++ b/src/3rdparty/v8/src/profile-generator.h
@@ -72,6 +72,7 @@ class StringsStorage {
const char* GetName(int index);
inline const char* GetFunctionName(String* name);
inline const char* GetFunctionName(const char* name);
+ size_t GetUsedMemorySize() const;
private:
static const int kMaxNameSize = 1024;
@@ -529,35 +530,14 @@ class HeapEntry BASE_EMBEDDED {
void set_name(const char* name) { name_ = name; }
inline SnapshotObjectId id() { return id_; }
int self_size() { return self_size_; }
- int retained_size() { return retained_size_; }
- void add_retained_size(int size) { retained_size_ += size; }
- void set_retained_size(int size) { retained_size_ = size; }
INLINE(int index() const);
- int postorder_index() { return postorder_index_; }
- void set_postorder_index(int value) { postorder_index_ = value; }
int children_count() const { return children_count_; }
INLINE(int set_children_index(int index));
- INLINE(int set_retainers_index(int index));
void add_child(HeapGraphEdge* edge) {
children_arr()[children_count_++] = edge;
}
- void add_retainer(HeapGraphEdge* edge) {
- retainers_arr()[retainers_count_++] = edge;
- }
Vector<HeapGraphEdge*> children() {
return Vector<HeapGraphEdge*>(children_arr(), children_count_); }
- Vector<HeapGraphEdge*> retainers() {
- return Vector<HeapGraphEdge*>(retainers_arr(), retainers_count_); }
- INLINE(HeapEntry* dominator() const);
- void set_dominator(HeapEntry* entry) {
- ASSERT(entry != NULL);
- dominator_ = entry->index();
- }
- void clear_paint() { painted_ = false; }
- bool painted() { return painted_; }
- void paint() { painted_ = true; }
- bool user_reachable() { return user_reachable_; }
- void set_user_reachable() { user_reachable_ = true; }
void SetIndexedReference(
HeapGraphEdge::Type type, int index, HeapEntry* entry);
@@ -571,22 +551,12 @@ class HeapEntry BASE_EMBEDDED {
private:
INLINE(HeapGraphEdge** children_arr());
- INLINE(HeapGraphEdge** retainers_arr());
const char* TypeAsString();
- unsigned painted_: 1;
- unsigned user_reachable_: 1;
- int dominator_: 30;
unsigned type_: 4;
- int retainers_count_: 28;
- int retainers_index_;
- int children_count_;
+ int children_count_: 28;
int children_index_;
int self_size_;
- union {
- int postorder_index_; // Used during dominator tree building.
- int retained_size_; // At that moment, there is no retained size yet.
- };
SnapshotObjectId id_;
HeapSnapshot* snapshot_;
const char* name_;
@@ -626,7 +596,6 @@ class HeapSnapshot {
List<HeapEntry>& entries() { return entries_; }
List<HeapGraphEdge>& edges() { return edges_; }
List<HeapGraphEdge*>& children() { return children_; }
- List<HeapGraphEdge*>& retainers() { return retainers_; }
void RememberLastJSObjectId();
SnapshotObjectId max_snapshot_js_object_id() const {
return max_snapshot_js_object_id_;
@@ -640,11 +609,9 @@ class HeapSnapshot {
HeapEntry* AddGcRootsEntry();
HeapEntry* AddGcSubrootEntry(int tag);
HeapEntry* AddNativesRootEntry();
- void ClearPaint();
HeapEntry* GetEntryById(SnapshotObjectId id);
List<HeapEntry*>* GetSortedEntriesList();
- void SetDominatorsToSelf();
- void FillChildrenAndRetainers();
+ void FillChildren();
void Print(int max_depth);
void PrintEntriesSize();
@@ -661,7 +628,6 @@ class HeapSnapshot {
List<HeapEntry> entries_;
List<HeapGraphEdge> edges_;
List<HeapGraphEdge*> children_;
- List<HeapGraphEdge*> retainers_;
List<HeapEntry*> sorted_entries_;
SnapshotObjectId max_snapshot_js_object_id_;
@@ -684,7 +650,8 @@ class HeapObjectsMap {
}
void StopHeapObjectsTracking();
- void PushHeapObjectsStats(OutputStream* stream);
+ SnapshotObjectId PushHeapObjectsStats(OutputStream* stream);
+ size_t GetUsedMemorySize() const;
static SnapshotObjectId GenerateId(v8::RetainedObjectInfo* info);
static inline SnapshotObjectId GetNthGcSubrootId(int delta);
@@ -742,7 +709,7 @@ class HeapSnapshotsCollection {
~HeapSnapshotsCollection();
bool is_tracking_objects() { return is_tracking_objects_; }
- void PushHeapObjectsStats(OutputStream* stream) {
+ SnapshotObjectId PushHeapObjectsStats(OutputStream* stream) {
return ids_.PushHeapObjectsStats(stream);
}
void StartHeapObjectsTracking() { is_tracking_objects_ = true; }
@@ -769,6 +736,7 @@ class HeapSnapshotsCollection {
SnapshotObjectId last_assigned_id() const {
return ids_.last_assigned_id();
}
+ size_t GetUsedMemorySize() const;
private:
INLINE(static bool HeapSnapshotsMatch(void* key1, void* key2)) {
@@ -957,10 +925,6 @@ class V8HeapExplorer : public HeapEntriesAllocator {
Object* child,
const char* name_format_string = NULL,
int field_offset = -1);
- void SetPropertyShortcutReference(HeapObject* parent_obj,
- int parent,
- String* reference_name,
- Object* child);
void SetUserGlobalReference(Object* user_global);
void SetRootGcRootsReference();
void SetGcRootsReference(VisitorSynchronization::SyncTag tag);
@@ -1061,16 +1025,9 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
bool GenerateSnapshot();
private:
- bool BuildDominatorTree(const Vector<HeapEntry*>& entries,
- Vector<int>* dominators);
- bool CalculateRetainedSizes();
bool FillReferences();
- void FillPostorderIndexes(Vector<HeapEntry*>* entries);
- bool IsUserGlobalReference(const HeapGraphEdge* edge);
- void MarkUserReachableObjects();
void ProgressStep();
bool ProgressReport(bool force = false);
- bool SetEntriesDominators();
void SetProgressTotal(int iterations_count);
HeapSnapshot* snapshot_;
@@ -1114,10 +1071,10 @@ class HeapSnapshotJSONSerializer {
int GetStringId(const char* s);
int entry_index(HeapEntry* e) { return e->index() * kNodeFieldsCount; }
void SerializeEdge(HeapGraphEdge* edge, bool first_edge);
- void SerializeEdges(const List<HeapEntry>& nodes);
+ void SerializeEdges();
void SerializeImpl();
- void SerializeNode(HeapEntry* entry, int edges_index);
- void SerializeNodes(const List<HeapEntry>& nodes);
+ void SerializeNode(HeapEntry* entry);
+ void SerializeNodes();
void SerializeSnapshot();
void SerializeString(const unsigned char* s);
void SerializeStrings();
diff --git a/src/3rdparty/v8/src/property-details.h b/src/3rdparty/v8/src/property-details.h
index c79aa96..64e3205 100644
--- a/src/3rdparty/v8/src/property-details.h
+++ b/src/3rdparty/v8/src/property-details.h
@@ -55,21 +55,18 @@ class Smi;
// Must fit in the BitField PropertyDetails::TypeField.
// A copy of this is in mirror-debugger.js.
enum PropertyType {
- NORMAL = 0, // only in slow mode
- FIELD = 1, // only in fast mode
- CONSTANT_FUNCTION = 2, // only in fast mode
+ // Only in slow mode.
+ NORMAL = 0,
+ // Only in fast mode.
+ FIELD = 1,
+ CONSTANT_FUNCTION = 2,
CALLBACKS = 3,
- HANDLER = 4, // only in lookup results, not in descriptors
- INTERCEPTOR = 5, // only in lookup results, not in descriptors
- // All properties before MAP_TRANSITION are real.
- MAP_TRANSITION = 6, // only in fast mode
- ELEMENTS_TRANSITION = 7,
- CONSTANT_TRANSITION = 8, // only in fast mode
- NULL_DESCRIPTOR = 9, // only in fast mode
- // There are no IC stubs for NULL_DESCRIPTORS. Therefore,
- // NULL_DESCRIPTOR can be used as the type flag for IC stubs for
- // nonexistent properties.
- NONEXISTENT = NULL_DESCRIPTOR
+ // Only in lookup results, not in descriptors.
+ HANDLER = 4,
+ INTERCEPTOR = 5,
+ TRANSITION = 6,
+ // Only used as a marker in LookupResult.
+ NONEXISTENT = 7
};
@@ -80,50 +77,64 @@ class PropertyDetails BASE_EMBEDDED {
PropertyDetails(PropertyAttributes attributes,
PropertyType type,
int index = 0) {
- ASSERT(TypeField::is_valid(type));
- ASSERT(AttributesField::is_valid(attributes));
- ASSERT(StorageField::is_valid(index));
-
value_ = TypeField::encode(type)
| AttributesField::encode(attributes)
- | StorageField::encode(index);
+ | DictionaryStorageField::encode(index);
ASSERT(type == this->type());
ASSERT(attributes == this->attributes());
- ASSERT(index == this->index());
+ ASSERT(index == this->dictionary_index());
}
+ int pointer() { return DescriptorPointer::decode(value_); }
+
+ PropertyDetails set_pointer(int i) { return PropertyDetails(value_, i); }
+
// Conversion for storing details as Object*.
explicit inline PropertyDetails(Smi* smi);
inline Smi* AsSmi();
PropertyType type() { return TypeField::decode(value_); }
- PropertyAttributes attributes() { return AttributesField::decode(value_); }
+ PropertyAttributes attributes() const {
+ return AttributesField::decode(value_);
+ }
- int index() { return StorageField::decode(value_); }
+ int dictionary_index() {
+ return DictionaryStorageField::decode(value_);
+ }
+
+ int descriptor_index() {
+ return DescriptorStorageField::decode(value_);
+ }
inline PropertyDetails AsDeleted();
static bool IsValidIndex(int index) {
- return StorageField::is_valid(index);
+ return DictionaryStorageField::is_valid(index);
}
- bool IsReadOnly() { return (attributes() & READ_ONLY) != 0; }
- bool IsDontDelete() { return (attributes() & DONT_DELETE) != 0; }
- bool IsDontEnum() { return (attributes() & DONT_ENUM) != 0; }
- bool IsDeleted() { return DeletedField::decode(value_) != 0;}
+ bool IsReadOnly() const { return (attributes() & READ_ONLY) != 0; }
+ bool IsDontDelete() const { return (attributes() & DONT_DELETE) != 0; }
+ bool IsDontEnum() const { return (attributes() & DONT_ENUM) != 0; }
+ bool IsDeleted() const { return DeletedField::decode(value_) != 0;}
// Bit fields in value_ (type, shift, size). Must be public so the
// constants can be embedded in generated code.
- class TypeField: public BitField<PropertyType, 0, 4> {};
- class AttributesField: public BitField<PropertyAttributes, 4, 3> {};
- class DeletedField: public BitField<uint32_t, 7, 1> {};
- class StorageField: public BitField<uint32_t, 8, 32-8> {};
+ class TypeField: public BitField<PropertyType, 0, 3> {};
+ class AttributesField: public BitField<PropertyAttributes, 3, 3> {};
+ class DeletedField: public BitField<uint32_t, 6, 1> {};
+ class DictionaryStorageField: public BitField<uint32_t, 7, 24> {};
+ class DescriptorStorageField: public BitField<uint32_t, 7, 11> {};
+ class DescriptorPointer: public BitField<uint32_t, 18, 11> {};
static const int kInitialIndex = 1;
private:
+ PropertyDetails(int value, int pointer) {
+ value_ = DescriptorPointer::update(value, pointer);
+ }
+
uint32_t value_;
};
diff --git a/src/3rdparty/v8/src/property.cc b/src/3rdparty/v8/src/property.cc
index 78f237d..d05ef2b 100644
--- a/src/3rdparty/v8/src/property.cc
+++ b/src/3rdparty/v8/src/property.cc
@@ -43,99 +43,80 @@ void LookupResult::Iterate(ObjectVisitor* visitor) {
#ifdef OBJECT_PRINT
void LookupResult::Print(FILE* out) {
if (!IsFound()) {
- PrintF(out, "Not Found\n");
+ FPrintF(out, "Not Found\n");
return;
}
- PrintF(out, "LookupResult:\n");
- PrintF(out, " -cacheable = %s\n", IsCacheable() ? "true" : "false");
- PrintF(out, " -attributes = %x\n", GetAttributes());
+ FPrintF(out, "LookupResult:\n");
+ FPrintF(out, " -cacheable = %s\n", IsCacheable() ? "true" : "false");
+ FPrintF(out, " -attributes = %x\n", GetAttributes());
switch (type()) {
case NORMAL:
- PrintF(out, " -type = normal\n");
- PrintF(out, " -entry = %d", GetDictionaryEntry());
- break;
- case MAP_TRANSITION:
- PrintF(out, " -type = map transition\n");
- PrintF(out, " -map:\n");
- GetTransitionMap()->Print(out);
- PrintF(out, "\n");
- break;
- case ELEMENTS_TRANSITION:
- PrintF(out, " -type = elements transition\n");
- PrintF(out, " -map:\n");
- GetTransitionMap()->Print(out);
- PrintF(out, "\n");
+ FPrintF(out, " -type = normal\n");
+ FPrintF(out, " -entry = %d", GetDictionaryEntry());
break;
case CONSTANT_FUNCTION:
- PrintF(out, " -type = constant function\n");
- PrintF(out, " -function:\n");
+ FPrintF(out, " -type = constant function\n");
+ FPrintF(out, " -function:\n");
GetConstantFunction()->Print(out);
- PrintF(out, "\n");
+ FPrintF(out, "\n");
break;
case FIELD:
- PrintF(out, " -type = field\n");
- PrintF(out, " -index = %d", GetFieldIndex());
- PrintF(out, "\n");
+ FPrintF(out, " -type = field\n");
+ FPrintF(out, " -index = %d", GetFieldIndex());
+ FPrintF(out, "\n");
break;
case CALLBACKS:
- PrintF(out, " -type = call backs\n");
- PrintF(out, " -callback object:\n");
+ FPrintF(out, " -type = call backs\n");
+ FPrintF(out, " -callback object:\n");
GetCallbackObject()->Print(out);
break;
case HANDLER:
- PrintF(out, " -type = lookup proxy\n");
+ FPrintF(out, " -type = lookup proxy\n");
break;
case INTERCEPTOR:
- PrintF(out, " -type = lookup interceptor\n");
- break;
- case CONSTANT_TRANSITION:
- PrintF(out, " -type = constant property transition\n");
- PrintF(out, " -map:\n");
- GetTransitionMap()->Print(out);
- PrintF(out, "\n");
+ FPrintF(out, " -type = lookup interceptor\n");
break;
- case NULL_DESCRIPTOR:
- PrintF(out, " =type = null descriptor\n");
+ case TRANSITION:
+ switch (GetTransitionDetails().type()) {
+ case FIELD:
+ FPrintF(out, " -type = map transition\n");
+ FPrintF(out, " -map:\n");
+ GetTransitionMap()->Print(out);
+ FPrintF(out, "\n");
+ return;
+ case CONSTANT_FUNCTION:
+ FPrintF(out, " -type = constant property transition\n");
+ FPrintF(out, " -map:\n");
+ GetTransitionMap()->Print(out);
+ FPrintF(out, "\n");
+ return;
+ case CALLBACKS:
+ FPrintF(out, " -type = callbacks transition\n");
+ FPrintF(out, " -callback object:\n");
+ GetCallbackObject()->Print(out);
+ return;
+ default:
+ UNREACHABLE();
+ return;
+ }
+ case NONEXISTENT:
+ UNREACHABLE();
break;
}
}
void Descriptor::Print(FILE* out) {
- PrintF(out, "Descriptor ");
+ FPrintF(out, "Descriptor ");
GetKey()->ShortPrint(out);
- PrintF(out, " @ ");
+ FPrintF(out, " @ ");
GetValue()->ShortPrint(out);
- PrintF(out, " %d\n", GetDetails().index());
+ FPrintF(out, " %d\n", GetDetails().descriptor_index());
}
#endif
-bool Descriptor::ContainsTransition() {
- switch (details_.type()) {
- case MAP_TRANSITION:
- case CONSTANT_TRANSITION:
- case ELEMENTS_TRANSITION:
- return true;
- case CALLBACKS: {
- if (!value_->IsAccessorPair()) return false;
- AccessorPair* accessors = AccessorPair::cast(value_);
- return accessors->getter()->IsMap() || accessors->setter()->IsMap();
- }
- case NORMAL:
- case FIELD:
- case CONSTANT_FUNCTION:
- case HANDLER:
- case INTERCEPTOR:
- case NULL_DESCRIPTOR:
- return false;
- }
- UNREACHABLE(); // Keep the compiler happy.
- return false;
-}
-
-
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/property.h b/src/3rdparty/v8/src/property.h
index ba5e3c8..3faa28b 100644
--- a/src/3rdparty/v8/src/property.h
+++ b/src/3rdparty/v8/src/property.h
@@ -29,6 +29,7 @@
#define V8_PROPERTY_H_
#include "allocation.h"
+#include "transitions.h"
namespace v8 {
namespace internal {
@@ -64,11 +65,10 @@ class Descriptor BASE_EMBEDDED {
#endif
void SetEnumerationIndex(int index) {
- ASSERT(PropertyDetails::IsValidIndex(index));
details_ = PropertyDetails(details_.attributes(), details_.type(), index);
}
- bool ContainsTransition();
+ void SetSortedKeyIndex(int index) { details_ = details_.set_pointer(index); }
private:
String* key_;
@@ -93,7 +93,7 @@ class Descriptor BASE_EMBEDDED {
Object* value,
PropertyAttributes attributes,
PropertyType type,
- int index = 0)
+ int index)
: key_(key),
value_(value),
details_(attributes, type, index) { }
@@ -101,35 +101,6 @@ class Descriptor BASE_EMBEDDED {
friend class DescriptorArray;
};
-// A pointer from a map to the new map that is created by adding
-// a named property. These are key to the speed and functioning of V8.
-// The two maps should always have the same prototype, since
-// MapSpace::CreateBackPointers depends on this.
-class MapTransitionDescriptor: public Descriptor {
- public:
- MapTransitionDescriptor(String* key, Map* map, PropertyAttributes attributes)
- : Descriptor(key, map, attributes, MAP_TRANSITION) { }
-};
-
-class ElementsTransitionDescriptor: public Descriptor {
- public:
- ElementsTransitionDescriptor(String* key,
- Object* map_or_array)
- : Descriptor(key, map_or_array, PropertyDetails(NONE,
- ELEMENTS_TRANSITION)) { }
-};
-
-// Marks a field name in a map so that adding the field is guaranteed
-// to create a FIELD descriptor in the new map. Used after adding
-// a constant function the first time, creating a CONSTANT_FUNCTION
-// descriptor in the new map. This avoids creating multiple maps with
-// the same CONSTANT_FUNCTION field.
-class ConstTransitionDescriptor: public Descriptor {
- public:
- explicit ConstTransitionDescriptor(String* key, Map* map)
- : Descriptor(key, map, NONE, CONSTANT_TRANSITION) { }
-};
-
class FieldDescriptor: public Descriptor {
public:
@@ -146,7 +117,7 @@ class ConstantFunctionDescriptor: public Descriptor {
ConstantFunctionDescriptor(String* key,
JSFunction* function,
PropertyAttributes attributes,
- int index = 0)
+ int index)
: Descriptor(key, function, attributes, CONSTANT_FUNCTION, index) {}
};
@@ -161,35 +132,6 @@ class CallbacksDescriptor: public Descriptor {
};
-template <class T>
-bool IsPropertyDescriptor(T* desc) {
- switch (desc->type()) {
- case NORMAL:
- case FIELD:
- case CONSTANT_FUNCTION:
- case HANDLER:
- case INTERCEPTOR:
- return true;
- case CALLBACKS: {
- Object* callback_object = desc->GetCallbackObject();
- // Non-JavaScript (i.e. native) accessors are always a property, otherwise
- // either the getter or the setter must be an accessor. Put another way:
- // If we only see map transitions and holes in a pair, this is not a
- // property.
- return (!callback_object->IsAccessorPair() ||
- AccessorPair::cast(callback_object)->ContainsAccessor());
- }
- case MAP_TRANSITION:
- case ELEMENTS_TRANSITION:
- case CONSTANT_TRANSITION:
- case NULL_DESCRIPTOR:
- return false;
- }
- UNREACHABLE(); // keep the compiler happy
- return false;
-}
-
-
class LookupResult BASE_EMBEDDED {
public:
explicit LookupResult(Isolate* isolate)
@@ -198,7 +140,7 @@ class LookupResult BASE_EMBEDDED {
lookup_type_(NOT_FOUND),
holder_(NULL),
cacheable_(true),
- details_(NONE, NORMAL) {
+ details_(NONE, NONEXISTENT) {
isolate->SetTopLookupResult(this);
}
@@ -214,6 +156,13 @@ class LookupResult BASE_EMBEDDED {
number_ = number;
}
+ void TransitionResult(JSObject* holder, int number) {
+ lookup_type_ = TRANSITION_TYPE;
+ details_ = PropertyDetails(NONE, TRANSITION);
+ holder_ = holder;
+ number_ = number;
+ }
+
void ConstantResult(JSObject* holder) {
lookup_type_ = CONSTANT_TYPE;
holder_ = holder;
@@ -246,6 +195,7 @@ class LookupResult BASE_EMBEDDED {
void NotFound() {
lookup_type_ = NOT_FOUND;
+ details_ = PropertyDetails(NONE, NONEXISTENT);
holder_ = NULL;
}
@@ -265,24 +215,61 @@ class LookupResult BASE_EMBEDDED {
}
PropertyAttributes GetAttributes() {
+ ASSERT(!IsTransition());
ASSERT(IsFound());
+ ASSERT(details_.type() != NONEXISTENT);
return details_.attributes();
}
PropertyDetails GetPropertyDetails() {
+ ASSERT(!IsTransition());
return details_;
}
- bool IsReadOnly() { return details_.IsReadOnly(); }
+ bool IsFastPropertyType() {
+ ASSERT(IsFound());
+ return IsTransition() || type() != NORMAL;
+ }
+
+ // Property callbacks does not include transitions to callbacks.
+ bool IsPropertyCallbacks() {
+ ASSERT(!(details_.type() == CALLBACKS && !IsFound()));
+ return details_.type() == CALLBACKS;
+ }
+
+ bool IsReadOnly() {
+ ASSERT(IsFound());
+ ASSERT(!IsTransition());
+ ASSERT(details_.type() != NONEXISTENT);
+ return details_.IsReadOnly();
+ }
+
+ bool IsField() {
+ ASSERT(!(details_.type() == FIELD && !IsFound()));
+ return details_.type() == FIELD;
+ }
+
+ bool IsNormal() {
+ ASSERT(!(details_.type() == NORMAL && !IsFound()));
+ return details_.type() == NORMAL;
+ }
+
+ bool IsConstantFunction() {
+ ASSERT(!(details_.type() == CONSTANT_FUNCTION && !IsFound()));
+ return details_.type() == CONSTANT_FUNCTION;
+ }
+
bool IsDontDelete() { return details_.IsDontDelete(); }
bool IsDontEnum() { return details_.IsDontEnum(); }
bool IsDeleted() { return details_.IsDeleted(); }
bool IsFound() { return lookup_type_ != NOT_FOUND; }
+ bool IsTransition() { return lookup_type_ == TRANSITION_TYPE; }
bool IsHandler() { return lookup_type_ == HANDLER_TYPE; }
+ bool IsInterceptor() { return lookup_type_ == INTERCEPTOR_TYPE; }
// Is the result is a property excluding transitions and the null descriptor?
bool IsProperty() {
- return IsFound() && IsPropertyDescriptor(this);
+ return IsFound() && !IsTransition();
}
bool IsCacheable() { return cacheable_; }
@@ -303,36 +290,59 @@ class LookupResult BASE_EMBEDDED {
case CONSTANT_FUNCTION:
return GetConstantFunction();
default:
- return Smi::FromInt(0);
+ return Isolate::Current()->heap()->the_hole_value();
}
}
+ Map* GetTransitionTarget() {
+ ASSERT(IsTransition());
+ TransitionArray* transitions = holder()->map()->transitions();
+ return transitions->GetTarget(number_);
+ }
+
+ PropertyDetails GetTransitionDetails(Map* map) {
+ ASSERT(IsTransition());
+ TransitionArray* transitions = map->transitions();
+ return transitions->GetTargetDetails(number_);
+ }
+
+ PropertyDetails GetTransitionDetails() {
+ return GetTransitionDetails(holder()->map());
+ }
+
+ bool IsTransitionToField(Map* map) {
+ return IsTransition() && GetTransitionDetails(map).type() == FIELD;
+ }
Map* GetTransitionMap() {
- ASSERT(lookup_type_ == DESCRIPTOR_TYPE);
- ASSERT(type() == MAP_TRANSITION ||
- type() == ELEMENTS_TRANSITION ||
- type() == CONSTANT_TRANSITION);
+ ASSERT(IsTransition());
return Map::cast(GetValue());
}
Map* GetTransitionMapFromMap(Map* map) {
+ ASSERT(IsTransition());
+ return map->transitions()->GetTarget(number_);
+ }
+
+ int GetTransitionIndex() {
+ ASSERT(IsTransition());
+ return number_;
+ }
+
+ int GetDescriptorIndex() {
ASSERT(lookup_type_ == DESCRIPTOR_TYPE);
- ASSERT(type() == MAP_TRANSITION);
- return Map::cast(map->instance_descriptors()->GetValue(number_));
+ return number_;
}
int GetFieldIndex() {
ASSERT(lookup_type_ == DESCRIPTOR_TYPE);
- ASSERT(type() == FIELD);
+ ASSERT(IsField());
return Descriptor::IndexFromValue(GetValue());
}
int GetLocalFieldIndexFromMap(Map* map) {
- ASSERT(lookup_type_ == DESCRIPTOR_TYPE);
- ASSERT(type() == FIELD);
- return Descriptor::IndexFromValue(
- map->instance_descriptors()->GetValue(number_)) -
+ ASSERT(IsField());
+ return Descriptor::IndexFromValue(GetValueFromMap(map)) -
map->inobject_properties();
}
@@ -347,16 +357,15 @@ class LookupResult BASE_EMBEDDED {
}
JSFunction* GetConstantFunctionFromMap(Map* map) {
- ASSERT(lookup_type_ == DESCRIPTOR_TYPE);
ASSERT(type() == CONSTANT_FUNCTION);
- return JSFunction::cast(map->instance_descriptors()->GetValue(number_));
+ return JSFunction::cast(GetValueFromMap(map));
}
Object* GetCallbackObject() {
if (lookup_type_ == CONSTANT_TYPE) {
- // For now we only have the __proto__ as constant type.
return HEAP->prototype_accessors();
}
+ ASSERT(!IsTransition());
return GetValue();
}
@@ -366,14 +375,19 @@ class LookupResult BASE_EMBEDDED {
Object* GetValue() {
if (lookup_type_ == DESCRIPTOR_TYPE) {
- DescriptorArray* descriptors = holder()->map()->instance_descriptors();
- return descriptors->GetValue(number_);
+ return GetValueFromMap(holder()->map());
}
// In the dictionary case, the data is held in the value field.
ASSERT(lookup_type_ == DICTIONARY_TYPE);
return holder()->GetNormalizedProperty(this);
}
+ Object* GetValueFromMap(Map* map) const {
+ ASSERT(lookup_type_ == DESCRIPTOR_TYPE);
+ ASSERT(number_ < map->NumberOfOwnDescriptors());
+ return map->instance_descriptors()->GetValue(number_);
+ }
+
void Iterate(ObjectVisitor* visitor);
private:
@@ -384,6 +398,7 @@ class LookupResult BASE_EMBEDDED {
enum {
NOT_FOUND,
DESCRIPTOR_TYPE,
+ TRANSITION_TYPE,
DICTIONARY_TYPE,
HANDLER_TYPE,
INTERCEPTOR_TYPE,
diff --git a/src/3rdparty/v8/src/proxy.js b/src/3rdparty/v8/src/proxy.js
index 4e86c88..53a3572 100644
--- a/src/3rdparty/v8/src/proxy.js
+++ b/src/3rdparty/v8/src/proxy.js
@@ -31,7 +31,7 @@ global.Proxy = new $Object();
var $Proxy = global.Proxy
-$Proxy.create = function(handler, proto) {
+function ProxyCreate(handler, proto) {
if (!IS_SPEC_OBJECT(handler))
throw MakeTypeError("handler_non_object", ["create"])
if (IS_UNDEFINED(proto))
@@ -41,7 +41,7 @@ $Proxy.create = function(handler, proto) {
return %CreateJSProxy(handler, proto)
}
-$Proxy.createFunction = function(handler, callTrap, constructTrap) {
+function ProxyCreateFunction(handler, callTrap, constructTrap) {
if (!IS_SPEC_OBJECT(handler))
throw MakeTypeError("handler_non_object", ["create"])
if (!IS_SPEC_FUNCTION(callTrap))
@@ -62,6 +62,11 @@ $Proxy.createFunction = function(handler, callTrap, constructTrap) {
handler, callTrap, constructTrap, $Function.prototype)
}
+%CheckIsBootstrapping()
+InstallFunctions($Proxy, DONT_ENUM, [
+ "create", ProxyCreate,
+ "createFunction", ProxyCreateFunction
+])
////////////////////////////////////////////////////////////////////////////////
diff --git a/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.cc b/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.cc
index aa67919..16766ca 100644
--- a/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.cc
+++ b/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.cc
@@ -38,8 +38,10 @@ namespace internal {
#ifdef V8_INTERPRETED_REGEXP
-RegExpMacroAssemblerIrregexp::RegExpMacroAssemblerIrregexp(Vector<byte> buffer)
- : buffer_(buffer),
+RegExpMacroAssemblerIrregexp::RegExpMacroAssemblerIrregexp(Vector<byte> buffer,
+ Zone* zone)
+ : RegExpMacroAssembler(zone),
+ buffer_(buffer),
pc_(0),
own_buffer_(false),
advance_current_end_(kInvalidPC) {
@@ -203,8 +205,9 @@ void RegExpMacroAssemblerIrregexp::PushBacktrack(Label* l) {
}
-void RegExpMacroAssemblerIrregexp::Succeed() {
+bool RegExpMacroAssemblerIrregexp::Succeed() {
Emit(BC_SUCCEED, 0);
+ return false; // Restart matching for global regexp not supported.
}
@@ -407,17 +410,6 @@ void RegExpMacroAssemblerIrregexp::CheckNotBackReferenceIgnoreCase(
}
-void RegExpMacroAssemblerIrregexp::CheckNotRegistersEqual(int reg1,
- int reg2,
- Label* on_not_equal) {
- ASSERT(reg1 >= 0);
- ASSERT(reg1 <= kMaxRegister);
- Emit(BC_CHECK_NOT_REGS_EQUAL, reg1);
- Emit32(reg2);
- EmitOrLink(on_not_equal);
-}
-
-
void RegExpMacroAssemblerIrregexp::CheckCharacters(
Vector<const uc16> str,
int cp_offset,
diff --git a/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.h b/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.h
index 25cb68d..4bc2980 100644
--- a/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.h
+++ b/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.h
@@ -1,4 +1,4 @@
-// Copyright 2008-2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -48,7 +48,7 @@ class RegExpMacroAssemblerIrregexp: public RegExpMacroAssembler {
// for code generation and assumes its size to be buffer_size. If the buffer
// is too small, a fatal error occurs. No deallocation of the buffer is done
// upon destruction of the assembler.
- explicit RegExpMacroAssemblerIrregexp(Vector<byte>);
+ RegExpMacroAssemblerIrregexp(Vector<byte>, Zone* zone);
virtual ~RegExpMacroAssemblerIrregexp();
// The byte-code interpreter checks on each push anyway.
virtual int stack_limit_slack() { return 1; }
@@ -59,7 +59,7 @@ class RegExpMacroAssemblerIrregexp: public RegExpMacroAssembler {
virtual void Backtrack();
virtual void GoTo(Label* label);
virtual void PushBacktrack(Label* label);
- virtual void Succeed();
+ virtual bool Succeed();
virtual void Fail();
virtual void PopRegister(int register_index);
virtual void PushRegister(int register_index,
@@ -103,7 +103,6 @@ class RegExpMacroAssemblerIrregexp: public RegExpMacroAssembler {
virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
Label* on_no_match);
- virtual void CheckNotRegistersEqual(int reg1, int reg2, Label* on_not_equal);
virtual void CheckCharacters(Vector<const uc16> str,
int cp_offset,
Label* on_failure,
diff --git a/src/3rdparty/v8/src/regexp-macro-assembler-tracer.cc b/src/3rdparty/v8/src/regexp-macro-assembler-tracer.cc
index b7aeac4..f878e8c 100644
--- a/src/3rdparty/v8/src/regexp-macro-assembler-tracer.cc
+++ b/src/3rdparty/v8/src/regexp-macro-assembler-tracer.cc
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -35,6 +35,7 @@ namespace internal {
RegExpMacroAssemblerTracer::RegExpMacroAssemblerTracer(
RegExpMacroAssembler* assembler) :
+ RegExpMacroAssembler(assembler->zone()),
assembler_(assembler) {
unsigned int type = assembler->Implementation();
ASSERT(type < 5);
@@ -102,14 +103,15 @@ void RegExpMacroAssemblerTracer::PushBacktrack(Label* label) {
}
-void RegExpMacroAssemblerTracer::Succeed() {
- PrintF(" Succeed();\n");
- assembler_->Succeed();
+bool RegExpMacroAssemblerTracer::Succeed() {
+ bool restart = assembler_->Succeed();
+ PrintF(" Succeed();%s\n", restart ? " [restart for global match]" : "");
+ return restart;
}
void RegExpMacroAssemblerTracer::Fail() {
- PrintF(" Fail();\n");
+ PrintF(" Fail();");
assembler_->Fail();
}
@@ -381,17 +383,6 @@ void RegExpMacroAssemblerTracer::CheckNotBackReferenceIgnoreCase(
}
-void RegExpMacroAssemblerTracer::CheckNotRegistersEqual(int reg1,
- int reg2,
- Label* on_not_equal) {
- PrintF(" CheckNotRegistersEqual(reg1=%d, reg2=%d, label[%08x]);\n",
- reg1,
- reg2,
- LabelToInt(on_not_equal));
- assembler_->CheckNotRegistersEqual(reg1, reg2, on_not_equal);
-}
-
-
void RegExpMacroAssemblerTracer::CheckCharacters(Vector<const uc16> str,
int cp_offset,
Label* on_failure,
diff --git a/src/3rdparty/v8/src/regexp-macro-assembler-tracer.h b/src/3rdparty/v8/src/regexp-macro-assembler-tracer.h
index 3fd4d8b..ac262df 100644
--- a/src/3rdparty/v8/src/regexp-macro-assembler-tracer.h
+++ b/src/3rdparty/v8/src/regexp-macro-assembler-tracer.h
@@ -59,7 +59,6 @@ class RegExpMacroAssemblerTracer: public RegExpMacroAssembler {
virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
Label* on_no_match);
- virtual void CheckNotRegistersEqual(int reg1, int reg2, Label* on_not_equal);
virtual void CheckNotCharacter(unsigned c, Label* on_not_equal);
virtual void CheckNotCharacterAfterAnd(unsigned c,
unsigned and_with,
@@ -98,7 +97,7 @@ class RegExpMacroAssemblerTracer: public RegExpMacroAssembler {
virtual void ReadStackPointerFromRegister(int reg);
virtual void SetCurrentPositionFromEnd(int by);
virtual void SetRegister(int register_index, int to);
- virtual void Succeed();
+ virtual bool Succeed();
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset);
virtual void ClearRegisters(int reg_from, int reg_to);
virtual void WriteStackPointerToRegister(int reg);
diff --git a/src/3rdparty/v8/src/regexp-macro-assembler.cc b/src/3rdparty/v8/src/regexp-macro-assembler.cc
index b6fb3c5..82ba34d 100644
--- a/src/3rdparty/v8/src/regexp-macro-assembler.cc
+++ b/src/3rdparty/v8/src/regexp-macro-assembler.cc
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -35,7 +35,10 @@
namespace v8 {
namespace internal {
-RegExpMacroAssembler::RegExpMacroAssembler() : slow_safe_compiler_(false) {
+RegExpMacroAssembler::RegExpMacroAssembler(Zone* zone)
+ : slow_safe_compiler_(false),
+ global_mode_(NOT_GLOBAL),
+ zone_(zone) {
}
@@ -54,8 +57,8 @@ bool RegExpMacroAssembler::CanReadUnaligned() {
#ifndef V8_INTERPRETED_REGEXP // Avoid unused code, e.g., on ARM.
-NativeRegExpMacroAssembler::NativeRegExpMacroAssembler()
- : RegExpMacroAssembler() {
+NativeRegExpMacroAssembler::NativeRegExpMacroAssembler(Zone* zone)
+ : RegExpMacroAssembler(zone) {
}
@@ -64,11 +67,7 @@ NativeRegExpMacroAssembler::~NativeRegExpMacroAssembler() {
bool NativeRegExpMacroAssembler::CanReadUnaligned() {
-#ifdef V8_TARGET_CAN_READ_UNALIGNED
- return !slow_safe();
-#else
- return false;
-#endif
+ return FLAG_enable_unaligned_accesses && !slow_safe();
}
const byte* NativeRegExpMacroAssembler::StringCharacterPosition(
@@ -149,6 +148,7 @@ NativeRegExpMacroAssembler::Result NativeRegExpMacroAssembler::Match(
input_start,
input_end,
offsets_vector,
+ offsets_vector_length,
isolate);
return res;
}
@@ -161,6 +161,7 @@ NativeRegExpMacroAssembler::Result NativeRegExpMacroAssembler::Execute(
const byte* input_start,
const byte* input_end,
int* output,
+ int output_size,
Isolate* isolate) {
ASSERT(isolate == Isolate::Current());
// Ensure that the minimum stack has been allocated.
@@ -174,10 +175,10 @@ NativeRegExpMacroAssembler::Result NativeRegExpMacroAssembler::Execute(
input_start,
input_end,
output,
+ output_size,
stack_base,
direct_call,
isolate);
- ASSERT(result <= SUCCESS);
ASSERT(result >= RETRY);
if (result == EXCEPTION && !isolate->has_pending_exception()) {
diff --git a/src/3rdparty/v8/src/regexp-macro-assembler.h b/src/3rdparty/v8/src/regexp-macro-assembler.h
index 8587435..bcf3673 100644
--- a/src/3rdparty/v8/src/regexp-macro-assembler.h
+++ b/src/3rdparty/v8/src/regexp-macro-assembler.h
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -63,7 +63,7 @@ class RegExpMacroAssembler {
kCheckStackLimit = true
};
- RegExpMacroAssembler();
+ explicit RegExpMacroAssembler(Zone* zone);
virtual ~RegExpMacroAssembler();
// The maximal number of pushes between stack checks. Users must supply
// kCheckStackLimit flag to push operations (instead of kNoStackLimitCheck)
@@ -128,10 +128,6 @@ class RegExpMacroAssembler {
// array, and if the found byte is non-zero, we jump to the on_bit_set label.
virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set) = 0;
- virtual void CheckNotRegistersEqual(int reg1,
- int reg2,
- Label* on_not_equal) = 0;
-
// Checks whether the given offset from the current position is before
// the end of the string. May overwrite the current character.
virtual void CheckPosition(int cp_offset, Label* on_outside_input) {
@@ -174,7 +170,8 @@ class RegExpMacroAssembler {
virtual void ReadStackPointerFromRegister(int reg) = 0;
virtual void SetCurrentPositionFromEnd(int by) = 0;
virtual void SetRegister(int register_index, int to) = 0;
- virtual void Succeed() = 0;
+ // Return whether the matching (with a global regexp) will be restarted.
+ virtual bool Succeed() = 0;
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset) = 0;
virtual void ClearRegisters(int reg_from, int reg_to) = 0;
virtual void WriteStackPointerToRegister(int reg) = 0;
@@ -183,8 +180,21 @@ class RegExpMacroAssembler {
void set_slow_safe(bool ssc) { slow_safe_compiler_ = ssc; }
bool slow_safe() { return slow_safe_compiler_; }
+ enum GlobalMode { NOT_GLOBAL, GLOBAL, GLOBAL_NO_ZERO_LENGTH_CHECK };
+ // Set whether the regular expression has the global flag. Exiting due to
+ // a failure in a global regexp may still mean success overall.
+ inline void set_global_mode(GlobalMode mode) { global_mode_ = mode; }
+ inline bool global() { return global_mode_ != NOT_GLOBAL; }
+ inline bool global_with_zero_length_check() {
+ return global_mode_ == GLOBAL;
+ }
+
+ Zone* zone() const { return zone_; }
+
private:
bool slow_safe_compiler_;
+ bool global_mode_;
+ Zone* zone_;
};
@@ -206,7 +216,7 @@ class NativeRegExpMacroAssembler: public RegExpMacroAssembler {
// capture positions.
enum Result { RETRY = -2, EXCEPTION = -1, FAILURE = 0, SUCCESS = 1 };
- NativeRegExpMacroAssembler();
+ explicit NativeRegExpMacroAssembler(Zone* zone);
virtual ~NativeRegExpMacroAssembler();
virtual bool CanReadUnaligned();
@@ -249,6 +259,7 @@ class NativeRegExpMacroAssembler: public RegExpMacroAssembler {
const byte* input_start,
const byte* input_end,
int* output,
+ int output_size,
Isolate* isolate);
};
diff --git a/src/3rdparty/v8/src/regexp-stack.cc b/src/3rdparty/v8/src/regexp-stack.cc
index ff9547f..325a149 100644
--- a/src/3rdparty/v8/src/regexp-stack.cc
+++ b/src/3rdparty/v8/src/regexp-stack.cc
@@ -51,6 +51,7 @@ RegExpStack::RegExpStack()
RegExpStack::~RegExpStack() {
+ thread_local_.Free();
}
diff --git a/src/3rdparty/v8/src/regexp.js b/src/3rdparty/v8/src/regexp.js
index a574f62..a3675f0 100644
--- a/src/3rdparty/v8/src/regexp.js
+++ b/src/3rdparty/v8/src/regexp.js
@@ -140,18 +140,15 @@ function BuildResultFromMatchInfo(lastMatchInfo, s) {
var j = REGEXP_FIRST_CAPTURE + 2;
for (var i = 1; i < numResults; i++) {
start = lastMatchInfo[j++];
- end = lastMatchInfo[j++];
- if (end != -1) {
+ if (start != -1) {
+ end = lastMatchInfo[j];
if (start + 1 == end) {
result[i] = %_StringCharAt(s, start);
} else {
result[i] = %_SubString(s, start, end);
}
- } else {
- // Make sure the element is present. Avoid reading the undefined
- // property from the global object since this may change.
- result[i] = void 0;
}
+ j++;
}
return result;
}
@@ -278,6 +275,10 @@ function TrimRegExp(regexp) {
function RegExpToString() {
+ if (!IS_REGEXP(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['RegExp.prototype.toString', this]);
+ }
var result = '/' + this.source + '/';
if (this.global) result += 'g';
if (this.ignoreCase) result += 'i';
@@ -423,6 +424,7 @@ function SetUpRegExp() {
LAST_INPUT(lastMatchInfo) = ToString(string);
};
+ %OptimizeObjectForAddingMultipleProperties($RegExp, 22);
%DefineOrRedefineAccessorProperty($RegExp, 'input', RegExpGetInput,
RegExpSetInput, DONT_DELETE);
%DefineOrRedefineAccessorProperty($RegExp, '$_', RegExpGetInput,
@@ -477,6 +479,7 @@ function SetUpRegExp() {
RegExpMakeCaptureGetter(i), NoOpSetter,
DONT_DELETE);
}
+ %ToFastProperties($RegExp);
}
SetUpRegExp();
diff --git a/src/3rdparty/v8/src/rewriter.cc b/src/3rdparty/v8/src/rewriter.cc
index e58ddb4..6541546 100644
--- a/src/3rdparty/v8/src/rewriter.cc
+++ b/src/3rdparty/v8/src/rewriter.cc
@@ -38,12 +38,12 @@ namespace internal {
class Processor: public AstVisitor {
public:
- explicit Processor(Variable* result)
+ Processor(Variable* result, Zone* zone)
: result_(result),
result_assigned_(false),
is_set_(false),
in_try_(false),
- factory_(isolate()) { }
+ factory_(isolate(), zone) { }
virtual ~Processor() { }
@@ -230,8 +230,8 @@ EXPRESSION_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
-// Assumes code has been parsed and scopes have been analyzed. Mutates the
-// AST, so the AST should not continue to be used in the case of failure.
+// Assumes code has been parsed. Mutates the AST, so the AST should not
+// continue to be used in the case of failure.
bool Rewriter::Rewrite(CompilationInfo* info) {
FunctionLiteral* function = info->function();
ASSERT(function != NULL);
@@ -243,7 +243,7 @@ bool Rewriter::Rewrite(CompilationInfo* info) {
if (!body->is_empty()) {
Variable* result = scope->NewTemporary(
info->isolate()->factory()->result_symbol());
- Processor processor(result);
+ Processor processor(result, info->zone());
processor.Process(body);
if (processor.HasStackOverflow()) return false;
@@ -257,12 +257,12 @@ bool Rewriter::Rewrite(CompilationInfo* info) {
// coincides with the end of the with scope which is the position of '1'.
int position = function->end_position();
VariableProxy* result_proxy = processor.factory()->NewVariableProxy(
- result->name(), false, position);
+ result->name(), false, Interface::NewValue(), position);
result_proxy->BindTo(result);
Statement* result_statement =
processor.factory()->NewReturnStatement(result_proxy);
result_statement->set_statement_pos(position);
- body->Add(result_statement);
+ body->Add(result_statement, info->zone());
}
}
diff --git a/src/3rdparty/v8/src/runtime-profiler.cc b/src/3rdparty/v8/src/runtime-profiler.cc
index 568e48e..23f41fa 100644
--- a/src/3rdparty/v8/src/runtime-profiler.cc
+++ b/src/3rdparty/v8/src/runtime-profiler.cc
@@ -34,6 +34,7 @@
#include "compilation-cache.h"
#include "deoptimizer.h"
#include "execution.h"
+#include "full-codegen.h"
#include "global-handles.h"
#include "isolate-inl.h"
#include "mark-compact.h"
@@ -65,16 +66,24 @@ static const int kSizeLimit = 1500;
// Number of times a function has to be seen on the stack before it is
// optimized.
static const int kProfilerTicksBeforeOptimization = 2;
+// If the function optimization was disabled due to high deoptimization count,
+// but the function is hot and has been seen on the stack this number of times,
+// then we try to reenable optimization for this function.
+static const int kProfilerTicksBeforeReenablingOptimization = 250;
// If a function does not have enough type info (according to
// FLAG_type_info_threshold), but has seen a huge number of ticks,
// optimize it as it is.
static const int kTicksWhenNotEnoughTypeInfo = 100;
// We only have one byte to store the number of ticks.
+STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256);
+STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256);
STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256);
+
// Maximum size in bytes of generated code for a function to be optimized
// the very first time it is seen on the stack.
-static const int kMaxSizeEarlyOpt = 500;
+static const int kMaxSizeEarlyOpt =
+ 5 * FullCodeGenerator::kBackEdgeDistanceUnit;
Atomic32 RuntimeProfiler::state_ = 0;
@@ -144,15 +153,20 @@ void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
PrintF("]\n");
}
- // The next call to the function will trigger optimization.
- function->MarkForLazyRecompilation();
+ if (FLAG_parallel_recompilation) {
+ function->MarkForParallelRecompilation();
+ } else {
+ // The next call to the function will trigger optimization.
+ function->MarkForLazyRecompilation();
+ }
}
void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) {
// See AlwaysFullCompiler (in compiler.cc) comment on why we need
// Debug::has_break_points().
- ASSERT(function->IsMarkedForLazyRecompilation());
+ ASSERT(function->IsMarkedForLazyRecompilation() ||
+ function->IsMarkedForParallelRecompilation());
if (!FLAG_use_osr ||
isolate_->DebuggerHasBreakPoints() ||
function->IsBuiltin()) {
@@ -211,7 +225,10 @@ int RuntimeProfiler::LookupSample(JSFunction* function) {
for (int i = 0; i < kSamplerWindowSize; i++) {
Object* sample = sampler_window_[i];
if (sample != NULL) {
- if (function == sample) {
+ bool fits = FLAG_lookup_sample_by_shared
+ ? (function->shared() == JSFunction::cast(sample)->shared())
+ : (function == JSFunction::cast(sample));
+ if (fits) {
weight += sampler_window_weight_[i];
}
}
@@ -263,30 +280,45 @@ void RuntimeProfiler::OptimizeNow() {
}
}
- Code* shared_code = function->shared()->code();
+ SharedFunctionInfo* shared = function->shared();
+ Code* shared_code = shared->code();
+
if (shared_code->kind() != Code::FUNCTION) continue;
- if (function->IsMarkedForLazyRecompilation()) {
+ if (function->IsMarkedForLazyRecompilation() ||
+ function->IsMarkedForParallelRecompilation()) {
int nesting = shared_code->allow_osr_at_loop_nesting_level();
if (nesting == 0) AttemptOnStackReplacement(function);
int new_nesting = Min(nesting + 1, Code::kMaxLoopNestingMarker);
shared_code->set_allow_osr_at_loop_nesting_level(new_nesting);
}
- // Do not record non-optimizable functions.
- if (!function->IsOptimizable()) continue;
- if (function->shared()->optimization_disabled()) continue;
-
// Only record top-level code on top of the execution stack and
// avoid optimizing excessively large scripts since top-level code
// will be executed only once.
const int kMaxToplevelSourceSize = 10 * 1024;
- if (function->shared()->is_toplevel()
- && (frame_count > 1
- || function->shared()->SourceSize() > kMaxToplevelSourceSize)) {
+ if (shared->is_toplevel() &&
+ (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) {
continue;
}
+ // Do not record non-optimizable functions.
+ if (shared->optimization_disabled()) {
+ if (shared->deopt_count() >= FLAG_max_opt_count) {
+ // If optimization was disabled due to many deoptimizations,
+ // then check if the function is hot and try to reenable optimization.
+ int ticks = shared_code->profiler_ticks();
+ if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
+ shared_code->set_profiler_ticks(0);
+ shared->TryReenableOptimization();
+ } else {
+ shared_code->set_profiler_ticks(ticks + 1);
+ }
+ }
+ continue;
+ }
+ if (!function->IsOptimizable()) continue;
+
if (FLAG_watch_ic_patching) {
int ticks = shared_code->profiler_ticks();
@@ -309,7 +341,7 @@ void RuntimeProfiler::OptimizeNow() {
}
}
} else if (!any_ic_changed_ &&
- shared_code->instruction_size() < kMaxSizeEarlyOpt) {
+ shared_code->instruction_size() < kMaxSizeEarlyOpt) {
// If no IC was patched since the last tick and this function is very
// small, optimistically optimize it now.
Optimize(function, "small function");
diff --git a/src/3rdparty/v8/src/runtime.cc b/src/3rdparty/v8/src/runtime.cc
index 919bf7a..08b0cd1 100644
--- a/src/3rdparty/v8/src/runtime.cc
+++ b/src/3rdparty/v8/src/runtime.cc
@@ -46,6 +46,7 @@
#include "isolate-inl.h"
#include "jsregexp.h"
#include "json-parser.h"
+#include "json-stringifier.h"
#include "liveedit.h"
#include "liveobjectlist-inl.h"
#include "misc-intrinsics.h"
@@ -54,7 +55,7 @@
#include "runtime-profiler.h"
#include "runtime.h"
#include "scopeinfo.h"
-#include "smart-array-pointer.h"
+#include "smart-pointers.h"
#include "string-search.h"
#include "stub-cache.h"
#include "v8threads.h"
@@ -208,8 +209,10 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(Isolate* isolate,
// Pixel elements cannot be created using an object literal.
ASSERT(!copy->HasExternalArrayElements());
switch (copy->GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
+ case FAST_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS: {
FixedArray* elements = FixedArray::cast(copy->elements());
if (elements->map() == heap->fixed_cow_array_map()) {
isolate->counters()->cow_arrays_created_runtime()->Increment();
@@ -223,7 +226,7 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(Isolate* isolate,
Object* value = elements->get(i);
ASSERT(value->IsSmi() ||
value->IsTheHole() ||
- (copy->GetElementsKind() == FAST_ELEMENTS));
+ (IsFastObjectElementsKind(copy->GetElementsKind())));
if (value->IsJSObject()) {
JSObject* js_object = JSObject::cast(value);
{ MaybeObject* maybe_result = DeepCopyBoilerplate(isolate,
@@ -268,6 +271,7 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(Isolate* isolate,
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
// No contained objects, nothing to do.
break;
}
@@ -300,7 +304,7 @@ static Handle<Map> ComputeObjectLiteralMap(
}
}
// If we only have symbols and array indices among keys then we can
- // use the map cache in the global context.
+ // use the map cache in the native context.
const int kMaxKeys = 10;
if ((number_of_symbol_keys == number_of_properties) &&
(number_of_symbol_keys < kMaxKeys)) {
@@ -339,14 +343,14 @@ static Handle<Object> CreateObjectLiteralBoilerplate(
Handle<FixedArray> constant_properties,
bool should_have_fast_elements,
bool has_function_literal) {
- // Get the global context from the literals array. This is the
+ // Get the native context from the literals array. This is the
// context in which the function was created and we use the object
// function from this context to create the object literal. We do
- // not use the object function from the current global context
+ // not use the object function from the current native context
// because this might be the object function from another context
// which we should not have access to.
Handle<Context> context =
- Handle<Context>(JSFunction::GlobalContextFromLiterals(*literals));
+ Handle<Context>(JSFunction::NativeContextFromLiterals(*literals));
// In case we have function literals, we want the object to be in
// slow properties mode for now. We don't go in the map cache because
@@ -452,7 +456,7 @@ MaybeObject* TransitionElements(Handle<Object> object,
}
-static const int kSmiOnlyLiteralMinimumLength = 1024;
+static const int kSmiLiteralMinimumLength = 1024;
Handle<Object> Runtime::CreateArrayLiteralBoilerplate(
@@ -461,7 +465,7 @@ Handle<Object> Runtime::CreateArrayLiteralBoilerplate(
Handle<FixedArray> elements) {
// Create the JSArray.
Handle<JSFunction> constructor(
- JSFunction::GlobalContextFromLiterals(*literals)->array_function());
+ JSFunction::NativeContextFromLiterals(*literals)->array_function());
Handle<JSArray> object =
Handle<JSArray>::cast(isolate->factory()->NewJSObject(constructor));
@@ -470,23 +474,22 @@ Handle<Object> Runtime::CreateArrayLiteralBoilerplate(
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(elements->get(1)));
- Context* global_context = isolate->context()->global_context();
- if (constant_elements_kind == FAST_SMI_ONLY_ELEMENTS) {
- object->set_map(Map::cast(global_context->smi_js_array_map()));
- } else if (constant_elements_kind == FAST_DOUBLE_ELEMENTS) {
- object->set_map(Map::cast(global_context->double_js_array_map()));
- } else {
- object->set_map(Map::cast(global_context->object_js_array_map()));
- }
+ ASSERT(IsFastElementsKind(constant_elements_kind));
+ Context* native_context = isolate->context()->native_context();
+ Object* maybe_maps_array = native_context->js_array_maps();
+ ASSERT(!maybe_maps_array->IsUndefined());
+ Object* maybe_map = FixedArray::cast(maybe_maps_array)->get(
+ constant_elements_kind);
+ ASSERT(maybe_map->IsMap());
+ object->set_map(Map::cast(maybe_map));
Handle<FixedArrayBase> copied_elements_values;
- if (constant_elements_kind == FAST_DOUBLE_ELEMENTS) {
+ if (IsFastDoubleElementsKind(constant_elements_kind)) {
ASSERT(FLAG_smi_only_arrays);
copied_elements_values = isolate->factory()->CopyFixedDoubleArray(
Handle<FixedDoubleArray>::cast(constant_elements_values));
} else {
- ASSERT(constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
- constant_elements_kind == FAST_ELEMENTS);
+ ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind));
const bool is_cow =
(constant_elements_values->map() ==
isolate->heap()->fixed_cow_array_map());
@@ -522,15 +525,22 @@ Handle<Object> Runtime::CreateArrayLiteralBoilerplate(
object->set_elements(*copied_elements_values);
object->set_length(Smi::FromInt(copied_elements_values->length()));
- // Ensure that the boilerplate object has FAST_ELEMENTS, unless the flag is
+ // Ensure that the boilerplate object has FAST_*_ELEMENTS, unless the flag is
// on or the object is larger than the threshold.
if (!FLAG_smi_only_arrays &&
- constant_elements_values->length() < kSmiOnlyLiteralMinimumLength) {
- if (object->GetElementsKind() != FAST_ELEMENTS) {
- CHECK(!TransitionElements(object, FAST_ELEMENTS, isolate)->IsFailure());
+ constant_elements_values->length() < kSmiLiteralMinimumLength) {
+ ElementsKind elements_kind = object->GetElementsKind();
+ if (!IsFastObjectElementsKind(elements_kind)) {
+ if (IsFastHoleyElementsKind(elements_kind)) {
+ CHECK(!TransitionElements(object, FAST_HOLEY_ELEMENTS,
+ isolate)->IsFailure());
+ } else {
+ CHECK(!TransitionElements(object, FAST_ELEMENTS, isolate)->IsFailure());
+ }
}
}
+ object->ValidateElements();
return object;
}
@@ -626,6 +636,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateArrayLiteral) {
// Check if boilerplate exists. If not, create it first.
Handle<Object> boilerplate(literals->get(literals_index), isolate);
if (*boilerplate == isolate->heap()->undefined_value()) {
+ ASSERT(*elements != isolate->heap()->empty_fixed_array());
boilerplate =
Runtime::CreateArrayLiteralBoilerplate(isolate, literals, elements);
if (boilerplate.is_null()) return Failure::Exception();
@@ -663,7 +674,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateArrayLiteralShallow) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateJSProxy) {
ASSERT(args.length() == 2);
- Object* handler = args[0];
+ CONVERT_ARG_CHECKED(JSReceiver, handler, 0);
Object* prototype = args[1];
Object* used_prototype =
prototype->IsJSReceiver() ? prototype : isolate->heap()->null_value();
@@ -673,9 +684,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateJSProxy) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateJSFunctionProxy) {
ASSERT(args.length() == 4);
- Object* handler = args[0];
+ CONVERT_ARG_CHECKED(JSReceiver, handler, 0);
Object* call_trap = args[1];
- Object* construct_trap = args[2];
+ RUNTIME_ASSERT(call_trap->IsJSFunction() || call_trap->IsJSFunctionProxy());
+ CONVERT_ARG_CHECKED(JSFunction, construct_trap, 2);
Object* prototype = args[3];
Object* used_prototype =
prototype->IsJSReceiver() ? prototype : isolate->heap()->null_value();
@@ -745,7 +757,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAdd) {
Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table()));
table = ObjectHashSetAdd(table, key);
holder->set_table(*table);
- return isolate->heap()->undefined_symbol();
+ return isolate->heap()->undefined_value();
}
@@ -767,7 +779,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetDelete) {
Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table()));
table = ObjectHashSetRemove(table, key);
holder->set_table(*table);
- return isolate->heap()->undefined_symbol();
+ return isolate->heap()->undefined_value();
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_SetGetSize) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0);
+ Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table()));
+ return Smi::FromInt(table->NumberOfElements());
}
@@ -785,8 +806,35 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_MapGet) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0);
- Handle<Object> key(args[1]);
- return ObjectHashTable::cast(holder->table())->Lookup(*key);
+ CONVERT_ARG_HANDLE_CHECKED(Object, key, 1);
+ Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table()));
+ Handle<Object> lookup(table->Lookup(*key));
+ return lookup->IsTheHole() ? isolate->heap()->undefined_value() : *lookup;
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_MapHas) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 2);
+ CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0);
+ CONVERT_ARG_HANDLE_CHECKED(Object, key, 1);
+ Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table()));
+ Handle<Object> lookup(table->Lookup(*key));
+ return isolate->heap()->ToBoolean(!lookup->IsTheHole());
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_MapDelete) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 2);
+ CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0);
+ CONVERT_ARG_HANDLE_CHECKED(Object, key, 1);
+ Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table()));
+ Handle<Object> lookup(table->Lookup(*key));
+ Handle<ObjectHashTable> new_table =
+ PutIntoObjectHashTable(table, key, isolate->factory()->the_hole_value());
+ holder->set_table(*new_table);
+ return isolate->heap()->ToBoolean(!lookup->IsTheHole());
}
@@ -794,12 +842,21 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_MapSet) {
HandleScope scope(isolate);
ASSERT(args.length() == 3);
CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0);
- Handle<Object> key(args[1]);
- Handle<Object> value(args[2]);
+ CONVERT_ARG_HANDLE_CHECKED(Object, key, 1);
+ CONVERT_ARG_HANDLE_CHECKED(Object, value, 2);
Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table()));
Handle<ObjectHashTable> new_table = PutIntoObjectHashTable(table, key, value);
holder->set_table(*new_table);
- return *value;
+ return isolate->heap()->undefined_value();
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_MapGetSize) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0);
+ Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table()));
+ return Smi::FromInt(table->NumberOfElements());
}
@@ -816,11 +873,38 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapInitialize) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapGet) {
- NoHandleAllocation ha;
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 2);
+ CONVERT_ARG_HANDLE_CHECKED(JSWeakMap, weakmap, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSReceiver, key, 1);
+ Handle<ObjectHashTable> table(ObjectHashTable::cast(weakmap->table()));
+ Handle<Object> lookup(table->Lookup(*key));
+ return lookup->IsTheHole() ? isolate->heap()->undefined_value() : *lookup;
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapHas) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 2);
+ CONVERT_ARG_HANDLE_CHECKED(JSWeakMap, weakmap, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSReceiver, key, 1);
+ Handle<ObjectHashTable> table(ObjectHashTable::cast(weakmap->table()));
+ Handle<Object> lookup(table->Lookup(*key));
+ return isolate->heap()->ToBoolean(!lookup->IsTheHole());
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapDelete) {
+ HandleScope scope(isolate);
ASSERT(args.length() == 2);
CONVERT_ARG_HANDLE_CHECKED(JSWeakMap, weakmap, 0);
CONVERT_ARG_HANDLE_CHECKED(JSReceiver, key, 1);
- return ObjectHashTable::cast(weakmap->table())->Lookup(*key);
+ Handle<ObjectHashTable> table(ObjectHashTable::cast(weakmap->table()));
+ Handle<Object> lookup(table->Lookup(*key));
+ Handle<ObjectHashTable> new_table =
+ PutIntoObjectHashTable(table, key, isolate->factory()->the_hole_value());
+ weakmap->set_table(*new_table);
+ return isolate->heap()->ToBoolean(!lookup->IsTheHole());
}
@@ -833,7 +917,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapSet) {
Handle<ObjectHashTable> table(ObjectHashTable::cast(weakmap->table()));
Handle<ObjectHashTable> new_table = PutIntoObjectHashTable(table, key, value);
weakmap->set_table(*new_table);
- return *value;
+ return isolate->heap()->undefined_value();
}
@@ -889,13 +973,13 @@ static void GetOwnPropertyImplementation(JSObject* obj,
LookupResult* result) {
obj->LocalLookupRealNamedProperty(name, result);
- if (!result->IsProperty()) {
- Object* proto = obj->GetPrototype();
- if (proto->IsJSObject() &&
- JSObject::cast(proto)->map()->is_hidden_prototype())
- GetOwnPropertyImplementation(JSObject::cast(proto),
- name, result);
- }
+ if (result->IsFound()) return;
+
+ Object* proto = obj->GetPrototype();
+ if (proto->IsJSObject() &&
+ JSObject::cast(proto)->map()->is_hidden_prototype())
+ GetOwnPropertyImplementation(JSObject::cast(proto),
+ name, result);
}
@@ -1007,7 +1091,7 @@ static MaybeObject* GetOwnProperty(Isolate* isolate,
// This could be an element.
uint32_t index;
if (name->AsArrayIndex(&index)) {
- switch (obj->HasLocalElement(index)) {
+ switch (obj->GetLocalElementType(index)) {
case JSObject::UNDEFINED_ELEMENT:
return heap->undefined_value();
@@ -1106,7 +1190,7 @@ static MaybeObject* GetOwnProperty(Isolate* isolate,
elms->set(ENUMERABLE_INDEX, heap->ToBoolean(!result.IsDontEnum()));
elms->set(CONFIGURABLE_INDEX, heap->ToBoolean(!result.IsDontDelete()));
- bool is_js_accessor = (result.type() == CALLBACKS) &&
+ bool is_js_accessor = result.IsPropertyCallbacks() &&
(result.GetCallbackObject()->IsAccessorPair());
if (is_js_accessor) {
@@ -1114,11 +1198,13 @@ static MaybeObject* GetOwnProperty(Isolate* isolate,
elms->set(IS_ACCESSOR_INDEX, heap->true_value());
AccessorPair* accessors = AccessorPair::cast(result.GetCallbackObject());
- if (CheckAccess(*obj, *name, &result, v8::ACCESS_GET)) {
- elms->set(GETTER_INDEX, accessors->GetComponent(ACCESSOR_GETTER));
+ Object* getter = accessors->GetComponent(ACCESSOR_GETTER);
+ if (!getter->IsMap() && CheckAccess(*obj, *name, &result, v8::ACCESS_GET)) {
+ elms->set(GETTER_INDEX, getter);
}
- if (CheckAccess(*obj, *name, &result, v8::ACCESS_SET)) {
- elms->set(SETTER_INDEX, accessors->GetComponent(ACCESSOR_SETTER));
+ Object* setter = accessors->GetComponent(ACCESSOR_SETTER);
+ if (!setter->IsMap() && CheckAccess(*obj, *name, &result, v8::ACCESS_SET)) {
+ elms->set(SETTER_INDEX, setter);
}
} else {
elms->set(IS_ACCESSOR_INDEX, heap->false_value());
@@ -1179,7 +1265,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpCompile) {
CONVERT_ARG_HANDLE_CHECKED(JSRegExp, re, 0);
CONVERT_ARG_HANDLE_CHECKED(String, pattern, 1);
CONVERT_ARG_HANDLE_CHECKED(String, flags, 2);
- Handle<Object> result = RegExpImpl::Compile(re, pattern, flags);
+ Handle<Object> result =
+ RegExpImpl::Compile(re, pattern, flags, isolate->runtime_zone());
if (result.is_null()) return Failure::Exception();
return *result;
}
@@ -1226,13 +1313,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DisableAccessChecks) {
bool needs_access_checks = old_map->is_access_check_needed();
if (needs_access_checks) {
// Copy map so it won't interfere constructor's initial map.
- Object* new_map;
- { MaybeObject* maybe_new_map = old_map->CopyDropTransitions();
- if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
- }
+ Map* new_map;
+ MaybeObject* maybe_new_map = old_map->Copy();
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
- Map::cast(new_map)->set_is_access_check_needed(false);
- object->set_map(Map::cast(new_map));
+ new_map->set_is_access_check_needed(false);
+ object->set_map(new_map);
}
return isolate->heap()->ToBoolean(needs_access_checks);
}
@@ -1244,13 +1330,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_EnableAccessChecks) {
Map* old_map = object->map();
if (!old_map->is_access_check_needed()) {
// Copy map so it won't interfere constructor's initial map.
- Object* new_map;
- { MaybeObject* maybe_new_map = old_map->CopyDropTransitions();
- if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
- }
+ Map* new_map;
+ MaybeObject* maybe_new_map = old_map->Copy();
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
- Map::cast(new_map)->set_is_access_check_needed(true);
- object->set_map(Map::cast(new_map));
+ new_map->set_is_access_check_needed(true);
+ object->set_map(new_map);
}
return isolate->heap()->undefined_value();
}
@@ -1277,8 +1362,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareGlobals) {
CONVERT_ARG_HANDLE_CHECKED(FixedArray, pairs, 1);
CONVERT_SMI_ARG_CHECKED(flags, 2);
- Handle<JSObject> js_global = Handle<JSObject>(isolate->context()->global());
- Handle<JSObject> qml_global = Handle<JSObject>(isolate->context()->qml_global());
+ Handle<JSObject> js_global = Handle<JSObject>(isolate->context()->global_object());
+ Handle<JSObject> qml_global = Handle<JSObject>(isolate->context()->qml_global_object());
// Traverse the name/value pairs and set the properties.
int length = pairs->length();
@@ -1303,16 +1388,23 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareGlobals) {
if (is_var || is_const) {
// Lookup the property in the global object, and don't set the
// value of the variable if the property is already there.
- // Do the lookup locally only, see ES5 errata.
+ // Do the lookup locally only, see ES5 erratum.
LookupResult lookup(isolate);
- if (FLAG_es52_globals)
- global->LocalLookup(*name, &lookup, true);
- else
+ if (FLAG_es52_globals) {
+ Object* obj = *global;
+ do {
+ JSObject::cast(obj)->LocalLookup(*name, &lookup, true);
+ if (lookup.IsFound()) break;
+ obj = obj->GetPrototype();
+ } while (obj->IsJSObject() &&
+ JSObject::cast(obj)->map()->is_hidden_prototype());
+ } else {
global->Lookup(*name, &lookup, true);
- if (lookup.IsProperty()) {
+ }
+ if (lookup.IsFound()) {
// We found an existing property. Unless it was an interceptor
// that claims the property is absent, skip this declaration.
- if (lookup.type() != INTERCEPTOR) continue;
+ if (!lookup.IsInterceptor()) continue;
PropertyAttributes attributes = global->GetPropertyAttribute(*name);
if (attributes != ABSENT) continue;
// Fall-through and introduce the absent property by using
@@ -1345,12 +1437,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareGlobals) {
LanguageMode language_mode = DeclareGlobalsLanguageMode::decode(flags);
- if (!lookup.IsProperty() || is_function || is_module) {
+ if (!lookup.IsFound() || is_function || is_module) {
// If the local property exists, check that we can reconfigure it
// as required for function declarations.
- if (lookup.IsProperty() && lookup.IsDontDelete()) {
+ if (lookup.IsFound() && lookup.IsDontDelete()) {
if (lookup.IsReadOnly() || lookup.IsDontEnum() ||
- lookup.type() == CALLBACKS) {
+ lookup.IsPropertyCallbacks()) {
return ThrowRedeclarationError(
isolate, is_function ? "function" : "module", name);
}
@@ -1380,7 +1472,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareContextSlot) {
HandleScope scope(isolate);
ASSERT(args.length() == 4);
- // Declarations are always made in a function or global context. In the
+ // Declarations are always made in a function or native context. In the
// case of eval code, the context passed is the context of the caller,
// which may be some nested context and not the declaration context.
RUNTIME_ASSERT(args[0]->IsContext());
@@ -1419,7 +1511,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareContextSlot) {
}
} else {
// Slow case: The property is in the context extension object of a
- // function context or the global object of a global context.
+ // function context or the global object of a native context.
Handle<JSObject> object = Handle<JSObject>::cast(holder);
RETURN_IF_EMPTY_HANDLE(
isolate,
@@ -1460,7 +1552,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareContextSlot) {
!object->IsJSContextExtensionObject()) {
LookupResult lookup(isolate);
object->Lookup(*name, &lookup);
- if (lookup.IsFound() && (lookup.type() == CALLBACKS)) {
+ if (lookup.IsPropertyCallbacks()) {
return ThrowRedeclarationError(isolate, "const", name);
}
}
@@ -1499,7 +1591,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeVarGlobal) {
RUNTIME_ASSERT(args[2]->IsSmi());
int qml_mode = Smi::cast(args[2])->value();
- JSObject* global = qml_mode?isolate->context()->qml_global():isolate->context()->global();
+ JSObject* global = qml_mode ? isolate->context()->qml_global_object()
+ : isolate->context()->global_object();
// According to ECMA-262, section 12.2, page 62, the property must
// not be deletable.
@@ -1518,7 +1611,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeVarGlobal) {
JSObject::cast(object)->map()->is_hidden_prototype()) {
JSObject* raw_holder = JSObject::cast(object);
raw_holder->LocalLookup(*name, &lookup, true);
- if (lookup.IsFound() && lookup.type() == INTERCEPTOR) {
+ if (lookup.IsInterceptor()) {
HandleScope handle_scope(isolate);
Handle<JSObject> holder(raw_holder);
PropertyAttributes intercepted = holder->GetPropertyAttribute(*name);
@@ -1538,10 +1631,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeVarGlobal) {
}
// Reload global in case the loop above performed a GC.
- global = qml_mode?isolate->context()->qml_global():isolate->context()->global();
+ global = qml_mode ? isolate->context()->qml_global_object()
+ : isolate->context()->global_object();
if (assign) {
return global->SetProperty(
- *name, args[3], attributes, strict_mode_flag, true);
+ *name, args[3], attributes, strict_mode_flag, JSReceiver::MAY_BE_STORE_FROM_KEYED, true);
}
return isolate->heap()->undefined_value();
}
@@ -1559,7 +1653,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstGlobal) {
int qml_mode = Smi::cast(args[2])->value();
// Get the current global object from top.
- JSObject* global = qml_mode?isolate->context()->qml_global():isolate->context()->global();
+ JSObject* global = qml_mode ? isolate->context()->qml_global_object()
+ : isolate->context()->global_object();
// According to ECMA-262, section 12.2, page 62, the property must
// not be deletable. Since it's a const, it must be READ_ONLY too.
@@ -1573,7 +1668,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstGlobal) {
// We use SetLocalPropertyIgnoreAttributes instead
LookupResult lookup(isolate);
global->LocalLookup(*name, &lookup);
- if (!lookup.IsProperty()) {
+ if (!lookup.IsFound()) {
return global->SetLocalPropertyIgnoreAttributes(*name,
*value,
attributes);
@@ -1583,7 +1678,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstGlobal) {
// Restore global object from context (in case of GC) and continue
// with setting the value.
HandleScope handle_scope(isolate);
- Handle<JSObject> global(qml_mode?isolate->context()->qml_global():isolate->context()->global());
+ Handle<JSObject> global(qml_mode ? isolate->context()->qml_global_object()
+ : isolate->context()->global_object());
// BUG 1213575: Handle the case where we have to set a read-only
// property through an interceptor and only do it if it's
@@ -1600,14 +1696,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstGlobal) {
// constant. For now, we determine this by checking if the
// current value is the hole.
// Strict mode handling not needed (const is disallowed in strict mode).
- PropertyType type = lookup.type();
- if (type == FIELD) {
+ if (lookup.IsField()) {
FixedArray* properties = global->properties();
int index = lookup.GetFieldIndex();
if (properties->get(index)->IsTheHole() || !lookup.IsReadOnly()) {
properties->set(index, *value);
}
- } else if (type == NORMAL) {
+ } else if (lookup.IsNormal()) {
if (global->GetNormalizedProperty(&lookup)->IsTheHole() ||
!lookup.IsReadOnly()) {
global->SetNormalizedProperty(&lookup, *value);
@@ -1615,7 +1710,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstGlobal) {
} else {
// Ignore re-initialization of constants that have already been
// assigned a function value.
- ASSERT(lookup.IsReadOnly() && type == CONSTANT_FUNCTION);
+ ASSERT(lookup.IsReadOnly() && lookup.IsConstantFunction());
}
// Use the set value as the result of the operation.
@@ -1630,7 +1725,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstContextSlot) {
Handle<Object> value(args[0], isolate);
ASSERT(!value->IsTheHole());
- // Initializations are always done in a function or global context.
+ // Initializations are always done in a function or native context.
RUNTIME_ASSERT(args[1]->IsContext());
Handle<Context> context(Context::cast(args[1])->declaration_context());
@@ -1658,7 +1753,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstContextSlot) {
// global object.
if (attributes == ABSENT) {
Handle<JSObject> global = Handle<JSObject>(
- isolate->context()->global());
+ isolate->context()->global_object());
// Strict mode not needed (const disallowed in strict mode).
RETURN_IF_EMPTY_HANDLE(
isolate,
@@ -1691,14 +1786,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstContextSlot) {
ASSERT(lookup.IsFound()); // the property was declared
ASSERT(lookup.IsReadOnly()); // and it was declared as read-only
- PropertyType type = lookup.type();
- if (type == FIELD) {
+ if (lookup.IsField()) {
FixedArray* properties = object->properties();
int index = lookup.GetFieldIndex();
if (properties->get(index)->IsTheHole()) {
properties->set(index, *value);
}
- } else if (type == NORMAL) {
+ } else if (lookup.IsNormal()) {
if (object->GetNormalizedProperty(&lookup)->IsTheHole()) {
object->SetNormalizedProperty(&lookup, *value);
}
@@ -1745,7 +1839,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpExec) {
// length of a string, i.e. it is always a Smi. We check anyway for security.
CONVERT_SMI_ARG_CHECKED(index, 2);
CONVERT_ARG_HANDLE_CHECKED(JSArray, last_match_info, 3);
- RUNTIME_ASSERT(last_match_info->HasFastElements());
+ RUNTIME_ASSERT(last_match_info->HasFastObjectElements());
RUNTIME_ASSERT(index >= 0);
RUNTIME_ASSERT(index <= subject->length());
isolate->counters()->regexp_entry_runtime()->Increment();
@@ -1780,7 +1874,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpConstructResult) {
AssertNoAllocation no_gc;
HandleScope scope(isolate);
reinterpret_cast<HeapObject*>(new_object)->
- set_map(isolate->global_context()->regexp_result_map());
+ set_map(isolate->native_context()->regexp_result_map());
}
JSArray* array = JSArray::cast(new_object);
array->set_properties(isolate->heap()->empty_fixed_array());
@@ -1932,9 +2026,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDefaultReceiver) {
// Returns undefined for strict or native functions, or
// the associated global receiver for "normal" functions.
- Context* global_context =
- function->context()->global()->global_context();
- return global_context->global()->global_receiver();
+ Context* native_context =
+ function->context()->global_object()->native_context();
+ return native_context->global_object()->global_receiver();
}
@@ -1949,11 +2043,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_MaterializeRegExpLiteral) {
// Get the RegExp function from the context in the literals array.
// This is the RegExp function from the context in which the
// function was created. We do not use the RegExp function from the
- // current global context because this might be the RegExp function
+ // current native context because this might be the RegExp function
// from another context which we should not have access to.
Handle<JSFunction> constructor =
Handle<JSFunction>(
- JSFunction::GlobalContextFromLiterals(*literals)->regexp_function());
+ JSFunction::NativeContextFromLiterals(*literals)->regexp_function());
// Compute the regular expression literal.
bool has_pending_exception;
Handle<Object> regexp =
@@ -2011,8 +2105,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionRemovePrototype) {
ASSERT(args.length() == 1);
CONVERT_ARG_CHECKED(JSFunction, f, 0);
- Object* obj = f->RemovePrototype();
- if (obj->IsFailure()) return obj;
+ f->RemovePrototype();
return isolate->heap()->undefined_value();
}
@@ -2105,40 +2198,28 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetReadOnlyPrototype) {
RUNTIME_ASSERT(args.length() == 1);
CONVERT_ARG_CHECKED(JSFunction, function, 0);
- MaybeObject* maybe_name =
- isolate->heap()->AllocateStringFromAscii(CStrVector("prototype"));
- String* name;
- if (!maybe_name->To(&name)) return maybe_name;
+ String* name = isolate->heap()->prototype_symbol();
if (function->HasFastProperties()) {
// Construct a new field descriptor with updated attributes.
DescriptorArray* instance_desc = function->map()->instance_descriptors();
- int index = instance_desc->Search(name);
+
+ int index = instance_desc->SearchWithCache(name, function->map());
ASSERT(index != DescriptorArray::kNotFound);
PropertyDetails details = instance_desc->GetDetails(index);
+
CallbacksDescriptor new_desc(name,
instance_desc->GetValue(index),
static_cast<PropertyAttributes>(details.attributes() | READ_ONLY),
- details.index());
- // Construct a new field descriptors array containing the new descriptor.
- Object* descriptors_unchecked;
- { MaybeObject* maybe_descriptors_unchecked =
- instance_desc->CopyInsert(&new_desc, REMOVE_TRANSITIONS);
- if (!maybe_descriptors_unchecked->ToObject(&descriptors_unchecked)) {
- return maybe_descriptors_unchecked;
- }
- }
- DescriptorArray* new_descriptors =
- DescriptorArray::cast(descriptors_unchecked);
+ details.descriptor_index());
+
// Create a new map featuring the new field descriptors array.
- Object* map_unchecked;
- { MaybeObject* maybe_map_unchecked = function->map()->CopyDropDescriptors();
- if (!maybe_map_unchecked->ToObject(&map_unchecked)) {
- return maybe_map_unchecked;
- }
- }
- Map* new_map = Map::cast(map_unchecked);
- new_map->set_instance_descriptors(new_descriptors);
+ Map* new_map;
+ MaybeObject* maybe_map =
+ function->map()->CopyReplaceDescriptor(
+ instance_desc, &new_desc, index, OMIT_TRANSITION);
+ if (!maybe_map->To(&new_map)) return maybe_map;
+
function->set_map(new_map);
} else { // Dictionary properties.
// Directly manipulate the property details.
@@ -2148,7 +2229,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetReadOnlyPrototype) {
PropertyDetails new_details(
static_cast<PropertyAttributes>(details.attributes() | READ_ONLY),
details.type(),
- details.index());
+ details.dictionary_index());
function->property_dictionary()->DetailsAtPut(entry, new_details);
}
return function;
@@ -2180,60 +2261,58 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetCode) {
CONVERT_ARG_HANDLE_CHECKED(JSFunction, target, 0);
Handle<Object> code = args.at<Object>(1);
- Handle<Context> context(target->context());
-
- if (!code->IsNull()) {
- RUNTIME_ASSERT(code->IsJSFunction());
- Handle<JSFunction> fun = Handle<JSFunction>::cast(code);
- Handle<SharedFunctionInfo> shared(fun->shared());
-
- if (!SharedFunctionInfo::EnsureCompiled(shared, KEEP_EXCEPTION)) {
- return Failure::Exception();
- }
- // Since we don't store the source for this we should never
- // optimize this.
- shared->code()->set_optimizable(false);
- // Set the code, scope info, formal parameter count,
- // and the length of the target function.
- target->shared()->set_code(shared->code());
- target->ReplaceCode(shared->code());
- target->shared()->set_scope_info(shared->scope_info());
- target->shared()->set_length(shared->length());
- target->shared()->set_formal_parameter_count(
- shared->formal_parameter_count());
- // Set the source code of the target function to undefined.
- // SetCode is only used for built-in constructors like String,
- // Array, and Object, and some web code
- // doesn't like seeing source code for constructors.
- target->shared()->set_script(isolate->heap()->undefined_value());
- target->shared()->code()->set_optimizable(false);
- // Clear the optimization hints related to the compiled code as these are no
- // longer valid when the code is overwritten.
- target->shared()->ClearThisPropertyAssignmentsInfo();
- context = Handle<Context>(fun->context());
-
- // Make sure we get a fresh copy of the literal vector to avoid
- // cross context contamination.
- int number_of_literals = fun->NumberOfLiterals();
- Handle<FixedArray> literals =
- isolate->factory()->NewFixedArray(number_of_literals, TENURED);
- if (number_of_literals > 0) {
- // Insert the object, regexp and array functions in the literals
- // array prefix. These are the functions that will be used when
- // creating object, regexp and array literals.
- literals->set(JSFunction::kLiteralGlobalContextIndex,
- context->global_context());
- }
- target->set_literals(*literals);
- target->set_next_function_link(isolate->heap()->undefined_value());
-
- if (isolate->logger()->is_logging() || CpuProfiler::is_profiling(isolate)) {
- isolate->logger()->LogExistingFunction(
- shared, Handle<Code>(shared->code()));
- }
+ if (code->IsNull()) return *target;
+ RUNTIME_ASSERT(code->IsJSFunction());
+ Handle<JSFunction> source = Handle<JSFunction>::cast(code);
+ Handle<SharedFunctionInfo> target_shared(target->shared());
+ Handle<SharedFunctionInfo> source_shared(source->shared());
+
+ if (!JSFunction::EnsureCompiled(source, KEEP_EXCEPTION)) {
+ return Failure::Exception();
}
+ // Set the code, scope info, formal parameter count, and the length
+ // of the target shared function info. Set the source code of the
+ // target function to undefined. SetCode is only used for built-in
+ // constructors like String, Array, and Object, and some web code
+ // doesn't like seeing source code for constructors.
+ target_shared->set_code(source_shared->code());
+ target_shared->set_scope_info(source_shared->scope_info());
+ target_shared->set_length(source_shared->length());
+ target_shared->set_formal_parameter_count(
+ source_shared->formal_parameter_count());
+ target_shared->set_script(isolate->heap()->undefined_value());
+
+ // Since we don't store the source we should never optimize this.
+ target_shared->code()->set_optimizable(false);
+
+ // Clear the optimization hints related to the compiled code as these
+ // are no longer valid when the code is overwritten.
+ target_shared->ClearThisPropertyAssignmentsInfo();
+
+ // Set the code of the target function.
+ target->ReplaceCode(source_shared->code());
+ ASSERT(target->next_function_link()->IsUndefined());
+
+ // Make sure we get a fresh copy of the literal vector to avoid cross
+ // context contamination.
+ Handle<Context> context(source->context());
+ int number_of_literals = source->NumberOfLiterals();
+ Handle<FixedArray> literals =
+ isolate->factory()->NewFixedArray(number_of_literals, TENURED);
+ if (number_of_literals > 0) {
+ literals->set(JSFunction::kLiteralNativeContextIndex,
+ context->native_context());
+ }
target->set_context(*context);
+ target->set_literals(*literals);
+
+ if (isolate->logger()->is_logging_code_events() ||
+ CpuProfiler::is_profiling(isolate)) {
+ isolate->logger()->LogExistingFunction(
+ source_shared, Handle<Code>(source_shared->code()));
+ }
+
return *target;
}
@@ -2266,19 +2345,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringCharCodeAt) {
ASSERT(args.length() == 2);
CONVERT_ARG_CHECKED(String, subject, 0);
- Object* index = args[1];
- RUNTIME_ASSERT(index->IsNumber());
-
- uint32_t i = 0;
- if (index->IsSmi()) {
- int value = Smi::cast(index)->value();
- if (value < 0) return isolate->heap()->nan_value();
- i = value;
- } else {
- ASSERT(index->IsHeapNumber());
- double value = HeapNumber::cast(index)->value();
- i = static_cast<uint32_t>(DoubleToInteger(value));
- }
+ CONVERT_NUMBER_CHECKED(uint32_t, i, Uint32, args[1]);
// Flatten the string. If someone wants to get a char at an index
// in a cons string, it is likely that more indices will be
@@ -2372,18 +2439,13 @@ class FixedArrayBuilder {
return array_->length();
}
- Handle<JSArray> ToJSArray() {
- Handle<JSArray> result_array = FACTORY->NewJSArrayWithElements(array_);
- result_array->set_length(Smi::FromInt(length_));
- return result_array;
- }
-
Handle<JSArray> ToJSArray(Handle<JSArray> target_array) {
FACTORY->SetContent(target_array, array_);
target_array->set_length(Smi::FromInt(length_));
return target_array;
}
+
private:
Handle<FixedArray> array_;
int length_;
@@ -2502,10 +2564,6 @@ class ReplacementStringBuilder {
character_count_ += by;
}
- Handle<JSArray> GetParts() {
- return array_builder_.ToJSArray();
- }
-
private:
Handle<SeqAsciiString> NewRawAsciiString(int length) {
return heap_->isolate()->factory()->NewRawAsciiString(length);
@@ -2533,26 +2591,26 @@ class ReplacementStringBuilder {
class CompiledReplacement {
public:
- CompiledReplacement()
- : parts_(1), replacement_substrings_(0), simple_hint_(false) {}
+ explicit CompiledReplacement(Zone* zone)
+ : parts_(1, zone), replacement_substrings_(0, zone), zone_(zone) {}
- void Compile(Handle<String> replacement,
+ // Return whether the replacement is simple.
+ bool Compile(Handle<String> replacement,
int capture_count,
int subject_length);
+ // Use Apply only if Compile returned false.
void Apply(ReplacementStringBuilder* builder,
int match_from,
int match_to,
- Handle<JSArray> last_match_info);
+ int32_t* match);
// Number of distinct parts of the replacement pattern.
int parts() {
return parts_.length();
}
- bool simple_hint() {
- return simple_hint_;
- }
+ Zone* zone() const { return zone_; }
private:
enum PartType {
@@ -2612,10 +2670,11 @@ class CompiledReplacement {
};
template<typename Char>
- static bool ParseReplacementPattern(ZoneList<ReplacementPart>* parts,
- Vector<Char> characters,
- int capture_count,
- int subject_length) {
+ bool ParseReplacementPattern(ZoneList<ReplacementPart>* parts,
+ Vector<Char> characters,
+ int capture_count,
+ int subject_length,
+ Zone* zone) {
int length = characters.length();
int last = 0;
for (int i = 0; i < length; i++) {
@@ -2630,7 +2689,8 @@ class CompiledReplacement {
case '$':
if (i > last) {
// There is a substring before. Include the first "$".
- parts->Add(ReplacementPart::ReplacementSubString(last, next_index));
+ parts->Add(ReplacementPart::ReplacementSubString(last, next_index),
+ zone);
last = next_index + 1; // Continue after the second "$".
} else {
// Let the next substring start with the second "$".
@@ -2640,25 +2700,25 @@ class CompiledReplacement {
break;
case '`':
if (i > last) {
- parts->Add(ReplacementPart::ReplacementSubString(last, i));
+ parts->Add(ReplacementPart::ReplacementSubString(last, i), zone);
}
- parts->Add(ReplacementPart::SubjectPrefix());
+ parts->Add(ReplacementPart::SubjectPrefix(), zone);
i = next_index;
last = i + 1;
break;
case '\'':
if (i > last) {
- parts->Add(ReplacementPart::ReplacementSubString(last, i));
+ parts->Add(ReplacementPart::ReplacementSubString(last, i), zone);
}
- parts->Add(ReplacementPart::SubjectSuffix(subject_length));
+ parts->Add(ReplacementPart::SubjectSuffix(subject_length), zone);
i = next_index;
last = i + 1;
break;
case '&':
if (i > last) {
- parts->Add(ReplacementPart::ReplacementSubString(last, i));
+ parts->Add(ReplacementPart::ReplacementSubString(last, i), zone);
}
- parts->Add(ReplacementPart::SubjectMatch());
+ parts->Add(ReplacementPart::SubjectMatch(), zone);
i = next_index;
last = i + 1;
break;
@@ -2691,10 +2751,10 @@ class CompiledReplacement {
}
if (capture_ref > 0) {
if (i > last) {
- parts->Add(ReplacementPart::ReplacementSubString(last, i));
+ parts->Add(ReplacementPart::ReplacementSubString(last, i), zone);
}
ASSERT(capture_ref <= capture_count);
- parts->Add(ReplacementPart::SubjectCapture(capture_ref));
+ parts->Add(ReplacementPart::SubjectCapture(capture_ref), zone);
last = next_index + 1;
}
i = next_index;
@@ -2708,10 +2768,10 @@ class CompiledReplacement {
}
if (length > last) {
if (last == 0) {
- parts->Add(ReplacementPart::ReplacementString());
+ // Replacement is simple. Do not use Apply to do the replacement.
return true;
} else {
- parts->Add(ReplacementPart::ReplacementSubString(last, length));
+ parts->Add(ReplacementPart::ReplacementSubString(last, length), zone);
}
}
return false;
@@ -2719,30 +2779,35 @@ class CompiledReplacement {
ZoneList<ReplacementPart> parts_;
ZoneList<Handle<String> > replacement_substrings_;
- bool simple_hint_;
+ Zone* zone_;
};
-void CompiledReplacement::Compile(Handle<String> replacement,
+bool CompiledReplacement::Compile(Handle<String> replacement,
int capture_count,
int subject_length) {
{
AssertNoAllocation no_alloc;
String::FlatContent content = replacement->GetFlatContent();
ASSERT(content.IsFlat());
+ bool simple = false;
if (content.IsAscii()) {
- simple_hint_ = ParseReplacementPattern(&parts_,
- content.ToAsciiVector(),
- capture_count,
- subject_length);
+ simple = ParseReplacementPattern(&parts_,
+ content.ToAsciiVector(),
+ capture_count,
+ subject_length,
+ zone());
} else {
ASSERT(content.IsTwoByte());
- simple_hint_ = ParseReplacementPattern(&parts_,
- content.ToUC16Vector(),
- capture_count,
- subject_length);
+ simple = ParseReplacementPattern(&parts_,
+ content.ToUC16Vector(),
+ capture_count,
+ subject_length,
+ zone());
}
+ if (simple) return true;
}
+
Isolate* isolate = replacement->GetIsolate();
// Find substrings of replacement string and create them as String objects.
int substring_index = 0;
@@ -2752,23 +2817,25 @@ void CompiledReplacement::Compile(Handle<String> replacement,
int from = -tag;
int to = parts_[i].data;
replacement_substrings_.Add(
- isolate->factory()->NewSubString(replacement, from, to));
+ isolate->factory()->NewSubString(replacement, from, to), zone());
parts_[i].tag = REPLACEMENT_SUBSTRING;
parts_[i].data = substring_index;
substring_index++;
} else if (tag == REPLACEMENT_STRING) {
- replacement_substrings_.Add(replacement);
+ replacement_substrings_.Add(replacement, zone());
parts_[i].data = substring_index;
substring_index++;
}
}
+ return false;
}
void CompiledReplacement::Apply(ReplacementStringBuilder* builder,
int match_from,
int match_to,
- Handle<JSArray> last_match_info) {
+ int32_t* match) {
+ ASSERT_LT(0, parts_.length());
for (int i = 0, n = parts_.length(); i < n; i++) {
ReplacementPart part = parts_[i];
switch (part.tag) {
@@ -2784,9 +2851,8 @@ void CompiledReplacement::Apply(ReplacementStringBuilder* builder,
}
case SUBJECT_CAPTURE: {
int capture = part.data;
- FixedArray* match_info = FixedArray::cast(last_match_info->elements());
- int from = RegExpImpl::GetCapture(match_info, capture * 2);
- int to = RegExpImpl::GetCapture(match_info, capture * 2 + 1);
+ int from = match[capture * 2];
+ int to = match[capture * 2 + 1];
if (from >= 0 && to > from) {
builder->AddSubjectSlice(from, to);
}
@@ -2806,7 +2872,8 @@ void CompiledReplacement::Apply(ReplacementStringBuilder* builder,
void FindAsciiStringIndices(Vector<const char> subject,
char pattern,
ZoneList<int>* indices,
- unsigned int limit) {
+ unsigned int limit,
+ Zone* zone) {
ASSERT(limit > 0);
// Collect indices of pattern in subject using memchr.
// Stop after finding at most limit values.
@@ -2817,7 +2884,7 @@ void FindAsciiStringIndices(Vector<const char> subject,
pos = reinterpret_cast<const char*>(
memchr(pos, pattern, subject_end - pos));
if (pos == NULL) return;
- indices->Add(static_cast<int>(pos - subject_start));
+ indices->Add(static_cast<int>(pos - subject_start), zone);
pos++;
limit--;
}
@@ -2829,7 +2896,8 @@ void FindStringIndices(Isolate* isolate,
Vector<const SubjectChar> subject,
Vector<const PatternChar> pattern,
ZoneList<int>* indices,
- unsigned int limit) {
+ unsigned int limit,
+ Zone* zone) {
ASSERT(limit > 0);
// Collect indices of pattern in subject.
// Stop after finding at most limit values.
@@ -2839,7 +2907,7 @@ void FindStringIndices(Isolate* isolate,
while (limit > 0) {
index = search.Search(subject, index);
if (index < 0) return;
- indices->Add(index);
+ indices->Add(index, zone);
index += pattern_length;
limit--;
}
@@ -2850,7 +2918,8 @@ void FindStringIndicesDispatch(Isolate* isolate,
String* subject,
String* pattern,
ZoneList<int>* indices,
- unsigned int limit) {
+ unsigned int limit,
+ Zone* zone) {
{
AssertNoAllocation no_gc;
String::FlatContent subject_content = subject->GetFlatContent();
@@ -2865,20 +2934,23 @@ void FindStringIndicesDispatch(Isolate* isolate,
FindAsciiStringIndices(subject_vector,
pattern_vector[0],
indices,
- limit);
+ limit,
+ zone);
} else {
FindStringIndices(isolate,
subject_vector,
pattern_vector,
indices,
- limit);
+ limit,
+ zone);
}
} else {
FindStringIndices(isolate,
subject_vector,
pattern_content.ToUC16Vector(),
indices,
- limit);
+ limit,
+ zone);
}
} else {
Vector<const uc16> subject_vector = subject_content.ToUC16Vector();
@@ -2887,85 +2959,21 @@ void FindStringIndicesDispatch(Isolate* isolate,
subject_vector,
pattern_content.ToAsciiVector(),
indices,
- limit);
+ limit,
+ zone);
} else {
FindStringIndices(isolate,
subject_vector,
pattern_content.ToUC16Vector(),
indices,
- limit);
- }
- }
- }
-}
-
-
-// Two smis before and after the match, for very long strings.
-const int kMaxBuilderEntriesPerRegExpMatch = 5;
-
-
-static void SetLastMatchInfoNoCaptures(Handle<String> subject,
- Handle<JSArray> last_match_info,
- int match_start,
- int match_end) {
- // Fill last_match_info with a single capture.
- last_match_info->EnsureSize(2 + RegExpImpl::kLastMatchOverhead);
- AssertNoAllocation no_gc;
- FixedArray* elements = FixedArray::cast(last_match_info->elements());
- RegExpImpl::SetLastCaptureCount(elements, 2);
- RegExpImpl::SetLastInput(elements, *subject);
- RegExpImpl::SetLastSubject(elements, *subject);
- RegExpImpl::SetCapture(elements, 0, match_start);
- RegExpImpl::SetCapture(elements, 1, match_end);
-}
-
-
-template <typename SubjectChar, typename PatternChar>
-static bool SearchStringMultiple(Isolate* isolate,
- Vector<const SubjectChar> subject,
- Vector<const PatternChar> pattern,
- String* pattern_string,
- FixedArrayBuilder* builder,
- int* match_pos) {
- int pos = *match_pos;
- int subject_length = subject.length();
- int pattern_length = pattern.length();
- int max_search_start = subject_length - pattern_length;
- StringSearch<PatternChar, SubjectChar> search(isolate, pattern);
- while (pos <= max_search_start) {
- if (!builder->HasCapacity(kMaxBuilderEntriesPerRegExpMatch)) {
- *match_pos = pos;
- return false;
- }
- // Position of end of previous match.
- int match_end = pos + pattern_length;
- int new_pos = search.Search(subject, match_end);
- if (new_pos >= 0) {
- // A match.
- if (new_pos > match_end) {
- ReplacementStringBuilder::AddSubjectSlice(builder,
- match_end,
- new_pos);
+ limit,
+ zone);
}
- pos = new_pos;
- builder->Add(pattern_string);
- } else {
- break;
}
}
-
- if (pos < max_search_start) {
- ReplacementStringBuilder::AddSubjectSlice(builder,
- pos + pattern_length,
- subject_length);
- }
- *match_pos = pos;
- return true;
}
-
-
template<typename ResultSeqString>
MUST_USE_RESULT static MaybeObject* StringReplaceAtomRegExpWithString(
Isolate* isolate,
@@ -2976,8 +2984,9 @@ MUST_USE_RESULT static MaybeObject* StringReplaceAtomRegExpWithString(
ASSERT(subject->IsFlat());
ASSERT(replacement->IsFlat());
- ZoneScope zone_space(isolate, DELETE_ON_EXIT);
- ZoneList<int> indices(8);
+ Zone* zone = isolate->runtime_zone();
+ ZoneScope zone_space(zone, DELETE_ON_EXIT);
+ ZoneList<int> indices(8, zone);
ASSERT_EQ(JSRegExp::ATOM, pattern_regexp->TypeTag());
String* pattern =
String::cast(pattern_regexp->DataAt(JSRegExp::kAtomPatternIndex));
@@ -2985,12 +2994,21 @@ MUST_USE_RESULT static MaybeObject* StringReplaceAtomRegExpWithString(
int pattern_len = pattern->length();
int replacement_len = replacement->length();
- FindStringIndicesDispatch(isolate, *subject, pattern, &indices, 0xffffffff);
+ FindStringIndicesDispatch(
+ isolate, *subject, pattern, &indices, 0xffffffff, zone);
int matches = indices.length();
if (matches == 0) return *subject;
- int result_len = (replacement_len - pattern_len) * matches + subject_len;
+ // Detect integer overflow.
+ int64_t result_len_64 =
+ (static_cast<int64_t>(replacement_len) -
+ static_cast<int64_t>(pattern_len)) *
+ static_cast<int64_t>(matches) +
+ static_cast<int64_t>(subject_len);
+ if (result_len_64 > INT_MAX) return Failure::OutOfMemoryException();
+ int result_len = static_cast<int>(result_len_64);
+
int subject_pos = 0;
int result_pos = 0;
@@ -3032,10 +3050,9 @@ MUST_USE_RESULT static MaybeObject* StringReplaceAtomRegExpWithString(
subject_len);
}
- SetLastMatchInfoNoCaptures(subject,
- last_match_info,
- indices.at(matches - 1),
- indices.at(matches - 1) + pattern_len);
+ int32_t match_indices[] = { indices.at(matches - 1),
+ indices.at(matches - 1) + pattern_len };
+ RegExpImpl::SetLastMatchInfo(last_match_info, subject, 0, match_indices);
return *result;
}
@@ -3043,133 +3060,101 @@ MUST_USE_RESULT static MaybeObject* StringReplaceAtomRegExpWithString(
MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithString(
Isolate* isolate,
- String* subject,
- JSRegExp* regexp,
- String* replacement,
- JSArray* last_match_info) {
+ Handle<String> subject,
+ Handle<JSRegExp> regexp,
+ Handle<String> replacement,
+ Handle<JSArray> last_match_info) {
ASSERT(subject->IsFlat());
ASSERT(replacement->IsFlat());
- HandleScope handles(isolate);
-
- int length = subject->length();
- Handle<String> subject_handle(subject);
- Handle<JSRegExp> regexp_handle(regexp);
- Handle<String> replacement_handle(replacement);
- Handle<JSArray> last_match_info_handle(last_match_info);
- Handle<Object> match = RegExpImpl::Exec(regexp_handle,
- subject_handle,
- 0,
- last_match_info_handle);
- if (match.is_null()) {
- return Failure::Exception();
- }
- if (match->IsNull()) {
- return *subject_handle;
- }
-
- int capture_count = regexp_handle->CaptureCount();
+ bool is_global = regexp->GetFlags().is_global();
+ int capture_count = regexp->CaptureCount();
+ int subject_length = subject->length();
// CompiledReplacement uses zone allocation.
- ZoneScope zone(isolate, DELETE_ON_EXIT);
- CompiledReplacement compiled_replacement;
- compiled_replacement.Compile(replacement_handle,
- capture_count,
- length);
-
- bool is_global = regexp_handle->GetFlags().is_global();
+ Zone* zone = isolate->runtime_zone();
+ ZoneScope zonescope(zone, DELETE_ON_EXIT);
+ CompiledReplacement compiled_replacement(zone);
+ bool simple_replace = compiled_replacement.Compile(replacement,
+ capture_count,
+ subject_length);
// Shortcut for simple non-regexp global replacements
if (is_global &&
- regexp_handle->TypeTag() == JSRegExp::ATOM &&
- compiled_replacement.simple_hint()) {
- if (subject_handle->HasOnlyAsciiChars() &&
- replacement_handle->HasOnlyAsciiChars()) {
+ regexp->TypeTag() == JSRegExp::ATOM &&
+ simple_replace) {
+ if (subject->HasOnlyAsciiChars() && replacement->HasOnlyAsciiChars()) {
return StringReplaceAtomRegExpWithString<SeqAsciiString>(
- isolate,
- subject_handle,
- regexp_handle,
- replacement_handle,
- last_match_info_handle);
+ isolate, subject, regexp, replacement, last_match_info);
} else {
return StringReplaceAtomRegExpWithString<SeqTwoByteString>(
- isolate,
- subject_handle,
- regexp_handle,
- replacement_handle,
- last_match_info_handle);
+ isolate, subject, regexp, replacement, last_match_info);
}
}
+ RegExpImpl::GlobalCache global_cache(regexp, subject, is_global, isolate);
+ if (global_cache.HasException()) return Failure::Exception();
+
+ int32_t* current_match = global_cache.FetchNext();
+ if (current_match == NULL) {
+ if (global_cache.HasException()) return Failure::Exception();
+ return *subject;
+ }
+
// Guessing the number of parts that the final result string is built
// from. Global regexps can match any number of times, so we guess
// conservatively.
int expected_parts =
(compiled_replacement.parts() + 1) * (is_global ? 4 : 1) + 1;
ReplacementStringBuilder builder(isolate->heap(),
- subject_handle,
+ subject,
expected_parts);
- // Index of end of last match.
- int prev = 0;
-
// Number of parts added by compiled replacement plus preceeding
// string and possibly suffix after last match. It is possible for
// all components to use two elements when encoded as two smis.
const int parts_added_per_loop = 2 * (compiled_replacement.parts() + 2);
- bool matched = true;
+
+ int prev = 0;
+
do {
- ASSERT(last_match_info_handle->HasFastElements());
- // Increase the capacity of the builder before entering local handle-scope,
- // so its internal buffer can safely allocate a new handle if it grows.
builder.EnsureCapacity(parts_added_per_loop);
- HandleScope loop_scope(isolate);
- int start, end;
- {
- AssertNoAllocation match_info_array_is_not_in_a_handle;
- FixedArray* match_info_array =
- FixedArray::cast(last_match_info_handle->elements());
-
- ASSERT_EQ(capture_count * 2 + 2,
- RegExpImpl::GetLastCaptureCount(match_info_array));
- start = RegExpImpl::GetCapture(match_info_array, 0);
- end = RegExpImpl::GetCapture(match_info_array, 1);
- }
+ int start = current_match[0];
+ int end = current_match[1];
if (prev < start) {
builder.AddSubjectSlice(prev, start);
}
- compiled_replacement.Apply(&builder,
- start,
- end,
- last_match_info_handle);
+
+ if (simple_replace) {
+ builder.AddString(replacement);
+ } else {
+ compiled_replacement.Apply(&builder,
+ start,
+ end,
+ current_match);
+ }
prev = end;
// Only continue checking for global regexps.
if (!is_global) break;
- // Continue from where the match ended, unless it was an empty match.
- int next = end;
- if (start == end) {
- next = end + 1;
- if (next > length) break;
- }
+ current_match = global_cache.FetchNext();
+ } while (current_match != NULL);
- match = RegExpImpl::Exec(regexp_handle,
- subject_handle,
- next,
- last_match_info_handle);
- if (match.is_null()) {
- return Failure::Exception();
- }
- matched = !match->IsNull();
- } while (matched);
+ if (global_cache.HasException()) return Failure::Exception();
- if (prev < length) {
- builder.AddSubjectSlice(prev, length);
+ if (prev < subject_length) {
+ builder.EnsureCapacity(2);
+ builder.AddSubjectSlice(prev, subject_length);
}
+ RegExpImpl::SetLastMatchInfo(last_match_info,
+ subject,
+ capture_count,
+ global_cache.LastSuccessfulMatch());
+
return *(builder.ToString());
}
@@ -3177,66 +3162,51 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithString(
template <typename ResultSeqString>
MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
Isolate* isolate,
- String* subject,
- JSRegExp* regexp,
- JSArray* last_match_info) {
+ Handle<String> subject,
+ Handle<JSRegExp> regexp,
+ Handle<JSArray> last_match_info) {
ASSERT(subject->IsFlat());
- HandleScope handles(isolate);
-
- Handle<String> subject_handle(subject);
- Handle<JSRegExp> regexp_handle(regexp);
- Handle<JSArray> last_match_info_handle(last_match_info);
+ bool is_global = regexp->GetFlags().is_global();
// Shortcut for simple non-regexp global replacements
- if (regexp_handle->GetFlags().is_global() &&
- regexp_handle->TypeTag() == JSRegExp::ATOM) {
- Handle<String> empty_string_handle(HEAP->empty_string());
- if (subject_handle->HasOnlyAsciiChars()) {
+ if (is_global &&
+ regexp->TypeTag() == JSRegExp::ATOM) {
+ Handle<String> empty_string(HEAP->empty_string());
+ if (subject->HasOnlyAsciiChars()) {
return StringReplaceAtomRegExpWithString<SeqAsciiString>(
isolate,
- subject_handle,
- regexp_handle,
- empty_string_handle,
- last_match_info_handle);
+ subject,
+ regexp,
+ empty_string,
+ last_match_info);
} else {
return StringReplaceAtomRegExpWithString<SeqTwoByteString>(
isolate,
- subject_handle,
- regexp_handle,
- empty_string_handle,
- last_match_info_handle);
+ subject,
+ regexp,
+ empty_string,
+ last_match_info);
}
}
- Handle<Object> match = RegExpImpl::Exec(regexp_handle,
- subject_handle,
- 0,
- last_match_info_handle);
- if (match.is_null()) return Failure::Exception();
- if (match->IsNull()) return *subject_handle;
-
- ASSERT(last_match_info_handle->HasFastElements());
-
- int start, end;
- {
- AssertNoAllocation match_info_array_is_not_in_a_handle;
- FixedArray* match_info_array =
- FixedArray::cast(last_match_info_handle->elements());
+ RegExpImpl::GlobalCache global_cache(regexp, subject, is_global, isolate);
+ if (global_cache.HasException()) return Failure::Exception();
- start = RegExpImpl::GetCapture(match_info_array, 0);
- end = RegExpImpl::GetCapture(match_info_array, 1);
+ int32_t* current_match = global_cache.FetchNext();
+ if (current_match == NULL) {
+ if (global_cache.HasException()) return Failure::Exception();
+ return *subject;
}
- bool global = regexp_handle->GetFlags().is_global();
+ int start = current_match[0];
+ int end = current_match[1];
+ int capture_count = regexp->CaptureCount();
+ int subject_length = subject->length();
- if (start == end && !global) return *subject_handle;
+ int new_length = subject_length - (end - start);
+ if (new_length == 0) return isolate->heap()->empty_string();
- int length = subject_handle->length();
- int new_length = length - (end - start);
- if (new_length == 0) {
- return isolate->heap()->empty_string();
- }
Handle<ResultSeqString> answer;
if (ResultSeqString::kHasAsciiEncoding) {
answer = Handle<ResultSeqString>::cast(
@@ -3246,73 +3216,55 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
isolate->factory()->NewRawTwoByteString(new_length));
}
- // If the regexp isn't global, only match once.
- if (!global) {
- if (start > 0) {
- String::WriteToFlat(*subject_handle,
- answer->GetChars(),
- 0,
- start);
- }
- if (end < length) {
- String::WriteToFlat(*subject_handle,
- answer->GetChars() + start,
- end,
- length);
+ if (!is_global) {
+ RegExpImpl::SetLastMatchInfo(
+ last_match_info, subject, capture_count, current_match);
+ if (start == end) {
+ return *subject;
+ } else {
+ if (start > 0) {
+ String::WriteToFlat(*subject, answer->GetChars(), 0, start);
+ }
+ if (end < subject_length) {
+ String::WriteToFlat(
+ *subject, answer->GetChars() + start, end, subject_length);
+ }
+ return *answer;
}
- return *answer;
}
- int prev = 0; // Index of end of last match.
- int next = 0; // Start of next search (prev unless last match was empty).
+ int prev = 0;
int position = 0;
do {
+ start = current_match[0];
+ end = current_match[1];
if (prev < start) {
// Add substring subject[prev;start] to answer string.
- String::WriteToFlat(*subject_handle,
- answer->GetChars() + position,
- prev,
- start);
+ String::WriteToFlat(
+ *subject, answer->GetChars() + position, prev, start);
position += start - prev;
}
prev = end;
- next = end;
- // Continue from where the match ended, unless it was an empty match.
- if (start == end) {
- next++;
- if (next > length) break;
- }
- match = RegExpImpl::Exec(regexp_handle,
- subject_handle,
- next,
- last_match_info_handle);
- if (match.is_null()) return Failure::Exception();
- if (match->IsNull()) break;
-
- ASSERT(last_match_info_handle->HasFastElements());
- HandleScope loop_scope(isolate);
- {
- AssertNoAllocation match_info_array_is_not_in_a_handle;
- FixedArray* match_info_array =
- FixedArray::cast(last_match_info_handle->elements());
- start = RegExpImpl::GetCapture(match_info_array, 0);
- end = RegExpImpl::GetCapture(match_info_array, 1);
- }
- } while (true);
- if (prev < length) {
+ current_match = global_cache.FetchNext();
+ } while (current_match != NULL);
+
+ if (global_cache.HasException()) return Failure::Exception();
+
+ RegExpImpl::SetLastMatchInfo(last_match_info,
+ subject,
+ capture_count,
+ global_cache.LastSuccessfulMatch());
+
+ if (prev < subject_length) {
// Add substring subject[prev;length] to answer string.
- String::WriteToFlat(*subject_handle,
- answer->GetChars() + position,
- prev,
- length);
- position += length - prev;
+ String::WriteToFlat(
+ *subject, answer->GetChars() + position, prev, subject_length);
+ position += subject_length - prev;
}
- if (position == 0) {
- return isolate->heap()->empty_string();
- }
+ if (position == 0) return isolate->heap()->empty_string();
// Shorten string and fill
int string_size = ResultSeqString::SizeFor(position);
@@ -3335,32 +3287,18 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceRegExpWithString) {
ASSERT(args.length() == 4);
- CONVERT_ARG_CHECKED(String, subject, 0);
- if (!subject->IsFlat()) {
- Object* flat_subject;
- { MaybeObject* maybe_flat_subject = subject->TryFlatten();
- if (!maybe_flat_subject->ToObject(&flat_subject)) {
- return maybe_flat_subject;
- }
- }
- subject = String::cast(flat_subject);
- }
+ HandleScope scope(isolate);
- CONVERT_ARG_CHECKED(String, replacement, 2);
- if (!replacement->IsFlat()) {
- Object* flat_replacement;
- { MaybeObject* maybe_flat_replacement = replacement->TryFlatten();
- if (!maybe_flat_replacement->ToObject(&flat_replacement)) {
- return maybe_flat_replacement;
- }
- }
- replacement = String::cast(flat_replacement);
- }
+ CONVERT_ARG_HANDLE_CHECKED(String, subject, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, replacement, 2);
+ CONVERT_ARG_HANDLE_CHECKED(JSRegExp, regexp, 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, last_match_info, 3);
- CONVERT_ARG_CHECKED(JSRegExp, regexp, 1);
- CONVERT_ARG_CHECKED(JSArray, last_match_info, 3);
+ if (!subject->IsFlat()) subject = FlattenGetString(subject);
- ASSERT(last_match_info->HasFastElements());
+ if (!replacement->IsFlat()) replacement = FlattenGetString(replacement);
+
+ ASSERT(last_match_info->HasFastObjectElements());
if (replacement->length() == 0) {
if (subject->HasOnlyAsciiChars()) {
@@ -3372,20 +3310,17 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceRegExpWithString) {
}
}
- return StringReplaceRegExpWithString(isolate,
- subject,
- regexp,
- replacement,
- last_match_info);
+ return StringReplaceRegExpWithString(
+ isolate, subject, regexp, replacement, last_match_info);
}
-Handle<String> Runtime::StringReplaceOneCharWithString(Isolate* isolate,
- Handle<String> subject,
- Handle<String> search,
- Handle<String> replace,
- bool* found,
- int recursion_limit) {
+Handle<String> StringReplaceOneCharWithString(Isolate* isolate,
+ Handle<String> subject,
+ Handle<String> search,
+ Handle<String> replace,
+ bool* found,
+ int recursion_limit) {
if (recursion_limit == 0) return Handle<String>::null();
if (subject->IsConsString()) {
ConsString* cons = ConsString::cast(*subject);
@@ -3413,7 +3348,7 @@ Handle<String> Runtime::StringReplaceOneCharWithString(Isolate* isolate,
return subject;
} else {
- int index = StringMatch(isolate, subject, search, 0);
+ int index = Runtime::StringMatch(isolate, subject, search, 0);
if (index == -1) return subject;
*found = true;
Handle<String> first = isolate->factory()->NewSubString(subject, 0, index);
@@ -3436,20 +3371,19 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceOneCharWithString) {
// retry with a flattened subject string.
const int kRecursionLimit = 0x1000;
bool found = false;
- Handle<String> result =
- Runtime::StringReplaceOneCharWithString(isolate,
- subject,
- search,
- replace,
- &found,
- kRecursionLimit);
+ Handle<String> result = StringReplaceOneCharWithString(isolate,
+ subject,
+ search,
+ replace,
+ &found,
+ kRecursionLimit);
if (!result.is_null()) return *result;
- return *Runtime::StringReplaceOneCharWithString(isolate,
- FlattenGetString(subject),
- search,
- replace,
- &found,
- kRecursionLimit);
+ return *StringReplaceOneCharWithString(isolate,
+ FlattenGetString(subject),
+ search,
+ replace,
+ &found,
+ kRecursionLimit);
}
@@ -3680,8 +3614,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SubString) {
} else {
CONVERT_DOUBLE_ARG_CHECKED(from_number, 1);
CONVERT_DOUBLE_ARG_CHECKED(to_number, 2);
- start = FastD2I(from_number);
- end = FastD2I(to_number);
+ start = FastD2IChecked(from_number);
+ end = FastD2IChecked(to_number);
}
RUNTIME_ASSERT(end >= start);
RUNTIME_ASSERT(start >= 0);
@@ -3699,45 +3633,45 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringMatch) {
CONVERT_ARG_HANDLE_CHECKED(JSArray, regexp_info, 2);
HandleScope handles;
- Handle<Object> match = RegExpImpl::Exec(regexp, subject, 0, regexp_info);
+ RegExpImpl::GlobalCache global_cache(regexp, subject, true, isolate);
+ if (global_cache.HasException()) return Failure::Exception();
- if (match.is_null()) {
- return Failure::Exception();
+ int capture_count = regexp->CaptureCount();
+
+ Zone* zone = isolate->runtime_zone();
+ ZoneScope zone_space(zone, DELETE_ON_EXIT);
+ ZoneList<int> offsets(8, zone);
+
+ while (true) {
+ int32_t* match = global_cache.FetchNext();
+ if (match == NULL) break;
+ offsets.Add(match[0], zone); // start
+ offsets.Add(match[1], zone); // end
}
- if (match->IsNull()) {
+
+ if (global_cache.HasException()) return Failure::Exception();
+
+ if (offsets.length() == 0) {
+ // Not a single match.
return isolate->heap()->null_value();
}
- int length = subject->length();
- ZoneScope zone_space(isolate, DELETE_ON_EXIT);
- ZoneList<int> offsets(8);
- int start;
- int end;
- do {
- {
- AssertNoAllocation no_alloc;
- FixedArray* elements = FixedArray::cast(regexp_info->elements());
- start = Smi::cast(elements->get(RegExpImpl::kFirstCapture))->value();
- end = Smi::cast(elements->get(RegExpImpl::kFirstCapture + 1))->value();
- }
- offsets.Add(start);
- offsets.Add(end);
- if (start == end) if (++end > length) break;
- match = RegExpImpl::Exec(regexp, subject, end, regexp_info);
- if (match.is_null()) {
- return Failure::Exception();
- }
- } while (!match->IsNull());
+ RegExpImpl::SetLastMatchInfo(regexp_info,
+ subject,
+ capture_count,
+ global_cache.LastSuccessfulMatch());
+
int matches = offsets.length() / 2;
Handle<FixedArray> elements = isolate->factory()->NewFixedArray(matches);
- Handle<String> substring = isolate->factory()->
- NewSubString(subject, offsets.at(0), offsets.at(1));
+ Handle<String> substring =
+ isolate->factory()->NewSubString(subject, offsets.at(0), offsets.at(1));
elements->set(0, *substring);
- for (int i = 1; i < matches ; i++) {
+ for (int i = 1; i < matches; i++) {
+ HandleScope temp_scope(isolate);
int from = offsets.at(i * 2);
int to = offsets.at(i * 2 + 1);
- Handle<String> substring = isolate->factory()->
- NewProperSubString(subject, from, to);
+ Handle<String> substring =
+ isolate->factory()->NewProperSubString(subject, from, to);
elements->set(i, *substring);
}
Handle<JSArray> result = isolate->factory()->NewJSArrayWithElements(elements);
@@ -3746,269 +3680,154 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringMatch) {
}
-static bool SearchStringMultiple(Isolate* isolate,
- Handle<String> subject,
- Handle<String> pattern,
- Handle<JSArray> last_match_info,
- FixedArrayBuilder* builder) {
- ASSERT(subject->IsFlat());
- ASSERT(pattern->IsFlat());
-
- // Treating as if a previous match was before first character.
- int match_pos = -pattern->length();
-
- for (;;) { // Break when search complete.
- builder->EnsureCapacity(kMaxBuilderEntriesPerRegExpMatch);
- AssertNoAllocation no_gc;
- String::FlatContent subject_content = subject->GetFlatContent();
- String::FlatContent pattern_content = pattern->GetFlatContent();
- if (subject_content.IsAscii()) {
- Vector<const char> subject_vector = subject_content.ToAsciiVector();
- if (pattern_content.IsAscii()) {
- if (SearchStringMultiple(isolate,
- subject_vector,
- pattern_content.ToAsciiVector(),
- *pattern,
- builder,
- &match_pos)) break;
- } else {
- if (SearchStringMultiple(isolate,
- subject_vector,
- pattern_content.ToUC16Vector(),
- *pattern,
- builder,
- &match_pos)) break;
- }
- } else {
- Vector<const uc16> subject_vector = subject_content.ToUC16Vector();
- if (pattern_content.IsAscii()) {
- if (SearchStringMultiple(isolate,
- subject_vector,
- pattern_content.ToAsciiVector(),
- *pattern,
- builder,
- &match_pos)) break;
- } else {
- if (SearchStringMultiple(isolate,
- subject_vector,
- pattern_content.ToUC16Vector(),
- *pattern,
- builder,
- &match_pos)) break;
- }
- }
- }
-
- if (match_pos >= 0) {
- SetLastMatchInfoNoCaptures(subject,
- last_match_info,
- match_pos,
- match_pos + pattern->length());
- return true;
- }
- return false; // No matches at all.
-}
-
-
-static RegExpImpl::IrregexpResult SearchRegExpNoCaptureMultiple(
+// Only called from Runtime_RegExpExecMultiple so it doesn't need to maintain
+// separate last match info. See comment on that function.
+template<bool has_capture>
+static MaybeObject* SearchRegExpMultiple(
Isolate* isolate,
Handle<String> subject,
Handle<JSRegExp> regexp,
Handle<JSArray> last_match_array,
- FixedArrayBuilder* builder) {
+ Handle<JSArray> result_array) {
ASSERT(subject->IsFlat());
- int match_start = -1;
- int match_end = 0;
- int pos = 0;
- int required_registers = RegExpImpl::IrregexpPrepare(regexp, subject);
- if (required_registers < 0) return RegExpImpl::RE_EXCEPTION;
+ ASSERT_NE(has_capture, regexp->CaptureCount() == 0);
- OffsetsVector registers(required_registers, isolate);
- Vector<int32_t> register_vector(registers.vector(), registers.length());
+ int capture_count = regexp->CaptureCount();
int subject_length = subject->length();
- bool first = true;
- for (;;) { // Break on failure, return on exception.
- RegExpImpl::IrregexpResult result =
- RegExpImpl::IrregexpExecOnce(regexp,
- subject,
- pos,
- register_vector);
- if (result == RegExpImpl::RE_SUCCESS) {
- match_start = register_vector[0];
- builder->EnsureCapacity(kMaxBuilderEntriesPerRegExpMatch);
- if (match_end < match_start) {
- ReplacementStringBuilder::AddSubjectSlice(builder,
- match_end,
- match_start);
- }
- match_end = register_vector[1];
- HandleScope loop_scope(isolate);
- if (!first) {
- builder->Add(*isolate->factory()->NewProperSubString(subject,
- match_start,
- match_end));
- } else {
- builder->Add(*isolate->factory()->NewSubString(subject,
- match_start,
- match_end));
- }
- if (match_start != match_end) {
- pos = match_end;
- } else {
- pos = match_end + 1;
- if (pos > subject_length) break;
- }
- } else if (result == RegExpImpl::RE_FAILURE) {
- break;
- } else {
- ASSERT_EQ(result, RegExpImpl::RE_EXCEPTION);
- return result;
- }
- first = false;
- }
+ static const int kMinLengthToCache = 0x1000;
- if (match_start >= 0) {
- if (match_end < subject_length) {
- ReplacementStringBuilder::AddSubjectSlice(builder,
- match_end,
- subject_length);
+ if (subject_length > kMinLengthToCache) {
+ Handle<Object> cached_answer(RegExpResultsCache::Lookup(
+ isolate->heap(),
+ *subject,
+ regexp->data(),
+ RegExpResultsCache::REGEXP_MULTIPLE_INDICES));
+ if (*cached_answer != Smi::FromInt(0)) {
+ Handle<FixedArray> cached_fixed_array =
+ Handle<FixedArray>(FixedArray::cast(*cached_answer));
+ // The cache FixedArray is a COW-array and can therefore be reused.
+ isolate->factory()->SetContent(result_array, cached_fixed_array);
+ // The actual length of the result array is stored in the last element of
+ // the backing store (the backing FixedArray may have a larger capacity).
+ Object* cached_fixed_array_last_element =
+ cached_fixed_array->get(cached_fixed_array->length() - 1);
+ Smi* js_array_length = Smi::cast(cached_fixed_array_last_element);
+ result_array->set_length(js_array_length);
+ RegExpImpl::SetLastMatchInfo(
+ last_match_array, subject, capture_count, NULL);
+ return *result_array;
}
- SetLastMatchInfoNoCaptures(subject,
- last_match_array,
- match_start,
- match_end);
- return RegExpImpl::RE_SUCCESS;
- } else {
- return RegExpImpl::RE_FAILURE; // No matches at all.
}
-}
+ RegExpImpl::GlobalCache global_cache(regexp, subject, true, isolate);
+ if (global_cache.HasException()) return Failure::Exception();
-// Only called from Runtime_RegExpExecMultiple so it doesn't need to maintain
-// separate last match info. See comment on that function.
-static RegExpImpl::IrregexpResult SearchRegExpMultiple(
- Isolate* isolate,
- Handle<String> subject,
- Handle<JSRegExp> regexp,
- Handle<JSArray> last_match_array,
- FixedArrayBuilder* builder) {
-
- ASSERT(subject->IsFlat());
- int required_registers = RegExpImpl::IrregexpPrepare(regexp, subject);
- if (required_registers < 0) return RegExpImpl::RE_EXCEPTION;
-
- OffsetsVector registers(required_registers, isolate);
- Vector<int32_t> register_vector(registers.vector(), registers.length());
-
- RegExpImpl::IrregexpResult result =
- RegExpImpl::IrregexpExecOnce(regexp,
- subject,
- 0,
- register_vector);
+ Handle<FixedArray> result_elements;
+ if (result_array->HasFastObjectElements()) {
+ result_elements =
+ Handle<FixedArray>(FixedArray::cast(result_array->elements()));
+ }
+ if (result_elements.is_null() || result_elements->length() < 16) {
+ result_elements = isolate->factory()->NewFixedArrayWithHoles(16);
+ }
- int capture_count = regexp->CaptureCount();
- int subject_length = subject->length();
+ FixedArrayBuilder builder(result_elements);
// Position to search from.
- int pos = 0;
- // End of previous match. Differs from pos if match was empty.
+ int match_start = -1;
int match_end = 0;
- if (result == RegExpImpl::RE_SUCCESS) {
- bool first = true;
- do {
- int match_start = register_vector[0];
- builder->EnsureCapacity(kMaxBuilderEntriesPerRegExpMatch);
- if (match_end < match_start) {
- ReplacementStringBuilder::AddSubjectSlice(builder,
- match_end,
- match_start);
+ bool first = true;
+
+ // Two smis before and after the match, for very long strings.
+ static const int kMaxBuilderEntriesPerRegExpMatch = 5;
+
+ while (true) {
+ int32_t* current_match = global_cache.FetchNext();
+ if (current_match == NULL) break;
+ match_start = current_match[0];
+ builder.EnsureCapacity(kMaxBuilderEntriesPerRegExpMatch);
+ if (match_end < match_start) {
+ ReplacementStringBuilder::AddSubjectSlice(&builder,
+ match_end,
+ match_start);
+ }
+ match_end = current_match[1];
+ {
+ // Avoid accumulating new handles inside loop.
+ HandleScope temp_scope(isolate);
+ Handle<String> match;
+ if (!first) {
+ match = isolate->factory()->NewProperSubString(subject,
+ match_start,
+ match_end);
+ } else {
+ match = isolate->factory()->NewSubString(subject,
+ match_start,
+ match_end);
+ first = false;
}
- match_end = register_vector[1];
- {
- // Avoid accumulating new handles inside loop.
- HandleScope temp_scope(isolate);
+ if (has_capture) {
// Arguments array to replace function is match, captures, index and
// subject, i.e., 3 + capture count in total.
Handle<FixedArray> elements =
isolate->factory()->NewFixedArray(3 + capture_count);
- Handle<String> match;
- if (!first) {
- match = isolate->factory()->NewProperSubString(subject,
- match_start,
- match_end);
- } else {
- match = isolate->factory()->NewSubString(subject,
- match_start,
- match_end);
- }
+
elements->set(0, *match);
for (int i = 1; i <= capture_count; i++) {
- int start = register_vector[i * 2];
+ int start = current_match[i * 2];
if (start >= 0) {
- int end = register_vector[i * 2 + 1];
+ int end = current_match[i * 2 + 1];
ASSERT(start <= end);
- Handle<String> substring;
- if (!first) {
- substring = isolate->factory()->NewProperSubString(subject,
- start,
- end);
- } else {
- substring = isolate->factory()->NewSubString(subject, start, end);
- }
+ Handle<String> substring =
+ isolate->factory()->NewSubString(subject, start, end);
elements->set(i, *substring);
} else {
- ASSERT(register_vector[i * 2 + 1] < 0);
+ ASSERT(current_match[i * 2 + 1] < 0);
elements->set(i, isolate->heap()->undefined_value());
}
}
elements->set(capture_count + 1, Smi::FromInt(match_start));
elements->set(capture_count + 2, *subject);
- builder->Add(*isolate->factory()->NewJSArrayWithElements(elements));
- }
-
- if (match_end > match_start) {
- pos = match_end;
+ builder.Add(*isolate->factory()->NewJSArrayWithElements(elements));
} else {
- pos = match_end + 1;
- if (pos > subject_length) {
- break;
- }
+ builder.Add(*match);
}
+ }
+ }
- result = RegExpImpl::IrregexpExecOnce(regexp,
- subject,
- pos,
- register_vector);
- first = false;
- } while (result == RegExpImpl::RE_SUCCESS);
-
- if (result != RegExpImpl::RE_EXCEPTION) {
- // Finished matching, with at least one match.
- if (match_end < subject_length) {
- ReplacementStringBuilder::AddSubjectSlice(builder,
- match_end,
- subject_length);
- }
+ if (global_cache.HasException()) return Failure::Exception();
- int last_match_capture_count = (capture_count + 1) * 2;
- int last_match_array_size =
- last_match_capture_count + RegExpImpl::kLastMatchOverhead;
- last_match_array->EnsureSize(last_match_array_size);
- AssertNoAllocation no_gc;
- FixedArray* elements = FixedArray::cast(last_match_array->elements());
- // We have to set this even though the rest of the last match array is
- // ignored.
- RegExpImpl::SetLastCaptureCount(elements, last_match_capture_count);
- // These are also read without consulting the override.
- RegExpImpl::SetLastSubject(elements, *subject);
- RegExpImpl::SetLastInput(elements, *subject);
- return RegExpImpl::RE_SUCCESS;
- }
- }
- // No matches at all, return failure or exception result directly.
- return result;
+ if (match_start >= 0) {
+ // Finished matching, with at least one match.
+ if (match_end < subject_length) {
+ ReplacementStringBuilder::AddSubjectSlice(&builder,
+ match_end,
+ subject_length);
+ }
+
+ RegExpImpl::SetLastMatchInfo(
+ last_match_array, subject, capture_count, NULL);
+
+ if (subject_length > kMinLengthToCache) {
+ // Store the length of the result array into the last element of the
+ // backing FixedArray.
+ builder.EnsureCapacity(1);
+ Handle<FixedArray> fixed_array = builder.array();
+ fixed_array->set(fixed_array->length() - 1,
+ Smi::FromInt(builder.length()));
+ // Cache the result and turn the FixedArray into a COW array.
+ RegExpResultsCache::Enter(isolate->heap(),
+ *subject,
+ regexp->data(),
+ *fixed_array,
+ RegExpResultsCache::REGEXP_MULTIPLE_INDICES);
+ }
+ return *builder.ToJSArray(result_array);
+ } else {
+ return isolate->heap()->null_value(); // No matches at all.
+ }
}
@@ -4025,49 +3844,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpExecMultiple) {
CONVERT_ARG_HANDLE_CHECKED(JSArray, last_match_info, 2);
CONVERT_ARG_HANDLE_CHECKED(JSArray, result_array, 3);
- ASSERT(last_match_info->HasFastElements());
+ ASSERT(last_match_info->HasFastObjectElements());
ASSERT(regexp->GetFlags().is_global());
- Handle<FixedArray> result_elements;
- if (result_array->HasFastElements()) {
- result_elements =
- Handle<FixedArray>(FixedArray::cast(result_array->elements()));
- }
- if (result_elements.is_null() || result_elements->length() < 16) {
- result_elements = isolate->factory()->NewFixedArrayWithHoles(16);
- }
- FixedArrayBuilder builder(result_elements);
- if (regexp->TypeTag() == JSRegExp::ATOM) {
- Handle<String> pattern(
- String::cast(regexp->DataAt(JSRegExp::kAtomPatternIndex)));
- ASSERT(pattern->IsFlat());
- if (SearchStringMultiple(isolate, subject, pattern,
- last_match_info, &builder)) {
- return *builder.ToJSArray(result_array);
- }
- return isolate->heap()->null_value();
- }
-
- ASSERT_EQ(regexp->TypeTag(), JSRegExp::IRREGEXP);
-
- RegExpImpl::IrregexpResult result;
if (regexp->CaptureCount() == 0) {
- result = SearchRegExpNoCaptureMultiple(isolate,
- subject,
- regexp,
- last_match_info,
- &builder);
+ return SearchRegExpMultiple<false>(
+ isolate, subject, regexp, last_match_info, result_array);
} else {
- result = SearchRegExpMultiple(isolate,
- subject,
- regexp,
- last_match_info,
- &builder);
+ return SearchRegExpMultiple<true>(
+ isolate, subject, regexp, last_match_info, result_array);
}
- if (result == RegExpImpl::RE_SUCCESS) return *builder.ToJSArray(result_array);
- if (result == RegExpImpl::RE_FAILURE) return isolate->heap()->null_value();
- ASSERT_EQ(result, RegExpImpl::RE_EXCEPTION);
- return Failure::Exception();
}
@@ -4122,7 +3908,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToFixed) {
return *isolate->factory()->infinity_symbol();
}
CONVERT_DOUBLE_ARG_CHECKED(f_number, 1);
- int f = FastD2I(f_number);
+ int f = FastD2IChecked(f_number);
RUNTIME_ASSERT(f >= 0);
char* str = DoubleToFixedCString(value, f);
MaybeObject* res =
@@ -4147,7 +3933,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToExponential) {
return *isolate->factory()->infinity_symbol();
}
CONVERT_DOUBLE_ARG_CHECKED(f_number, 1);
- int f = FastD2I(f_number);
+ int f = FastD2IChecked(f_number);
RUNTIME_ASSERT(f >= -1 && f <= 20);
char* str = DoubleToExponentialCString(value, f);
MaybeObject* res =
@@ -4172,7 +3958,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberToPrecision) {
return *isolate->factory()->infinity_symbol();
}
CONVERT_DOUBLE_ARG_CHECKED(f_number, 1);
- int f = FastD2I(f_number);
+ int f = FastD2IChecked(f_number);
RUNTIME_ASSERT(f >= 1 && f <= 21);
char* str = DoubleToPrecisionCString(value, f);
MaybeObject* res =
@@ -4308,7 +4094,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) {
// appropriate.
LookupResult result(isolate);
receiver->LocalLookup(key, &result);
- if (result.IsFound() && result.type() == FIELD) {
+ if (result.IsField()) {
int offset = result.GetFieldIndex();
keyed_lookup_cache->Update(receiver_map, key, offset);
return receiver->FastPropertyAt(offset);
@@ -4330,17 +4116,22 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) {
// JSObject without a string key. If the key is a Smi, check for a
// definite out-of-bounds access to elements, which is a strong indicator
// that subsequent accesses will also call the runtime. Proactively
- // transition elements to FAST_ELEMENTS to avoid excessive boxing of
+ // transition elements to FAST_*_ELEMENTS to avoid excessive boxing of
// doubles for those future calls in the case that the elements would
// become FAST_DOUBLE_ELEMENTS.
Handle<JSObject> js_object(args.at<JSObject>(0));
ElementsKind elements_kind = js_object->GetElementsKind();
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS ||
- elements_kind == FAST_DOUBLE_ELEMENTS) {
+ if (IsFastElementsKind(elements_kind) &&
+ !IsFastObjectElementsKind(elements_kind)) {
FixedArrayBase* elements = js_object->elements();
if (args.at<Smi>(1)->value() >= elements->length()) {
+ if (IsFastHoleyElementsKind(elements_kind)) {
+ elements_kind = FAST_HOLEY_ELEMENTS;
+ } else {
+ elements_kind = FAST_ELEMENTS;
+ }
MaybeObject* maybe_object = TransitionElements(js_object,
- FAST_ELEMENTS,
+ elements_kind,
isolate);
if (maybe_object->IsFailure()) return maybe_object;
}
@@ -4415,7 +4206,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineDataProperty) {
js_object->LocalLookupRealNamedProperty(*name, &result);
// Special case for callback properties.
- if (result.IsFound() && result.type() == CALLBACKS) {
+ if (result.IsPropertyCallbacks()) {
Object* callback = result.GetCallbackObject();
// To be compatible with Safari we do not change the value on API objects
// in Object.defineProperty(). Firefox disagrees here, and actually changes
@@ -4442,8 +4233,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineDataProperty) {
// map. The current version of SetObjectProperty does not handle attributes
// correctly in the case where a property is a field and is reset with
// new attributes.
- if (result.IsProperty() &&
- (attr != result.GetAttributes() || result.type() == CALLBACKS)) {
+ if (result.IsFound() &&
+ (attr != result.GetAttributes() || result.IsPropertyCallbacks())) {
// New attributes - normalize to avoid writing to instance descriptor
if (js_object->IsJSGlobalProxy()) {
// Since the result is a property, the prototype will exist so
@@ -4466,6 +4257,33 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineDataProperty) {
}
+// Return property without being observable by accessors or interceptors.
+RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDataProperty) {
+ ASSERT(args.length() == 2);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, key, 1);
+ LookupResult lookup(isolate);
+ object->LookupRealNamedProperty(*key, &lookup);
+ if (!lookup.IsFound()) return isolate->heap()->undefined_value();
+ switch (lookup.type()) {
+ case NORMAL:
+ return lookup.holder()->GetNormalizedProperty(&lookup);
+ case FIELD:
+ return lookup.holder()->FastPropertyAt(lookup.GetFieldIndex());
+ case CONSTANT_FUNCTION:
+ return lookup.GetConstantFunction();
+ case CALLBACKS:
+ case HANDLER:
+ case INTERCEPTOR:
+ case TRANSITION:
+ return isolate->heap()->undefined_value();
+ case NONEXISTENT:
+ UNREACHABLE();
+ }
+ return isolate->heap()->undefined_value();
+}
+
+
MaybeObject* Runtime::SetObjectProperty(Isolate* isolate,
Handle<Object> object,
Handle<Object> key,
@@ -4510,8 +4328,10 @@ MaybeObject* Runtime::SetObjectProperty(Isolate* isolate,
return *value;
}
+ js_object->ValidateElements();
Handle<Object> result = JSObject::SetElement(
js_object, index, value, attr, strict_mode, set_mode);
+ js_object->ValidateElements();
if (result.is_null()) return Failure::Exception();
return *value;
}
@@ -4669,7 +4489,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TransitionElementsSmiToDouble) {
NoHandleAllocation ha;
RUNTIME_ASSERT(args.length() == 1);
Handle<Object> object = args.at<Object>(0);
- return TransitionElements(object, FAST_DOUBLE_ELEMENTS, isolate);
+ if (object->IsJSObject()) {
+ Handle<JSObject> js_object(Handle<JSObject>::cast(object));
+ ElementsKind new_kind = js_object->HasFastHoleyElements()
+ ? FAST_HOLEY_DOUBLE_ELEMENTS
+ : FAST_DOUBLE_ELEMENTS;
+ return TransitionElements(object, new_kind, isolate);
+ } else {
+ return *object;
+ }
}
@@ -4677,7 +4505,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TransitionElementsDoubleToObject) {
NoHandleAllocation ha;
RUNTIME_ASSERT(args.length() == 1);
Handle<Object> object = args.at<Object>(0);
- return TransitionElements(object, FAST_ELEMENTS, isolate);
+ if (object->IsJSObject()) {
+ Handle<JSObject> js_object(Handle<JSObject>::cast(object));
+ ElementsKind new_kind = js_object->HasFastHoleyElements()
+ ? FAST_HOLEY_ELEMENTS
+ : FAST_ELEMENTS;
+ return TransitionElements(object, new_kind, isolate);
+ } else {
+ return *object;
+ }
}
@@ -4708,32 +4544,38 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StoreArrayLiteralElement) {
HandleScope scope;
Object* raw_boilerplate_object = literals->get(literal_index);
- Handle<JSArray> boilerplate(JSArray::cast(raw_boilerplate_object));
-#if DEBUG
+ Handle<JSArray> boilerplate_object(JSArray::cast(raw_boilerplate_object));
ElementsKind elements_kind = object->GetElementsKind();
-#endif
- ASSERT(elements_kind <= FAST_DOUBLE_ELEMENTS);
+ ASSERT(IsFastElementsKind(elements_kind));
// Smis should never trigger transitions.
ASSERT(!value->IsSmi());
if (value->IsNumber()) {
- ASSERT(elements_kind == FAST_SMI_ONLY_ELEMENTS);
- JSObject::TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
- if (IsMoreGeneralElementsKindTransition(boilerplate->GetElementsKind(),
- FAST_DOUBLE_ELEMENTS)) {
- JSObject::TransitionElementsKind(boilerplate, FAST_DOUBLE_ELEMENTS);
- }
- ASSERT(object->GetElementsKind() == FAST_DOUBLE_ELEMENTS);
+ ASSERT(IsFastSmiElementsKind(elements_kind));
+ ElementsKind transitioned_kind = IsFastHoleyElementsKind(elements_kind)
+ ? FAST_HOLEY_DOUBLE_ELEMENTS
+ : FAST_DOUBLE_ELEMENTS;
+ if (IsMoreGeneralElementsKindTransition(
+ boilerplate_object->GetElementsKind(),
+ transitioned_kind)) {
+ JSObject::TransitionElementsKind(boilerplate_object, transitioned_kind);
+ }
+ JSObject::TransitionElementsKind(object, transitioned_kind);
+ ASSERT(IsFastDoubleElementsKind(object->GetElementsKind()));
FixedDoubleArray* double_array = FixedDoubleArray::cast(object->elements());
HeapNumber* number = HeapNumber::cast(*value);
double_array->set(store_index, number->Number());
} else {
- ASSERT(elements_kind == FAST_SMI_ONLY_ELEMENTS ||
- elements_kind == FAST_DOUBLE_ELEMENTS);
- JSObject::TransitionElementsKind(object, FAST_ELEMENTS);
- if (IsMoreGeneralElementsKindTransition(boilerplate->GetElementsKind(),
- FAST_ELEMENTS)) {
- JSObject::TransitionElementsKind(boilerplate, FAST_ELEMENTS);
+ ASSERT(IsFastSmiElementsKind(elements_kind) ||
+ IsFastDoubleElementsKind(elements_kind));
+ ElementsKind transitioned_kind = IsFastHoleyElementsKind(elements_kind)
+ ? FAST_HOLEY_ELEMENTS
+ : FAST_ELEMENTS;
+ JSObject::TransitionElementsKind(object, transitioned_kind);
+ if (IsMoreGeneralElementsKindTransition(
+ boilerplate_object->GetElementsKind(),
+ transitioned_kind)) {
+ JSObject::TransitionElementsKind(boilerplate_object, transitioned_kind);
}
FixedArray* object_array = FixedArray::cast(object->elements());
object_array->set(store_index, *value);
@@ -4745,30 +4587,37 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StoreArrayLiteralElement) {
// Check whether debugger and is about to step into the callback that is passed
// to a built-in function such as Array.forEach.
RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugCallbackSupportsStepping) {
- if (!isolate->IsDebuggerActive()) return isolate->heap()->false_value();
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ if (!isolate->IsDebuggerActive() || !isolate->debug()->StepInActive()) {
+ return isolate->heap()->false_value();
+ }
CONVERT_ARG_CHECKED(Object, callback, 0);
// We do not step into the callback if it's a builtin or not even a function.
if (!callback->IsJSFunction() || JSFunction::cast(callback)->IsBuiltin()) {
return isolate->heap()->false_value();
}
return isolate->heap()->true_value();
+#else
+ return isolate->heap()->false_value();
+#endif // ENABLE_DEBUGGER_SUPPORT
}
// Set one shot breakpoints for the callback function that is passed to a
// built-in function such as Array.forEach to enable stepping into the callback.
RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPrepareStepInIfStepping) {
+#ifdef ENABLE_DEBUGGER_SUPPORT
Debug* debug = isolate->debug();
- if (!debug->IsStepping()) return NULL;
- CONVERT_ARG_CHECKED(Object, callback, 0);
+ if (!debug->IsStepping()) return isolate->heap()->undefined_value();
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, callback, 0);
HandleScope scope(isolate);
- Handle<SharedFunctionInfo> shared_info(JSFunction::cast(callback)->shared());
// When leaving the callback, step out has been activated, but not performed
// if we do not leave the builtin. To be able to step into the callback
// again, we need to clear the step out at this point.
debug->ClearStepOut();
- debug->FloodWithOneShot(shared_info);
- return NULL;
+ debug->FloodWithOneShot(callback);
+#endif // ENABLE_DEBUGGER_SUPPORT
+ return isolate->heap()->undefined_value();
}
@@ -4896,7 +4745,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsPropertyEnumerable) {
uint32_t index;
if (key->AsArrayIndex(&index)) {
- JSObject::LocalElementType type = object->HasLocalElement(index);
+ JSObject::LocalElementType type = object->GetLocalElementType(index);
switch (type) {
case JSObject::UNDEFINED_ELEMENT:
case JSObject::STRING_CHARACTER_ELEMENT:
@@ -5248,15 +5097,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ToFastProperties) {
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_ToSlowProperties) {
- ASSERT(args.length() == 1);
- Object* obj = args[0];
- return (obj->IsJSObject() && !obj->IsJSGlobalProxy())
- ? JSObject::cast(obj)->NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0)
- : obj;
-}
-
-
RUNTIME_FUNCTION(MaybeObject*, Runtime_ToBool) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
@@ -5946,7 +5786,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_QuoteJSONStringArray) {
ASSERT(args.length() == 1);
CONVERT_ARG_CHECKED(JSArray, array, 0);
- if (!array->HasFastElements()) return isolate->heap()->undefined_value();
+ if (!array->HasFastObjectElements()) {
+ return isolate->heap()->undefined_value();
+ }
FixedArray* elements = FixedArray::cast(array->elements());
int n = elements->length();
bool ascii = true;
@@ -5983,6 +5825,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_QuoteJSONStringArray) {
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_BasicJSONStringify) {
+ ASSERT(args.length() == 1);
+ HandleScope scope(isolate);
+ BasicJsonStringifier stringifier(isolate);
+ return stringifier.Stringify(Handle<Object>(args[0]));
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_StringParseInt) {
NoHandleAllocation ha;
@@ -6346,11 +6196,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringSplit) {
RUNTIME_ASSERT(pattern_length > 0);
if (limit == 0xffffffffu) {
- Handle<Object> cached_answer(StringSplitCache::Lookup(
- isolate->heap()->string_split_cache(),
+ Handle<Object> cached_answer(RegExpResultsCache::Lookup(
+ isolate->heap(),
*subject,
- *pattern));
+ *pattern,
+ RegExpResultsCache::STRING_SPLIT_SUBSTRINGS));
if (*cached_answer != Smi::FromInt(0)) {
+ // The cache FixedArray is a COW-array and can therefore be reused.
Handle<JSArray> result =
isolate->factory()->NewJSArrayWithElements(
Handle<FixedArray>::cast(cached_answer));
@@ -6366,17 +6218,18 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringSplit) {
static const int kMaxInitialListCapacity = 16;
- ZoneScope scope(isolate, DELETE_ON_EXIT);
+ Zone* zone = isolate->runtime_zone();
+ ZoneScope scope(zone, DELETE_ON_EXIT);
// Find (up to limit) indices of separator and end-of-string in subject
int initial_capacity = Min<uint32_t>(kMaxInitialListCapacity, limit);
- ZoneList<int> indices(initial_capacity);
+ ZoneList<int> indices(initial_capacity, zone);
if (!pattern->IsFlat()) FlattenString(pattern);
- FindStringIndicesDispatch(isolate, *subject, *pattern, &indices, limit);
+ FindStringIndicesDispatch(isolate, *subject, *pattern, &indices, limit, zone);
if (static_cast<uint32_t>(indices.length()) < limit) {
- indices.Add(subject_length);
+ indices.Add(subject_length, zone);
}
// The list indices now contains the end of each part to create.
@@ -6389,7 +6242,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringSplit) {
if (maybe_result->IsFailure()) return maybe_result;
result->set_length(Smi::FromInt(part_count));
- ASSERT(result->HasFastElements());
+ ASSERT(result->HasFastObjectElements());
if (part_count == 1 && indices.at(0) == subject_length) {
FixedArray::cast(result->elements())->set(0, *subject);
@@ -6408,12 +6261,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringSplit) {
}
if (limit == 0xffffffffu) {
- if (result->HasFastElements()) {
- StringSplitCache::Enter(isolate->heap(),
- isolate->heap()->string_split_cache(),
- *subject,
- *pattern,
- *elements);
+ if (result->HasFastObjectElements()) {
+ RegExpResultsCache::Enter(isolate->heap(),
+ *subject,
+ *pattern,
+ *elements,
+ RegExpResultsCache::STRING_SPLIT_SUBSTRINGS);
}
}
@@ -6765,7 +6618,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderConcat) {
if (maybe_result->IsFailure()) return maybe_result;
int special_length = special->length();
- if (!array->HasFastElements()) {
+ if (!array->HasFastObjectElements()) {
return isolate->Throw(isolate->heap()->illegal_argument_symbol());
}
FixedArray* fixed_array = FixedArray::cast(array->elements());
@@ -6875,7 +6728,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderJoin) {
int array_length = args.smi_at(1);
CONVERT_ARG_CHECKED(String, separator, 2);
- if (!array->HasFastElements()) {
+ if (!array->HasFastObjectElements()) {
return isolate->Throw(isolate->heap()->illegal_argument_symbol());
}
FixedArray* fixed_array = FixedArray::cast(array->elements());
@@ -6992,8 +6845,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SparseJoinWithSeparator) {
NoHandleAllocation ha;
ASSERT(args.length() == 3);
CONVERT_ARG_CHECKED(JSArray, elements_array, 0);
- RUNTIME_ASSERT(elements_array->HasFastElements() ||
- elements_array->HasFastSmiOnlyElements());
+ RUNTIME_ASSERT(elements_array->HasFastSmiOrObjectElements());
CONVERT_NUMBER_CHECKED(uint32_t, array_length, Uint32, args[1]);
CONVERT_ARG_CHECKED(String, separator, 2);
// elements_array is fast-mode JSarray of alternating positions
@@ -7703,8 +7555,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewArgumentsFast) {
isolate->heap()->non_strict_arguments_elements_map());
Handle<Map> old_map(result->map());
- Handle<Map> new_map =
- isolate->factory()->CopyMapDropTransitions(old_map);
+ Handle<Map> new_map = isolate->factory()->CopyMap(old_map);
new_map->set_elements_kind(NON_STRICT_ARGUMENTS_ELEMENTS);
result->set_map(*new_map);
@@ -8061,17 +7912,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObject) {
// instead of a new JSFunction object. This way, errors are
// reported the same way whether or not 'Function' is called
// using 'new'.
- return isolate->context()->global();
+ return isolate->context()->global_object();
}
}
// The function should be compiled for the optimization hints to be
- // available. We cannot use EnsureCompiled because that forces a
- // compilation through the shared function info which makes it
- // impossible for us to optimize.
- if (!function->is_compiled()) {
- JSFunction::CompileLazy(function, CLEAR_EXCEPTION);
- }
+ // available.
+ JSFunction::EnsureCompiled(function, CLEAR_EXCEPTION);
Handle<SharedFunctionInfo> shared(function->shared(), isolate);
if (!function->has_initial_map() &&
@@ -8149,7 +7996,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LazyRecompile) {
// If the function is not optimizable or debugger is active continue using the
// code from the full compiler.
- if (!function->shared()->code()->optimizable() ||
+ if (!FLAG_crankshaft ||
+ !function->shared()->code()->optimizable() ||
isolate->DebuggerHasBreakPoints()) {
if (FLAG_trace_opt) {
PrintF("[failed to optimize ");
@@ -8163,7 +8011,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LazyRecompile) {
}
function->shared()->code()->set_profiler_ticks(0);
if (JSFunction::CompileOptimized(function,
- AstNode::kNoNumber,
+ BailoutId::None(),
CLEAR_EXCEPTION)) {
return function->code();
}
@@ -8177,6 +8025,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LazyRecompile) {
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_ParallelRecompile) {
+ HandleScope handle_scope(isolate);
+ ASSERT(FLAG_parallel_recompilation);
+ Compiler::RecompileParallel(args.at<JSFunction>(0));
+ return *isolate->factory()->undefined_value();
+}
+
+
class ActivationsFinder : public ThreadVisitor {
public:
explicit ActivationsFinder(JSFunction* function)
@@ -8202,35 +8058,6 @@ class ActivationsFinder : public ThreadVisitor {
};
-static void MaterializeArgumentsObjectInFrame(Isolate* isolate,
- JavaScriptFrame* frame) {
- Handle<JSFunction> function(JSFunction::cast(frame->function()), isolate);
- Handle<Object> arguments;
- for (int i = frame->ComputeExpressionsCount() - 1; i >= 0; --i) {
- if (frame->GetExpression(i) == isolate->heap()->arguments_marker()) {
- if (arguments.is_null()) {
- // FunctionGetArguments can't throw an exception, so cast away the
- // doubt with an assert.
- arguments = Handle<Object>(
- Accessors::FunctionGetArguments(*function,
- NULL)->ToObjectUnchecked());
- ASSERT(*arguments != isolate->heap()->null_value());
- ASSERT(*arguments != isolate->heap()->undefined_value());
- }
- frame->SetExpression(i, *arguments);
- if (FLAG_trace_deopt) {
- PrintF("Materializing arguments object for frame %p - %p: %p ",
- reinterpret_cast<void*>(frame->sp()),
- reinterpret_cast<void*>(frame->fp()),
- reinterpret_cast<void*>(*arguments));
- arguments->ShortPrint();
- PrintF("\n");
- }
- }
- }
-}
-
-
RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
@@ -8239,25 +8066,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) {
static_cast<Deoptimizer::BailoutType>(args.smi_at(0));
Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
ASSERT(isolate->heap()->IsAllocationAllowed());
- int jsframes = deoptimizer->jsframe_count();
+ JavaScriptFrameIterator it(isolate);
- deoptimizer->MaterializeHeapNumbers();
+ // Make sure to materialize objects before causing any allocation.
+ deoptimizer->MaterializeHeapObjects(&it);
delete deoptimizer;
- JavaScriptFrameIterator it(isolate);
- for (int i = 0; i < jsframes - 1; i++) {
- MaterializeArgumentsObjectInFrame(isolate, it.frame());
- it.Advance();
- }
-
JavaScriptFrame* frame = it.frame();
RUNTIME_ASSERT(frame->function()->IsJSFunction());
Handle<JSFunction> function(JSFunction::cast(frame->function()), isolate);
- MaterializeArgumentsObjectInFrame(isolate, frame);
-
- if (type == Deoptimizer::EAGER) {
- RUNTIME_ASSERT(function->IsOptimized());
- }
+ RUNTIME_ASSERT(type != Deoptimizer::EAGER || function->IsOptimized());
// Avoid doing too much work when running with --always-opt and keep
// the optimized code around.
@@ -8265,11 +8083,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) {
return isolate->heap()->undefined_value();
}
- // Find other optimized activations of the function.
+ // Find other optimized activations of the function or functions that
+ // share the same optimized code.
bool has_other_activations = false;
while (!it.done()) {
JavaScriptFrame* frame = it.frame();
- if (frame->is_optimized() && frame->function() == *function) {
+ JSFunction* other_function = JSFunction::cast(frame->function());
+ if (frame->is_optimized() && other_function->code() == function->code()) {
has_other_activations = true;
break;
}
@@ -8292,6 +8112,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) {
} else {
Deoptimizer::DeoptimizeFunction(*function);
}
+ // Flush optimized code cache for this function.
+ function->shared()->ClearOptimizedCodeMap();
+
return isolate->heap()->undefined_value();
}
@@ -8368,6 +8191,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationStatus) {
return Smi::FromInt(4); // 4 == "never".
}
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
+ if (FLAG_parallel_recompilation) {
+ if (function->IsMarkedForLazyRecompilation()) {
+ return Smi::FromInt(5);
+ }
+ }
if (FLAG_always_opt) {
// We may have always opt, but that is more best-effort than a real
// promise, so we still say "no" if it is not optimized.
@@ -8414,7 +8242,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
}
}
- int ast_id = AstNode::kNoNumber;
+ BailoutId ast_id = BailoutId::None();
if (succeeded) {
// The top JS function is this one, the PC is somewhere in the
// unoptimized code.
@@ -8435,14 +8263,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
// Table entries are (AST id, pc offset) pairs.
uint32_t pc_offset = Memory::uint32_at(table_cursor + kIntSize);
if (pc_offset == target_pc_offset) {
- ast_id = static_cast<int>(Memory::uint32_at(table_cursor));
+ ast_id = BailoutId(static_cast<int>(Memory::uint32_at(table_cursor)));
break;
}
table_cursor += 2 * kIntSize;
}
- ASSERT(ast_id != AstNode::kNoNumber);
+ ASSERT(!ast_id.IsNone());
if (FLAG_trace_osr) {
- PrintF("[replacing on-stack at AST id %d in ", ast_id);
+ PrintF("[replacing on-stack at AST id %d in ", ast_id.ToInt());
function->PrintName();
PrintF("]\n");
}
@@ -8459,7 +8287,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
PrintF("[on-stack replacement offset %d in optimized code]\n",
data->OsrPcOffset()->value());
}
- ASSERT(data->OsrAstId()->value() == ast_id);
+ ASSERT(BailoutId(data->OsrAstId()->value()) == ast_id);
} else {
// We may never generate the desired OSR entry if we emit an
// early deoptimize.
@@ -8498,7 +8326,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
// frame to an optimized one.
if (succeeded) {
ASSERT(function->code()->kind() == Code::OPTIMIZED_FUNCTION);
- return Smi::FromInt(ast_id);
+ return Smi::FromInt(ast_id.ToInt());
} else {
if (function->IsMarkedForLazyRecompilation()) {
function->ReplaceCode(function->shared()->code());
@@ -8607,6 +8435,26 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetConstructorDelegate) {
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_NewGlobalContext) {
+ NoHandleAllocation ha;
+ ASSERT(args.length() == 2);
+
+ CONVERT_ARG_CHECKED(JSFunction, function, 0);
+ CONVERT_ARG_CHECKED(ScopeInfo, scope_info, 1);
+ Context* result;
+ MaybeObject* maybe_result =
+ isolate->heap()->AllocateGlobalContext(function, scope_info);
+ if (!maybe_result->To(&result)) return maybe_result;
+
+ ASSERT(function->context() == isolate->context());
+ ASSERT(function->context()->global_object() == result->global_object());
+ isolate->set_context(result);
+ result->global_object()->set_global_context(result);
+
+ return result; // non-failure
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_NewFunctionContext) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
@@ -8615,14 +8463,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewFunctionContext) {
SharedFunctionInfo* shared = function->shared();
// TODO: The QML mode should be checked in the ContextLength function.
int length = shared->scope_info()->ContextLength(shared->qml_mode());
+ Context* result;
+ MaybeObject* maybe_result =
+ isolate->heap()->AllocateFunctionContext(length, function);
+ if (!maybe_result->To(&result)) return maybe_result;
- Object* result;
- { MaybeObject* maybe_result =
- isolate->heap()->AllocateFunctionContext(length, function);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
-
- isolate->set_context(Context::cast(result));
+ isolate->set_context(result);
return result; // non-failure
}
@@ -8655,8 +8501,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_PushWithContext) {
if (args[1]->IsSmi()) {
// A smi sentinel indicates a context nested inside global code rather
// than some function. There is a canonical empty function that can be
- // gotten from the global context.
- function = isolate->context()->global_context()->closure();
+ // gotten from the native context.
+ function = isolate->context()->native_context()->closure();
} else {
function = JSFunction::cast(args[1]);
}
@@ -8681,8 +8527,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_PushCatchContext) {
if (args[2]->IsSmi()) {
// A smi sentinel indicates a context nested inside global code rather
// than some function. There is a canonical empty function that can be
- // gotten from the global context.
- function = isolate->context()->global_context()->closure();
+ // gotten from the native context.
+ function = isolate->context()->native_context()->closure();
} else {
function = JSFunction::cast(args[2]);
}
@@ -8706,8 +8552,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_PushBlockContext) {
if (args[1]->IsSmi()) {
// A smi sentinel indicates a context nested inside global code rather
// than some function. There is a canonical empty function that can be
- // gotten from the global context.
- function = isolate->context()->global_context()->closure();
+ // gotten from the native context.
+ function = isolate->context()->native_context()->closure();
} else {
function = JSFunction::cast(args[1]);
}
@@ -8722,19 +8568,25 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_PushBlockContext) {
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_IsJSModule) {
+ ASSERT(args.length() == 1);
+ Object* obj = args[0];
+ return isolate->heap()->ToBoolean(obj->IsJSModule());
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_PushModuleContext) {
NoHandleAllocation ha;
- ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(ScopeInfo, scope_info, 0);
- CONVERT_ARG_HANDLE_CHECKED(JSModule, instance, 1);
+ ASSERT(args.length() == 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSModule, instance, 0);
- Context* context;
- MaybeObject* maybe_context =
- isolate->heap()->AllocateModuleContext(isolate->context(),
- scope_info);
- if (!maybe_context->To(&context)) return maybe_context;
- // Also initialize the context slot of the instance object.
- instance->set_context(context);
+ Context* context = Context::cast(instance->context());
+ Context* previous = isolate->context();
+ ASSERT(context->IsModuleContext());
+ // Initialize the context links.
+ context->set_previous(previous);
+ context->set_closure(previous->closure());
+ context->set_global_object(previous->global_object());
isolate->set_context(context);
return context;
@@ -8820,7 +8672,7 @@ static Object* ComputeReceiverForNonGlobal(Isolate* isolate,
Context* top = isolate->context();
// Get the context extension function.
JSFunction* context_extension_function =
- top->global_context()->context_extension_function();
+ top->native_context()->context_extension_function();
// If the holder isn't a context extension object, we just return it
// as the receiver. This allows arguments objects to be used as
// receivers, but only if they are put in the context scope chain
@@ -8999,7 +8851,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StoreContextSlot) {
}
// In non-strict mode, the property is added to the global object.
attributes = NONE;
- object = Handle<JSObject>(isolate->context()->global());
+ object = Handle<JSObject>(isolate->context()->global_object());
}
// Set the property if it's not read only or doesn't yet exist.
@@ -9053,6 +8905,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ThrowReferenceError) {
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_ThrowNotDateError) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 0);
+ return isolate->Throw(*isolate->factory()->NewTypeError(
+ "not_date_object", HandleVector<Object>(NULL, 0)));
+}
+
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_StackGuard) {
ASSERT(args.length() == 0);
@@ -9180,7 +9041,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DateParseString) {
MaybeObject* maybe_result_array =
output->EnsureCanContainHeapObjectElements();
if (maybe_result_array->IsFailure()) return maybe_result_array;
- RUNTIME_ASSERT(output->HasFastElements());
+ RUNTIME_ASSERT(output->HasFastObjectElements());
AssertNoAllocation no_allocation;
@@ -9242,13 +9103,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ParseJson) {
ASSERT_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(String, source, 0);
+ Zone* zone = isolate->runtime_zone();
source = Handle<String>(source->TryFlattenGetString());
// Optimized fast case where we only have ASCII characters.
Handle<Object> result;
if (source->IsSeqAsciiString()) {
- result = JsonParser<true>::Parse(source);
+ result = JsonParser<true>::Parse(source, zone);
} else {
- result = JsonParser<false>::Parse(source);
+ result = JsonParser<false>::Parse(source, zone);
}
if (result.is_null()) {
// Syntax error or stack overflow in scanner.
@@ -9281,18 +9143,20 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileString) {
ASSERT_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(String, source, 0);
- // Extract global context.
- Handle<Context> context(isolate->context()->global_context());
+ // Extract native context.
+ Handle<Context> context(isolate->context()->native_context());
- // Check if global context allows code generation from
+ // Check if native context allows code generation from
// strings. Throw an exception if it doesn't.
if (context->allow_code_gen_from_strings()->IsFalse() &&
!CodeGenerationFromStringsAllowed(isolate, context)) {
- return isolate->Throw(*isolate->factory()->NewError(
- "code_gen_from_strings", HandleVector<Object>(NULL, 0)));
+ Handle<Object> error_message =
+ context->ErrorMessageForCodeGenerationFromStrings();
+ return isolate->Throw(*isolate->factory()->NewEvalError(
+ "code_gen_from_strings", HandleVector<Object>(&error_message, 1)));
}
- // Compile source string in the global context.
+ // Compile source string in the native context.
Handle<SharedFunctionInfo> shared = Compiler::CompileEval(
source, context, true, CLASSIC_MODE, RelocInfo::kNoPosition, false);
if (shared.is_null()) return Failure::Exception();
@@ -9311,14 +9175,16 @@ static ObjectPair CompileGlobalEval(Isolate* isolate,
int scope_position,
bool qml_mode) {
Handle<Context> context = Handle<Context>(isolate->context());
- Handle<Context> global_context = Handle<Context>(context->global_context());
+ Handle<Context> native_context = Handle<Context>(context->native_context());
- // Check if global context allows code generation from
+ // Check if native context allows code generation from
// strings. Throw an exception if it doesn't.
- if (global_context->allow_code_gen_from_strings()->IsFalse() &&
- !CodeGenerationFromStringsAllowed(isolate, global_context)) {
- isolate->Throw(*isolate->factory()->NewError(
- "code_gen_from_strings", HandleVector<Object>(NULL, 0)));
+ if (native_context->allow_code_gen_from_strings()->IsFalse() &&
+ !CodeGenerationFromStringsAllowed(isolate, native_context)) {
+ Handle<Object> error_message =
+ native_context->ErrorMessageForCodeGenerationFromStrings();
+ isolate->Throw(*isolate->factory()->NewEvalError(
+ "code_gen_from_strings", HandleVector<Object>(&error_message, 1)));
return MakePair(Failure::Exception(), NULL);
}
@@ -9327,7 +9193,7 @@ static ObjectPair CompileGlobalEval(Isolate* isolate,
Handle<SharedFunctionInfo> shared = Compiler::CompileEval(
source,
Handle<Context>(isolate->context()),
- context->IsGlobalContext(),
+ context->IsNativeContext(),
language_mode,
scope_position,
qml_mode);
@@ -9350,7 +9216,7 @@ RUNTIME_FUNCTION(ObjectPair, Runtime_ResolvePossiblyDirectEval) {
// (And even if it is, but the first argument isn't a string, just let
// execution default to an indirect call to eval, which will also return
// the first argument without doing anything).
- if (*callee != isolate->global_context()->global_eval_fun() ||
+ if (*callee != isolate->native_context()->global_eval_fun() ||
!args[1]->IsString()) {
return MakePair(*callee, isolate->heap()->the_hole_value());
}
@@ -9414,8 +9280,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_AllocateInNewSpace) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_PushIfAbsent) {
ASSERT(args.length() == 2);
CONVERT_ARG_CHECKED(JSArray, array, 0);
- CONVERT_ARG_CHECKED(JSObject, element, 1);
- RUNTIME_ASSERT(array->HasFastElements() || array->HasFastSmiOnlyElements());
+ CONVERT_ARG_CHECKED(JSReceiver, element, 1);
+ RUNTIME_ASSERT(array->HasFastSmiOrObjectElements());
int length = Smi::cast(array->length())->value();
FixedArray* elements = FixedArray::cast(array->elements());
for (int i = 0; i < length; i++) {
@@ -9500,7 +9366,7 @@ class ArrayConcatVisitor {
Handle<Map> map;
if (fast_elements_) {
map = isolate_->factory()->GetElementsTransitionMap(array,
- FAST_ELEMENTS);
+ FAST_HOLEY_ELEMENTS);
} else {
map = isolate_->factory()->GetElementsTransitionMap(array,
DICTIONARY_ELEMENTS);
@@ -9559,8 +9425,10 @@ static uint32_t EstimateElementCount(Handle<JSArray> array) {
uint32_t length = static_cast<uint32_t>(array->length()->Number());
int element_count = 0;
switch (array->GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS: {
// Fast elements can't have lengths that are not representable by
// a 32-bit signed integer.
ASSERT(static_cast<int32_t>(FixedArray::kMaxLength) >= 0);
@@ -9572,6 +9440,7 @@ static uint32_t EstimateElementCount(Handle<JSArray> array) {
break;
}
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
// TODO(1810): Decide if it's worthwhile to implement this.
UNREACHABLE();
break;
@@ -9662,8 +9531,10 @@ static void CollectElementIndices(Handle<JSObject> object,
List<uint32_t>* indices) {
ElementsKind kind = object->GetElementsKind();
switch (kind) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
+ case FAST_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS: {
Handle<FixedArray> elements(FixedArray::cast(object->elements()));
uint32_t length = static_cast<uint32_t>(elements->length());
if (range < length) length = range;
@@ -9674,6 +9545,7 @@ static void CollectElementIndices(Handle<JSObject> object,
}
break;
}
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS: {
// TODO(1810): Decide if it's worthwhile to implement this.
UNREACHABLE();
@@ -9788,8 +9660,10 @@ static bool IterateElements(Isolate* isolate,
ArrayConcatVisitor* visitor) {
uint32_t length = static_cast<uint32_t>(receiver->length()->Number());
switch (receiver->GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
+ case FAST_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS: {
// Run through the elements FixedArray and use HasElement and GetElement
// to check the prototype for missing elements.
Handle<FixedArray> elements(FixedArray::cast(receiver->elements()));
@@ -9810,6 +9684,7 @@ static bool IterateElements(Isolate* isolate,
}
break;
}
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS: {
// TODO(1810): Decide if it's worthwhile to implement this.
UNREACHABLE();
@@ -9907,7 +9782,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConcat) {
CONVERT_ARG_HANDLE_CHECKED(JSArray, arguments, 0);
int argument_count = static_cast<int>(arguments->length()->Number());
- RUNTIME_ASSERT(arguments->HasFastElements());
+ RUNTIME_ASSERT(arguments->HasFastObjectElements());
Handle<FixedArray> elements(FixedArray::cast(arguments->elements()));
// Pass 1: estimate the length and number of elements of the result.
@@ -9927,10 +9802,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConcat) {
Handle<JSArray> array(Handle<JSArray>::cast(obj));
// TODO(1810): Find out if it's worthwhile to properly support
// arbitrary ElementsKinds. For now, pessimistically transition to
- // FAST_ELEMENTS.
+ // FAST_*_ELEMENTS.
if (array->HasFastDoubleElements()) {
+ ElementsKind to_kind = FAST_ELEMENTS;
+ if (array->HasFastHoleyElements()) {
+ to_kind = FAST_HOLEY_ELEMENTS;
+ }
array = Handle<JSArray>::cast(
- JSObject::TransitionElementsKind(array, FAST_ELEMENTS));
+ JSObject::TransitionElementsKind(array, to_kind));
}
length_estimate =
static_cast<uint32_t>(array->length()->Number());
@@ -10027,29 +9906,22 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_MoveArrayContents) {
ASSERT(args.length() == 2);
CONVERT_ARG_CHECKED(JSArray, from, 0);
CONVERT_ARG_CHECKED(JSArray, to, 1);
+ from->ValidateElements();
+ to->ValidateElements();
FixedArrayBase* new_elements = from->elements();
+ ElementsKind from_kind = from->GetElementsKind();
MaybeObject* maybe_new_map;
- ElementsKind elements_kind;
- if (new_elements->map() == isolate->heap()->fixed_array_map() ||
- new_elements->map() == isolate->heap()->fixed_cow_array_map()) {
- elements_kind = FAST_ELEMENTS;
- } else if (new_elements->map() ==
- isolate->heap()->fixed_double_array_map()) {
- elements_kind = FAST_DOUBLE_ELEMENTS;
- } else {
- elements_kind = DICTIONARY_ELEMENTS;
- }
- maybe_new_map = to->GetElementsTransitionMap(isolate, elements_kind);
+ maybe_new_map = to->GetElementsTransitionMap(isolate, from_kind);
Object* new_map;
if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
- to->set_map(Map::cast(new_map));
- to->set_elements(new_elements);
+ to->set_map_and_elements(Map::cast(new_map), new_elements);
to->set_length(from->length());
Object* obj;
{ MaybeObject* maybe_obj = from->ResetElements();
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
from->set_length(Smi::FromInt(0));
+ to->ValidateElements();
return to;
}
@@ -10099,8 +9971,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArrayKeys) {
}
return *isolate->factory()->NewJSArrayWithElements(keys);
} else {
- ASSERT(array->HasFastElements() ||
- array->HasFastSmiOnlyElements() ||
+ ASSERT(array->HasFastSmiOrObjectElements() ||
array->HasFastDoubleElements());
Handle<FixedArray> single_interval = isolate->factory()->NewFixedArray(2);
// -1 means start of array.
@@ -10119,11 +9990,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArrayKeys) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_LookupAccessor) {
ASSERT(args.length() == 3);
- CONVERT_ARG_CHECKED(JSObject, obj, 0);
+ CONVERT_ARG_CHECKED(JSReceiver, receiver, 0);
CONVERT_ARG_CHECKED(String, name, 1);
CONVERT_SMI_ARG_CHECKED(flag, 2);
AccessorComponent component = flag == 0 ? ACCESSOR_GETTER : ACCESSOR_SETTER;
- return obj->LookupAccessor(name, component);
+ if (!receiver->IsJSObject()) return isolate->heap()->undefined_value();
+ return JSObject::cast(receiver)->LookupAccessor(name, component);
}
@@ -10214,12 +10086,10 @@ static MaybeObject* DebugLookupResultValue(Heap* heap,
}
}
case INTERCEPTOR:
- case MAP_TRANSITION:
- case ELEMENTS_TRANSITION:
- case CONSTANT_TRANSITION:
- case NULL_DESCRIPTOR:
+ case TRANSITION:
return heap->undefined_value();
case HANDLER:
+ case NONEXISTENT:
UNREACHABLE();
return heap->undefined_value();
}
@@ -10252,7 +10122,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPropertyDetails) {
// entered (if the debugger is entered). The reason for switching context here
// is that for some property lookups (accessors and interceptors) callbacks
// into the embedding application can occour, and the embedding application
- // could have the assumption that its own global context is the current
+ // could have the assumption that its own native context is the current
// context and not some internal debugger context.
SaveContext save(isolate);
if (isolate->debug()->InDebugger()) {
@@ -10291,13 +10161,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPropertyDetails) {
for (int i = 0; i < length; i++) {
LookupResult result(isolate);
jsproto->LocalLookup(*name, &result);
- if (result.IsProperty()) {
+ if (result.IsFound()) {
// LookupResult is not GC safe as it holds raw object pointers.
// GC can happen later in this code so put the required fields into
// local variables using handles when required for later use.
- PropertyType result_type = result.type();
Handle<Object> result_callback_obj;
- if (result_type == CALLBACKS) {
+ if (result.IsPropertyCallbacks()) {
result_callback_obj = Handle<Object>(result.GetCallbackObject(),
isolate);
}
@@ -10315,7 +10184,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPropertyDetails) {
// If the callback object is a fixed array then it contains JavaScript
// getter and/or setter.
- bool hasJavaScriptAccessors = result_type == CALLBACKS &&
+ bool hasJavaScriptAccessors = result.IsPropertyCallbacks() &&
result_callback_obj->IsAccessorPair();
Handle<FixedArray> details =
isolate->factory()->NewFixedArray(hasJavaScriptAccessors ? 5 : 2);
@@ -10349,7 +10218,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetProperty) {
LookupResult result(isolate);
obj->Lookup(*name, &result);
- if (result.IsProperty()) {
+ if (result.IsFound()) {
return DebugLookupResultValue(isolate->heap(), *obj, *name, &result, NULL);
}
return isolate->heap()->undefined_value();
@@ -10379,7 +10248,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPropertyAttributesFromDetails) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPropertyIndexFromDetails) {
ASSERT(args.length() == 1);
CONVERT_PROPERTY_DETAILS_CHECKED(details, 0);
- return Smi::FromInt(details.index());
+ // TODO(verwaest): Depends on the type of details.
+ return Smi::FromInt(details.dictionary_index());
}
@@ -10798,12 +10668,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
// value object is not converted into a wrapped JS objects. To
// hide this optimization from the debugger, we wrap the receiver
// by creating correct wrapper object based on the calling frame's
- // global context.
+ // native context.
it.Advance();
- Handle<Context> calling_frames_global_context(
- Context::cast(Context::cast(it.frame()->context())->global_context()));
+ Handle<Context> calling_frames_native_context(
+ Context::cast(Context::cast(it.frame()->context())->native_context()));
receiver =
- isolate->factory()->ToObject(receiver, calling_frames_global_context);
+ isolate->factory()->ToObject(receiver, calling_frames_native_context);
}
details->set(kFrameDetailsReceiverIndex, *receiver);
@@ -10895,7 +10765,7 @@ static Handle<JSObject> MaterializeLocalScopeWithFrameInspector(
// These will be variables introduced by eval.
if (function_context->closure() == *function) {
if (function_context->has_extension() &&
- !function_context->IsGlobalContext()) {
+ !function_context->IsNativeContext()) {
Handle<JSObject> ext(JSObject::cast(function_context->extension()));
bool threw = false;
Handle<FixedArray> keys =
@@ -11069,7 +10939,8 @@ class ScopeIterator {
inlined_jsframe_index_(inlined_jsframe_index),
function_(JSFunction::cast(frame->function())),
context_(Context::cast(frame->context())),
- nested_scope_chain_(4) {
+ nested_scope_chain_(4),
+ failed_(false) {
// Catch the case when the debugger stops in an internal function.
Handle<SharedFunctionInfo> shared_info(function_->shared());
@@ -11082,7 +10953,7 @@ class ScopeIterator {
}
// Get the debug info (create it if it does not exist).
- if (!isolate->debug()->EnsureDebugInfo(shared_info)) {
+ if (!isolate->debug()->EnsureDebugInfo(shared_info, function_)) {
// Return if ensuring debug info failed.
return;
}
@@ -11107,7 +10978,6 @@ class ScopeIterator {
if (scope_info->Type() != EVAL_SCOPE) nested_scope_chain_.Add(scope_info);
} else {
// Reparse the code and analyze the scopes.
- ZoneScope zone_scope(isolate, DELETE_ON_EXIT);
Handle<Script> script(Script::cast(shared_info->script()));
Scope* scope = NULL;
@@ -11115,36 +10985,25 @@ class ScopeIterator {
Handle<ScopeInfo> scope_info(shared_info->scope_info());
if (scope_info->Type() != FUNCTION_SCOPE) {
// Global or eval code.
- CompilationInfo info(script);
+ CompilationInfoWithZone info(script);
if (scope_info->Type() == GLOBAL_SCOPE) {
info.MarkAsGlobal();
} else {
ASSERT(scope_info->Type() == EVAL_SCOPE);
info.MarkAsEval();
- info.SetCallingContext(Handle<Context>(function_->context()));
+ info.SetContext(Handle<Context>(function_->context()));
}
if (ParserApi::Parse(&info, kNoParsingFlags) && Scope::Analyze(&info)) {
scope = info.function()->scope();
}
+ RetrieveScopeChain(scope, shared_info);
} else {
// Function code
- CompilationInfo info(shared_info);
+ CompilationInfoWithZone info(shared_info);
if (ParserApi::Parse(&info, kNoParsingFlags) && Scope::Analyze(&info)) {
scope = info.function()->scope();
}
- }
-
- // Retrieve the scope chain for the current position.
- if (scope != NULL) {
- int source_position = shared_info->code()->SourcePosition(frame_->pc());
- scope->GetNestedScopeChain(&nested_scope_chain_, source_position);
- } else {
- // A failed reparse indicates that the preparser has diverged from the
- // parser or that the preparse data given to the initial parse has been
- // faulty. We fail in debug mode but in release mode we only provide the
- // information we get from the context chain but nothing about
- // completely stack allocated scopes or stack allocated locals.
- UNREACHABLE();
+ RetrieveScopeChain(scope, shared_info);
}
}
}
@@ -11155,21 +11014,28 @@ class ScopeIterator {
frame_(NULL),
inlined_jsframe_index_(0),
function_(function),
- context_(function->context()) {
+ context_(function->context()),
+ failed_(false) {
if (function->IsBuiltin()) {
context_ = Handle<Context>();
}
}
// More scopes?
- bool Done() { return context_.is_null(); }
+ bool Done() {
+ ASSERT(!failed_);
+ return context_.is_null();
+ }
+
+ bool Failed() { return failed_; }
// Move to the next scope.
void Next() {
+ ASSERT(!failed_);
ScopeType scope_type = Type();
if (scope_type == ScopeTypeGlobal) {
// The global scope is always the last in the chain.
- ASSERT(context_->IsGlobalContext());
+ ASSERT(context_->IsNativeContext());
context_ = Handle<Context>();
return;
}
@@ -11186,6 +11052,7 @@ class ScopeIterator {
// Return the type of the current scope.
ScopeType Type() {
+ ASSERT(!failed_);
if (!nested_scope_chain_.is_empty()) {
Handle<ScopeInfo> scope_info = nested_scope_chain_.last();
switch (scope_info->Type()) {
@@ -11197,7 +11064,7 @@ class ScopeIterator {
ASSERT(context_->IsModuleContext());
return ScopeTypeModule;
case GLOBAL_SCOPE:
- ASSERT(context_->IsGlobalContext());
+ ASSERT(context_->IsNativeContext());
return ScopeTypeGlobal;
case WITH_SCOPE:
ASSERT(context_->IsWithContext());
@@ -11213,8 +11080,8 @@ class ScopeIterator {
UNREACHABLE();
}
}
- if (context_->IsGlobalContext()) {
- ASSERT(context_->global()->IsGlobalObject());
+ if (context_->IsNativeContext()) {
+ ASSERT(context_->global_object()->IsGlobalObject());
return ScopeTypeGlobal;
}
if (context_->IsFunctionContext()) {
@@ -11235,9 +11102,10 @@ class ScopeIterator {
// Return the JavaScript object with the content of the current scope.
Handle<JSObject> ScopeObject() {
+ ASSERT(!failed_);
switch (Type()) {
case ScopeIterator::ScopeTypeGlobal:
- return Handle<JSObject>(CurrentContext()->global());
+ return Handle<JSObject>(CurrentContext()->global_object());
case ScopeIterator::ScopeTypeLocal:
// Materialize the content of the local scope into a JSObject.
ASSERT(nested_scope_chain_.length() == 1);
@@ -11260,6 +11128,7 @@ class ScopeIterator {
}
Handle<ScopeInfo> CurrentScopeInfo() {
+ ASSERT(!failed_);
if (!nested_scope_chain_.is_empty()) {
return nested_scope_chain_.last();
} else if (context_->IsBlockContext()) {
@@ -11273,6 +11142,7 @@ class ScopeIterator {
// Return the context for this scope. For the local context there might not
// be an actual context.
Handle<Context> CurrentContext() {
+ ASSERT(!failed_);
if (Type() == ScopeTypeGlobal ||
nested_scope_chain_.is_empty()) {
return context_;
@@ -11286,6 +11156,7 @@ class ScopeIterator {
#ifdef DEBUG
// Debug print of the content of the current scope.
void DebugPrint() {
+ ASSERT(!failed_);
switch (Type()) {
case ScopeIterator::ScopeTypeGlobal:
PrintF("Global:\n");
@@ -11343,6 +11214,24 @@ class ScopeIterator {
Handle<JSFunction> function_;
Handle<Context> context_;
List<Handle<ScopeInfo> > nested_scope_chain_;
+ bool failed_;
+
+ void RetrieveScopeChain(Scope* scope,
+ Handle<SharedFunctionInfo> shared_info) {
+ if (scope != NULL) {
+ int source_position = shared_info->code()->SourcePosition(frame_->pc());
+ scope->GetNestedScopeChain(&nested_scope_chain_, source_position);
+ } else {
+ // A failed reparse indicates that the preparser has diverged from the
+ // parser or that the preparse data given to the initial parse has been
+ // faulty. We fail in debug mode but in release mode we only provide the
+ // information we get from the context chain but nothing about
+ // completely stack allocated scopes or stack allocated locals.
+ // Or it could be due to stack overflow.
+ ASSERT(isolate_->has_pending_exception());
+ failed_ = true;
+ }
+ }
DISALLOW_IMPLICIT_CONSTRUCTORS(ScopeIterator);
};
@@ -11611,110 +11500,26 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_AllowBreakPointRelocation) {
}
-// Set a break point in a function
+// Set a break point in a function.
// args[0]: function
// args[1]: number: break source position (within the function source)
// args[2]: number: break point object
RUNTIME_FUNCTION(MaybeObject*, Runtime_SetFunctionBreakPoint) {
HandleScope scope(isolate);
ASSERT(args.length() == 3);
- CONVERT_ARG_HANDLE_CHECKED(JSFunction, fun, 0);
- Handle<SharedFunctionInfo> shared(fun->shared());
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
CONVERT_NUMBER_CHECKED(int32_t, source_position, Int32, args[1]);
RUNTIME_ASSERT(source_position >= 0);
Handle<Object> break_point_object_arg = args.at<Object>(2);
// Set break point.
- isolate->debug()->SetBreakPoint(shared, break_point_object_arg,
+ isolate->debug()->SetBreakPoint(function, break_point_object_arg,
&source_position);
return Smi::FromInt(source_position);
}
-Object* Runtime::FindSharedFunctionInfoInScript(Isolate* isolate,
- Handle<Script> script,
- int position) {
- // Iterate the heap looking for SharedFunctionInfo generated from the
- // script. The inner most SharedFunctionInfo containing the source position
- // for the requested break point is found.
- // NOTE: This might require several heap iterations. If the SharedFunctionInfo
- // which is found is not compiled it is compiled and the heap is iterated
- // again as the compilation might create inner functions from the newly
- // compiled function and the actual requested break point might be in one of
- // these functions.
- bool done = false;
- // The current candidate for the source position:
- int target_start_position = RelocInfo::kNoPosition;
- Handle<SharedFunctionInfo> target;
- while (!done) {
- { // Extra scope for iterator and no-allocation.
- isolate->heap()->EnsureHeapIsIterable();
- AssertNoAllocation no_alloc_during_heap_iteration;
- HeapIterator iterator;
- for (HeapObject* obj = iterator.next();
- obj != NULL; obj = iterator.next()) {
- if (obj->IsSharedFunctionInfo()) {
- Handle<SharedFunctionInfo> shared(SharedFunctionInfo::cast(obj));
- if (shared->script() == *script) {
- // If the SharedFunctionInfo found has the requested script data and
- // contains the source position it is a candidate.
- int start_position = shared->function_token_position();
- if (start_position == RelocInfo::kNoPosition) {
- start_position = shared->start_position();
- }
- if (start_position <= position &&
- position <= shared->end_position()) {
- // If there is no candidate or this function is within the current
- // candidate this is the new candidate.
- if (target.is_null()) {
- target_start_position = start_position;
- target = shared;
- } else {
- if (target_start_position == start_position &&
- shared->end_position() == target->end_position()) {
- // If a top-level function contain only one function
- // declartion the source for the top-level and the
- // function is the same. In that case prefer the non
- // top-level function.
- if (!shared->is_toplevel()) {
- target_start_position = start_position;
- target = shared;
- }
- } else if (target_start_position <= start_position &&
- shared->end_position() <= target->end_position()) {
- // This containment check includes equality as a function
- // inside a top-level function can share either start or end
- // position with the top-level function.
- target_start_position = start_position;
- target = shared;
- }
- }
- }
- }
- }
- } // End for loop.
- } // End No allocation scope.
-
- if (target.is_null()) {
- return isolate->heap()->undefined_value();
- }
-
- // If the candidate found is compiled we are done. NOTE: when lazy
- // compilation of inner functions is introduced some additional checking
- // needs to be done here to compile inner functions.
- done = target->is_compiled();
- if (!done) {
- // If the candidate is not compiled compile it to reveal any inner
- // functions which might contain the requested source position.
- SharedFunctionInfo::CompileLazy(target, KEEP_EXCEPTION);
- }
- } // End while loop.
-
- return *target;
-}
-
-
// Changes the state of a break point in a script and returns source position
// where break point was set. NOTE: Regarding performance see the NOTE for
// GetScriptFromScriptData.
@@ -11733,23 +11538,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetScriptBreakPoint) {
RUNTIME_ASSERT(wrapper->value()->IsScript());
Handle<Script> script(Script::cast(wrapper->value()));
- Object* result = Runtime::FindSharedFunctionInfoInScript(
- isolate, script, source_position);
- if (!result->IsUndefined()) {
- Handle<SharedFunctionInfo> shared(SharedFunctionInfo::cast(result));
- // Find position within function. The script position might be before the
- // source position of the first function.
- int position;
- if (shared->start_position() > source_position) {
- position = 0;
- } else {
- position = source_position - shared->start_position();
- }
- isolate->debug()->SetBreakPoint(shared, break_point_object_arg, &position);
- position += shared->start_position();
- return Smi::FromInt(position);
+ // Set break point.
+ if (!isolate->debug()->SetBreakPointForScript(script, break_point_object_arg,
+ &source_position)) {
+ return isolate->heap()->undefined_value();
}
- return isolate->heap()->undefined_value();
+
+ return Smi::FromInt(source_position);
}
@@ -11865,6 +11660,8 @@ static Handle<Context> CopyNestedScopeContextChain(Isolate* isolate,
List<Handle<Context> > context_chain;
ScopeIterator it(isolate, frame, inlined_jsframe_index);
+ if (it.Failed()) return Handle<Context>::null();
+
for (; it.Type() != ScopeIterator::ScopeTypeGlobal &&
it.Type() != ScopeIterator::ScopeTypeLocal ; it.Next()) {
ASSERT(!it.Done());
@@ -11893,9 +11690,7 @@ static Handle<Context> CopyNestedScopeContextChain(Isolate* isolate,
// Materialize the contents of the block scope into a JSObject.
Handle<JSObject> block_scope_object =
MaterializeBlockScope(isolate, current);
- if (block_scope_object.is_null()) {
- return Handle<Context>::null();
- }
+ CHECK(!block_scope_object.is_null());
// Allocate a new function context for the debug evaluation and set the
// extension object.
Handle<Context> new_context =
@@ -12040,7 +11835,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) {
Handle<Context> context =
isolate->factory()->NewFunctionContext(Context::MIN_CONTEXT_SLOTS,
go_between);
- context->set_extension(*local_scope);
+
+ // Use the materialized local scope in a with context.
+ context =
+ isolate->factory()->NewWithContext(go_between, context, local_scope);
+
// Copy any with contexts present and chain them in front of this context.
Handle<Context> frame_context(Context::cast(frame->context()));
Handle<Context> function_context;
@@ -12053,6 +11852,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) {
context,
frame,
inlined_jsframe_index);
+ if (context.is_null()) {
+ ASSERT(isolate->has_pending_exception());
+ MaybeObject* exception = isolate->pending_exception();
+ isolate->clear_pending_exception();
+ return exception;
+ }
if (additional_context->IsJSObject()) {
Handle<JSObject> extension = Handle<JSObject>::cast(additional_context);
@@ -12075,7 +11880,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) {
Handle<SharedFunctionInfo> shared =
Compiler::CompileEval(function_source,
context,
- context->IsGlobalContext(),
+ context->IsNativeContext(),
CLASSIC_MODE,
RelocInfo::kNoPosition,
qml_mode);
@@ -12089,7 +11894,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) {
Handle<Object> evaluation_function =
Execution::Call(compiled_function, receiver, 0, NULL,
&has_pending_exception, false,
- Handle<Object>(function->context()->qml_global()));
+ Handle<Object>(function->context()->qml_global_object()));
if (has_pending_exception) return Failure::Exception();
Handle<Object> arguments = GetArgumentsObject(isolate,
@@ -12098,6 +11903,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) {
scope_info,
function_context);
+ // Check if eval is blocked in the context and temporarily allow it
+ // for debugger.
+ Handle<Context> native_context = Handle<Context>(context->native_context());
+ bool eval_disabled =
+ native_context->allow_code_gen_from_strings()->IsFalse();
+ if (eval_disabled) {
+ native_context->set_allow_code_gen_from_strings(
+ isolate->heap()->true_value());
+ }
// Invoke the evaluation function and return the result.
Handle<Object> argv[] = { arguments, source };
Handle<Object> result =
@@ -12106,6 +11920,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) {
ARRAY_SIZE(argv),
argv,
&has_pending_exception);
+ if (eval_disabled) {
+ native_context->set_allow_code_gen_from_strings(
+ isolate->heap()->false_value());
+ }
if (has_pending_exception) return Failure::Exception();
// Skip the global proxy as it has no properties and always delegates to the
@@ -12148,9 +11966,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluateGlobal) {
isolate->set_context(*top->context());
}
- // Get the global context now set to the top context from before the
+ // Get the native context now set to the top context from before the
// debugger was invoked.
- Handle<Context> context = isolate->global_context();
+ Handle<Context> context = isolate->native_context();
bool is_global = true;
@@ -12182,7 +12000,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluateGlobal) {
// Invoke the result of the compilation to get the evaluation function.
bool has_pending_exception;
- Handle<Object> receiver = isolate->global();
+ Handle<Object> receiver = isolate->global_object();
Handle<Object> result =
Execution::Call(compiled_function, receiver, 0, NULL,
&has_pending_exception);
@@ -12316,7 +12134,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugReferencedBy) {
// Get the constructor function for context extension and arguments array.
JSObject* arguments_boilerplate =
- isolate->context()->global_context()->arguments_boilerplate();
+ isolate->context()->native_context()->arguments_boilerplate();
JSFunction* arguments_function =
JSFunction::cast(arguments_boilerplate->map()->constructor());
@@ -12345,7 +12163,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugReferencedBy) {
// Return result as JS array.
Object* result;
MaybeObject* maybe_result = isolate->heap()->AllocateJSObject(
- isolate->context()->global_context()->array_function());
+ isolate->context()->native_context()->array_function());
if (!maybe_result->ToObject(&result)) return maybe_result;
return JSArray::cast(result)->SetContent(instances);
}
@@ -12426,7 +12244,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugConstructedBy) {
// Return result as JS array.
Object* result;
{ MaybeObject* maybe_result = isolate->heap()->AllocateJSObject(
- isolate->context()->global_context()->array_function());
+ isolate->context()->native_context()->array_function());
if (!maybe_result->ToObject(&result)) return maybe_result;
}
return JSArray::cast(result)->SetContent(instances);
@@ -12451,7 +12269,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugSetScriptSource) {
ASSERT(args.length() == 2);
CONVERT_ARG_HANDLE_CHECKED(JSValue, script_wrapper, 0);
- Handle<String> source(String::cast(args[1]));
+ CONVERT_ARG_HANDLE_CHECKED(String, source, 1);
RUNTIME_ASSERT(script_wrapper->value()->IsScript());
Handle<Script> script(Script::cast(script_wrapper->value()));
@@ -12477,8 +12295,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleFunction) {
ASSERT(args.length() == 1);
// Get the function and make sure it is compiled.
CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0);
- Handle<SharedFunctionInfo> shared(func->shared());
- if (!SharedFunctionInfo::EnsureCompiled(shared, KEEP_EXCEPTION)) {
+ if (!JSFunction::EnsureCompiled(func, KEEP_EXCEPTION)) {
return Failure::Exception();
}
func->code()->PrintLn();
@@ -12493,11 +12310,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleConstructor) {
ASSERT(args.length() == 1);
// Get the function and make sure it is compiled.
CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0);
- Handle<SharedFunctionInfo> shared(func->shared());
- if (!SharedFunctionInfo::EnsureCompiled(shared, KEEP_EXCEPTION)) {
+ if (!JSFunction::EnsureCompiled(func, KEEP_EXCEPTION)) {
return Failure::Exception();
}
- shared->construct_stub()->PrintLn();
+ func->shared()->construct_stub()->PrintLn();
#endif // DEBUG
return isolate->heap()->undefined_value();
}
@@ -12542,11 +12358,12 @@ static int FindSharedFunctionInfosForScript(HeapIterator* iterator,
// in OpaqueReferences.
RUNTIME_FUNCTION(MaybeObject*,
Runtime_LiveEditFindSharedFunctionInfosForScript) {
+ CHECK(isolate->debugger()->live_edit_enabled());
ASSERT(args.length() == 1);
HandleScope scope(isolate);
CONVERT_ARG_CHECKED(JSValue, script_value, 0);
-
+ RUNTIME_ASSERT(script_value->value()->IsScript());
Handle<Script> script = Handle<Script>(Script::cast(script_value->value()));
const int kBufferSize = 32;
@@ -12588,10 +12405,13 @@ RUNTIME_FUNCTION(MaybeObject*,
// each function with all its descendant is always stored in a continues range
// with the function itself going first. The root function is a script function.
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditGatherCompileInfo) {
+ CHECK(isolate->debugger()->live_edit_enabled());
ASSERT(args.length() == 2);
HandleScope scope(isolate);
CONVERT_ARG_CHECKED(JSValue, script, 0);
CONVERT_ARG_HANDLE_CHECKED(String, source, 1);
+
+ RUNTIME_ASSERT(script->value()->IsScript());
Handle<Script> script_handle = Handle<Script>(Script::cast(script->value()));
JSArray* result = LiveEdit::GatherCompileInfo(script_handle, source);
@@ -12607,6 +12427,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditGatherCompileInfo) {
// If old_script_name is provided (i.e. is a String), also creates a copy of
// the script with its original source and sends notification to debugger.
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceScript) {
+ CHECK(isolate->debugger()->live_edit_enabled());
ASSERT(args.length() == 3);
HandleScope scope(isolate);
CONVERT_ARG_CHECKED(JSValue, original_script_value, 0);
@@ -12630,6 +12451,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceScript) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditFunctionSourceUpdated) {
+ CHECK(isolate->debugger()->live_edit_enabled());
ASSERT(args.length() == 1);
HandleScope scope(isolate);
CONVERT_ARG_HANDLE_CHECKED(JSArray, shared_info, 0);
@@ -12639,6 +12461,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditFunctionSourceUpdated) {
// Replaces code of SharedFunctionInfo with a new one.
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceFunctionCode) {
+ CHECK(isolate->debugger()->live_edit_enabled());
ASSERT(args.length() == 2);
HandleScope scope(isolate);
CONVERT_ARG_HANDLE_CHECKED(JSArray, new_compile_info, 0);
@@ -12649,6 +12472,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceFunctionCode) {
// Connects SharedFunctionInfo to another script.
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditFunctionSetScript) {
+ CHECK(isolate->debugger()->live_edit_enabled());
ASSERT(args.length() == 2);
HandleScope scope(isolate);
Handle<Object> function_object(args[0], isolate);
@@ -12675,6 +12499,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditFunctionSetScript) {
// In a code of a parent function replaces original function as embedded object
// with a substitution one.
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceRefToNestedFunction) {
+ CHECK(isolate->debugger()->live_edit_enabled());
ASSERT(args.length() == 3);
HandleScope scope(isolate);
@@ -12695,6 +12520,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceRefToNestedFunction) {
// (change_begin, change_end, change_end_new_position).
// Each group describes a change in text; groups are sorted by change_begin.
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditPatchFunctionPositions) {
+ CHECK(isolate->debugger()->live_edit_enabled());
ASSERT(args.length() == 2);
HandleScope scope(isolate);
CONVERT_ARG_HANDLE_CHECKED(JSArray, shared_array, 0);
@@ -12709,18 +12535,21 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditPatchFunctionPositions) {
// Returns array of the same length with corresponding results of
// LiveEdit::FunctionPatchabilityStatus type.
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditCheckAndDropActivations) {
+ CHECK(isolate->debugger()->live_edit_enabled());
ASSERT(args.length() == 2);
HandleScope scope(isolate);
CONVERT_ARG_HANDLE_CHECKED(JSArray, shared_array, 0);
CONVERT_BOOLEAN_ARG_CHECKED(do_drop, 1);
- return *LiveEdit::CheckAndDropActivations(shared_array, do_drop);
+ return *LiveEdit::CheckAndDropActivations(shared_array, do_drop,
+ isolate->runtime_zone());
}
// Compares 2 strings line-by-line, then token-wise and returns diff in form
// of JSArray of triplets (pos1, pos1_end, pos2_end) describing list
// of diff chunks.
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditCompareStrings) {
+ CHECK(isolate->debugger()->live_edit_enabled());
ASSERT(args.length() == 2);
HandleScope scope(isolate);
CONVERT_ARG_HANDLE_CHECKED(String, s1, 0);
@@ -12730,9 +12559,50 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditCompareStrings) {
}
+// Restarts a call frame and completely drops all frames above.
+// Returns true if successful. Otherwise returns undefined or an error message.
+RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditRestartFrame) {
+ CHECK(isolate->debugger()->live_edit_enabled());
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 2);
+
+ // Check arguments.
+ Object* check;
+ { MaybeObject* maybe_check = Runtime_CheckExecutionState(
+ RUNTIME_ARGUMENTS(isolate, args));
+ if (!maybe_check->ToObject(&check)) return maybe_check;
+ }
+ CONVERT_NUMBER_CHECKED(int, index, Int32, args[1]);
+ Heap* heap = isolate->heap();
+
+ // Find the relevant frame with the requested index.
+ StackFrame::Id id = isolate->debug()->break_frame_id();
+ if (id == StackFrame::NO_ID) {
+ // If there are no JavaScript stack frames return undefined.
+ return heap->undefined_value();
+ }
+
+ int count = 0;
+ JavaScriptFrameIterator it(isolate, id);
+ for (; !it.done(); it.Advance()) {
+ if (index < count + it.frame()->GetInlineCount()) break;
+ count += it.frame()->GetInlineCount();
+ }
+ if (it.done()) return heap->undefined_value();
+
+ const char* error_message =
+ LiveEdit::RestartFrame(it.frame(), isolate->runtime_zone());
+ if (error_message) {
+ return *(isolate->factory()->LookupAsciiSymbol(error_message));
+ }
+ return heap->true_value();
+}
+
+
// A testing entry. Returns statement position which is the closest to
// source_position.
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFunctionCodePositionFromSource) {
+ CHECK(isolate->debugger()->live_edit_enabled());
ASSERT(args.length() == 2);
HandleScope scope(isolate);
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
@@ -12779,11 +12649,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ExecuteInDebugContext) {
bool pending_exception;
{
if (without_debugger) {
- result = Execution::Call(function, isolate->global(), 0, NULL,
+ result = Execution::Call(function, isolate->global_object(), 0, NULL,
&pending_exception);
} else {
EnterDebugger enter_debugger;
- result = Execution::Call(function, isolate->global(), 0, NULL,
+ result = Execution::Call(function, isolate->global_object(), 0, NULL,
&pending_exception);
}
}
@@ -13262,7 +13132,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) {
Handle<JSFunction> factory(JSFunction::cast(
cache_handle->get(JSFunctionResultCache::kFactoryIndex)));
// TODO(antonm): consider passing a receiver when constructing a cache.
- Handle<Object> receiver(isolate->global_context()->global());
+ Handle<Object> receiver(isolate->native_context()->global_object());
// This handle is nor shared, nor used later, so it's safe.
Handle<Object> argv[] = { key_handle };
bool pending_exception;
@@ -13274,7 +13144,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) {
if (pending_exception) return Failure::Exception();
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
cache_handle->JSFunctionResultCacheVerify();
}
@@ -13305,7 +13175,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) {
cache_handle->set(index + 1, *value);
cache_handle->set_finger_index(index);
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
cache_handle->JSFunctionResultCacheVerify();
}
@@ -13315,33 +13185,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) {
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_NewMessageObject) {
- HandleScope scope(isolate);
- CONVERT_ARG_HANDLE_CHECKED(String, type, 0);
- CONVERT_ARG_HANDLE_CHECKED(JSArray, arguments, 1);
- return *isolate->factory()->NewJSMessageObject(
- type,
- arguments,
- 0,
- 0,
- isolate->factory()->undefined_value(),
- isolate->factory()->undefined_value(),
- isolate->factory()->undefined_value());
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_MessageGetType) {
- CONVERT_ARG_CHECKED(JSMessageObject, message, 0);
- return message->type();
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_MessageGetArguments) {
- CONVERT_ARG_CHECKED(JSMessageObject, message, 0);
- return message->arguments();
-}
-
-
RUNTIME_FUNCTION(MaybeObject*, Runtime_MessageGetStartPosition) {
CONVERT_ARG_CHECKED(JSMessageObject, message, 0);
return Smi::FromInt(message->start_position());
@@ -13425,9 +13268,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IS_VAR) {
return isolate->heap()->ToBoolean(obj->Has##Name()); \
}
-ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastSmiOnlyElements)
-ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastElements)
+ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastSmiElements)
+ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastObjectElements)
+ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastSmiOrObjectElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastDoubleElements)
+ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastHoleyElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(DictionaryElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(ExternalPixelElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(ExternalArrayElements)
@@ -13439,6 +13284,8 @@ ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(ExternalIntElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(ExternalUnsignedIntElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(ExternalFloatElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(ExternalDoubleElements)
+// Properties test sitting with elements tests - not fooling anyone.
+ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastProperties)
#undef ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION
@@ -13450,6 +13297,78 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_HaveSameMap) {
return isolate->heap()->ToBoolean(obj1->map() == obj2->map());
}
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_IsObserved) {
+ ASSERT(args.length() == 1);
+ CONVERT_ARG_CHECKED(JSReceiver, obj, 0);
+ return isolate->heap()->ToBoolean(obj->map()->is_observed());
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_SetIsObserved) {
+ ASSERT(args.length() == 2);
+ CONVERT_ARG_CHECKED(JSReceiver, obj, 0);
+ CONVERT_BOOLEAN_ARG_CHECKED(is_observed, 1);
+ if (obj->map()->is_observed() != is_observed) {
+ MaybeObject* maybe = obj->map()->Copy();
+ Map* map;
+ if (!maybe->To(&map)) return maybe;
+ map->set_is_observed(is_observed);
+ obj->set_map(map);
+ }
+ return isolate->heap()->undefined_value();
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_SetObserverDeliveryPending) {
+ ASSERT(args.length() == 0);
+ isolate->set_observer_delivery_pending(true);
+ return isolate->heap()->undefined_value();
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_GetObservationState) {
+ ASSERT(args.length() == 0);
+ return isolate->heap()->observation_state();
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateObjectHashTable) {
+ ASSERT(args.length() == 0);
+ return ObjectHashTable::Allocate(0);
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_ObjectHashTableGet) {
+ NoHandleAllocation ha;
+ ASSERT(args.length() == 2);
+ CONVERT_ARG_CHECKED(ObjectHashTable, table, 0);
+ Object* key = args[1];
+ Object* lookup = table->Lookup(key);
+ return lookup->IsTheHole() ? isolate->heap()->undefined_value() : lookup;
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_ObjectHashTableSet) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 3);
+ CONVERT_ARG_HANDLE_CHECKED(ObjectHashTable, table, 0);
+ Handle<Object> key = args.at<Object>(1);
+ Handle<Object> value = args.at<Object>(2);
+ return *PutIntoObjectHashTable(table, key, value);
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_ObjectHashTableHas) {
+ NoHandleAllocation ha;
+ ASSERT(args.length() == 2);
+ CONVERT_ARG_CHECKED(ObjectHashTable, table, 0);
+ Object* key = args[1];
+ Object* lookup = table->Lookup(key);
+ return isolate->heap()->ToBoolean(!lookup->IsTheHole());
+}
+
+
// ----------------------------------------------------------------------------
// Implementation of Runtime
diff --git a/src/3rdparty/v8/src/runtime.h b/src/3rdparty/v8/src/runtime.h
index cfd8b1e..6428f89 100644
--- a/src/3rdparty/v8/src/runtime.h
+++ b/src/3rdparty/v8/src/runtime.h
@@ -62,7 +62,6 @@ namespace internal {
F(GetIndexedInterceptorElementNames, 1, 1) \
F(GetArgumentsProperty, 1, 1) \
F(ToFastProperties, 1, 1) \
- F(ToSlowProperties, 1, 1) \
F(FinishArrayPrototypeSetup, 1, 1) \
F(SpecialArrayFunctions, 1, 1) \
F(GetDefaultReceiver, 1, 1) \
@@ -86,6 +85,7 @@ namespace internal {
F(NewStrictArgumentsFast, 3, 1) \
F(LazyCompile, 1, 1) \
F(LazyRecompile, 1, 1) \
+ F(ParallelRecompile, 1, 1) \
F(NotifyDeoptimized, 1, 1) \
F(NotifyOSR, 0, 1) \
F(DeoptimizeFunction, 1, 1) \
@@ -120,6 +120,7 @@ namespace internal {
F(CharFromCode, 1, 1) \
F(URIEscape, 1, 1) \
F(URIUnescape, 1, 1) \
+ F(BasicJSONStringify, 1, 1) \
F(QuoteJSONString, 1, 1) \
F(QuoteJSONStringComma, 1, 1) \
F(QuoteJSONStringArray, 1, 1) \
@@ -267,6 +268,7 @@ namespace internal {
F(DefineOrRedefineDataProperty, 4, 1) \
F(DefineOrRedefineAccessorProperty, 5, 1) \
F(IgnoreAttributesAndSetProperty, -1 /* 3 or 4 */, 1) \
+ F(GetDataProperty, 2, 1) \
\
/* Arrays */ \
F(RemoveArrayHoles, 2, 1) \
@@ -284,6 +286,9 @@ namespace internal {
F(CreateArrayLiteral, 3, 1) \
F(CreateArrayLiteralShallow, 3, 1) \
\
+ /* Harmony modules */ \
+ F(IsJSModule, 1, 1) \
+ \
/* Harmony proxies */ \
F(CreateJSProxy, 2, 1) \
F(CreateJSFunctionProxy, 4, 1) \
@@ -299,17 +304,33 @@ namespace internal {
F(SetAdd, 2, 1) \
F(SetHas, 2, 1) \
F(SetDelete, 2, 1) \
+ F(SetGetSize, 1, 1) \
\
/* Harmony maps */ \
F(MapInitialize, 1, 1) \
F(MapGet, 2, 1) \
+ F(MapHas, 2, 1) \
+ F(MapDelete, 2, 1) \
F(MapSet, 3, 1) \
+ F(MapGetSize, 1, 1) \
\
/* Harmony weakmaps */ \
F(WeakMapInitialize, 1, 1) \
F(WeakMapGet, 2, 1) \
+ F(WeakMapHas, 2, 1) \
+ F(WeakMapDelete, 2, 1) \
F(WeakMapSet, 3, 1) \
\
+ /* Harmony observe */ \
+ F(IsObserved, 1, 1) \
+ F(SetIsObserved, 2, 1) \
+ F(SetObserverDeliveryPending, 0, 1) \
+ F(GetObservationState, 0, 1) \
+ F(CreateObjectHashTable, 0, 1) \
+ F(ObjectHashTableGet, 2, 1) \
+ F(ObjectHashTableSet, 3, 1) \
+ F(ObjectHashTableHas, 2, 1) \
+ \
/* Statements */ \
F(NewClosure, 3, 1) \
F(NewObject, 1, 1) \
@@ -318,16 +339,18 @@ namespace internal {
F(Throw, 1, 1) \
F(ReThrow, 1, 1) \
F(ThrowReferenceError, 1, 1) \
+ F(ThrowNotDateError, 0, 1) \
F(StackGuard, 0, 1) \
F(Interrupt, 0, 1) \
F(PromoteScheduledException, 0, 1) \
\
/* Contexts */ \
+ F(NewGlobalContext, 2, 1) \
F(NewFunctionContext, 1, 1) \
F(PushWithContext, 2, 1) \
F(PushCatchContext, 3, 1) \
F(PushBlockContext, 2, 1) \
- F(PushModuleContext, 2, 1) \
+ F(PushModuleContext, 1, 1) \
F(DeleteContextSlot, 2, 1) \
F(LoadContextSlot, 2, 2) \
F(LoadContextSlotNoReferenceError, 2, 2) \
@@ -355,9 +378,6 @@ namespace internal {
F(GetFromCache, 2, 1) \
\
/* Message objects */ \
- F(NewMessageObject, 2, 1) \
- F(MessageGetType, 1, 1) \
- F(MessageGetArguments, 1, 1) \
F(MessageGetStartPosition, 1, 1) \
F(MessageGetScript, 1, 1) \
\
@@ -365,9 +385,11 @@ namespace internal {
F(IS_VAR, 1, 1) \
\
/* expose boolean functions from objects-inl.h */ \
- F(HasFastSmiOnlyElements, 1, 1) \
- F(HasFastElements, 1, 1) \
+ F(HasFastSmiElements, 1, 1) \
+ F(HasFastSmiOrObjectElements, 1, 1) \
+ F(HasFastObjectElements, 1, 1) \
F(HasFastDoubleElements, 1, 1) \
+ F(HasFastHoleyElements, 1, 1) \
F(HasDictionaryElements, 1, 1) \
F(HasExternalPixelElements, 1, 1) \
F(HasExternalArrayElements, 1, 1) \
@@ -379,6 +401,7 @@ namespace internal {
F(HasExternalUnsignedIntElements, 1, 1) \
F(HasExternalFloatElements, 1, 1) \
F(HasExternalDoubleElements, 1, 1) \
+ F(HasFastProperties, 1, 1) \
F(TransitionElementsSmiToDouble, 1, 1) \
F(TransitionElementsDoubleToObject, 1, 1) \
F(HaveSameMap, 2, 1) \
@@ -441,6 +464,7 @@ namespace internal {
F(LiveEditPatchFunctionPositions, 2, 1) \
F(LiveEditCheckAndDropActivations, 2, 1) \
F(LiveEditCompareStrings, 2, 1) \
+ F(LiveEditRestartFrame, 2, 1) \
F(GetFunctionCodePositionFromSource, 2, 1) \
F(ExecuteInDebugContext, 2, 1) \
\
@@ -636,13 +660,6 @@ class Runtime : public AllStatic {
// Get the intrinsic function with the given FunctionId.
static const Function* FunctionForId(FunctionId id);
- static Handle<String> StringReplaceOneCharWithString(Isolate* isolate,
- Handle<String> subject,
- Handle<String> search,
- Handle<String> replace,
- bool* found,
- int recursion_limit);
-
// General-purpose helper functions for runtime system.
static int StringMatch(Isolate* isolate,
Handle<String> sub,
@@ -685,11 +702,6 @@ class Runtime : public AllStatic {
Handle<Object> object,
Handle<Object> key);
- // This function is used in FunctionNameUsing* tests.
- static Object* FindSharedFunctionInfoInScript(Isolate* isolate,
- Handle<Script> script,
- int position);
-
// Helper functions used stubs.
static void PerformGC(Object* result);
diff --git a/src/3rdparty/v8/src/safepoint-table.cc b/src/3rdparty/v8/src/safepoint-table.cc
index 89ad8af..714e5c3 100644
--- a/src/3rdparty/v8/src/safepoint-table.cc
+++ b/src/3rdparty/v8/src/safepoint-table.cc
@@ -116,8 +116,8 @@ void SafepointTable::PrintBits(uint8_t byte, int digits) {
}
-void Safepoint::DefinePointerRegister(Register reg) {
- registers_->Add(reg.code());
+void Safepoint::DefinePointerRegister(Register reg, Zone* zone) {
+ registers_->Add(reg.code(), zone);
}
@@ -131,15 +131,16 @@ Safepoint SafepointTableBuilder::DefineSafepoint(
info.pc = assembler->pc_offset();
info.arguments = arguments;
info.has_doubles = (kind & Safepoint::kWithDoubles);
- deoptimization_info_.Add(info);
- deopt_index_list_.Add(Safepoint::kNoDeoptimizationIndex);
+ deoptimization_info_.Add(info, zone_);
+ deopt_index_list_.Add(Safepoint::kNoDeoptimizationIndex, zone_);
if (deopt_mode == Safepoint::kNoLazyDeopt) {
last_lazy_safepoint_ = deopt_index_list_.length();
}
- indexes_.Add(new ZoneList<int>(8));
+ indexes_.Add(new(zone_) ZoneList<int>(8, zone_), zone_);
registers_.Add((kind & Safepoint::kWithRegisters)
- ? new ZoneList<int>(4)
- : NULL);
+ ? new(zone_) ZoneList<int>(4, zone_)
+ : NULL,
+ zone_);
return Safepoint(indexes_.last(), registers_.last());
}
@@ -190,12 +191,12 @@ void SafepointTableBuilder::Emit(Assembler* assembler, int bits_per_entry) {
}
// Emit table of bitmaps.
- ZoneList<uint8_t> bits(bytes_per_entry);
+ ZoneList<uint8_t> bits(bytes_per_entry, zone_);
for (int i = 0; i < length; i++) {
ZoneList<int>* indexes = indexes_[i];
ZoneList<int>* registers = registers_[i];
bits.Clear();
- bits.AddBlock(0, bytes_per_entry);
+ bits.AddBlock(0, bytes_per_entry, zone_);
// Run through the registers (if any).
ASSERT(IsAligned(kNumSafepointRegisters, kBitsPerByte));
diff --git a/src/3rdparty/v8/src/safepoint-table.h b/src/3rdparty/v8/src/safepoint-table.h
index 57fceec..307d948 100644
--- a/src/3rdparty/v8/src/safepoint-table.h
+++ b/src/3rdparty/v8/src/safepoint-table.h
@@ -183,8 +183,8 @@ class Safepoint BASE_EMBEDDED {
static const int kNoDeoptimizationIndex =
(1 << (SafepointEntry::kDeoptIndexBits)) - 1;
- void DefinePointerSlot(int index) { indexes_->Add(index); }
- void DefinePointerRegister(Register reg);
+ void DefinePointerSlot(int index, Zone* zone) { indexes_->Add(index, zone); }
+ void DefinePointerRegister(Register reg, Zone* zone);
private:
Safepoint(ZoneList<int>* indexes, ZoneList<int>* registers) :
@@ -198,13 +198,14 @@ class Safepoint BASE_EMBEDDED {
class SafepointTableBuilder BASE_EMBEDDED {
public:
- SafepointTableBuilder()
- : deoptimization_info_(32),
- deopt_index_list_(32),
- indexes_(32),
- registers_(32),
+ explicit SafepointTableBuilder(Zone* zone)
+ : deoptimization_info_(32, zone),
+ deopt_index_list_(32, zone),
+ indexes_(32, zone),
+ registers_(32, zone),
emitted_(false),
- last_lazy_safepoint_(0) { }
+ last_lazy_safepoint_(0),
+ zone_(zone) { }
// Get the offset of the emitted safepoint table in the code.
unsigned GetCodeOffset() const;
@@ -242,6 +243,8 @@ class SafepointTableBuilder BASE_EMBEDDED {
bool emitted_;
int last_lazy_safepoint_;
+ Zone* zone_;
+
DISALLOW_COPY_AND_ASSIGN(SafepointTableBuilder);
};
diff --git a/src/3rdparty/v8/src/scanner.cc b/src/3rdparty/v8/src/scanner.cc
index f24af2e..61ee1a4 100755
--- a/src/3rdparty/v8/src/scanner.cc
+++ b/src/3rdparty/v8/src/scanner.cc
@@ -32,6 +32,11 @@
#include "../include/v8stdint.h"
#include "char-predicates-inl.h"
+#undef CONST
+#undef DELETE
+#undef IN
+#undef VOID
+
namespace v8 {
namespace internal {
@@ -1077,6 +1082,7 @@ bool Scanner::ScanRegExpFlags() {
if (!ScanLiteralUnicodeEscape()) {
break;
}
+ Advance();
}
}
literal.Complete();
diff --git a/src/3rdparty/v8/src/scopeinfo.cc b/src/3rdparty/v8/src/scopeinfo.cc
index a1f8339..66e2013 100644
--- a/src/3rdparty/v8/src/scopeinfo.cc
+++ b/src/3rdparty/v8/src/scopeinfo.cc
@@ -38,10 +38,10 @@ namespace v8 {
namespace internal {
-Handle<ScopeInfo> ScopeInfo::Create(Scope* scope) {
+Handle<ScopeInfo> ScopeInfo::Create(Scope* scope, Zone* zone) {
// Collect stack and context locals.
- ZoneList<Variable*> stack_locals(scope->StackLocalCount());
- ZoneList<Variable*> context_locals(scope->ContextLocalCount());
+ ZoneList<Variable*> stack_locals(scope->StackLocalCount(), zone);
+ ZoneList<Variable*> context_locals(scope->ContextLocalCount(), zone);
scope->CollectStackAndContextLocals(&stack_locals, &context_locals);
const int stack_local_count = stack_locals.length();
const int context_local_count = context_locals.length();
@@ -199,8 +199,8 @@ int ScopeInfo::ContextLength(bool qml_function) {
bool has_context = context_locals > 0 ||
function_name_context_slot ||
Type() == WITH_SCOPE ||
- (Type() == FUNCTION_SCOPE && CallsEval());
-
+ (Type() == FUNCTION_SCOPE && CallsEval()) ||
+ Type() == MODULE_SCOPE;
// TODO: The QML mode should be checked in the has_context expression.
if (has_context || qml_function) {
return Context::MIN_CONTEXT_SLOTS + context_locals +
@@ -230,11 +230,7 @@ bool ScopeInfo::HasHeapAllocatedLocals() {
bool ScopeInfo::HasContext() {
- if (length() > 0) {
- return ContextLength() > 0;
- } else {
- return false;
- }
+ return ContextLength() > 0;
}
diff --git a/src/3rdparty/v8/src/scopes.cc b/src/3rdparty/v8/src/scopes.cc
index 6580378..d2a919a 100644
--- a/src/3rdparty/v8/src/scopes.cc
+++ b/src/3rdparty/v8/src/scopes.cc
@@ -29,6 +29,7 @@
#include "scopes.h"
+#include "accessors.h"
#include "bootstrapper.h"
#include "compiler.h"
#include "messages.h"
@@ -59,7 +60,9 @@ static bool Match(void* key1, void* key2) {
}
-VariableMap::VariableMap() : ZoneHashMap(Match, 8) {}
+VariableMap::VariableMap(Zone* zone)
+ : ZoneHashMap(Match, 8, ZoneAllocationPolicy(zone)),
+ zone_(zone) {}
VariableMap::~VariableMap() {}
@@ -71,24 +74,26 @@ Variable* VariableMap::Declare(
Variable::Kind kind,
InitializationFlag initialization_flag,
Interface* interface) {
- Entry* p = ZoneHashMap::Lookup(name.location(), name->Hash(), true);
+ Entry* p = ZoneHashMap::Lookup(name.location(), name->Hash(), true,
+ ZoneAllocationPolicy(zone()));
if (p->value == NULL) {
// The variable has not been declared yet -> insert it.
ASSERT(p->key == name.location());
- p->value = new Variable(scope,
- name,
- mode,
- is_valid_lhs,
- kind,
- initialization_flag,
- interface);
+ p->value = new(zone()) Variable(scope,
+ name,
+ mode,
+ is_valid_lhs,
+ kind,
+ initialization_flag,
+ interface);
}
return reinterpret_cast<Variable*>(p->value);
}
Variable* VariableMap::Lookup(Handle<String> name) {
- Entry* p = ZoneHashMap::Lookup(name.location(), name->Hash(), false);
+ Entry* p = ZoneHashMap::Lookup(name.location(), name->Hash(), false,
+ ZoneAllocationPolicy(NULL));
if (p != NULL) {
ASSERT(*reinterpret_cast<String**>(p->key) == *name);
ASSERT(p->value != NULL);
@@ -101,39 +106,40 @@ Variable* VariableMap::Lookup(Handle<String> name) {
// ----------------------------------------------------------------------------
// Implementation of Scope
-Scope::Scope(Scope* outer_scope, ScopeType type)
+Scope::Scope(Scope* outer_scope, ScopeType type, Zone* zone)
: isolate_(Isolate::Current()),
- inner_scopes_(4),
- variables_(),
- temps_(4),
- params_(4),
- unresolved_(16),
- decls_(4),
+ inner_scopes_(4, zone),
+ variables_(zone),
+ temps_(4, zone),
+ params_(4, zone),
+ unresolved_(16, zone),
+ decls_(4, zone),
interface_(FLAG_harmony_modules &&
(type == MODULE_SCOPE || type == GLOBAL_SCOPE)
- ? Interface::NewModule() : NULL),
- already_resolved_(false) {
+ ? Interface::NewModule(zone) : NULL),
+ already_resolved_(false),
+ zone_(zone) {
SetDefaults(type, outer_scope, Handle<ScopeInfo>::null());
- // At some point we might want to provide outer scopes to
- // eval scopes (by walking the stack and reading the scope info).
- // In that case, the ASSERT below needs to be adjusted.
- ASSERT_EQ(type == GLOBAL_SCOPE, outer_scope == NULL);
+ // The outermost scope must be a global scope.
+ ASSERT(type == GLOBAL_SCOPE || outer_scope != NULL);
ASSERT(!HasIllegalRedeclaration());
}
Scope::Scope(Scope* inner_scope,
ScopeType type,
- Handle<ScopeInfo> scope_info)
+ Handle<ScopeInfo> scope_info,
+ Zone* zone)
: isolate_(Isolate::Current()),
- inner_scopes_(4),
- variables_(),
- temps_(4),
- params_(4),
- unresolved_(16),
- decls_(4),
+ inner_scopes_(4, zone),
+ variables_(zone),
+ temps_(4, zone),
+ params_(4, zone),
+ unresolved_(16, zone),
+ decls_(4, zone),
interface_(NULL),
- already_resolved_(true) {
+ already_resolved_(true),
+ zone_(zone) {
SetDefaults(type, NULL, scope_info);
if (!scope_info.is_null()) {
num_heap_slots_ = scope_info_->ContextLength();
@@ -145,16 +151,17 @@ Scope::Scope(Scope* inner_scope,
}
-Scope::Scope(Scope* inner_scope, Handle<String> catch_variable_name)
+Scope::Scope(Scope* inner_scope, Handle<String> catch_variable_name, Zone* zone)
: isolate_(Isolate::Current()),
- inner_scopes_(1),
- variables_(),
- temps_(0),
- params_(0),
- unresolved_(0),
- decls_(0),
+ inner_scopes_(1, zone),
+ variables_(zone),
+ temps_(0, zone),
+ params_(0, zone),
+ unresolved_(0, zone),
+ decls_(0, zone),
interface_(NULL),
- already_resolved_(true) {
+ already_resolved_(true),
+ zone_(zone) {
SetDefaults(CATCH_SCOPE, NULL, Handle<ScopeInfo>::null());
AddInnerScope(inner_scope);
++num_var_or_const_;
@@ -204,36 +211,53 @@ void Scope::SetDefaults(ScopeType type,
}
-Scope* Scope::DeserializeScopeChain(Context* context, Scope* global_scope) {
+Scope* Scope::DeserializeScopeChain(Context* context, Scope* global_scope,
+ Zone* zone) {
// Reconstruct the outer scope chain from a closure's context chain.
Scope* current_scope = NULL;
Scope* innermost_scope = NULL;
bool contains_with = false;
- while (!context->IsGlobalContext()) {
+ while (!context->IsNativeContext()) {
if (context->IsWithContext()) {
- Scope* with_scope = new Scope(current_scope,
- WITH_SCOPE,
- Handle<ScopeInfo>::null());
+ Scope* with_scope = new(zone) Scope(current_scope,
+ WITH_SCOPE,
+ Handle<ScopeInfo>::null(),
+ zone);
current_scope = with_scope;
// All the inner scopes are inside a with.
contains_with = true;
for (Scope* s = innermost_scope; s != NULL; s = s->outer_scope()) {
s->scope_inside_with_ = true;
}
+ } else if (context->IsGlobalContext()) {
+ ScopeInfo* scope_info = ScopeInfo::cast(context->extension());
+ current_scope = new(zone) Scope(current_scope,
+ GLOBAL_SCOPE,
+ Handle<ScopeInfo>(scope_info),
+ zone);
+ } else if (context->IsModuleContext()) {
+ ScopeInfo* scope_info = ScopeInfo::cast(context->module()->scope_info());
+ current_scope = new(zone) Scope(current_scope,
+ MODULE_SCOPE,
+ Handle<ScopeInfo>(scope_info),
+ zone);
} else if (context->IsFunctionContext()) {
ScopeInfo* scope_info = context->closure()->shared()->scope_info();
- current_scope = new Scope(current_scope,
- FUNCTION_SCOPE,
- Handle<ScopeInfo>(scope_info));
+ current_scope = new(zone) Scope(current_scope,
+ FUNCTION_SCOPE,
+ Handle<ScopeInfo>(scope_info),
+ zone);
} else if (context->IsBlockContext()) {
ScopeInfo* scope_info = ScopeInfo::cast(context->extension());
- current_scope = new Scope(current_scope,
- BLOCK_SCOPE,
- Handle<ScopeInfo>(scope_info));
+ current_scope = new(zone) Scope(current_scope,
+ BLOCK_SCOPE,
+ Handle<ScopeInfo>(scope_info),
+ zone);
} else {
ASSERT(context->IsCatchContext());
String* name = String::cast(context->extension());
- current_scope = new Scope(current_scope, Handle<String>(name));
+ current_scope = new(zone) Scope(
+ current_scope, Handle<String>(name), zone);
}
if (contains_with) current_scope->RecordWithStatement();
if (innermost_scope == NULL) innermost_scope = current_scope;
@@ -265,7 +289,8 @@ bool Scope::Analyze(CompilationInfo* info) {
// Allocate the variables.
{
- AstNodeFactory<AstNullVisitor> ast_node_factory(info->isolate());
+ AstNodeFactory<AstNullVisitor> ast_node_factory(info->isolate(),
+ info->zone());
if (!top->AllocateVariables(info, &ast_node_factory)) return false;
}
@@ -309,7 +334,7 @@ void Scope::Initialize() {
// Add this scope as a new inner scope of the outer scope.
if (outer_scope_ != NULL) {
- outer_scope_->inner_scopes_.Add(this);
+ outer_scope_->inner_scopes_.Add(this, zone());
scope_inside_with_ = outer_scope_->scope_inside_with_ || is_with_scope();
} else {
scope_inside_with_ = is_with_scope();
@@ -374,7 +399,7 @@ Scope* Scope::FinalizeBlockScope() {
// Move unresolved variables
for (int i = 0; i < unresolved_.length(); i++) {
- outer_scope()->unresolved_.Add(unresolved_[i]);
+ outer_scope()->unresolved_.Add(unresolved_[i], zone());
}
return NULL;
@@ -405,13 +430,8 @@ Variable* Scope::LocalLookup(Handle<String> name) {
init_flag = kCreatedInitialized;
}
- Variable* var =
- variables_.Declare(this,
- name,
- mode,
- true,
- Variable::NORMAL,
- init_flag);
+ Variable* var = variables_.Declare(this, name, mode, true, Variable::NORMAL,
+ init_flag);
var->AllocateTo(location, index);
return var;
}
@@ -426,7 +446,7 @@ Variable* Scope::LookupFunctionVar(Handle<String> name,
VariableMode mode;
int index = scope_info_->FunctionContextSlotIndex(*name, &mode);
if (index < 0) return NULL;
- Variable* var = new Variable(
+ Variable* var = new(zone()) Variable(
this, name, mode, true /* is valid LHS */,
Variable::NORMAL, kCreatedInitialized);
VariableProxy* proxy = factory->NewVariableProxy(var);
@@ -455,9 +475,9 @@ Variable* Scope::Lookup(Handle<String> name) {
void Scope::DeclareParameter(Handle<String> name, VariableMode mode) {
ASSERT(!already_resolved());
ASSERT(is_function_scope());
- Variable* var = variables_.Declare(
- this, name, mode, true, Variable::NORMAL, kCreatedInitialized);
- params_.Add(var);
+ Variable* var = variables_.Declare(this, name, mode, true, Variable::NORMAL,
+ kCreatedInitialized);
+ params_.Add(var, zone());
}
@@ -469,17 +489,14 @@ Variable* Scope::DeclareLocal(Handle<String> name,
// This function handles VAR and CONST modes. DYNAMIC variables are
// introduces during variable allocation, INTERNAL variables are allocated
// explicitly, and TEMPORARY variables are allocated via NewTemporary().
- ASSERT(mode == VAR ||
- mode == CONST ||
- mode == CONST_HARMONY ||
- mode == LET);
+ ASSERT(IsDeclaredVariableMode(mode));
++num_var_or_const_;
return variables_.Declare(
this, name, mode, true, Variable::NORMAL, init_flag, interface);
}
-Variable* Scope::DeclareGlobal(Handle<String> name) {
+Variable* Scope::DeclareDynamicGlobal(Handle<String> name) {
ASSERT(is_global_scope());
return variables_.Declare(this,
name,
@@ -504,19 +521,19 @@ void Scope::RemoveUnresolved(VariableProxy* var) {
Variable* Scope::NewTemporary(Handle<String> name) {
ASSERT(!already_resolved());
- Variable* var = new Variable(this,
- name,
- TEMPORARY,
- true,
- Variable::NORMAL,
- kCreatedInitialized);
- temps_.Add(var);
+ Variable* var = new(zone()) Variable(this,
+ name,
+ TEMPORARY,
+ true,
+ Variable::NORMAL,
+ kCreatedInitialized);
+ temps_.Add(var, zone());
return var;
}
void Scope::AddDeclaration(Declaration* declaration) {
- decls_.Add(declaration);
+ decls_.Add(declaration, zone());
}
@@ -582,6 +599,21 @@ VariableProxy* Scope::CheckAssignmentToConst() {
}
+class VarAndOrder {
+ public:
+ VarAndOrder(Variable* var, int order) : var_(var), order_(order) { }
+ Variable* var() const { return var_; }
+ int order() const { return order_; }
+ static int Compare(const VarAndOrder* a, const VarAndOrder* b) {
+ return a->order_ - b->order_;
+ }
+
+ private:
+ Variable* var_;
+ int order_;
+};
+
+
void Scope::CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
ZoneList<Variable*>* context_locals) {
ASSERT(stack_locals != NULL);
@@ -592,21 +624,29 @@ void Scope::CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
Variable* var = temps_[i];
if (var->is_used()) {
ASSERT(var->IsStackLocal());
- stack_locals->Add(var);
+ stack_locals->Add(var, zone());
}
}
+ ZoneList<VarAndOrder> vars(variables_.occupancy(), zone());
+
// Collect declared local variables.
for (VariableMap::Entry* p = variables_.Start();
p != NULL;
p = variables_.Next(p)) {
Variable* var = reinterpret_cast<Variable*>(p->value);
if (var->is_used()) {
- if (var->IsStackLocal()) {
- stack_locals->Add(var);
- } else if (var->IsContextSlot()) {
- context_locals->Add(var);
- }
+ vars.Add(VarAndOrder(var, p->order), zone());
+ }
+ }
+ vars.Sort(VarAndOrder::Compare);
+ int var_count = vars.length();
+ for (int i = 0; i < var_count; i++) {
+ Variable* var = vars[i].var();
+ if (var->IsStackLocal()) {
+ stack_locals->Add(var, zone());
+ } else if (var->IsContextSlot()) {
+ context_locals->Add(var, zone());
}
}
}
@@ -629,12 +669,13 @@ bool Scope::AllocateVariables(CompilationInfo* info,
// 3) Allocate variables.
AllocateVariablesRecursively();
- return true;
-}
-
+ // 4) Allocate and link module instance objects.
+ if (FLAG_harmony_modules && (is_global_scope() || is_module_scope())) {
+ AllocateModules(info);
+ LinkModules(info);
+ }
-bool Scope::AllowsLazyCompilation() const {
- return !force_eager_compilation_ && HasTrivialOuterContext();
+ return true;
}
@@ -662,23 +703,36 @@ bool Scope::HasTrivialOuterContext() const {
}
-bool Scope::AllowsLazyRecompilation() const {
- return !force_eager_compilation_ &&
- !TrivialDeclarationScopesBeforeWithScope();
-}
-
-
-bool Scope::TrivialDeclarationScopesBeforeWithScope() const {
+bool Scope::HasLazyCompilableOuterContext() const {
Scope* outer = outer_scope_;
- if (outer == NULL) return false;
+ if (outer == NULL) return true;
+ // There are several reasons that prevent lazy compilation:
+ // - This scope is inside a with scope and all declaration scopes between
+ // them have empty contexts. Such declaration scopes become invisible
+ // during scope info deserialization.
+ // - This scope is inside a strict eval scope with variables that are
+ // potentially context allocated in an artificial function scope that
+ // is not deserialized correctly.
outer = outer->DeclarationScope();
- while (outer != NULL) {
- if (outer->is_with_scope()) return true;
- if (outer->is_declaration_scope() && outer->num_heap_slots() > 0)
- return false;
- outer = outer->outer_scope_;
+ bool found_non_trivial_declarations = false;
+ for (const Scope* scope = outer; scope != NULL; scope = scope->outer_scope_) {
+ if (scope->is_eval_scope()) return false;
+ if (scope->is_with_scope() && !found_non_trivial_declarations) return false;
+ if (scope->is_declaration_scope() && scope->num_heap_slots() > 0) {
+ found_non_trivial_declarations = true;
+ }
}
- return false;
+ return true;
+}
+
+
+bool Scope::AllowsLazyCompilation() const {
+ return !force_eager_compilation_ && HasLazyCompilableOuterContext();
+}
+
+
+bool Scope::AllowsLazyCompilationWithoutContext() const {
+ return !force_eager_compilation_ && HasTrivialOuterContext();
}
@@ -703,7 +757,7 @@ Scope* Scope::DeclarationScope() {
Handle<ScopeInfo> Scope::GetScopeInfo() {
if (scope_info_.is_null()) {
- scope_info_ = ScopeInfo::Create(this);
+ scope_info_ = ScopeInfo::Create(this, zone());
}
return scope_info_;
}
@@ -889,7 +943,7 @@ void Scope::Print(int n) {
Variable* Scope::NonLocal(Handle<String> name, VariableMode mode) {
- if (dynamics_ == NULL) dynamics_ = new DynamicScopePart();
+ if (dynamics_ == NULL) dynamics_ = new(zone()) DynamicScopePart(zone());
VariableMap* map = dynamics_->GetMap(mode);
Variable* var = map->Lookup(name);
if (var == NULL) {
@@ -979,6 +1033,24 @@ bool Scope::ResolveVariable(CompilationInfo* info,
switch (binding_kind) {
case BOUND:
// We found a variable binding.
+ if (is_qml_mode()) {
+ Handle<GlobalObject> global = isolate_->global_object();
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ if (isolate_->debug()->IsLoaded() && isolate_->debug()->InDebugger()) {
+ // Get the context before the debugger was entered.
+ SaveContext *save = isolate_->save_context();
+ while (save != NULL && *save->context() == *isolate_->debug()->debug_context())
+ save = save->prev();
+
+ global = Handle<GlobalObject>(save->context()->global_object());
+ }
+#endif
+
+ if (!global->HasProperty(*(proxy->name()))) {
+ var->set_is_qml_global(true);
+ }
+ }
break;
case BOUND_EVAL_SHADOWED:
@@ -986,11 +1058,11 @@ bool Scope::ResolveVariable(CompilationInfo* info,
// gave up on it (e.g. by encountering a local with the same in the outer
// scope which was not promoted to a context, this can happen if we use
// debugger to evaluate arbitrary expressions at a break point).
- if (var->is_global()) {
+ if (var->IsGlobalObjectProperty()) {
var = NonLocal(proxy->name(), DYNAMIC_GLOBAL);
if (is_qml_mode()) {
- Handle<GlobalObject> global = isolate_->global();
+ Handle<GlobalObject> global = isolate_->global_object();
#ifdef ENABLE_DEBUGGER_SUPPORT
if (isolate_->debug()->IsLoaded() && isolate_->debug()->InDebugger()) {
@@ -999,11 +1071,11 @@ bool Scope::ResolveVariable(CompilationInfo* info,
while (save != NULL && *save->context() == *isolate_->debug()->debug_context())
save = save->prev();
- global = Handle<GlobalObject>(save->context()->global());
+ global = Handle<GlobalObject>(save->context()->global_object());
}
#endif
- if (is_qml_mode() && !global->HasProperty(*(proxy->name()))) {
+ if (!global->HasProperty(*(proxy->name()))) {
var->set_is_qml_global(true);
}
}
@@ -1017,11 +1089,11 @@ bool Scope::ResolveVariable(CompilationInfo* info,
break;
case UNBOUND:
- // No binding has been found. Declare a variable in global scope.
- var = info->global_scope()->DeclareGlobal(proxy->name());
+ // No binding has been found. Declare a variable on the global object.
+ var = info->global_scope()->DeclareDynamicGlobal(proxy->name());
if (is_qml_mode()) {
- Handle<GlobalObject> global = isolate_->global();
+ Handle<GlobalObject> global = isolate_->global_object();
#ifdef ENABLE_DEBUGGER_SUPPORT
if (isolate_->debug()->IsLoaded() && isolate_->debug()->InDebugger()) {
@@ -1030,7 +1102,7 @@ bool Scope::ResolveVariable(CompilationInfo* info,
while (save != NULL && *save->context() == *isolate_->debug()->debug_context())
save = save->prev();
- global = Handle<GlobalObject>(save->context()->global());
+ global = Handle<GlobalObject>(save->context()->global_object());
}
#endif
@@ -1047,7 +1119,7 @@ bool Scope::ResolveVariable(CompilationInfo* info,
var = NonLocal(proxy->name(), DYNAMIC_GLOBAL);
if (is_qml_mode()) {
- Handle<GlobalObject> global = isolate_->global();
+ Handle<GlobalObject> global = isolate_->global_object();
#ifdef ENABLE_DEBUGGER_SUPPORT
if (isolate_->debug()->IsLoaded() && isolate_->debug()->InDebugger()) {
@@ -1056,11 +1128,11 @@ bool Scope::ResolveVariable(CompilationInfo* info,
while (save != NULL && *save->context() == *isolate_->debug()->debug_context())
save = save->prev();
- global = Handle<GlobalObject>(save->context()->global());
+ global = Handle<GlobalObject>(save->context()->global_object());
}
#endif
- if (is_qml_mode() && !global->HasProperty(*(proxy->name()))) {
+ if (!global->HasProperty(*(proxy->name()))) {
var->set_is_qml_global(true);
}
}
@@ -1082,7 +1154,7 @@ bool Scope::ResolveVariable(CompilationInfo* info,
if (FLAG_print_interface_details)
PrintF("# Resolve %s:\n", var->name()->ToAsciiArray());
#endif
- proxy->interface()->Unify(var->interface(), &ok);
+ proxy->interface()->Unify(var->interface(), zone(), &ok);
if (!ok) {
#ifdef DEBUG
if (FLAG_print_interfaces) {
@@ -1165,11 +1237,13 @@ bool Scope::MustAllocate(Variable* var) {
inner_scope_calls_eval_ ||
scope_contains_with_ ||
is_catch_scope() ||
- is_block_scope())) {
+ is_block_scope() ||
+ is_module_scope() ||
+ is_global_scope())) {
var->set_is_used(true);
}
// Global variables do not need to be allocated.
- return !var->is_global() && var->is_used();
+ return !var->IsGlobalObjectProperty() && var->is_used();
}
@@ -1183,11 +1257,11 @@ bool Scope::MustAllocateInContext(Variable* var) {
// catch-bound variables are always allocated in a context.
if (var->mode() == TEMPORARY) return false;
if (is_catch_scope() || is_block_scope() || is_module_scope()) return true;
+ if (is_global_scope() && IsLexicalVariableMode(var->mode())) return true;
return var->has_forced_context_allocation() ||
scope_calls_eval_ ||
inner_scope_calls_eval_ ||
- scope_contains_with_ ||
- var->is_global();
+ scope_contains_with_;
}
@@ -1288,11 +1362,19 @@ void Scope::AllocateNonParameterLocals() {
AllocateNonParameterLocal(temps_[i]);
}
+ ZoneList<VarAndOrder> vars(variables_.occupancy(), zone());
+
for (VariableMap::Entry* p = variables_.Start();
p != NULL;
p = variables_.Next(p)) {
Variable* var = reinterpret_cast<Variable*>(p->value);
- AllocateNonParameterLocal(var);
+ vars.Add(VarAndOrder(var, p->order), zone());
+ }
+
+ vars.Sort(VarAndOrder::Compare);
+ int var_count = vars.length();
+ for (int i = 0; i < var_count; i++) {
+ AllocateNonParameterLocal(vars[i].var());
}
// For now, function_ must be allocated at the very end. If it gets
@@ -1353,4 +1435,77 @@ int Scope::ContextLocalCount() const {
(function_ != NULL && function_->proxy()->var()->IsContextSlot() ? 1 : 0);
}
+
+void Scope::AllocateModules(CompilationInfo* info) {
+ ASSERT(is_global_scope() || is_module_scope());
+
+ if (is_module_scope()) {
+ ASSERT(interface_->IsFrozen());
+ ASSERT(scope_info_.is_null());
+
+ // TODO(rossberg): This has to be the initial compilation of this code.
+ // We currently do not allow recompiling any module definitions.
+ Handle<ScopeInfo> scope_info = GetScopeInfo();
+ Factory* factory = info->isolate()->factory();
+ Handle<Context> context = factory->NewModuleContext(scope_info);
+ Handle<JSModule> instance = factory->NewJSModule(context, scope_info);
+ context->set_module(*instance);
+
+ bool ok;
+ interface_->MakeSingleton(instance, &ok);
+ ASSERT(ok);
+ }
+
+ // Allocate nested modules.
+ for (int i = 0; i < inner_scopes_.length(); i++) {
+ Scope* inner_scope = inner_scopes_.at(i);
+ if (inner_scope->is_module_scope()) {
+ inner_scope->AllocateModules(info);
+ }
+ }
+}
+
+
+void Scope::LinkModules(CompilationInfo* info) {
+ ASSERT(is_global_scope() || is_module_scope());
+
+ if (is_module_scope()) {
+ Handle<JSModule> instance = interface_->Instance();
+
+ // Populate the module instance object.
+ const PropertyAttributes ro_attr =
+ static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE | DONT_ENUM);
+ const PropertyAttributes rw_attr =
+ static_cast<PropertyAttributes>(DONT_DELETE | DONT_ENUM);
+ for (Interface::Iterator it = interface_->iterator();
+ !it.done(); it.Advance()) {
+ if (it.interface()->IsModule()) {
+ Handle<Object> value = it.interface()->Instance();
+ ASSERT(!value.is_null());
+ JSReceiver::SetProperty(
+ instance, it.name(), value, ro_attr, kStrictMode);
+ } else {
+ Variable* var = LocalLookup(it.name());
+ ASSERT(var != NULL && var->IsContextSlot());
+ PropertyAttributes attr = var->is_const_mode() ? ro_attr : rw_attr;
+ Handle<AccessorInfo> info =
+ Accessors::MakeModuleExport(it.name(), var->index(), attr);
+ Handle<Object> result = SetAccessor(instance, info);
+ ASSERT(!(result.is_null() || result->IsUndefined()));
+ USE(result);
+ }
+ }
+ USE(JSObject::PreventExtensions(instance));
+ }
+
+ // Link nested modules.
+ for (int i = 0; i < inner_scopes_.length(); i++) {
+ Scope* inner_scope = inner_scopes_.at(i);
+ if (inner_scope->is_module_scope()) {
+ inner_scope->LinkModules(info);
+ }
+ }
+}
+
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/scopes.h b/src/3rdparty/v8/src/scopes.h
index 31847e2..e9425f0 100644
--- a/src/3rdparty/v8/src/scopes.h
+++ b/src/3rdparty/v8/src/scopes.h
@@ -40,7 +40,7 @@ class CompilationInfo;
// A hash map to support fast variable declaration and lookup.
class VariableMap: public ZoneHashMap {
public:
- VariableMap();
+ explicit VariableMap(Zone* zone);
virtual ~VariableMap();
@@ -53,6 +53,11 @@ class VariableMap: public ZoneHashMap {
Interface* interface = Interface::NewValue());
Variable* Lookup(Handle<String> name);
+
+ Zone* zone() const { return zone_; }
+
+ private:
+ Zone* zone_;
};
@@ -62,14 +67,19 @@ class VariableMap: public ZoneHashMap {
// and setup time for scopes that don't need them.
class DynamicScopePart : public ZoneObject {
public:
+ explicit DynamicScopePart(Zone* zone) {
+ for (int i = 0; i < 3; i++)
+ maps_[i] = new(zone->New(sizeof(VariableMap))) VariableMap(zone);
+ }
+
VariableMap* GetMap(VariableMode mode) {
int index = mode - DYNAMIC;
ASSERT(index >= 0 && index < 3);
- return &maps_[index];
+ return maps_[index];
}
private:
- VariableMap maps_[3];
+ VariableMap *maps_[3];
};
@@ -87,14 +97,15 @@ class Scope: public ZoneObject {
// ---------------------------------------------------------------------------
// Construction
- Scope(Scope* outer_scope, ScopeType type);
+ Scope(Scope* outer_scope, ScopeType type, Zone* zone);
// Compute top scope and allocate variables. For lazy compilation the top
// scope only contains the single lazily compiled function, so this
// doesn't re-allocate variables repeatedly.
static bool Analyze(CompilationInfo* info);
- static Scope* DeserializeScopeChain(Context* context, Scope* global_scope);
+ static Scope* DeserializeScopeChain(Context* context, Scope* global_scope,
+ Zone* zone);
// The scope name is only used for printing/debugging.
void SetScopeName(Handle<String> scope_name) { scope_name_ = scope_name; }
@@ -106,6 +117,8 @@ class Scope: public ZoneObject {
// tree and its children are reparented.
Scope* FinalizeBlockScope();
+ Zone* zone() const { return zone_; }
+
// ---------------------------------------------------------------------------
// Declarations
@@ -147,21 +160,21 @@ class Scope: public ZoneObject {
// global scope. The variable was introduced (possibly from an inner
// scope) by a reference to an unresolved variable with no intervening
// with statements or eval calls.
- Variable* DeclareGlobal(Handle<String> name);
+ Variable* DeclareDynamicGlobal(Handle<String> name);
// Create a new unresolved variable.
template<class Visitor>
VariableProxy* NewUnresolved(AstNodeFactory<Visitor>* factory,
Handle<String> name,
- int position = RelocInfo::kNoPosition,
- Interface* interface = Interface::NewValue()) {
+ Interface* interface = Interface::NewValue(),
+ int position = RelocInfo::kNoPosition) {
// Note that we must not share the unresolved variables with
// the same name because they may be removed selectively via
// RemoveUnresolved().
ASSERT(!already_resolved());
VariableProxy* proxy =
- factory->NewVariableProxy(name, false, position, interface);
- unresolved_.Add(proxy);
+ factory->NewVariableProxy(name, false, interface, position);
+ unresolved_.Add(proxy, zone_);
return proxy;
}
@@ -176,7 +189,7 @@ class Scope: public ZoneObject {
// Creates a new temporary variable in this scope. The name is only used
// for printing and cannot be used to find the variable. In particular,
// the only way to get hold of the temporary is by keeping the Variable*
- // around.
+ // around. The name should not clash with a legitimate variable names.
Variable* NewTemporary(Handle<String> name);
// Adds the specific declaration node to the list of declarations in
@@ -272,7 +285,8 @@ class Scope: public ZoneObject {
bool is_block_scope() const { return type_ == BLOCK_SCOPE; }
bool is_with_scope() const { return type_ == WITH_SCOPE; }
bool is_declaration_scope() const {
- return is_eval_scope() || is_function_scope() || is_global_scope();
+ return is_eval_scope() || is_function_scope() ||
+ is_module_scope() || is_global_scope();
}
bool is_classic_mode() const {
return language_mode() == CLASSIC_MODE;
@@ -370,16 +384,14 @@ class Scope: public ZoneObject {
// Determine if we can use lazy compilation for this scope.
bool AllowsLazyCompilation() const;
- // True if we can lazily recompile functions with this scope.
- bool AllowsLazyRecompilation() const;
+ // Determine if we can use lazy compilation for this scope without a context.
+ bool AllowsLazyCompilationWithoutContext() const;
- // True if the outer context of this scope is always the global context.
+ // True if the outer context of this scope is always the native context.
bool HasTrivialOuterContext() const;
- // True if this scope is inside a with scope and all declaration scopes
- // between them have empty contexts. Such declaration scopes become
- // invisible during scope info deserialization.
- bool TrivialDeclarationScopesBeforeWithScope() const;
+ // True if the outer context allows lazy compilation of this scope.
+ bool HasLazyCompilableOuterContext() const;
// The number of contexts between this and scope; zero if this == scope.
int ContextChainLength(Scope* scope);
@@ -590,16 +602,24 @@ class Scope: public ZoneObject {
bool AllocateVariables(CompilationInfo* info,
AstNodeFactory<AstNullVisitor>* factory);
+ // Instance objects have to be created ahead of time (before code generation)
+ // because of potentially cyclic references between them.
+ // Linking also has to be a separate stage, since populating one object may
+ // potentially require (forward) references to others.
+ void AllocateModules(CompilationInfo* info);
+ void LinkModules(CompilationInfo* info);
+
private:
// Construct a scope based on the scope info.
- Scope(Scope* inner_scope, ScopeType type, Handle<ScopeInfo> scope_info);
+ Scope(Scope* inner_scope, ScopeType type, Handle<ScopeInfo> scope_info,
+ Zone* zone);
// Construct a catch scope with a binding for the name.
- Scope(Scope* inner_scope, Handle<String> catch_variable_name);
+ Scope(Scope* inner_scope, Handle<String> catch_variable_name, Zone* zone);
void AddInnerScope(Scope* inner_scope) {
if (inner_scope != NULL) {
- inner_scopes_.Add(inner_scope);
+ inner_scopes_.Add(inner_scope, zone_);
inner_scope->outer_scope_ = this;
}
}
@@ -607,6 +627,8 @@ class Scope: public ZoneObject {
void SetDefaults(ScopeType type,
Scope* outer_scope,
Handle<ScopeInfo> scope_info);
+
+ Zone* zone_;
};
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/serialize.cc b/src/3rdparty/v8/src/serialize.cc
index cf8e5e1..dfc5574 100644
--- a/src/3rdparty/v8/src/serialize.cc
+++ b/src/3rdparty/v8/src/serialize.cc
@@ -37,6 +37,7 @@
#include "platform.h"
#include "runtime.h"
#include "serialize.h"
+#include "snapshot.h"
#include "stub-cache.h"
#include "v8threads.h"
@@ -510,6 +511,22 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
UNCLASSIFIED,
47,
"date_cache_stamp");
+ Add(ExternalReference::address_of_pending_message_obj(isolate).address(),
+ UNCLASSIFIED,
+ 48,
+ "address_of_pending_message_obj");
+ Add(ExternalReference::address_of_has_pending_message(isolate).address(),
+ UNCLASSIFIED,
+ 49,
+ "address_of_has_pending_message");
+ Add(ExternalReference::address_of_pending_message_script(isolate).address(),
+ UNCLASSIFIED,
+ 50,
+ "pending_message_script");
+ Add(ExternalReference::get_make_code_young_function(isolate).address(),
+ UNCLASSIFIED,
+ 51,
+ "Code::MakeCodeYoung");
}
@@ -586,104 +603,27 @@ Deserializer::Deserializer(SnapshotByteSource* source)
: isolate_(NULL),
source_(source),
external_reference_decoder_(NULL) {
-}
-
-
-// This routine both allocates a new object, and also keeps
-// track of where objects have been allocated so that we can
-// fix back references when deserializing.
-Address Deserializer::Allocate(int space_index, Space* space, int size) {
- Address address;
- if (!SpaceIsLarge(space_index)) {
- ASSERT(!SpaceIsPaged(space_index) ||
- size <= Page::kPageSize - Page::kObjectStartOffset);
- MaybeObject* maybe_new_allocation;
- if (space_index == NEW_SPACE) {
- maybe_new_allocation =
- reinterpret_cast<NewSpace*>(space)->AllocateRaw(size);
- } else {
- maybe_new_allocation =
- reinterpret_cast<PagedSpace*>(space)->AllocateRaw(size);
- }
- ASSERT(!maybe_new_allocation->IsFailure());
- Object* new_allocation = maybe_new_allocation->ToObjectUnchecked();
- HeapObject* new_object = HeapObject::cast(new_allocation);
- address = new_object->address();
- high_water_[space_index] = address + size;
- } else {
- ASSERT(SpaceIsLarge(space_index));
- LargeObjectSpace* lo_space = reinterpret_cast<LargeObjectSpace*>(space);
- Object* new_allocation;
- if (space_index == kLargeData || space_index == kLargeFixedArray) {
- new_allocation =
- lo_space->AllocateRaw(size, NOT_EXECUTABLE)->ToObjectUnchecked();
- } else {
- ASSERT_EQ(kLargeCode, space_index);
- new_allocation =
- lo_space->AllocateRaw(size, EXECUTABLE)->ToObjectUnchecked();
- }
- HeapObject* new_object = HeapObject::cast(new_allocation);
- // Record all large objects in the same space.
- address = new_object->address();
- pages_[LO_SPACE].Add(address);
+ for (int i = 0; i < LAST_SPACE + 1; i++) {
+ reservations_[i] = kUninitializedReservation;
}
- last_object_address_ = address;
- return address;
-}
-
-
-// This returns the address of an object that has been described in the
-// snapshot as being offset bytes back in a particular space.
-HeapObject* Deserializer::GetAddressFromEnd(int space) {
- int offset = source_->GetInt();
- ASSERT(!SpaceIsLarge(space));
- offset <<= kObjectAlignmentBits;
- return HeapObject::FromAddress(high_water_[space] - offset);
-}
-
-
-// This returns the address of an object that has been described in the
-// snapshot as being offset bytes into a particular space.
-HeapObject* Deserializer::GetAddressFromStart(int space) {
- int offset = source_->GetInt();
- if (SpaceIsLarge(space)) {
- // Large spaces have one object per 'page'.
- return HeapObject::FromAddress(pages_[LO_SPACE][offset]);
- }
- offset <<= kObjectAlignmentBits;
- if (space == NEW_SPACE) {
- // New space has only one space - numbered 0.
- return HeapObject::FromAddress(pages_[space][0] + offset);
- }
- ASSERT(SpaceIsPaged(space));
- int page_of_pointee = offset >> kPageSizeBits;
- Address object_address = pages_[space][page_of_pointee] +
- (offset & Page::kPageAlignmentMask);
- return HeapObject::FromAddress(object_address);
}
void Deserializer::Deserialize() {
isolate_ = Isolate::Current();
ASSERT(isolate_ != NULL);
- // Don't GC while deserializing - just expand the heap.
- AlwaysAllocateScope always_allocate;
- // Don't use the free lists while deserializing.
- LinearAllocationScope allocate_linearly;
+ isolate_->heap()->ReserveSpace(reservations_, &high_water_[0]);
// No active threads.
ASSERT_EQ(NULL, isolate_->thread_manager()->FirstThreadStateInUse());
// No active handles.
ASSERT(isolate_->handle_scope_implementer()->blocks()->is_empty());
- // Make sure the entire partial snapshot cache is traversed, filling it with
- // valid object pointers.
- isolate_->set_serialize_partial_snapshot_cache_length(
- Isolate::kPartialSnapshotCacheCapacity);
ASSERT_EQ(NULL, external_reference_decoder_);
external_reference_decoder_ = new ExternalReferenceDecoder();
isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
+ isolate_->heap()->RepairFreeListsAfterBoot();
isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
- isolate_->heap()->set_global_contexts_list(
+ isolate_->heap()->set_native_contexts_list(
isolate_->heap()->undefined_value());
// Update data pointers to the external strings containing natives sources.
@@ -693,19 +633,33 @@ void Deserializer::Deserialize() {
ExternalAsciiString::cast(source)->update_data_cache();
}
}
+
+ // Issue code events for newly deserialized code objects.
+ LOG_CODE_EVENT(isolate_, LogCodeObjects());
+ LOG_CODE_EVENT(isolate_, LogCompiledFunctions());
}
void Deserializer::DeserializePartial(Object** root) {
isolate_ = Isolate::Current();
- // Don't GC while deserializing - just expand the heap.
- AlwaysAllocateScope always_allocate;
- // Don't use the free lists while deserializing.
- LinearAllocationScope allocate_linearly;
+ for (int i = NEW_SPACE; i < kNumberOfSpaces; i++) {
+ ASSERT(reservations_[i] != kUninitializedReservation);
+ }
+ isolate_->heap()->ReserveSpace(reservations_, &high_water_[0]);
if (external_reference_decoder_ == NULL) {
external_reference_decoder_ = new ExternalReferenceDecoder();
}
+
+ // Keep track of the code space start and end pointers in case new
+ // code objects were unserialized
+ OldSpace* code_space = isolate_->heap()->code_space();
+ Address start_address = code_space->top();
VisitPointer(root);
+
+ // There's no code deserialized here. If this assert fires
+ // then that's changed and logging should be added to notify
+ // the profiler et al of the new code.
+ CHECK_EQ(start_address, code_space->top());
}
@@ -733,10 +687,9 @@ void Deserializer::VisitPointers(Object** start, Object** end) {
// written very late, which means the FreeSpace map is not set up by the
// time we need to use it to mark the space at the end of a page free.
void Deserializer::ReadObject(int space_number,
- Space* space,
Object** write_back) {
int size = source_->GetInt() << kObjectAlignmentBits;
- Address address = Allocate(space_number, space, size);
+ Address address = Allocate(space_number, size);
*write_back = HeapObject::FromAddress(address);
Object** current = reinterpret_cast<Object**>(address);
Object** limit = current + (size >> kPointerSizeLog2);
@@ -745,44 +698,19 @@ void Deserializer::ReadObject(int space_number,
}
ReadChunk(current, limit, space_number, address);
#ifdef DEBUG
- bool is_codespace = (space == HEAP->code_space()) ||
- ((space == HEAP->lo_space()) && (space_number == kLargeCode));
+ bool is_codespace = (space_number == CODE_SPACE);
ASSERT(HeapObject::FromAddress(address)->IsCode() == is_codespace);
#endif
}
-
-// This macro is always used with a constant argument so it should all fold
-// away to almost nothing in the generated code. It might be nicer to do this
-// with the ternary operator but there are type issues with that.
-#define ASSIGN_DEST_SPACE(space_number) \
- Space* dest_space; \
- if (space_number == NEW_SPACE) { \
- dest_space = isolate->heap()->new_space(); \
- } else if (space_number == OLD_POINTER_SPACE) { \
- dest_space = isolate->heap()->old_pointer_space(); \
- } else if (space_number == OLD_DATA_SPACE) { \
- dest_space = isolate->heap()->old_data_space(); \
- } else if (space_number == CODE_SPACE) { \
- dest_space = isolate->heap()->code_space(); \
- } else if (space_number == MAP_SPACE) { \
- dest_space = isolate->heap()->map_space(); \
- } else if (space_number == CELL_SPACE) { \
- dest_space = isolate->heap()->cell_space(); \
- } else { \
- ASSERT(space_number >= LO_SPACE); \
- dest_space = isolate->heap()->lo_space(); \
- }
-
-
-static const int kUnknownOffsetFromStart = -1;
-
-
void Deserializer::ReadChunk(Object** current,
Object** limit,
int source_space,
Address current_object_address) {
Isolate* const isolate = isolate_;
+ // Write barrier support costs around 1% in startup time. In fact there
+ // are no new space objects in current boot snapshots, so it's not needed,
+ // but that may change.
bool write_barrier_needed = (current_object_address != NULL &&
source_space != NEW_SPACE &&
source_space != CELL_SPACE &&
@@ -798,21 +726,19 @@ void Deserializer::ReadChunk(Object** current,
ASSERT((within & ~kWhereToPointMask) == 0); \
ASSERT((space_number & ~kSpaceMask) == 0);
-#define CASE_BODY(where, how, within, space_number_if_any, offset_from_start) \
+#define CASE_BODY(where, how, within, space_number_if_any) \
{ \
bool emit_write_barrier = false; \
bool current_was_incremented = false; \
int space_number = space_number_if_any == kAnyOldSpace ? \
(data & kSpaceMask) : space_number_if_any; \
if (where == kNewObject && how == kPlain && within == kStartOfObject) {\
- ASSIGN_DEST_SPACE(space_number) \
- ReadObject(space_number, dest_space, current); \
+ ReadObject(space_number, current); \
emit_write_barrier = (space_number == NEW_SPACE); \
} else { \
Object* new_object = NULL; /* May not be a real Object pointer. */ \
if (where == kNewObject) { \
- ASSIGN_DEST_SPACE(space_number) \
- ReadObject(space_number, dest_space, &new_object); \
+ ReadObject(space_number, &new_object); \
} else if (where == kRootArray) { \
int root_id = source_->GetInt(); \
new_object = isolate->heap()->roots_array_start()[root_id]; \
@@ -823,6 +749,9 @@ void Deserializer::ReadChunk(Object** current,
[cache_index]; \
emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
} else if (where == kExternalReference) { \
+ int skip = source_->GetInt(); \
+ current = reinterpret_cast<Object**>(reinterpret_cast<Address>( \
+ current) + skip); \
int reference_id = source_->GetInt(); \
Address address = external_reference_decoder_-> \
Decode(reference_id); \
@@ -831,21 +760,26 @@ void Deserializer::ReadChunk(Object** current,
emit_write_barrier = (space_number == NEW_SPACE); \
new_object = GetAddressFromEnd(data & kSpaceMask); \
} else { \
- ASSERT(where == kFromStart); \
- if (offset_from_start == kUnknownOffsetFromStart) { \
- emit_write_barrier = (space_number == NEW_SPACE); \
- new_object = GetAddressFromStart(data & kSpaceMask); \
+ ASSERT(where == kBackrefWithSkip); \
+ int skip = source_->GetInt(); \
+ current = reinterpret_cast<Object**>( \
+ reinterpret_cast<Address>(current) + skip); \
+ emit_write_barrier = (space_number == NEW_SPACE); \
+ new_object = GetAddressFromEnd(data & kSpaceMask); \
+ } \
+ if (within == kInnerPointer) { \
+ if (space_number != CODE_SPACE || new_object->IsCode()) { \
+ Code* new_code_object = reinterpret_cast<Code*>(new_object); \
+ new_object = reinterpret_cast<Object*>( \
+ new_code_object->instruction_start()); \
} else { \
- Address object_address = pages_[space_number][0] + \
- (offset_from_start << kObjectAlignmentBits); \
- new_object = HeapObject::FromAddress(object_address); \
+ ASSERT(space_number == CODE_SPACE); \
+ JSGlobalPropertyCell* cell = \
+ JSGlobalPropertyCell::cast(new_object); \
+ new_object = reinterpret_cast<Object*>( \
+ cell->ValueAddress()); \
} \
} \
- if (within == kFirstInstruction) { \
- Code* new_code_object = reinterpret_cast<Code*>(new_object); \
- new_object = reinterpret_cast<Object*>( \
- new_code_object->instruction_start()); \
- } \
if (how == kFromCode) { \
Address location_of_branch_data = \
reinterpret_cast<Address>(current); \
@@ -871,47 +805,18 @@ void Deserializer::ReadChunk(Object** current,
break; \
} \
-// This generates a case and a body for each space. The large object spaces are
-// very rare in snapshots so they are grouped in one body.
-#define ONE_PER_SPACE(where, how, within) \
- CASE_STATEMENT(where, how, within, NEW_SPACE) \
- CASE_BODY(where, how, within, NEW_SPACE, kUnknownOffsetFromStart) \
- CASE_STATEMENT(where, how, within, OLD_DATA_SPACE) \
- CASE_BODY(where, how, within, OLD_DATA_SPACE, kUnknownOffsetFromStart) \
- CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE) \
- CASE_BODY(where, how, within, OLD_POINTER_SPACE, kUnknownOffsetFromStart) \
- CASE_STATEMENT(where, how, within, CODE_SPACE) \
- CASE_BODY(where, how, within, CODE_SPACE, kUnknownOffsetFromStart) \
- CASE_STATEMENT(where, how, within, CELL_SPACE) \
- CASE_BODY(where, how, within, CELL_SPACE, kUnknownOffsetFromStart) \
- CASE_STATEMENT(where, how, within, MAP_SPACE) \
- CASE_BODY(where, how, within, MAP_SPACE, kUnknownOffsetFromStart) \
- CASE_STATEMENT(where, how, within, kLargeData) \
- CASE_STATEMENT(where, how, within, kLargeCode) \
- CASE_STATEMENT(where, how, within, kLargeFixedArray) \
- CASE_BODY(where, how, within, kAnyOldSpace, kUnknownOffsetFromStart)
-
// This generates a case and a body for the new space (which has to do extra
// write barrier handling) and handles the other spaces with 8 fall-through
// cases and one body.
#define ALL_SPACES(where, how, within) \
CASE_STATEMENT(where, how, within, NEW_SPACE) \
- CASE_BODY(where, how, within, NEW_SPACE, kUnknownOffsetFromStart) \
+ CASE_BODY(where, how, within, NEW_SPACE) \
CASE_STATEMENT(where, how, within, OLD_DATA_SPACE) \
CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE) \
CASE_STATEMENT(where, how, within, CODE_SPACE) \
CASE_STATEMENT(where, how, within, CELL_SPACE) \
CASE_STATEMENT(where, how, within, MAP_SPACE) \
- CASE_STATEMENT(where, how, within, kLargeData) \
- CASE_STATEMENT(where, how, within, kLargeCode) \
- CASE_STATEMENT(where, how, within, kLargeFixedArray) \
- CASE_BODY(where, how, within, kAnyOldSpace, kUnknownOffsetFromStart)
-
-#define ONE_PER_CODE_SPACE(where, how, within) \
- CASE_STATEMENT(where, how, within, CODE_SPACE) \
- CASE_BODY(where, how, within, CODE_SPACE, kUnknownOffsetFromStart) \
- CASE_STATEMENT(where, how, within, kLargeCode) \
- CASE_BODY(where, how, within, kLargeCode, kUnknownOffsetFromStart)
+ CASE_BODY(where, how, within, kAnyOldSpace)
#define FOUR_CASES(byte_code) \
case byte_code: \
@@ -925,14 +830,48 @@ void Deserializer::ReadChunk(Object** current,
FOUR_CASES(byte_code + 8) \
FOUR_CASES(byte_code + 12)
+#define COMMON_RAW_LENGTHS(f) \
+ f(1) \
+ f(2) \
+ f(3) \
+ f(4) \
+ f(5) \
+ f(6) \
+ f(7) \
+ f(8) \
+ f(9) \
+ f(10) \
+ f(11) \
+ f(12) \
+ f(13) \
+ f(14) \
+ f(15) \
+ f(16) \
+ f(17) \
+ f(18) \
+ f(19) \
+ f(20) \
+ f(21) \
+ f(22) \
+ f(23) \
+ f(24) \
+ f(25) \
+ f(26) \
+ f(27) \
+ f(28) \
+ f(29) \
+ f(30) \
+ f(31)
+
// We generate 15 cases and bodies that process special tags that combine
// the raw data tag and the length into one byte.
-#define RAW_CASE(index, size) \
- case kRawData + index: { \
- byte* raw_data_out = reinterpret_cast<byte*>(current); \
- source_->CopyRaw(raw_data_out, size); \
- current = reinterpret_cast<Object**>(raw_data_out + size); \
- break; \
+#define RAW_CASE(index) \
+ case kRawData + index: { \
+ byte* raw_data_out = reinterpret_cast<byte*>(current); \
+ source_->CopyRaw(raw_data_out, index * kPointerSize); \
+ current = \
+ reinterpret_cast<Object**>(raw_data_out + index * kPointerSize); \
+ break; \
}
COMMON_RAW_LENGTHS(RAW_CASE)
#undef RAW_CASE
@@ -943,12 +882,11 @@ void Deserializer::ReadChunk(Object** current,
int size = source_->GetInt();
byte* raw_data_out = reinterpret_cast<byte*>(current);
source_->CopyRaw(raw_data_out, size);
- current = reinterpret_cast<Object**>(raw_data_out + size);
break;
}
- SIXTEEN_CASES(kRootArrayLowConstants)
- SIXTEEN_CASES(kRootArrayHighConstants) {
+ SIXTEEN_CASES(kRootArrayConstants + kNoSkipDistance)
+ SIXTEEN_CASES(kRootArrayConstants + kNoSkipDistance + 16) {
int root_id = RootArrayConstantFromByteCode(data);
Object* object = isolate->heap()->roots_array_start()[root_id];
ASSERT(!isolate->heap()->InNewSpace(object));
@@ -956,6 +894,18 @@ void Deserializer::ReadChunk(Object** current,
break;
}
+ SIXTEEN_CASES(kRootArrayConstants + kHasSkipDistance)
+ SIXTEEN_CASES(kRootArrayConstants + kHasSkipDistance + 16) {
+ int root_id = RootArrayConstantFromByteCode(data);
+ int skip = source_->GetInt();
+ current = reinterpret_cast<Object**>(
+ reinterpret_cast<intptr_t>(current) + skip);
+ Object* object = isolate->heap()->roots_array_start()[root_id];
+ ASSERT(!isolate->heap()->InNewSpace(object));
+ *current++ = object;
+ break;
+ }
+
case kRepeat: {
int repeats = source_->GetInt();
Object* object = current[-1];
@@ -967,10 +917,11 @@ void Deserializer::ReadChunk(Object** current,
STATIC_ASSERT(kRootArrayNumberOfConstantEncodings ==
Heap::kOldSpaceRoots);
- STATIC_ASSERT(kMaxRepeats == 12);
- FOUR_CASES(kConstantRepeat)
- FOUR_CASES(kConstantRepeat + 4)
- FOUR_CASES(kConstantRepeat + 8) {
+ STATIC_ASSERT(kMaxRepeats == 13);
+ case kConstantRepeat:
+ FOUR_CASES(kConstantRepeat + 1)
+ FOUR_CASES(kConstantRepeat + 5)
+ FOUR_CASES(kConstantRepeat + 9) {
int repeats = RepeatsForCode(data);
Object* object = current[-1];
ASSERT(!isolate->heap()->InNewSpace(object));
@@ -981,98 +932,80 @@ void Deserializer::ReadChunk(Object** current,
// Deserialize a new object and write a pointer to it to the current
// object.
- ONE_PER_SPACE(kNewObject, kPlain, kStartOfObject)
- // Support for direct instruction pointers in functions
- ONE_PER_CODE_SPACE(kNewObject, kPlain, kFirstInstruction)
+ ALL_SPACES(kNewObject, kPlain, kStartOfObject)
+ // Support for direct instruction pointers in functions. It's an inner
+ // pointer because it points at the entry point, not at the start of the
+ // code object.
+ CASE_STATEMENT(kNewObject, kPlain, kInnerPointer, CODE_SPACE)
+ CASE_BODY(kNewObject, kPlain, kInnerPointer, CODE_SPACE)
// Deserialize a new code object and write a pointer to its first
// instruction to the current code object.
- ONE_PER_SPACE(kNewObject, kFromCode, kFirstInstruction)
+ ALL_SPACES(kNewObject, kFromCode, kInnerPointer)
// Find a recently deserialized object using its offset from the current
// allocation point and write a pointer to it to the current object.
ALL_SPACES(kBackref, kPlain, kStartOfObject)
+ ALL_SPACES(kBackrefWithSkip, kPlain, kStartOfObject)
#if V8_TARGET_ARCH_MIPS
// Deserialize a new object from pointer found in code and write
// a pointer to it to the current object. Required only for MIPS, and
// omitted on the other architectures because it is fully unrolled and
// would cause bloat.
- ONE_PER_SPACE(kNewObject, kFromCode, kStartOfObject)
+ ALL_SPACES(kNewObject, kFromCode, kStartOfObject)
// Find a recently deserialized code object using its offset from the
// current allocation point and write a pointer to it to the current
// object. Required only for MIPS.
ALL_SPACES(kBackref, kFromCode, kStartOfObject)
- // Find an already deserialized code object using its offset from
- // the start and write a pointer to it to the current object.
- // Required only for MIPS.
- ALL_SPACES(kFromStart, kFromCode, kStartOfObject)
+ ALL_SPACES(kBackrefWithSkip, kFromCode, kStartOfObject)
#endif
// Find a recently deserialized code object using its offset from the
// current allocation point and write a pointer to its first instruction
// to the current code object or the instruction pointer in a function
// object.
- ALL_SPACES(kBackref, kFromCode, kFirstInstruction)
- ALL_SPACES(kBackref, kPlain, kFirstInstruction)
- // Find an already deserialized object using its offset from the start
- // and write a pointer to it to the current object.
- ALL_SPACES(kFromStart, kPlain, kStartOfObject)
- ALL_SPACES(kFromStart, kPlain, kFirstInstruction)
- // Find an already deserialized code object using its offset from the
- // start and write a pointer to its first instruction to the current code
- // object.
- ALL_SPACES(kFromStart, kFromCode, kFirstInstruction)
+ ALL_SPACES(kBackref, kFromCode, kInnerPointer)
+ ALL_SPACES(kBackrefWithSkip, kFromCode, kInnerPointer)
+ ALL_SPACES(kBackref, kPlain, kInnerPointer)
+ ALL_SPACES(kBackrefWithSkip, kPlain, kInnerPointer)
// Find an object in the roots array and write a pointer to it to the
// current object.
CASE_STATEMENT(kRootArray, kPlain, kStartOfObject, 0)
- CASE_BODY(kRootArray, kPlain, kStartOfObject, 0, kUnknownOffsetFromStart)
+ CASE_BODY(kRootArray, kPlain, kStartOfObject, 0)
// Find an object in the partial snapshots cache and write a pointer to it
// to the current object.
CASE_STATEMENT(kPartialSnapshotCache, kPlain, kStartOfObject, 0)
CASE_BODY(kPartialSnapshotCache,
kPlain,
kStartOfObject,
- 0,
- kUnknownOffsetFromStart)
+ 0)
// Find an code entry in the partial snapshots cache and
// write a pointer to it to the current object.
- CASE_STATEMENT(kPartialSnapshotCache, kPlain, kFirstInstruction, 0)
+ CASE_STATEMENT(kPartialSnapshotCache, kPlain, kInnerPointer, 0)
CASE_BODY(kPartialSnapshotCache,
kPlain,
- kFirstInstruction,
- 0,
- kUnknownOffsetFromStart)
+ kInnerPointer,
+ 0)
// Find an external reference and write a pointer to it to the current
// object.
CASE_STATEMENT(kExternalReference, kPlain, kStartOfObject, 0)
CASE_BODY(kExternalReference,
kPlain,
kStartOfObject,
- 0,
- kUnknownOffsetFromStart)
+ 0)
// Find an external reference and write a pointer to it in the current
// code object.
CASE_STATEMENT(kExternalReference, kFromCode, kStartOfObject, 0)
CASE_BODY(kExternalReference,
kFromCode,
kStartOfObject,
- 0,
- kUnknownOffsetFromStart)
+ 0)
#undef CASE_STATEMENT
#undef CASE_BODY
-#undef ONE_PER_SPACE
#undef ALL_SPACES
-#undef ASSIGN_DEST_SPACE
-
- case kNewPage: {
- int space = source_->Get();
- pages_[space].Add(last_object_address_);
- if (space == CODE_SPACE) {
- CPU::FlushICache(last_object_address_, Page::kPageSize);
- }
- break;
- }
case kSkip: {
- current++;
+ int size = source_->GetInt();
+ current = reinterpret_cast<Object**>(
+ reinterpret_cast<intptr_t>(current) + size);
break;
}
@@ -1097,18 +1030,20 @@ void Deserializer::ReadChunk(Object** current,
UNREACHABLE();
}
}
- ASSERT_EQ(current, limit);
+ ASSERT_EQ(limit, current);
}
void SnapshotByteSink::PutInt(uintptr_t integer, const char* description) {
- const int max_shift = ((kPointerSize * kBitsPerByte) / 7) * 7;
- for (int shift = max_shift; shift > 0; shift -= 7) {
- if (integer >= static_cast<uintptr_t>(1u) << shift) {
- Put((static_cast<int>((integer >> shift)) & 0x7f) | 0x80, "IntPart");
- }
- }
- PutSection(static_cast<int>(integer & 0x7f), "IntLastPart");
+ ASSERT(integer < 1 << 22);
+ integer <<= 2;
+ int bytes = 1;
+ if (integer > 0xff) bytes = 2;
+ if (integer > 0xffff) bytes = 3;
+ integer |= bytes;
+ Put(static_cast<int>(integer & 0xff), "IntPart1");
+ if (bytes > 1) Put(static_cast<int>((integer >> 8) & 0xff), "IntPart2");
+ if (bytes > 2) Put(static_cast<int>((integer >> 16) & 0xff), "IntPart3");
}
@@ -1116,7 +1051,6 @@ Serializer::Serializer(SnapshotByteSink* sink)
: sink_(sink),
current_root_index_(0),
external_reference_encoder_(new ExternalReferenceEncoder),
- large_object_total_(0),
root_index_wave_front_(0) {
isolate_ = Isolate::Current();
// The serializer is meant to be used only to generate initial heap images
@@ -1149,22 +1083,7 @@ void StartupSerializer::SerializeStrongReferences() {
void PartialSerializer::Serialize(Object** object) {
this->VisitPointer(object);
- Isolate* isolate = Isolate::Current();
-
- // After we have done the partial serialization the partial snapshot cache
- // will contain some references needed to decode the partial snapshot. We
- // fill it up with undefineds so it has a predictable length so the
- // deserialization code doesn't need to know the length.
- for (int index = isolate->serialize_partial_snapshot_cache_length();
- index < Isolate::kPartialSnapshotCacheCapacity;
- index++) {
- isolate->serialize_partial_snapshot_cache()[index] =
- isolate->heap()->undefined_value();
- startup_serializer_->VisitPointer(
- &isolate->serialize_partial_snapshot_cache()[index]);
- }
- isolate->set_serialize_partial_snapshot_cache_length(
- Isolate::kPartialSnapshotCacheCapacity);
+ Pad();
}
@@ -1179,14 +1098,14 @@ void Serializer::VisitPointers(Object** start, Object** end) {
if (reinterpret_cast<Address>(current) ==
isolate->heap()->store_buffer()->TopAddress()) {
sink_->Put(kSkip, "Skip");
+ sink_->PutInt(kPointerSize, "SkipOneWord");
} else if ((*current)->IsSmi()) {
- sink_->Put(kRawData, "RawData");
- sink_->PutInt(kPointerSize, "length");
+ sink_->Put(kRawData + 1, "Smi");
for (int i = 0; i < kPointerSize; i++) {
sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte");
}
} else {
- SerializeObject(*current, kPlain, kStartOfObject);
+ SerializeObject(*current, kPlain, kStartOfObject, 0);
}
}
}
@@ -1194,26 +1113,29 @@ void Serializer::VisitPointers(Object** start, Object** end) {
// This ensures that the partial snapshot cache keeps things alive during GC and
// tracks their movement. When it is called during serialization of the startup
-// snapshot the partial snapshot is empty, so nothing happens. When the partial
-// (context) snapshot is created, this array is populated with the pointers that
-// the partial snapshot will need. As that happens we emit serialized objects to
-// the startup snapshot that correspond to the elements of this cache array. On
-// deserialization we therefore need to visit the cache array. This fills it up
-// with pointers to deserialized objects.
+// snapshot nothing happens. When the partial (context) snapshot is created,
+// this array is populated with the pointers that the partial snapshot will
+// need. As that happens we emit serialized objects to the startup snapshot
+// that correspond to the elements of this cache array. On deserialization we
+// therefore need to visit the cache array. This fills it up with pointers to
+// deserialized objects.
void SerializerDeserializer::Iterate(ObjectVisitor* visitor) {
+ if (Serializer::enabled()) return;
Isolate* isolate = Isolate::Current();
- visitor->VisitPointers(
- isolate->serialize_partial_snapshot_cache(),
- &isolate->serialize_partial_snapshot_cache()[
- isolate->serialize_partial_snapshot_cache_length()]);
-}
-
-
-// When deserializing we need to set the size of the snapshot cache. This means
-// the root iteration code (above) will iterate over array elements, writing the
-// references to deserialized objects in them.
-void SerializerDeserializer::SetSnapshotCacheSize(int size) {
- Isolate::Current()->set_serialize_partial_snapshot_cache_length(size);
+ for (int i = 0; ; i++) {
+ if (isolate->serialize_partial_snapshot_cache_length() <= i) {
+ // Extend the array ready to get a value from the visitor when
+ // deserializing.
+ isolate->PushToPartialSnapshotCache(Smi::FromInt(0));
+ }
+ Object** cache = isolate->serialize_partial_snapshot_cache();
+ visitor->VisitPointers(&cache[i], &cache[i + 1]);
+ // Sentinel is the undefined object, which is a root so it will not normally
+ // be found in the cache.
+ if (cache[i] == isolate->heap()->undefined_value()) {
+ break;
+ }
+ }
}
@@ -1231,14 +1153,11 @@ int PartialSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) {
// then visit the pointer so that it becomes part of the startup snapshot
// and we can refer to it from the partial snapshot.
int length = isolate->serialize_partial_snapshot_cache_length();
- CHECK(length < Isolate::kPartialSnapshotCacheCapacity);
- isolate->serialize_partial_snapshot_cache()[length] = heap_object;
- startup_serializer_->VisitPointer(
- &isolate->serialize_partial_snapshot_cache()[length]);
+ isolate->PushToPartialSnapshotCache(heap_object);
+ startup_serializer_->VisitPointer(reinterpret_cast<Object**>(&heap_object));
// We don't recurse from the startup snapshot generator into the partial
// snapshot generator.
- ASSERT(length == isolate->serialize_partial_snapshot_cache_length());
- isolate->set_serialize_partial_snapshot_cache_length(length + 1);
+ ASSERT(length == isolate->serialize_partial_snapshot_cache_length() - 1);
return length;
}
@@ -1273,58 +1192,50 @@ void Serializer::SerializeReferenceToPreviousObject(
int space,
int address,
HowToCode how_to_code,
- WhereToPoint where_to_point) {
+ WhereToPoint where_to_point,
+ int skip) {
int offset = CurrentAllocationAddress(space) - address;
- bool from_start = true;
- if (SpaceIsPaged(space)) {
- // For paged space it is simple to encode back from current allocation if
- // the object is on the same page as the current allocation pointer.
- if ((CurrentAllocationAddress(space) >> kPageSizeBits) ==
- (address >> kPageSizeBits)) {
- from_start = false;
- address = offset;
- }
- } else if (space == NEW_SPACE) {
- // For new space it is always simple to encode back from current allocation.
- if (offset < address) {
- from_start = false;
- address = offset;
- }
- }
- // If we are actually dealing with real offsets (and not a numbering of
- // all objects) then we should shift out the bits that are always 0.
- if (!SpaceIsLarge(space)) address >>= kObjectAlignmentBits;
- if (from_start) {
- sink_->Put(kFromStart + how_to_code + where_to_point + space, "RefSer");
- sink_->PutInt(address, "address");
- } else {
+ // Shift out the bits that are always 0.
+ offset >>= kObjectAlignmentBits;
+ if (skip == 0) {
sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRefSer");
- sink_->PutInt(address, "address");
+ } else {
+ sink_->Put(kBackrefWithSkip + how_to_code + where_to_point + space,
+ "BackRefSerWithSkip");
+ sink_->PutInt(skip, "BackRefSkipDistance");
}
+ sink_->PutInt(offset, "offset");
}
void StartupSerializer::SerializeObject(
Object* o,
HowToCode how_to_code,
- WhereToPoint where_to_point) {
+ WhereToPoint where_to_point,
+ int skip) {
CHECK(o->IsHeapObject());
HeapObject* heap_object = HeapObject::cast(o);
int root_index;
if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) {
- PutRoot(root_index, heap_object, how_to_code, where_to_point);
+ PutRoot(root_index, heap_object, how_to_code, where_to_point, skip);
return;
}
if (address_mapper_.IsMapped(heap_object)) {
- int space = SpaceOfAlreadySerializedObject(heap_object);
+ int space = SpaceOfObject(heap_object);
int address = address_mapper_.MappedTo(heap_object);
SerializeReferenceToPreviousObject(space,
address,
how_to_code,
- where_to_point);
+ where_to_point,
+ skip);
} else {
+ if (skip != 0) {
+ sink_->Put(kSkip, "FlushPendingSkip");
+ sink_->PutInt(skip, "SkipDistance");
+ }
+
// Object has not yet been serialized. Serialize it here.
ObjectSerializer object_serializer(this,
heap_object,
@@ -1337,32 +1248,41 @@ void StartupSerializer::SerializeObject(
void StartupSerializer::SerializeWeakReferences() {
- for (int i = Isolate::Current()->serialize_partial_snapshot_cache_length();
- i < Isolate::kPartialSnapshotCacheCapacity;
- i++) {
- sink_->Put(kRootArray + kPlain + kStartOfObject, "RootSerialization");
- sink_->PutInt(Heap::kUndefinedValueRootIndex, "root_index");
- }
+ // This phase comes right after the partial serialization (of the snapshot).
+ // After we have done the partial serialization the partial snapshot cache
+ // will contain some references needed to decode the partial snapshot. We
+ // add one entry with 'undefined' which is the sentinel that the deserializer
+ // uses to know it is done deserializing the array.
+ Isolate* isolate = Isolate::Current();
+ Object* undefined = isolate->heap()->undefined_value();
+ VisitPointer(&undefined);
HEAP->IterateWeakRoots(this, VISIT_ALL);
+ Pad();
}
void Serializer::PutRoot(int root_index,
HeapObject* object,
SerializerDeserializer::HowToCode how_to_code,
- SerializerDeserializer::WhereToPoint where_to_point) {
+ SerializerDeserializer::WhereToPoint where_to_point,
+ int skip) {
if (how_to_code == kPlain &&
where_to_point == kStartOfObject &&
root_index < kRootArrayNumberOfConstantEncodings &&
!HEAP->InNewSpace(object)) {
- if (root_index < kRootArrayNumberOfLowConstantEncodings) {
- sink_->Put(kRootArrayLowConstants + root_index, "RootLoConstant");
+ if (skip == 0) {
+ sink_->Put(kRootArrayConstants + kNoSkipDistance + root_index,
+ "RootConstant");
} else {
- sink_->Put(kRootArrayHighConstants + root_index -
- kRootArrayNumberOfLowConstantEncodings,
- "RootHiConstant");
+ sink_->Put(kRootArrayConstants + kHasSkipDistance + root_index,
+ "RootConstant");
+ sink_->PutInt(skip, "SkipInPutRoot");
}
} else {
+ if (skip != 0) {
+ sink_->Put(kSkip, "SkipFromPutRoot");
+ sink_->PutInt(skip, "SkipFromPutRootDistance");
+ }
sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
sink_->PutInt(root_index, "root_index");
}
@@ -1372,7 +1292,8 @@ void Serializer::PutRoot(int root_index,
void PartialSerializer::SerializeObject(
Object* o,
HowToCode how_to_code,
- WhereToPoint where_to_point) {
+ WhereToPoint where_to_point,
+ int skip) {
CHECK(o->IsHeapObject());
HeapObject* heap_object = HeapObject::cast(o);
@@ -1380,16 +1301,21 @@ void PartialSerializer::SerializeObject(
// The code-caches link to context-specific code objects, which
// the startup and context serializes cannot currently handle.
ASSERT(Map::cast(heap_object)->code_cache() ==
- heap_object->GetHeap()->raw_unchecked_empty_fixed_array());
+ heap_object->GetHeap()->empty_fixed_array());
}
int root_index;
if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) {
- PutRoot(root_index, heap_object, how_to_code, where_to_point);
+ PutRoot(root_index, heap_object, how_to_code, where_to_point, skip);
return;
}
if (ShouldBeInThePartialSnapshotCache(heap_object)) {
+ if (skip != 0) {
+ sink_->Put(kSkip, "SkipFromSerializeObject");
+ sink_->PutInt(skip, "SkipDistanceFromSerializeObject");
+ }
+
int cache_index = PartialSnapshotCacheIndex(heap_object);
sink_->Put(kPartialSnapshotCache + how_to_code + where_to_point,
"PartialSnapshotCache");
@@ -1406,13 +1332,18 @@ void PartialSerializer::SerializeObject(
ASSERT(!heap_object->IsSymbol());
if (address_mapper_.IsMapped(heap_object)) {
- int space = SpaceOfAlreadySerializedObject(heap_object);
+ int space = SpaceOfObject(heap_object);
int address = address_mapper_.MappedTo(heap_object);
SerializeReferenceToPreviousObject(space,
address,
how_to_code,
- where_to_point);
+ where_to_point,
+ skip);
} else {
+ if (skip != 0) {
+ sink_->Put(kSkip, "SkipFromSerializeObject");
+ sink_->PutInt(skip, "SkipDistanceFromSerializeObject");
+ }
// Object has not yet been serialized. Serialize it here.
ObjectSerializer serializer(this,
heap_object,
@@ -1436,16 +1367,11 @@ void Serializer::ObjectSerializer::Serialize() {
SnapshotPositionEvent(object_->address(), sink_->Position()));
// Mark this object as already serialized.
- bool start_new_page;
- int offset = serializer_->Allocate(space, size, &start_new_page);
+ int offset = serializer_->Allocate(space, size);
serializer_->address_mapper()->AddMapping(object_, offset);
- if (start_new_page) {
- sink_->Put(kNewPage, "NewPage");
- sink_->PutSection(space, "NewPageSpace");
- }
// Serialize the map (first word of the object).
- serializer_->SerializeObject(object_->map(), kPlain, kStartOfObject);
+ serializer_->SerializeObject(object_->map(), kPlain, kStartOfObject, 0);
// Serialize the rest of the object.
CHECK_EQ(0, bytes_processed_so_far_);
@@ -1486,7 +1412,8 @@ void Serializer::ObjectSerializer::VisitPointers(Object** start,
sink_->Put(CodeForRepeats(repeat_count), "SerializeRepeats");
}
} else {
- serializer_->SerializeObject(current_contents, kPlain, kStartOfObject);
+ serializer_->SerializeObject(
+ current_contents, kPlain, kStartOfObject, 0);
bytes_processed_so_far_ += kPointerSize;
current++;
}
@@ -1498,9 +1425,10 @@ void Serializer::ObjectSerializer::VisitPointers(Object** start,
void Serializer::ObjectSerializer::VisitEmbeddedPointer(RelocInfo* rinfo) {
Object** current = rinfo->target_object_address();
- OutputRawData(rinfo->target_address_address());
+ int skip = OutputRawData(rinfo->target_address_address(),
+ kCanReturnSkipInsteadOfSkipping);
HowToCode representation = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
- serializer_->SerializeObject(*current, representation, kStartOfObject);
+ serializer_->SerializeObject(*current, representation, kStartOfObject, skip);
bytes_processed_so_far_ += rinfo->target_address_size();
}
@@ -1508,10 +1436,12 @@ void Serializer::ObjectSerializer::VisitEmbeddedPointer(RelocInfo* rinfo) {
void Serializer::ObjectSerializer::VisitExternalReferences(Address* start,
Address* end) {
Address references_start = reinterpret_cast<Address>(start);
- OutputRawData(references_start);
+ int skip = OutputRawData(references_start, kCanReturnSkipInsteadOfSkipping);
for (Address* current = start; current < end; current++) {
sink_->Put(kExternalReference + kPlain + kStartOfObject, "ExternalRef");
+ sink_->PutInt(skip, "SkipB4ExternalRef");
+ skip = 0;
int reference_id = serializer_->EncodeExternalReference(*current);
sink_->PutInt(reference_id, "reference id");
}
@@ -1521,12 +1451,13 @@ void Serializer::ObjectSerializer::VisitExternalReferences(Address* start,
void Serializer::ObjectSerializer::VisitExternalReference(RelocInfo* rinfo) {
Address references_start = rinfo->target_address_address();
- OutputRawData(references_start);
+ int skip = OutputRawData(references_start, kCanReturnSkipInsteadOfSkipping);
Address* current = rinfo->target_reference_address();
int representation = rinfo->IsCodedSpecially() ?
kFromCode + kStartOfObject : kPlain + kStartOfObject;
sink_->Put(kExternalReference + representation, "ExternalRef");
+ sink_->PutInt(skip, "SkipB4ExternalRef");
int reference_id = serializer_->EncodeExternalReference(*current);
sink_->PutInt(reference_id, "reference id");
bytes_processed_so_far_ += rinfo->target_address_size();
@@ -1535,7 +1466,7 @@ void Serializer::ObjectSerializer::VisitExternalReference(RelocInfo* rinfo) {
void Serializer::ObjectSerializer::VisitRuntimeEntry(RelocInfo* rinfo) {
Address target_start = rinfo->target_address_address();
- OutputRawData(target_start);
+ int skip = OutputRawData(target_start, kCanReturnSkipInsteadOfSkipping);
Address target = rinfo->target_address();
uint32_t encoding = serializer_->EncodeExternalReference(target);
CHECK(target == NULL ? encoding == 0 : encoding != 0);
@@ -1547,6 +1478,7 @@ void Serializer::ObjectSerializer::VisitRuntimeEntry(RelocInfo* rinfo) {
representation = kStartOfObject + kPlain;
}
sink_->Put(kExternalReference + representation, "ExternalReference");
+ sink_->PutInt(skip, "SkipB4ExternalRef");
sink_->PutInt(encoding, "reference id");
bytes_processed_so_far_ += rinfo->target_address_size();
}
@@ -1555,25 +1487,27 @@ void Serializer::ObjectSerializer::VisitRuntimeEntry(RelocInfo* rinfo) {
void Serializer::ObjectSerializer::VisitCodeTarget(RelocInfo* rinfo) {
CHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
Address target_start = rinfo->target_address_address();
- OutputRawData(target_start);
+ int skip = OutputRawData(target_start, kCanReturnSkipInsteadOfSkipping);
Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
- serializer_->SerializeObject(target, kFromCode, kFirstInstruction);
+ serializer_->SerializeObject(target, kFromCode, kInnerPointer, skip);
bytes_processed_so_far_ += rinfo->target_address_size();
}
void Serializer::ObjectSerializer::VisitCodeEntry(Address entry_address) {
Code* target = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
- OutputRawData(entry_address);
- serializer_->SerializeObject(target, kPlain, kFirstInstruction);
+ int skip = OutputRawData(entry_address, kCanReturnSkipInsteadOfSkipping);
+ serializer_->SerializeObject(target, kPlain, kInnerPointer, skip);
bytes_processed_so_far_ += kPointerSize;
}
void Serializer::ObjectSerializer::VisitGlobalPropertyCell(RelocInfo* rinfo) {
- // We shouldn't have any global property cell references in code
- // objects in the snapshot.
- UNREACHABLE();
+ ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
+ JSGlobalPropertyCell* cell =
+ JSGlobalPropertyCell::cast(rinfo->target_cell());
+ int skip = OutputRawData(rinfo->pc(), kCanReturnSkipInsteadOfSkipping);
+ serializer_->SerializeObject(cell, kPlain, kInnerPointer, skip);
}
@@ -1601,59 +1535,58 @@ void Serializer::ObjectSerializer::VisitExternalAsciiString(
}
-void Serializer::ObjectSerializer::OutputRawData(Address up_to) {
+int Serializer::ObjectSerializer::OutputRawData(
+ Address up_to, Serializer::ObjectSerializer::ReturnSkip return_skip) {
Address object_start = object_->address();
+ Address base = object_start + bytes_processed_so_far_;
int up_to_offset = static_cast<int>(up_to - object_start);
- int skipped = up_to_offset - bytes_processed_so_far_;
+ int to_skip = up_to_offset - bytes_processed_so_far_;
+ int bytes_to_output = to_skip;
+ bytes_processed_so_far_ += to_skip;
// This assert will fail if the reloc info gives us the target_address_address
// locations in a non-ascending order. Luckily that doesn't happen.
- ASSERT(skipped >= 0);
- if (skipped != 0) {
- Address base = object_start + bytes_processed_so_far_;
-#define RAW_CASE(index, length) \
- if (skipped == length) { \
+ ASSERT(to_skip >= 0);
+ bool outputting_code = false;
+ if (to_skip != 0 && code_object_ && !code_has_been_output_) {
+ // Output the code all at once and fix later.
+ bytes_to_output = object_->Size() + to_skip - bytes_processed_so_far_;
+ outputting_code = true;
+ code_has_been_output_ = true;
+ }
+ if (bytes_to_output != 0 &&
+ (!code_object_ || outputting_code)) {
+#define RAW_CASE(index) \
+ if (!outputting_code && bytes_to_output == index * kPointerSize && \
+ index * kPointerSize == to_skip) { \
sink_->PutSection(kRawData + index, "RawDataFixed"); \
+ to_skip = 0; /* This insn already skips. */ \
} else /* NOLINT */
COMMON_RAW_LENGTHS(RAW_CASE)
#undef RAW_CASE
{ /* NOLINT */
+ // We always end up here if we are outputting the code of a code object.
sink_->Put(kRawData, "RawData");
- sink_->PutInt(skipped, "length");
+ sink_->PutInt(bytes_to_output, "length");
}
- for (int i = 0; i < skipped; i++) {
+ for (int i = 0; i < bytes_to_output; i++) {
unsigned int data = base[i];
sink_->PutSection(data, "Byte");
}
- bytes_processed_so_far_ += skipped;
}
-}
-
-
-int Serializer::SpaceOfObject(HeapObject* object) {
- for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
- AllocationSpace s = static_cast<AllocationSpace>(i);
- if (HEAP->InSpace(object, s)) {
- if (i == LO_SPACE) {
- if (object->IsCode()) {
- return kLargeCode;
- } else if (object->IsFixedArray()) {
- return kLargeFixedArray;
- } else {
- return kLargeData;
- }
- }
- return i;
- }
+ if (to_skip != 0 && return_skip == kIgnoringReturn) {
+ sink_->Put(kSkip, "Skip");
+ sink_->PutInt(to_skip, "SkipDistance");
+ to_skip = 0;
}
- UNREACHABLE();
- return 0;
+ return to_skip;
}
-int Serializer::SpaceOfAlreadySerializedObject(HeapObject* object) {
+int Serializer::SpaceOfObject(HeapObject* object) {
for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
AllocationSpace s = static_cast<AllocationSpace>(i);
if (HEAP->InSpace(object, s)) {
+ ASSERT(i < kNumberOfSpaces);
return i;
}
}
@@ -1662,34 +1595,8 @@ int Serializer::SpaceOfAlreadySerializedObject(HeapObject* object) {
}
-int Serializer::Allocate(int space, int size, bool* new_page) {
+int Serializer::Allocate(int space, int size) {
CHECK(space >= 0 && space < kNumberOfSpaces);
- if (SpaceIsLarge(space)) {
- // In large object space we merely number the objects instead of trying to
- // determine some sort of address.
- *new_page = true;
- large_object_total_ += size;
- return fullness_[LO_SPACE]++;
- }
- *new_page = false;
- if (fullness_[space] == 0) {
- *new_page = true;
- }
- if (SpaceIsPaged(space)) {
- // Paged spaces are a little special. We encode their addresses as if the
- // pages were all contiguous and each page were filled up in the range
- // 0 - Page::kObjectAreaSize. In practice the pages may not be contiguous
- // and allocation does not start at offset 0 in the page, but this scheme
- // means the deserializer can get the page number quickly by shifting the
- // serialized address.
- CHECK(IsPowerOf2(Page::kPageSize));
- int used_in_this_page = (fullness_[space] & (Page::kPageSize - 1));
- CHECK(size <= SpaceAreaSize(space));
- if (used_in_this_page + size > SpaceAreaSize(space)) {
- *new_page = true;
- fullness_[space] = RoundUp(fullness_[space], Page::kPageSize);
- }
- }
int allocation_address = fullness_[space];
fullness_[space] = allocation_address + size;
return allocation_address;
@@ -1705,4 +1612,21 @@ int Serializer::SpaceAreaSize(int space) {
}
+void Serializer::Pad() {
+ // The non-branching GetInt will read up to 3 bytes too far, so we need
+ // to pad the snapshot to make sure we don't read over the end.
+ for (unsigned i = 0; i < sizeof(int32_t) - 1; i++) {
+ sink_->Put(kNop, "Padding");
+ }
+}
+
+
+bool SnapshotByteSource::AtEOF() {
+ if (0u + length_ - position_ > 2 * sizeof(uint32_t)) return false;
+ for (int x = position_; x < length_; x++) {
+ if (data_[x] != SerializerDeserializer::nop()) return false;
+ }
+ return true;
+}
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/serialize.h b/src/3rdparty/v8/src/serialize.h
index f50e23e..2041792 100644
--- a/src/3rdparty/v8/src/serialize.h
+++ b/src/3rdparty/v8/src/serialize.h
@@ -170,13 +170,27 @@ class SnapshotByteSource {
return data_[position_++];
}
+ int32_t GetUnalignedInt() {
+#if defined(V8_HOST_CAN_READ_UNALIGNED) && __BYTE_ORDER == __LITTLE_ENDIAN
+ int32_t answer;
+ ASSERT(position_ + sizeof(answer) <= length_ + 0u);
+ answer = *reinterpret_cast<const int32_t*>(data_ + position_);
+#else
+ int32_t answer = data_[position_];
+ answer |= data_[position_ + 1] << 8;
+ answer |= data_[position_ + 2] << 16;
+ answer |= data_[position_ + 3] << 24;
+#endif
+ return answer;
+ }
+
+ void Advance(int by) { position_ += by; }
+
inline void CopyRaw(byte* to, int number_of_bytes);
inline int GetInt();
- bool AtEOF() {
- return position_ == length_;
- }
+ bool AtEOF();
int position() { return position_; }
@@ -187,48 +201,31 @@ class SnapshotByteSource {
};
-#define COMMON_RAW_LENGTHS(f) \
- f(1, 1) \
- f(2, 2) \
- f(3, 3) \
- f(4, 4) \
- f(5, 5) \
- f(6, 6) \
- f(7, 7) \
- f(8, 8) \
- f(9, 12) \
- f(10, 16) \
- f(11, 20) \
- f(12, 24) \
- f(13, 28) \
- f(14, 32) \
- f(15, 36)
-
// The Serializer/Deserializer class is a common superclass for Serializer and
// Deserializer which is used to store common constants and methods used by
// both.
class SerializerDeserializer: public ObjectVisitor {
public:
static void Iterate(ObjectVisitor* visitor);
- static void SetSnapshotCacheSize(int size);
+
+ static int nop() { return kNop; }
protected:
// Where the pointed-to object can be found:
enum Where {
kNewObject = 0, // Object is next in snapshot.
- // 1-8 One per space.
+ // 1-6 One per space.
kRootArray = 0x9, // Object is found in root array.
kPartialSnapshotCache = 0xa, // Object is in the cache.
kExternalReference = 0xb, // Pointer to an external reference.
- kSkip = 0xc, // Skip a pointer sized cell.
- // 0xd-0xf Free.
- kBackref = 0x10, // Object is described relative to end.
- // 0x11-0x18 One per space.
- // 0x19-0x1f Free.
- kFromStart = 0x20, // Object is described relative to start.
- // 0x21-0x28 One per space.
- // 0x29-0x2f Free.
- // 0x30-0x3f Used by misc. tags below.
+ kSkip = 0xc, // Skip n bytes.
+ kNop = 0xd, // Does nothing, used to pad.
+ // 0xe-0xf Free.
+ kBackref = 0x10, // Object is described relative to end.
+ // 0x11-0x16 One per space.
+ kBackrefWithSkip = 0x18, // Object is described relative to end.
+ // 0x19-0x1e One per space.
+ // 0x20-0x3f Used by misc. tags below.
kPointedToMask = 0x3f
};
@@ -240,17 +237,27 @@ class SerializerDeserializer: public ObjectVisitor {
kHowToCodeMask = 0x40
};
+ // For kRootArrayConstants
+ enum WithSkip {
+ kNoSkipDistance = 0,
+ kHasSkipDistance = 0x40,
+ kWithSkipMask = 0x40
+ };
+
// Where to point within the object.
enum WhereToPoint {
kStartOfObject = 0,
- kFirstInstruction = 0x80,
+ kInnerPointer = 0x80, // First insn in code object or payload of cell.
kWhereToPointMask = 0x80
};
// Misc.
- // Raw data to be copied from the snapshot.
- static const int kRawData = 0x30;
- // Some common raw lengths: 0x31-0x3f
+ // Raw data to be copied from the snapshot. This byte code does not advance
+ // the current pointer, which is used for code objects, where we write the
+ // entire code in one memcpy, then fix up stuff with kSkip and other byte
+ // codes that overwrite data.
+ static const int kRawData = 0x20;
+ // Some common raw lengths: 0x21-0x3f. These autoadvance the current pointer.
// A tag emitted at strategic points in the snapshot to delineate sections.
// If the deserializer does not find these at the expected moments then it
// is an indication that the snapshot and the VM do not fit together.
@@ -260,64 +267,44 @@ class SerializerDeserializer: public ObjectVisitor {
// Used for the source code of the natives, which is in the executable, but
// is referred to from external strings in the snapshot.
static const int kNativesStringResource = 0x71;
- static const int kNewPage = 0x72;
- static const int kRepeat = 0x73;
- static const int kConstantRepeat = 0x74;
- // 0x74-0x7f Repeat last word (subtract 0x73 to get the count).
- static const int kMaxRepeats = 0x7f - 0x73;
+ static const int kRepeat = 0x72;
+ static const int kConstantRepeat = 0x73;
+ // 0x73-0x7f Repeat last word (subtract 0x72 to get the count).
+ static const int kMaxRepeats = 0x7f - 0x72;
static int CodeForRepeats(int repeats) {
ASSERT(repeats >= 1 && repeats <= kMaxRepeats);
- return 0x73 + repeats;
+ return 0x72 + repeats;
}
static int RepeatsForCode(int byte_code) {
ASSERT(byte_code >= kConstantRepeat && byte_code <= 0x7f);
- return byte_code - 0x73;
+ return byte_code - 0x72;
}
- static const int kRootArrayLowConstants = 0xb0;
- // 0xb0-0xbf Things from the first 16 elements of the root array.
- static const int kRootArrayHighConstants = 0xf0;
- // 0xf0-0xff Things from the next 16 elements of the root array.
+ static const int kRootArrayConstants = 0xa0;
+ // 0xa0-0xbf Things from the first 32 elements of the root array.
static const int kRootArrayNumberOfConstantEncodings = 0x20;
- static const int kRootArrayNumberOfLowConstantEncodings = 0x10;
static int RootArrayConstantFromByteCode(int byte_code) {
- int constant = (byte_code & 0xf) | ((byte_code & 0x40) >> 2);
- ASSERT(constant >= 0 && constant < kRootArrayNumberOfConstantEncodings);
- return constant;
+ return byte_code & 0x1f;
}
-
- static const int kLargeData = LAST_SPACE;
- static const int kLargeCode = kLargeData + 1;
- static const int kLargeFixedArray = kLargeCode + 1;
- static const int kNumberOfSpaces = kLargeFixedArray + 1;
+ static const int kNumberOfSpaces = LO_SPACE;
static const int kAnyOldSpace = -1;
// A bitmask for getting the space out of an instruction.
- static const int kSpaceMask = 15;
-
- static inline bool SpaceIsLarge(int space) { return space >= kLargeData; }
- static inline bool SpaceIsPaged(int space) {
- return space >= FIRST_PAGED_SPACE && space <= LAST_PAGED_SPACE;
- }
+ static const int kSpaceMask = 7;
};
int SnapshotByteSource::GetInt() {
- // A little unwind to catch the really small ints.
- int snapshot_byte = Get();
- if ((snapshot_byte & 0x80) == 0) {
- return snapshot_byte;
- }
- int accumulator = (snapshot_byte & 0x7f) << 7;
- while (true) {
- snapshot_byte = Get();
- if ((snapshot_byte & 0x80) == 0) {
- return accumulator | snapshot_byte;
- }
- accumulator = (accumulator | (snapshot_byte & 0x7f)) << 7;
- }
- UNREACHABLE();
- return accumulator;
+ // This way of variable-length encoding integers does not suffer from branch
+ // mispredictions.
+ uint32_t answer = GetUnalignedInt();
+ int bytes = answer & 3;
+ Advance(bytes);
+ uint32_t mask = 0xffffffffu;
+ mask >>= 32 - (bytes << 3);
+ answer &= mask;
+ answer >>= 2;
+ return answer;
}
@@ -341,6 +328,12 @@ class Deserializer: public SerializerDeserializer {
// Deserialize a single object and the objects reachable from it.
void DeserializePartial(Object** root);
+ void set_reservation(int space_number, int reservation) {
+ ASSERT(space_number >= 0);
+ ASSERT(space_number <= LAST_SPACE);
+ reservations_[space_number] = reservation;
+ }
+
private:
virtual void VisitPointers(Object** start, Object** end);
@@ -359,28 +352,36 @@ class Deserializer: public SerializerDeserializer {
// the heap.
void ReadChunk(
Object** start, Object** end, int space, Address object_address);
- HeapObject* GetAddressFromStart(int space);
- inline HeapObject* GetAddressFromEnd(int space);
- Address Allocate(int space_number, Space* space, int size);
- void ReadObject(int space_number, Space* space, Object** write_back);
+ void ReadObject(int space_number, Object** write_back);
+
+ // This routine both allocates a new object, and also keeps
+ // track of where objects have been allocated so that we can
+ // fix back references when deserializing.
+ Address Allocate(int space_index, int size) {
+ Address address = high_water_[space_index];
+ high_water_[space_index] = address + size;
+ return address;
+ }
+
+ // This returns the address of an object that has been described in the
+ // snapshot as being offset bytes back in a particular space.
+ HeapObject* GetAddressFromEnd(int space) {
+ int offset = source_->GetInt();
+ offset <<= kObjectAlignmentBits;
+ return HeapObject::FromAddress(high_water_[space] - offset);
+ }
+
// Cached current isolate.
Isolate* isolate_;
- // Keep track of the pages in the paged spaces.
- // (In large object space we are keeping track of individual objects
- // rather than pages.) In new space we just need the address of the
- // first object and the others will flow from that.
- List<Address> pages_[SerializerDeserializer::kNumberOfSpaces];
-
SnapshotByteSource* source_;
// This is the address of the next object that will be allocated in each
// space. It is used to calculate the addresses of back-references.
Address high_water_[LAST_SPACE + 1];
- // This is the address of the most recent object that was allocated. It
- // is used to set the location of the new page when we encounter a
- // START_NEW_PAGE_SERIALIZATION tag.
- Address last_object_address_;
+
+ int reservations_[LAST_SPACE + 1];
+ static const intptr_t kUninitializedReservation = -1;
ExternalReferenceDecoder* external_reference_decoder_;
@@ -462,7 +463,7 @@ class Serializer : public SerializerDeserializer {
// You can call this after serialization to find out how much space was used
// in each space.
int CurrentAllocationAddress(int space) {
- if (SpaceIsLarge(space)) return large_object_total_;
+ ASSERT(space < kNumberOfSpaces);
return fullness_[space];
}
@@ -479,8 +480,11 @@ class Serializer : public SerializerDeserializer {
static void TooLateToEnableNow() { too_late_to_enable_now_ = true; }
static bool enabled() { return serialization_enabled_; }
SerializationAddressMapper* address_mapper() { return &address_mapper_; }
- void PutRoot(
- int index, HeapObject* object, HowToCode how, WhereToPoint where);
+ void PutRoot(int index,
+ HeapObject* object,
+ HowToCode how,
+ WhereToPoint where,
+ int skip);
protected:
static const int kInvalidRootIndex = -1;
@@ -504,7 +508,9 @@ class Serializer : public SerializerDeserializer {
object_(HeapObject::cast(o)),
sink_(sink),
reference_representation_(how_to_code + where_to_point),
- bytes_processed_so_far_(0) { }
+ bytes_processed_so_far_(0),
+ code_object_(o->IsCode()),
+ code_has_been_output_(false) { }
void Serialize();
void VisitPointers(Object** start, Object** end);
void VisitEmbeddedPointer(RelocInfo* target);
@@ -524,34 +530,36 @@ class Serializer : public SerializerDeserializer {
}
private:
- void OutputRawData(Address up_to);
+ enum ReturnSkip { kCanReturnSkipInsteadOfSkipping, kIgnoringReturn };
+ // This function outputs or skips the raw data between the last pointer and
+ // up to the current position. It optionally can just return the number of
+ // bytes to skip instead of performing a skip instruction, in case the skip
+ // can be merged into the next instruction.
+ int OutputRawData(Address up_to, ReturnSkip return_skip = kIgnoringReturn);
Serializer* serializer_;
HeapObject* object_;
SnapshotByteSink* sink_;
int reference_representation_;
int bytes_processed_so_far_;
+ bool code_object_;
+ bool code_has_been_output_;
};
virtual void SerializeObject(Object* o,
HowToCode how_to_code,
- WhereToPoint where_to_point) = 0;
+ WhereToPoint where_to_point,
+ int skip) = 0;
void SerializeReferenceToPreviousObject(
int space,
int address,
HowToCode how_to_code,
- WhereToPoint where_to_point);
+ WhereToPoint where_to_point,
+ int skip);
void InitializeAllocators();
- // This will return the space for an object. If the object is in large
- // object space it may return kLargeCode or kLargeFixedArray in order
- // to indicate to the deserializer what kind of large object allocation
- // to make.
+ // This will return the space for an object.
static int SpaceOfObject(HeapObject* object);
- // This just returns the space of the object. It will return LO_SPACE
- // for all large objects since you can't check the type of the object
- // once the map has been used for the serialization address.
- static int SpaceOfAlreadySerializedObject(HeapObject* object);
- int Allocate(int space, int size, bool* new_page_started);
+ int Allocate(int space, int size);
int EncodeExternalReference(Address addr) {
return external_reference_encoder_->Encode(addr);
}
@@ -560,9 +568,7 @@ class Serializer : public SerializerDeserializer {
Isolate* isolate_;
// Keep track of the fullness of each space in order to generate
- // relative addresses for back references. Large objects are
- // just numbered sequentially since relative addresses make no
- // sense in large object space.
+ // relative addresses for back references.
int fullness_[LAST_SPACE + 1];
SnapshotByteSink* sink_;
int current_root_index_;
@@ -570,9 +576,9 @@ class Serializer : public SerializerDeserializer {
static bool serialization_enabled_;
// Did we already make use of the fact that serialization was not enabled?
static bool too_late_to_enable_now_;
- int large_object_total_;
SerializationAddressMapper address_mapper_;
intptr_t root_index_wave_front_;
+ void Pad();
friend class ObjectSerializer;
friend class Deserializer;
@@ -595,7 +601,8 @@ class PartialSerializer : public Serializer {
virtual void Serialize(Object** o);
virtual void SerializeObject(Object* o,
HowToCode how_to_code,
- WhereToPoint where_to_point);
+ WhereToPoint where_to_point,
+ int skip);
protected:
virtual int PartialSnapshotCacheIndex(HeapObject* o);
@@ -633,11 +640,13 @@ class StartupSerializer : public Serializer {
virtual void SerializeStrongReferences();
virtual void SerializeObject(Object* o,
HowToCode how_to_code,
- WhereToPoint where_to_point);
+ WhereToPoint where_to_point,
+ int skip);
void SerializeWeakReferences();
void Serialize() {
SerializeStrongReferences();
SerializeWeakReferences();
+ Pad();
}
private:
diff --git a/src/3rdparty/v8/src/small-pointer-list.h b/src/3rdparty/v8/src/small-pointer-list.h
index 75fea06..295a06f 100644
--- a/src/3rdparty/v8/src/small-pointer-list.h
+++ b/src/3rdparty/v8/src/small-pointer-list.h
@@ -44,22 +44,22 @@ class SmallPointerList {
public:
SmallPointerList() : data_(kEmptyTag) {}
- explicit SmallPointerList(int capacity) : data_(kEmptyTag) {
- Reserve(capacity);
+ SmallPointerList(int capacity, Zone* zone) : data_(kEmptyTag) {
+ Reserve(capacity, zone);
}
- void Reserve(int capacity) {
+ void Reserve(int capacity, Zone* zone) {
if (capacity < 2) return;
if ((data_ & kTagMask) == kListTag) {
if (list()->capacity() >= capacity) return;
int old_length = list()->length();
- list()->AddBlock(NULL, capacity - list()->capacity());
+ list()->AddBlock(NULL, capacity - list()->capacity(), zone);
list()->Rewind(old_length);
return;
}
- PointerList* list = new PointerList(capacity);
+ PointerList* list = new(zone) PointerList(capacity, zone);
if ((data_ & kTagMask) == kSingletonTag) {
- list->Add(single_value());
+ list->Add(single_value(), zone);
}
ASSERT(IsAligned(reinterpret_cast<intptr_t>(list), kPointerAlignment));
data_ = reinterpret_cast<intptr_t>(list) | kListTag;
@@ -83,21 +83,21 @@ class SmallPointerList {
return list()->length();
}
- void Add(T* pointer) {
+ void Add(T* pointer, Zone* zone) {
ASSERT(IsAligned(reinterpret_cast<intptr_t>(pointer), kPointerAlignment));
if ((data_ & kTagMask) == kEmptyTag) {
data_ = reinterpret_cast<intptr_t>(pointer) | kSingletonTag;
return;
}
if ((data_ & kTagMask) == kSingletonTag) {
- PointerList* list = new PointerList(2);
- list->Add(single_value());
- list->Add(pointer);
+ PointerList* list = new(zone) PointerList(2, zone);
+ list->Add(single_value(), zone);
+ list->Add(pointer, zone);
ASSERT(IsAligned(reinterpret_cast<intptr_t>(list), kPointerAlignment));
data_ = reinterpret_cast<intptr_t>(list) | kListTag;
return;
}
- list()->Add(pointer);
+ list()->Add(pointer, zone);
}
// Note: returns T* and not T*& (unlike List from list.h).
diff --git a/src/3rdparty/v8/src/smart-array-pointer.h b/src/3rdparty/v8/src/smart-pointers.h
index 00721c1..345c4d4 100644
--- a/src/3rdparty/v8/src/smart-array-pointer.h
+++ b/src/3rdparty/v8/src/smart-pointers.h
@@ -25,34 +25,33 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#ifndef V8_SMART_ARRAY_POINTER_H_
-#define V8_SMART_ARRAY_POINTER_H_
+#ifndef V8_SMART_POINTERS_H_
+#define V8_SMART_POINTERS_H_
namespace v8 {
namespace internal {
-// A 'scoped array pointer' that calls DeleteArray on its pointer when the
-// destructor is called.
-template<typename T>
-class SmartArrayPointer {
+template<typename Deallocator, typename T>
+class SmartPointerBase {
public:
// Default constructor. Constructs an empty scoped pointer.
- inline SmartArrayPointer() : p_(NULL) {}
+ inline SmartPointerBase() : p_(NULL) {}
// Constructs a scoped pointer from a plain one.
- explicit inline SmartArrayPointer(T* ptr) : p_(ptr) {}
+ explicit inline SmartPointerBase(T* ptr) : p_(ptr) {}
// Copy constructor removes the pointer from the original to avoid double
// freeing.
- inline SmartArrayPointer(const SmartArrayPointer<T>& rhs) : p_(rhs.p_) {
- const_cast<SmartArrayPointer<T>&>(rhs).p_ = NULL;
+ inline SmartPointerBase(const SmartPointerBase<Deallocator, T>& rhs)
+ : p_(rhs.p_) {
+ const_cast<SmartPointerBase<Deallocator, T>&>(rhs).p_ = NULL;
}
// When the destructor of the scoped pointer is executed the plain pointer
// is deleted using DeleteArray. This implies that you must allocate with
// NewArray.
- inline ~SmartArrayPointer() { if (p_) DeleteArray(p_); }
+ inline ~SmartPointerBase() { if (p_) Deallocator::Delete(p_); }
inline T* operator->() const { return p_; }
@@ -81,10 +80,11 @@ class SmartArrayPointer {
// Assignment requires an empty (NULL) SmartArrayPointer as the receiver. Like
// the copy constructor it removes the pointer in the original to avoid
// double freeing.
- inline SmartArrayPointer& operator=(const SmartArrayPointer<T>& rhs) {
+ inline SmartPointerBase<Deallocator, T>& operator=(
+ const SmartPointerBase<Deallocator, T>& rhs) {
ASSERT(is_empty());
T* tmp = rhs.p_; // swap to handle self-assignment
- const_cast<SmartArrayPointer<T>&>(rhs).p_ = NULL;
+ const_cast<SmartPointerBase<Deallocator, T>&>(rhs).p_ = NULL;
p_ = tmp;
return *this;
}
@@ -95,6 +95,45 @@ class SmartArrayPointer {
T* p_;
};
+// A 'scoped array pointer' that calls DeleteArray on its pointer when the
+// destructor is called.
+
+template<typename T>
+struct ArrayDeallocator {
+ static void Delete(T* array) {
+ DeleteArray(array);
+ }
+};
+
+
+template<typename T>
+class SmartArrayPointer: public SmartPointerBase<ArrayDeallocator<T>, T> {
+ public:
+ inline SmartArrayPointer() { }
+ explicit inline SmartArrayPointer(T* ptr)
+ : SmartPointerBase<ArrayDeallocator<T>, T>(ptr) { }
+ inline SmartArrayPointer(const SmartArrayPointer<T>& rhs)
+ : SmartPointerBase<ArrayDeallocator<T>, T>(rhs) { }
+};
+
+
+template<typename T>
+struct ObjectDeallocator {
+ static void Delete(T* array) {
+ Malloced::Delete(array);
+ }
+};
+
+template<typename T>
+class SmartPointer: public SmartPointerBase<ObjectDeallocator<T>, T> {
+ public:
+ inline SmartPointer() { }
+ explicit inline SmartPointer(T* ptr)
+ : SmartPointerBase<ObjectDeallocator<T>, T>(ptr) { }
+ inline SmartPointer(const SmartPointer<T>& rhs)
+ : SmartPointerBase<ObjectDeallocator<T>, T>(rhs) { }
+};
+
} } // namespace v8::internal
-#endif // V8_SMART_ARRAY_POINTER_H_
+#endif // V8_SMART_POINTERS_H_
diff --git a/src/3rdparty/v8/src/snapshot-common.cc b/src/3rdparty/v8/src/snapshot-common.cc
index ef89a5e..a8806f0 100644
--- a/src/3rdparty/v8/src/snapshot-common.cc
+++ b/src/3rdparty/v8/src/snapshot-common.cc
@@ -37,10 +37,47 @@
namespace v8 {
namespace internal {
-bool Snapshot::Deserialize(const byte* content, int len) {
- SnapshotByteSource source(content, len);
- Deserializer deserializer(&source);
- return V8::Initialize(&deserializer);
+
+static void ReserveSpaceForSnapshot(Deserializer* deserializer,
+ const char* file_name) {
+ int file_name_length = StrLength(file_name) + 10;
+ Vector<char> name = Vector<char>::New(file_name_length + 1);
+ OS::SNPrintF(name, "%s.size", file_name);
+ FILE* fp = OS::FOpen(name.start(), "r");
+ CHECK_NE(NULL, fp);
+ int new_size, pointer_size, data_size, code_size, map_size, cell_size;
+#ifdef _MSC_VER
+ // Avoid warning about unsafe fscanf from MSVC.
+ // Please note that this is only fine if %c and %s are not being used.
+#define fscanf fscanf_s
+#endif
+ CHECK_EQ(1, fscanf(fp, "new %d\n", &new_size));
+ CHECK_EQ(1, fscanf(fp, "pointer %d\n", &pointer_size));
+ CHECK_EQ(1, fscanf(fp, "data %d\n", &data_size));
+ CHECK_EQ(1, fscanf(fp, "code %d\n", &code_size));
+ CHECK_EQ(1, fscanf(fp, "map %d\n", &map_size));
+ CHECK_EQ(1, fscanf(fp, "cell %d\n", &cell_size));
+#ifdef _MSC_VER
+#undef fscanf
+#endif
+ fclose(fp);
+ deserializer->set_reservation(NEW_SPACE, new_size);
+ deserializer->set_reservation(OLD_POINTER_SPACE, pointer_size);
+ deserializer->set_reservation(OLD_DATA_SPACE, data_size);
+ deserializer->set_reservation(CODE_SPACE, code_size);
+ deserializer->set_reservation(MAP_SPACE, map_size);
+ deserializer->set_reservation(CELL_SPACE, cell_size);
+ name.Dispose();
+}
+
+
+void Snapshot::ReserveSpaceForLinkedInSnapshot(Deserializer* deserializer) {
+ deserializer->set_reservation(NEW_SPACE, new_space_used_);
+ deserializer->set_reservation(OLD_POINTER_SPACE, pointer_space_used_);
+ deserializer->set_reservation(OLD_DATA_SPACE, data_space_used_);
+ deserializer->set_reservation(CODE_SPACE, code_space_used_);
+ deserializer->set_reservation(MAP_SPACE, map_space_used_);
+ deserializer->set_reservation(CELL_SPACE, cell_space_used_);
}
@@ -49,32 +86,44 @@ bool Snapshot::Initialize(const char* snapshot_file) {
int len;
byte* str = ReadBytes(snapshot_file, &len);
if (!str) return false;
- Deserialize(str, len);
+ bool success;
+ {
+ SnapshotByteSource source(str, len);
+ Deserializer deserializer(&source);
+ ReserveSpaceForSnapshot(&deserializer, snapshot_file);
+ success = V8::Initialize(&deserializer);
+ }
DeleteArray(str);
- return true;
+ return success;
} else if (size_ > 0) {
- Deserialize(raw_data_, raw_size_);
- return true;
+ SnapshotByteSource source(raw_data_, raw_size_);
+ Deserializer deserializer(&source);
+ ReserveSpaceForLinkedInSnapshot(&deserializer);
+ return V8::Initialize(&deserializer);
}
return false;
}
+bool Snapshot::HaveASnapshotToStartFrom() {
+ return size_ != 0;
+}
+
+
Handle<Context> Snapshot::NewContextFromSnapshot() {
if (context_size_ == 0) {
return Handle<Context>();
}
- HEAP->ReserveSpace(new_space_used_,
- pointer_space_used_,
- data_space_used_,
- code_space_used_,
- map_space_used_,
- cell_space_used_,
- large_space_used_);
SnapshotByteSource source(context_raw_data_,
context_raw_size_);
Deserializer deserializer(&source);
Object* root;
+ deserializer.set_reservation(NEW_SPACE, context_new_space_used_);
+ deserializer.set_reservation(OLD_POINTER_SPACE, context_pointer_space_used_);
+ deserializer.set_reservation(OLD_DATA_SPACE, context_data_space_used_);
+ deserializer.set_reservation(CODE_SPACE, context_code_space_used_);
+ deserializer.set_reservation(MAP_SPACE, context_map_space_used_);
+ deserializer.set_reservation(CELL_SPACE, context_cell_space_used_);
deserializer.DeserializePartial(&root);
CHECK(root->IsContext());
return Handle<Context>(Context::cast(root));
diff --git a/src/3rdparty/v8/src/snapshot-empty.cc b/src/3rdparty/v8/src/snapshot-empty.cc
index 0b35720..70e7ab8 100644
--- a/src/3rdparty/v8/src/snapshot-empty.cc
+++ b/src/3rdparty/v8/src/snapshot-empty.cc
@@ -49,6 +49,12 @@ const int Snapshot::data_space_used_ = 0;
const int Snapshot::code_space_used_ = 0;
const int Snapshot::map_space_used_ = 0;
const int Snapshot::cell_space_used_ = 0;
-const int Snapshot::large_space_used_ = 0;
+
+const int Snapshot::context_new_space_used_ = 0;
+const int Snapshot::context_pointer_space_used_ = 0;
+const int Snapshot::context_data_space_used_ = 0;
+const int Snapshot::context_code_space_used_ = 0;
+const int Snapshot::context_map_space_used_ = 0;
+const int Snapshot::context_cell_space_used_ = 0;
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/snapshot.h b/src/3rdparty/v8/src/snapshot.h
index 4f01a2d..c4ae45e 100644
--- a/src/3rdparty/v8/src/snapshot.h
+++ b/src/3rdparty/v8/src/snapshot.h
@@ -40,6 +40,8 @@ class Snapshot {
// could be found.
static bool Initialize(const char* snapshot_file = NULL);
+ static bool HaveASnapshotToStartFrom();
+
// Create a new context using the internal partial snapshot.
static Handle<Context> NewContextFromSnapshot();
@@ -75,13 +77,18 @@ class Snapshot {
static const int code_space_used_;
static const int map_space_used_;
static const int cell_space_used_;
- static const int large_space_used_;
+ static const int context_new_space_used_;
+ static const int context_pointer_space_used_;
+ static const int context_data_space_used_;
+ static const int context_code_space_used_;
+ static const int context_map_space_used_;
+ static const int context_cell_space_used_;
static const int size_;
static const int raw_size_;
static const int context_size_;
static const int context_raw_size_;
- static bool Deserialize(const byte* content, int len);
+ static void ReserveSpaceForLinkedInSnapshot(Deserializer* deserializer);
DISALLOW_IMPLICIT_CONSTRUCTORS(Snapshot);
};
diff --git a/src/3rdparty/v8/src/spaces-inl.h b/src/3rdparty/v8/src/spaces-inl.h
index ed78fc7..8a576a8 100644
--- a/src/3rdparty/v8/src/spaces-inl.h
+++ b/src/3rdparty/v8/src/spaces-inl.h
@@ -214,6 +214,19 @@ MemoryChunk* MemoryChunk::FromAnyPointerAddress(Address addr) {
}
+void MemoryChunk::UpdateHighWaterMark(Address mark) {
+ if (mark == NULL) return;
+ // Need to subtract one from the mark because when a chunk is full the
+ // top points to the next address after the chunk, which effectively belongs
+ // to another chunk. See the comment to Page::FromAllocationTop.
+ MemoryChunk* chunk = MemoryChunk::FromAddress(mark - 1);
+ int new_mark = static_cast<int>(mark - chunk->address());
+ if (new_mark > chunk->high_water_mark_) {
+ chunk->high_water_mark_ = new_mark;
+ }
+}
+
+
PointerChunkIterator::PointerChunkIterator(Heap* heap)
: state_(kOldPointerState),
old_pointer_iterator_(heap->old_pointer_space()),
@@ -269,6 +282,10 @@ MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) {
return object;
}
+ ASSERT(!heap()->linear_allocation() ||
+ (anchor_.next_chunk() == &anchor_ &&
+ anchor_.prev_chunk() == &anchor_));
+
object = free_list_.Allocate(size_in_bytes);
if (object != NULL) {
if (identity() == CODE_SPACE) {
diff --git a/src/3rdparty/v8/src/spaces.cc b/src/3rdparty/v8/src/spaces.cc
index a0c8f2c..583b2ca 100644
--- a/src/3rdparty/v8/src/spaces.cc
+++ b/src/3rdparty/v8/src/spaces.cc
@@ -447,6 +447,8 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap,
chunk->InitializeReservedMemory();
chunk->slots_buffer_ = NULL;
chunk->skip_list_ = NULL;
+ chunk->write_barrier_counter_ = kWriteBarrierCounterGranularity;
+ chunk->high_water_mark_ = static_cast<int>(area_start - base);
chunk->ResetLiveBytes();
Bitmap::Clear(chunk);
chunk->initialize_scan_on_scavenge(false);
@@ -496,6 +498,7 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
VirtualMemory reservation;
Address area_start = NULL;
Address area_end = NULL;
+
if (executable == EXECUTABLE) {
chunk_size = RoundUp(CodePageAreaStartOffset() + body_size,
OS::CommitPageSize()) + CodePageGuardSize();
@@ -528,10 +531,11 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
size_executable_ += reservation.size();
}
-#ifdef DEBUG
- ZapBlock(base, CodePageGuardStartOffset());
- ZapBlock(base + CodePageAreaStartOffset(), body_size);
-#endif
+ if (Heap::ShouldZapGarbage()) {
+ ZapBlock(base, CodePageGuardStartOffset());
+ ZapBlock(base + CodePageAreaStartOffset(), body_size);
+ }
+
area_start = base + CodePageAreaStartOffset();
area_end = area_start + body_size;
} else {
@@ -543,9 +547,9 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
if (base == NULL) return NULL;
-#ifdef DEBUG
- ZapBlock(base, chunk_size);
-#endif
+ if (Heap::ShouldZapGarbage()) {
+ ZapBlock(base, chunk_size);
+ }
area_start = base + Page::kObjectStartOffset;
area_end = base + chunk_size;
@@ -621,9 +625,11 @@ bool MemoryAllocator::CommitBlock(Address start,
size_t size,
Executability executable) {
if (!VirtualMemory::CommitRegion(start, size, executable)) return false;
-#ifdef DEBUG
- ZapBlock(start, size);
-#endif
+
+ if (Heap::ShouldZapGarbage()) {
+ ZapBlock(start, size);
+ }
+
isolate_->counters()->memory_allocated()->Increment(static_cast<int>(size));
return true;
}
@@ -819,6 +825,18 @@ void PagedSpace::TearDown() {
}
+size_t PagedSpace::CommittedPhysicalMemory() {
+ if (!VirtualMemory::HasLazyCommits()) return CommittedMemory();
+ MemoryChunk::UpdateHighWaterMark(allocation_info_.top);
+ size_t size = 0;
+ PageIterator it(this);
+ while (it.has_next()) {
+ size += it.next()->CommittedPhysicalMemory();
+ }
+ return size;
+}
+
+
MaybeObject* PagedSpace::FindObject(Address addr) {
// Note: this function can only be called on precisely swept spaces.
ASSERT(!heap()->mark_compact_collector()->in_use());
@@ -881,10 +899,10 @@ intptr_t PagedSpace::SizeOfFirstPage() {
size = 192 * KB;
break;
case MAP_SPACE:
- size = 128 * KB;
+ size = 16 * kPointerSize * KB;
break;
case CELL_SPACE:
- size = 96 * KB;
+ size = 16 * kPointerSize * KB;
break;
case CODE_SPACE:
if (kPointerSize == 8) {
@@ -984,8 +1002,7 @@ void PagedSpace::ReleaseAllUnusedPages() {
void PagedSpace::Print() { }
#endif
-
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void PagedSpace::Verify(ObjectVisitor* visitor) {
// We can only iterate over the pages if they were swept precisely.
if (was_swept_conservatively_) return;
@@ -995,23 +1012,23 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
PageIterator page_iterator(this);
while (page_iterator.has_next()) {
Page* page = page_iterator.next();
- ASSERT(page->owner() == this);
+ CHECK(page->owner() == this);
if (page == Page::FromAllocationTop(allocation_info_.top)) {
allocation_pointer_found_in_space = true;
}
- ASSERT(page->WasSweptPrecisely());
+ CHECK(page->WasSweptPrecisely());
HeapObjectIterator it(page, NULL);
Address end_of_previous_object = page->area_start();
Address top = page->area_end();
int black_size = 0;
for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
- ASSERT(end_of_previous_object <= object->address());
+ CHECK(end_of_previous_object <= object->address());
// The first word should be a map, and we expect all map pointers to
// be in map space.
Map* map = object->map();
- ASSERT(map->IsMap());
- ASSERT(heap()->map_space()->Contains(map));
+ CHECK(map->IsMap());
+ CHECK(heap()->map_space()->Contains(map));
// Perform space-specific object verification.
VerifyObject(object);
@@ -1026,15 +1043,14 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
black_size += size;
}
- ASSERT(object->address() + size <= top);
+ CHECK(object->address() + size <= top);
end_of_previous_object = object->address() + size;
}
- ASSERT_LE(black_size, page->LiveBytes());
+ CHECK_LE(black_size, page->LiveBytes());
}
- ASSERT(allocation_pointer_found_in_space);
+ CHECK(allocation_pointer_found_in_space);
}
-#endif
-
+#endif // VERIFY_HEAP
// -----------------------------------------------------------------------------
// NewSpace implementation
@@ -1172,6 +1188,7 @@ void NewSpace::Shrink() {
void NewSpace::UpdateAllocationInfo() {
+ MemoryChunk::UpdateHighWaterMark(allocation_info_.top);
allocation_info_.top = to_space_.page_low();
allocation_info_.limit = to_space_.page_high();
@@ -1258,7 +1275,7 @@ MaybeObject* NewSpace::SlowAllocateRaw(int size_in_bytes) {
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
// We do not use the SemiSpaceIterator because verification doesn't assume
// that it works (it depends on the invariants we are checking).
void NewSpace::Verify() {
@@ -1307,8 +1324,8 @@ void NewSpace::Verify() {
}
// Check semi-spaces.
- ASSERT_EQ(from_space_.id(), kFromSpace);
- ASSERT_EQ(to_space_.id(), kToSpace);
+ CHECK_EQ(from_space_.id(), kFromSpace);
+ CHECK_EQ(to_space_.id(), kToSpace);
from_space_.Verify();
to_space_.Verify();
}
@@ -1384,6 +1401,17 @@ bool SemiSpace::Uncommit() {
}
+size_t SemiSpace::CommittedPhysicalMemory() {
+ if (!is_committed()) return 0;
+ size_t size = 0;
+ NewSpacePageIterator it(this);
+ while (it.has_next()) {
+ size += it.next()->CommittedPhysicalMemory();
+ }
+ return size;
+}
+
+
bool SemiSpace::GrowTo(int new_capacity) {
if (!is_committed()) {
if (!Commit()) return false;
@@ -1524,8 +1552,9 @@ void SemiSpace::set_age_mark(Address mark) {
#ifdef DEBUG
void SemiSpace::Print() { }
+#endif
-
+#ifdef VERIFY_HEAP
void SemiSpace::Verify() {
bool is_from_space = (id_ == kFromSpace);
NewSpacePage* page = anchor_.next_page();
@@ -1555,8 +1584,9 @@ void SemiSpace::Verify() {
page = page->next_page();
}
}
+#endif
-
+#ifdef DEBUG
void SemiSpace::AssertValidRange(Address start, Address end) {
// Addresses belong to same semi-space
NewSpacePage* page = NewSpacePage::FromLimit(start);
@@ -1816,6 +1846,17 @@ void NewSpace::RecordPromotion(HeapObject* obj) {
promoted_histogram_[type].increment_bytes(obj->Size());
}
+
+size_t NewSpace::CommittedPhysicalMemory() {
+ if (!VirtualMemory::HasLazyCommits()) return CommittedMemory();
+ MemoryChunk::UpdateHighWaterMark(allocation_info_.top);
+ size_t size = to_space_.CommittedPhysicalMemory();
+ if (from_space_.is_committed()) {
+ size += from_space_.CommittedPhysicalMemory();
+ }
+ return size;
+}
+
// -----------------------------------------------------------------------------
// Free lists for old object spaces implementation
@@ -2027,15 +2068,16 @@ HeapObject* FreeList::Allocate(int size_in_bytes) {
// if it is big enough.
owner_->Free(owner_->top(), old_linear_size);
+ owner_->heap()->incremental_marking()->OldSpaceStep(
+ size_in_bytes - old_linear_size);
+
#ifdef DEBUG
for (int i = 0; i < size_in_bytes / kPointerSize; i++) {
- reinterpret_cast<Object**>(new_node->address())[i] = Smi::FromInt(0);
+ reinterpret_cast<Object**>(new_node->address())[i] =
+ Smi::FromInt(kCodeZapValue);
}
#endif
- owner_->heap()->incremental_marking()->OldSpaceStep(
- size_in_bytes - old_linear_size);
-
// The old-space-step might have finished sweeping and restarted marking.
// Verify that it did not turn the page of the new node into an evacuation
// candidate.
@@ -2257,11 +2299,40 @@ bool PagedSpace::ReserveSpace(int size_in_bytes) {
Free(top(), old_linear_size);
SetTop(new_area->address(), new_area->address() + size_in_bytes);
- Allocate(size_in_bytes);
return true;
}
+static void RepairFreeList(Heap* heap, FreeListNode* n) {
+ while (n != NULL) {
+ Map** map_location = reinterpret_cast<Map**>(n->address());
+ if (*map_location == NULL) {
+ *map_location = heap->free_space_map();
+ } else {
+ ASSERT(*map_location == heap->free_space_map());
+ }
+ n = n->next();
+ }
+}
+
+
+void FreeList::RepairLists(Heap* heap) {
+ RepairFreeList(heap, small_list_);
+ RepairFreeList(heap, medium_list_);
+ RepairFreeList(heap, large_list_);
+ RepairFreeList(heap, huge_list_);
+}
+
+
+// After we have booted, we have created a map which represents free space
+// on the heap. If there was already a free list then the elements on it
+// were created with the wrong FreeSpaceMap (normally NULL), so we need to
+// fix them.
+void PagedSpace::RepairFreeListsAfterBoot() {
+ free_list_.RepairLists(heap());
+}
+
+
// You have to call this last, since the implementation from PagedSpace
// doesn't know that memory was 'promised' to large object space.
bool LargeObjectSpace::ReserveSpace(int bytes) {
@@ -2520,25 +2591,27 @@ void FixedSpace::PrepareForMarkCompact() {
// -----------------------------------------------------------------------------
// MapSpace implementation
+// TODO(mvstanton): this is weird...the compiler can't make a vtable unless
+// there is at least one non-inlined virtual function. I would prefer to hide
+// the VerifyObject definition behind VERIFY_HEAP.
-#ifdef DEBUG
void MapSpace::VerifyObject(HeapObject* object) {
// The object should be a map or a free-list node.
- ASSERT(object->IsMap() || object->IsFreeSpace());
+ CHECK(object->IsMap() || object->IsFreeSpace());
}
-#endif
// -----------------------------------------------------------------------------
// GlobalPropertyCellSpace implementation
+// TODO(mvstanton): this is weird...the compiler can't make a vtable unless
+// there is at least one non-inlined virtual function. I would prefer to hide
+// the VerifyObject definition behind VERIFY_HEAP.
-#ifdef DEBUG
void CellSpace::VerifyObject(HeapObject* object) {
// The object should be a global object property cell or a free-list node.
- ASSERT(object->IsJSGlobalPropertyCell() ||
+ CHECK(object->IsJSGlobalPropertyCell() ||
object->map() == heap()->two_pointer_filler_map());
}
-#endif
// -----------------------------------------------------------------------------
@@ -2648,18 +2721,31 @@ MaybeObject* LargeObjectSpace::AllocateRaw(int object_size,
HeapObject* object = page->GetObject();
-#ifdef DEBUG
- // Make the object consistent so the heap can be vefified in OldSpaceStep.
- reinterpret_cast<Object**>(object->address())[0] =
- heap()->fixed_array_map();
- reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0);
-#endif
+ if (Heap::ShouldZapGarbage()) {
+ // Make the object consistent so the heap can be verified in OldSpaceStep.
+ // We only need to do this in debug builds or if verify_heap is on.
+ reinterpret_cast<Object**>(object->address())[0] =
+ heap()->fixed_array_map();
+ reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0);
+ }
heap()->incremental_marking()->OldSpaceStep(object_size);
return object;
}
+size_t LargeObjectSpace::CommittedPhysicalMemory() {
+ if (!VirtualMemory::HasLazyCommits()) return CommittedMemory();
+ size_t size = 0;
+ LargePage* current = first_page_;
+ while (current != NULL) {
+ size += current->CommittedPhysicalMemory();
+ current = current->next_page();
+ }
+ return size;
+}
+
+
// GC support
MaybeObject* LargeObjectSpace::FindObject(Address a) {
LargePage* page = FindPage(a);
@@ -2752,7 +2838,7 @@ bool LargeObjectSpace::Contains(HeapObject* object) {
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
// We do not assume that the large object iterator works, because it depends
// on the invariants we are checking during verification.
void LargeObjectSpace::Verify() {
@@ -2763,18 +2849,18 @@ void LargeObjectSpace::Verify() {
// object area start.
HeapObject* object = chunk->GetObject();
Page* page = Page::FromAddress(object->address());
- ASSERT(object->address() == page->area_start());
+ CHECK(object->address() == page->area_start());
// The first word should be a map, and we expect all map pointers to be
// in map space.
Map* map = object->map();
- ASSERT(map->IsMap());
- ASSERT(heap()->map_space()->Contains(map));
+ CHECK(map->IsMap());
+ CHECK(heap()->map_space()->Contains(map));
// We have only code, sequential strings, external strings
// (sequential strings that have been morphed into external
// strings), fixed arrays, and byte arrays in large object space.
- ASSERT(object->IsCode() || object->IsSeqString() ||
+ CHECK(object->IsCode() || object->IsSeqString() ||
object->IsExternalString() || object->IsFixedArray() ||
object->IsFixedDoubleArray() || object->IsByteArray());
@@ -2793,15 +2879,17 @@ void LargeObjectSpace::Verify() {
Object* element = array->get(j);
if (element->IsHeapObject()) {
HeapObject* element_object = HeapObject::cast(element);
- ASSERT(heap()->Contains(element_object));
- ASSERT(element_object->map()->IsMap());
+ CHECK(heap()->Contains(element_object));
+ CHECK(element_object->map()->IsMap());
}
}
}
}
}
+#endif
+#ifdef DEBUG
void LargeObjectSpace::Print() {
LargeObjectIterator it(this);
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
diff --git a/src/3rdparty/v8/src/spaces.h b/src/3rdparty/v8/src/spaces.h
index b0ecc5d..9121e9c 100644
--- a/src/3rdparty/v8/src/spaces.h
+++ b/src/3rdparty/v8/src/spaces.h
@@ -100,9 +100,6 @@ class Isolate;
#define ASSERT_OBJECT_ALIGNED(address) \
ASSERT((OffsetFrom(address) & kObjectAlignmentMask) == 0)
-#define ASSERT_MAP_ALIGNED(address) \
- ASSERT((OffsetFrom(address) & kMapAlignmentMask) == 0)
-
#define ASSERT_OBJECT_SIZE(size) \
ASSERT((0 < size) && (size <= Page::kMaxNonCodeHeapObjectSize))
@@ -284,7 +281,9 @@ class Bitmap {
bool IsClean() {
for (int i = 0; i < CellsCount(); i++) {
- if (cells()[i] != 0) return false;
+ if (cells()[i] != 0) {
+ return false;
+ }
}
return true;
}
@@ -373,6 +372,11 @@ class MemoryChunk {
return addr >= area_start() && addr <= area_end();
}
+ // Every n write barrier invocations we go to runtime even though
+ // we could have handled it in generated code. This lets us check
+ // whether we have hit the limit and should do some more marking.
+ static const int kWriteBarrierCounterGranularity = 500;
+
enum MemoryChunkFlags {
IS_EXECUTABLE,
ABOUT_TO_BE_FREED,
@@ -468,6 +472,15 @@ class MemoryChunk {
return live_byte_count_;
}
+ int write_barrier_counter() {
+ return static_cast<int>(write_barrier_counter_);
+ }
+
+ void set_write_barrier_counter(int counter) {
+ write_barrier_counter_ = counter;
+ }
+
+
static void IncrementLiveBytesFromGC(Address address, int by) {
MemoryChunk::FromAddress(address)->IncrementLiveBytes(by);
}
@@ -488,11 +501,14 @@ class MemoryChunk {
static const size_t kSlotsBufferOffset = kLiveBytesOffset + kIntSize;
- static const size_t kHeaderSize =
+ static const size_t kWriteBarrierCounterOffset =
kSlotsBufferOffset + kPointerSize + kPointerSize;
+ static const size_t kHeaderSize =
+ kWriteBarrierCounterOffset + kPointerSize + kPointerSize;
+
static const int kBodyOffset =
- CODE_POINTER_ALIGN(MAP_POINTER_ALIGN(kHeaderSize + Bitmap::kSize));
+ CODE_POINTER_ALIGN(kHeaderSize + Bitmap::kSize);
// The start offset of the object area in a page. Aligned to both maps and
// code alignment to be suitable for both. Also aligned to 32 words because
@@ -601,6 +617,13 @@ class MemoryChunk {
return static_cast<int>(area_end() - area_start());
}
+ // Approximate amount of physical memory committed for this chunk.
+ size_t CommittedPhysicalMemory() {
+ return high_water_mark_;
+ }
+
+ static inline void UpdateHighWaterMark(Address mark);
+
protected:
MemoryChunk* next_chunk_;
MemoryChunk* prev_chunk_;
@@ -625,6 +648,10 @@ class MemoryChunk {
int live_byte_count_;
SlotsBuffer* slots_buffer_;
SkipList* skip_list_;
+ intptr_t write_barrier_counter_;
+ // Assuming the initial allocation on a page is sequential,
+ // count highest number of bytes ever allocated on the page.
+ int high_water_mark_;
static MemoryChunk* Initialize(Heap* heap,
Address base,
@@ -790,14 +817,6 @@ class Space : public Malloced {
virtual void Print() = 0;
#endif
- // After calling this we can allocate a certain number of bytes using only
- // linear allocation (with a LinearAllocationScope and an AlwaysAllocateScope)
- // without using freelists or causing a GC. This is used by partial
- // snapshots. It returns true of space was reserved or false if a GC is
- // needed. For paged spaces the space requested must include the space wasted
- // at the end of each when allocating linearly.
- virtual bool ReserveSpace(int bytes) = 0;
-
private:
Heap* heap_;
AllocationSpace id_;
@@ -1318,6 +1337,11 @@ class FreeListNode: public HeapObject {
inline void Zap();
+ static inline FreeListNode* cast(MaybeObject* maybe) {
+ ASSERT(!maybe->IsFailure());
+ return reinterpret_cast<FreeListNode*>(maybe);
+ }
+
private:
static const int kNextOffset = POINTER_SIZE_ALIGN(FreeSpace::kHeaderSize);
@@ -1380,6 +1404,9 @@ class FreeList BASE_EMBEDDED {
bool IsVeryLong();
#endif
+ // Used after booting the VM.
+ void RepairLists(Heap* heap);
+
struct SizeStats {
intptr_t Total() {
return small_size_ + medium_size_ + large_size_ + huge_size_;
@@ -1460,6 +1487,10 @@ class PagedSpace : public Space {
// linear in the number of objects in the page. It may be slow.
MUST_USE_RESULT MaybeObject* FindObject(Address addr);
+ // During boot the free_space_map is created, and afterwards we may need
+ // to write it into the free list nodes that were already created.
+ virtual void RepairFreeListsAfterBoot();
+
// Prepares for a mark-compact GC.
virtual void PrepareForMarkCompact();
@@ -1470,6 +1501,9 @@ class PagedSpace : public Space {
// spaces this equals the capacity.
intptr_t CommittedMemory() { return Capacity(); }
+ // Approximate amount of physical memory committed for this space.
+ size_t CommittedPhysicalMemory();
+
// Sets the capacity, the available space and the wasted space to zero.
// The stats are rebuilt during sweeping by adding each page to the
// capacity and the size when it is encountered. As free spaces are
@@ -1530,6 +1564,7 @@ class PagedSpace : public Space {
void SetTop(Address top, Address limit) {
ASSERT(top == limit ||
Page::FromAddress(top) == Page::FromAddress(limit - 1));
+ MemoryChunk::UpdateHighWaterMark(allocation_info_.top);
allocation_info_.top = top;
allocation_info_.limit = limit;
}
@@ -1551,19 +1586,21 @@ class PagedSpace : public Space {
// The dummy page that anchors the linked list of pages.
Page* anchor() { return &anchor_; }
-#ifdef DEBUG
- // Print meta info and objects in this space.
- virtual void Print();
-
+#ifdef VERIFY_HEAP
// Verify integrity of this space.
virtual void Verify(ObjectVisitor* visitor);
- // Reports statistics for the space
- void ReportStatistics();
-
// Overridden by subclasses to verify space-specific object
// properties (e.g., only maps or free-list nodes are in map space).
virtual void VerifyObject(HeapObject* obj) {}
+#endif
+
+#ifdef DEBUG
+ // Print meta info and objects in this space.
+ virtual void Print();
+
+ // Reports statistics for the space
+ void ReportStatistics();
// Report code object related statistics
void CollectCodeStatistics();
@@ -1911,9 +1948,12 @@ class SemiSpace : public Space {
NewSpacePage* first_page() { return anchor_.next_page(); }
NewSpacePage* current_page() { return current_page_; }
+#ifdef VERIFY_HEAP
+ virtual void Verify();
+#endif
+
#ifdef DEBUG
virtual void Print();
- virtual void Verify();
// Validate a range of of addresses in a SemiSpace.
// The "from" address must be on a page prior to the "to" address,
// in the linked page order, or it must be earlier on the same page.
@@ -1936,6 +1976,9 @@ class SemiSpace : public Space {
static void Swap(SemiSpace* from, SemiSpace* to);
+ // Approximate amount of physical memory committed for this space.
+ size_t CommittedPhysicalMemory();
+
private:
// Flips the semispace between being from-space and to-space.
// Copies the flags into the masked positions on all pages in the space.
@@ -2133,6 +2176,9 @@ class NewSpace : public Space {
return Capacity();
}
+ // Approximate amount of physical memory committed for this space.
+ size_t CommittedPhysicalMemory();
+
// Return the available bytes without growing.
intptr_t Available() {
return Capacity() - Size();
@@ -2238,9 +2284,12 @@ class NewSpace : public Space {
template <typename StringType>
inline void ShrinkStringAtAllocationBoundary(String* string, int len);
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
// Verify the active semispace.
virtual void Verify();
+#endif
+
+#ifdef DEBUG
// Print the active semispace.
virtual void Print() { to_space_.Print(); }
#endif
@@ -2410,9 +2459,7 @@ class MapSpace : public FixedSpace {
}
protected:
-#ifdef DEBUG
virtual void VerifyObject(HeapObject* obj);
-#endif
private:
static const int kMapsPerPage = Page::kNonCodeObjectAreaSize / Map::kSize;
@@ -2448,9 +2495,7 @@ class CellSpace : public FixedSpace {
}
protected:
-#ifdef DEBUG
virtual void VerifyObject(HeapObject* obj);
-#endif
public:
TRACK_MEMORY("CellSpace")
@@ -2496,6 +2541,13 @@ class LargeObjectSpace : public Space {
return objects_size_;
}
+ intptr_t CommittedMemory() {
+ return Size();
+ }
+
+ // Approximate amount of physical memory committed for this space.
+ size_t CommittedPhysicalMemory();
+
int PageCount() {
return page_count_;
}
@@ -2525,8 +2577,11 @@ class LargeObjectSpace : public Space {
LargePage* first_page() { return first_page_; }
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
virtual void Verify();
+#endif
+
+#ifdef DEBUG
virtual void Print();
void ReportStatistics();
void CollectCodeStatistics();
diff --git a/src/3rdparty/v8/src/splay-tree-inl.h b/src/3rdparty/v8/src/splay-tree-inl.h
index 4640ed5..4eca71d 100644
--- a/src/3rdparty/v8/src/splay-tree-inl.h
+++ b/src/3rdparty/v8/src/splay-tree-inl.h
@@ -42,10 +42,11 @@ SplayTree<Config, Allocator>::~SplayTree() {
template<typename Config, class Allocator>
-bool SplayTree<Config, Allocator>::Insert(const Key& key, Locator* locator) {
+bool SplayTree<Config, Allocator>::Insert(const Key& key,
+ Locator* locator) {
if (is_empty()) {
// If the tree is empty, insert the new node.
- root_ = new Node(key, Config::NoValue());
+ root_ = new(allocator_) Node(key, Config::NoValue());
} else {
// Splay on the key to move the last node on the search path
// for the key to the root of the tree.
@@ -57,7 +58,7 @@ bool SplayTree<Config, Allocator>::Insert(const Key& key, Locator* locator) {
return false;
}
// Insert the new node.
- Node* node = new Node(key, Config::NoValue());
+ Node* node = new(allocator_) Node(key, Config::NoValue());
InsertInternal(cmp, node);
}
locator->bind(root_);
@@ -293,13 +294,13 @@ void SplayTree<Config, Allocator>::ForEach(Callback* callback) {
template <typename Config, class Allocator> template <class Callback>
void SplayTree<Config, Allocator>::ForEachNode(Callback* callback) {
// Pre-allocate some space for tiny trees.
- List<Node*, Allocator> nodes_to_visit(10);
- if (root_ != NULL) nodes_to_visit.Add(root_);
+ List<Node*, Allocator> nodes_to_visit(10, allocator_);
+ if (root_ != NULL) nodes_to_visit.Add(root_, allocator_);
int pos = 0;
while (pos < nodes_to_visit.length()) {
Node* node = nodes_to_visit[pos++];
- if (node->left() != NULL) nodes_to_visit.Add(node->left());
- if (node->right() != NULL) nodes_to_visit.Add(node->right());
+ if (node->left() != NULL) nodes_to_visit.Add(node->left(), allocator_);
+ if (node->right() != NULL) nodes_to_visit.Add(node->right(), allocator_);
callback->Call(node);
}
}
diff --git a/src/3rdparty/v8/src/splay-tree.h b/src/3rdparty/v8/src/splay-tree.h
index 72231e4..8844d8a 100644
--- a/src/3rdparty/v8/src/splay-tree.h
+++ b/src/3rdparty/v8/src/splay-tree.h
@@ -50,7 +50,7 @@ namespace internal {
// Forward defined as
// template <typename Config, class Allocator = FreeStoreAllocationPolicy>
// class SplayTree;
-template <typename Config, class Allocator>
+template <typename Config, class AllocationPolicy>
class SplayTree {
public:
typedef typename Config::Key Key;
@@ -58,13 +58,21 @@ class SplayTree {
class Locator;
- SplayTree() : root_(NULL) { }
+ SplayTree(AllocationPolicy allocator = AllocationPolicy())
+ : root_(NULL), allocator_(allocator) { }
~SplayTree();
- INLINE(void* operator new(size_t size)) {
- return Allocator::New(static_cast<int>(size));
+ INLINE(void* operator new(size_t size,
+ AllocationPolicy allocator = AllocationPolicy())) {
+ return allocator.New(static_cast<int>(size));
+ }
+ INLINE(void operator delete(void* p)) {
+ AllocationPolicy::Delete(p);
+ }
+ // Please the MSVC compiler. We should never have to execute this.
+ INLINE(void operator delete(void* p, AllocationPolicy policy)) {
+ UNREACHABLE();
}
- INLINE(void operator delete(void* p, size_t)) { return Allocator::Delete(p); }
// Inserts the given key in this tree with the given value. Returns
// true if a node was inserted, otherwise false. If found the locator
@@ -112,11 +120,16 @@ class SplayTree {
left_(NULL),
right_(NULL) { }
- INLINE(void* operator new(size_t size)) {
- return Allocator::New(static_cast<int>(size));
+ INLINE(void* operator new(size_t size, AllocationPolicy allocator)) {
+ return allocator.New(static_cast<int>(size));
+ }
+ INLINE(void operator delete(void* p)) {
+ return AllocationPolicy::Delete(p);
}
- INLINE(void operator delete(void* p, size_t)) {
- return Allocator::Delete(p);
+ // Please the MSVC compiler. We should never have to execute
+ // this.
+ INLINE(void operator delete(void* p, AllocationPolicy allocator)) {
+ UNREACHABLE();
}
Key key() { return key_; }
@@ -184,7 +197,7 @@ class SplayTree {
class NodeDeleter BASE_EMBEDDED {
public:
NodeDeleter() { }
- void Call(Node* node) { delete node; }
+ void Call(Node* node) { AllocationPolicy::Delete(node); }
private:
DISALLOW_COPY_AND_ASSIGN(NodeDeleter);
@@ -194,6 +207,7 @@ class SplayTree {
void ForEachNode(Callback* callback);
Node* root_;
+ AllocationPolicy allocator_;
DISALLOW_COPY_AND_ASSIGN(SplayTree);
};
diff --git a/src/3rdparty/v8/src/store-buffer.cc b/src/3rdparty/v8/src/store-buffer.cc
index 3852155..66488ae 100644
--- a/src/3rdparty/v8/src/store-buffer.cc
+++ b/src/3rdparty/v8/src/store-buffer.cc
@@ -372,7 +372,7 @@ void StoreBuffer::GCPrologue() {
}
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
static void DummyScavengePointer(HeapObject** p, HeapObject* o) {
// Do nothing.
}
@@ -415,7 +415,7 @@ void StoreBuffer::VerifyPointers(LargeObjectSpace* space) {
void StoreBuffer::Verify() {
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
VerifyPointers(heap_->old_pointer_space(),
&StoreBuffer::FindPointersToNewSpaceInRegion);
VerifyPointers(heap_->map_space(),
@@ -427,9 +427,11 @@ void StoreBuffer::Verify() {
void StoreBuffer::GCEpilogue() {
during_gc_ = false;
+#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Verify();
}
+#endif
}
diff --git a/src/3rdparty/v8/src/store-buffer.h b/src/3rdparty/v8/src/store-buffer.h
index 951a9ca..0ade8ce 100644
--- a/src/3rdparty/v8/src/store-buffer.h
+++ b/src/3rdparty/v8/src/store-buffer.h
@@ -195,7 +195,7 @@ class StoreBuffer {
void IteratePointersInStoreBuffer(ObjectSlotCallback slot_callback);
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void VerifyPointers(PagedSpace* space, RegionCallback region_callback);
void VerifyPointers(LargeObjectSpace* space);
#endif
diff --git a/src/3rdparty/v8/src/string-stream.cc b/src/3rdparty/v8/src/string-stream.cc
index 35f7be5..cffd7b0 100644
--- a/src/3rdparty/v8/src/string-stream.cc
+++ b/src/3rdparty/v8/src/string-stream.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -274,10 +274,10 @@ void StringStream::OutputToFile(FILE* out) {
for (unsigned next; (next = position + 2048) < length_; position = next) {
char save = buffer_[next];
buffer_[next] = '\0';
- internal::PrintF(out, "%s", &buffer_[position]);
+ internal::FPrintF(out, "%s", &buffer_[position]);
buffer_[next] = save;
}
- internal::PrintF(out, "%s", &buffer_[position]);
+ internal::FPrintF(out, "%s", &buffer_[position]);
}
@@ -291,7 +291,7 @@ void StringStream::ClearMentionedObjectCache() {
isolate->set_string_stream_current_security_token(NULL);
if (isolate->string_stream_debug_object_cache() == NULL) {
isolate->set_string_stream_debug_object_cache(
- new List<HeapObject*, PreallocatedStorage>(0));
+ new List<HeapObject*, PreallocatedStorageAllocationPolicy>(0));
}
isolate->string_stream_debug_object_cache()->Clear();
}
@@ -348,9 +348,12 @@ void StringStream::PrintUsingMap(JSObject* js_object) {
Add("<Invalid map>\n");
return;
}
+ int real_size = map->NumberOfOwnDescriptors();
DescriptorArray* descs = map->instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
- if (descs->GetType(i) == FIELD) {
+ PropertyDetails details = descs->GetDetails(i);
+ if (details.descriptor_index() > real_size) continue;
+ if (details.type() == FIELD) {
Object* key = descs->GetKey(i);
if (key->IsString() || key->IsNumber()) {
int len = 3;
@@ -427,7 +430,7 @@ void StringStream::PrintMentionedObjectCache() {
PrintUsingMap(JSObject::cast(printee));
if (printee->IsJSArray()) {
JSArray* array = JSArray::cast(printee);
- if (array->HasFastElements()) {
+ if (array->HasFastObjectElements()) {
unsigned int limit = FixedArray::cast(array->elements())->length();
unsigned int length =
static_cast<uint32_t>(JSArray::cast(array)->length()->Number());
@@ -469,7 +472,7 @@ void StringStream::PrintSecurityTokenIfChanged(Object* f) {
Add("(Function context is outside heap)\n");
return;
}
- Object* token = context->global_context()->security_token();
+ Object* token = context->native_context()->security_token();
if (token != isolate->string_stream_current_security_token()) {
Add("Security context: %o\n", token);
isolate->set_string_stream_current_security_token(token);
diff --git a/src/3rdparty/v8/src/stub-cache.cc b/src/3rdparty/v8/src/stub-cache.cc
index af41231..3796d2d 100644
--- a/src/3rdparty/v8/src/stub-cache.cc
+++ b/src/3rdparty/v8/src/stub-cache.cc
@@ -43,7 +43,8 @@ namespace internal {
// StubCache implementation.
-StubCache::StubCache(Isolate* isolate) : isolate_(isolate) {
+StubCache::StubCache(Isolate* isolate, Zone* zone)
+ : isolate_(isolate) {
ASSERT(isolate == Isolate::Current());
}
@@ -118,7 +119,7 @@ Handle<Code> StubCache::ComputeLoadNonexistent(Handle<String> name,
// Compile the stub that is either shared for all names or
// name specific if there are global objects involved.
Code::Flags flags =
- Code::ComputeMonomorphicFlags(Code::LOAD_IC, NONEXISTENT);
+ Code::ComputeMonomorphicFlags(Code::LOAD_IC, Code::NONEXISTENT);
Handle<Object> probe(receiver->map()->FindInCodeCache(*cache_name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -137,7 +138,7 @@ Handle<Code> StubCache::ComputeLoadField(Handle<String> name,
Handle<JSObject> holder,
int field_index) {
ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
- Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, FIELD);
+ Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, Code::FIELD);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -157,7 +158,8 @@ Handle<Code> StubCache::ComputeLoadCallback(Handle<String> name,
Handle<AccessorInfo> callback) {
ASSERT(v8::ToCData<Address>(callback->getter()) != 0);
ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
- Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, CALLBACKS);
+ Code::Flags flags =
+ Code::ComputeMonomorphicFlags(Code::LOAD_IC, Code::CALLBACKS);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -171,13 +173,33 @@ Handle<Code> StubCache::ComputeLoadCallback(Handle<String> name,
}
+Handle<Code> StubCache::ComputeLoadViaGetter(Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> getter) {
+ ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
+ Code::Flags flags =
+ Code::ComputeMonomorphicFlags(Code::LOAD_IC, Code::CALLBACKS);
+ Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+ if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+ LoadStubCompiler compiler(isolate_);
+ Handle<Code> code =
+ compiler.CompileLoadViaGetter(name, receiver, holder, getter);
+ PROFILE(isolate_, CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name));
+ GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code));
+ JSObject::UpdateMapCodeCache(receiver, name, code);
+ return code;
+}
+
+
Handle<Code> StubCache::ComputeLoadConstant(Handle<String> name,
Handle<JSObject> receiver,
Handle<JSObject> holder,
Handle<JSFunction> value) {
ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
Code::Flags flags =
- Code::ComputeMonomorphicFlags(Code::LOAD_IC, CONSTANT_FUNCTION);
+ Code::ComputeMonomorphicFlags(Code::LOAD_IC, Code::CONSTANT_FUNCTION);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -195,7 +217,8 @@ Handle<Code> StubCache::ComputeLoadInterceptor(Handle<String> name,
Handle<JSObject> receiver,
Handle<JSObject> holder) {
ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
- Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, INTERCEPTOR);
+ Code::Flags flags =
+ Code::ComputeMonomorphicFlags(Code::LOAD_IC, Code::INTERCEPTOR);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -220,7 +243,8 @@ Handle<Code> StubCache::ComputeLoadGlobal(Handle<String> name,
Handle<JSGlobalPropertyCell> cell,
bool is_dont_delete) {
ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
- Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, NORMAL);
+ Code::Flags flags =
+ Code::ComputeMonomorphicFlags(Code::LOAD_IC, Code::NORMAL);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -239,7 +263,8 @@ Handle<Code> StubCache::ComputeKeyedLoadField(Handle<String> name,
Handle<JSObject> holder,
int field_index) {
ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
- Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, FIELD);
+ Code::Flags flags =
+ Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, Code::FIELD);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -258,8 +283,8 @@ Handle<Code> StubCache::ComputeKeyedLoadConstant(Handle<String> name,
Handle<JSObject> holder,
Handle<JSFunction> value) {
ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
- Code::Flags flags =
- Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CONSTANT_FUNCTION);
+ Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC,
+ Code::CONSTANT_FUNCTION);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -278,7 +303,7 @@ Handle<Code> StubCache::ComputeKeyedLoadInterceptor(Handle<String> name,
Handle<JSObject> holder) {
ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
Code::Flags flags =
- Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, INTERCEPTOR);
+ Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, Code::INTERCEPTOR);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -298,7 +323,7 @@ Handle<Code> StubCache::ComputeKeyedLoadCallback(
Handle<AccessorInfo> callback) {
ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
Code::Flags flags =
- Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS);
+ Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, Code::CALLBACKS);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -315,7 +340,7 @@ Handle<Code> StubCache::ComputeKeyedLoadCallback(
Handle<Code> StubCache::ComputeKeyedLoadArrayLength(Handle<String> name,
Handle<JSArray> receiver) {
Code::Flags flags =
- Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS);
+ Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, Code::CALLBACKS);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -331,7 +356,7 @@ Handle<Code> StubCache::ComputeKeyedLoadArrayLength(Handle<String> name,
Handle<Code> StubCache::ComputeKeyedLoadStringLength(Handle<String> name,
Handle<String> receiver) {
Code::Flags flags =
- Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS);
+ Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, Code::CALLBACKS);
Handle<Map> map(receiver->map());
Handle<Object> probe(map->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -349,7 +374,7 @@ Handle<Code> StubCache::ComputeKeyedLoadFunctionPrototype(
Handle<String> name,
Handle<JSFunction> receiver) {
Code::Flags flags =
- Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS);
+ Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, Code::CALLBACKS);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -367,7 +392,8 @@ Handle<Code> StubCache::ComputeStoreField(Handle<String> name,
int field_index,
Handle<Map> transition,
StrictModeFlag strict_mode) {
- PropertyType type = (transition.is_null()) ? FIELD : MAP_TRANSITION;
+ Code::StubType type =
+ (transition.is_null()) ? Code::FIELD : Code::MAP_TRANSITION;
Code::Flags flags = Code::ComputeMonomorphicFlags(
Code::STORE_IC, type, strict_mode);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
@@ -384,7 +410,7 @@ Handle<Code> StubCache::ComputeStoreField(Handle<String> name,
Handle<Code> StubCache::ComputeKeyedLoadOrStoreElement(
- Handle<JSObject> receiver,
+ Handle<Map> receiver_map,
KeyedIC::StubKind stub_kind,
StrictModeFlag strict_mode) {
KeyedAccessGrowMode grow_mode =
@@ -395,7 +421,7 @@ Handle<Code> StubCache::ComputeKeyedLoadOrStoreElement(
Code::ComputeMonomorphicFlags(
stub_kind == KeyedIC::LOAD ? Code::KEYED_LOAD_IC
: Code::KEYED_STORE_IC,
- NORMAL,
+ Code::NORMAL,
extra_state);
Handle<String> name;
switch (stub_kind) {
@@ -412,7 +438,6 @@ Handle<Code> StubCache::ComputeKeyedLoadOrStoreElement(
UNREACHABLE();
break;
}
- Handle<Map> receiver_map(receiver->map());
Handle<Object> probe(receiver_map->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -447,7 +472,7 @@ Handle<Code> StubCache::ComputeKeyedLoadOrStoreElement(
} else {
PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, *code, 0));
}
- JSObject::UpdateMapCodeCache(receiver, name, code);
+ Map::UpdateCodeCache(receiver_map, name, code);
return code;
}
@@ -464,7 +489,7 @@ Handle<Code> StubCache::ComputeStoreGlobal(Handle<String> name,
Handle<JSGlobalPropertyCell> cell,
StrictModeFlag strict_mode) {
Code::Flags flags = Code::ComputeMonomorphicFlags(
- Code::STORE_IC, NORMAL, strict_mode);
+ Code::STORE_IC, Code::NORMAL, strict_mode);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -479,16 +504,38 @@ Handle<Code> StubCache::ComputeStoreGlobal(Handle<String> name,
Handle<Code> StubCache::ComputeStoreCallback(Handle<String> name,
Handle<JSObject> receiver,
+ Handle<JSObject> holder,
Handle<AccessorInfo> callback,
StrictModeFlag strict_mode) {
ASSERT(v8::ToCData<Address>(callback->setter()) != 0);
Code::Flags flags = Code::ComputeMonomorphicFlags(
- Code::STORE_IC, CALLBACKS, strict_mode);
+ Code::STORE_IC, Code::CALLBACKS, strict_mode);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
StoreStubCompiler compiler(isolate_, strict_mode);
- Handle<Code> code = compiler.CompileStoreCallback(receiver, callback, name);
+ Handle<Code> code =
+ compiler.CompileStoreCallback(name, receiver, holder, callback);
+ PROFILE(isolate_, CodeCreateEvent(Logger::STORE_IC_TAG, *code, *name));
+ GDBJIT(AddCode(GDBJITInterface::STORE_IC, *name, *code));
+ JSObject::UpdateMapCodeCache(receiver, name, code);
+ return code;
+}
+
+
+Handle<Code> StubCache::ComputeStoreViaSetter(Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> setter,
+ StrictModeFlag strict_mode) {
+ Code::Flags flags = Code::ComputeMonomorphicFlags(
+ Code::STORE_IC, Code::CALLBACKS, strict_mode);
+ Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+ if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+ StoreStubCompiler compiler(isolate_, strict_mode);
+ Handle<Code> code =
+ compiler.CompileStoreViaSetter(name, receiver, holder, setter);
PROFILE(isolate_, CodeCreateEvent(Logger::STORE_IC_TAG, *code, *name));
GDBJIT(AddCode(GDBJITInterface::STORE_IC, *name, *code));
JSObject::UpdateMapCodeCache(receiver, name, code);
@@ -500,7 +547,7 @@ Handle<Code> StubCache::ComputeStoreInterceptor(Handle<String> name,
Handle<JSObject> receiver,
StrictModeFlag strict_mode) {
Code::Flags flags = Code::ComputeMonomorphicFlags(
- Code::STORE_IC, INTERCEPTOR, strict_mode);
+ Code::STORE_IC, Code::INTERCEPTOR, strict_mode);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -517,7 +564,8 @@ Handle<Code> StubCache::ComputeKeyedStoreField(Handle<String> name,
int field_index,
Handle<Map> transition,
StrictModeFlag strict_mode) {
- PropertyType type = (transition.is_null()) ? FIELD : MAP_TRANSITION;
+ Code::StubType type =
+ (transition.is_null()) ? Code::FIELD : Code::MAP_TRANSITION;
Code::Flags flags = Code::ComputeMonomorphicFlags(
Code::KEYED_STORE_IC, type, strict_mode);
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
@@ -560,7 +608,7 @@ Handle<Code> StubCache::ComputeCallConstant(int argc,
}
Code::Flags flags =
- Code::ComputeMonomorphicFlags(kind, CONSTANT_FUNCTION, extra_state,
+ Code::ComputeMonomorphicFlags(kind, Code::CONSTANT_FUNCTION, extra_state,
cache_holder, argc);
Handle<Object> probe(map_holder->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -598,7 +646,7 @@ Handle<Code> StubCache::ComputeCallField(int argc,
}
Code::Flags flags =
- Code::ComputeMonomorphicFlags(kind, FIELD, extra_state,
+ Code::ComputeMonomorphicFlags(kind, Code::FIELD, extra_state,
cache_holder, argc);
Handle<Object> probe(map_holder->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -635,7 +683,7 @@ Handle<Code> StubCache::ComputeCallInterceptor(int argc,
}
Code::Flags flags =
- Code::ComputeMonomorphicFlags(kind, INTERCEPTOR, extra_state,
+ Code::ComputeMonomorphicFlags(kind, Code::INTERCEPTOR, extra_state,
cache_holder, argc);
Handle<Object> probe(map_holder->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -665,7 +713,7 @@ Handle<Code> StubCache::ComputeCallGlobal(int argc,
IC::GetCodeCacheForObject(*receiver, *holder);
Handle<JSObject> map_holder(IC::GetCodeCacheHolder(*receiver, cache_holder));
Code::Flags flags =
- Code::ComputeMonomorphicFlags(kind, NORMAL, extra_state,
+ Code::ComputeMonomorphicFlags(kind, Code::NORMAL, extra_state,
cache_holder, argc);
Handle<Object> probe(map_holder->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -698,11 +746,9 @@ Code* StubCache::FindCallInitialize(int argc,
CallICBase::StringStubState::encode(DEFAULT_STRING_STUB) |
CallICBase::Contextual::encode(mode == RelocInfo::CODE_TARGET_CONTEXT);
Code::Flags flags =
- Code::ComputeFlags(kind, UNINITIALIZED, extra_state, NORMAL, argc);
-
- // Use raw_unchecked... so we don't get assert failures during GC.
+ Code::ComputeFlags(kind, UNINITIALIZED, extra_state, Code::NORMAL, argc);
UnseededNumberDictionary* dictionary =
- isolate()->heap()->raw_unchecked_non_monomorphic_cache();
+ isolate()->heap()->non_monomorphic_cache();
int entry = dictionary->FindEntry(isolate(), flags);
ASSERT(entry != -1);
Object* code = dictionary->ValueAt(entry);
@@ -719,7 +765,7 @@ Handle<Code> StubCache::ComputeCallInitialize(int argc,
CallICBase::StringStubState::encode(DEFAULT_STRING_STUB) |
CallICBase::Contextual::encode(mode == RelocInfo::CODE_TARGET_CONTEXT);
Code::Flags flags =
- Code::ComputeFlags(kind, UNINITIALIZED, extra_state, NORMAL, argc);
+ Code::ComputeFlags(kind, UNINITIALIZED, extra_state, Code::NORMAL, argc);
Handle<UnseededNumberDictionary> cache =
isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
@@ -748,7 +794,7 @@ Handle<Code> StubCache::ComputeCallPreMonomorphic(
Code::Kind kind,
Code::ExtraICState extra_state) {
Code::Flags flags =
- Code::ComputeFlags(kind, PREMONOMORPHIC, extra_state, NORMAL, argc);
+ Code::ComputeFlags(kind, PREMONOMORPHIC, extra_state, Code::NORMAL, argc);
Handle<UnseededNumberDictionary> cache =
isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
@@ -766,7 +812,7 @@ Handle<Code> StubCache::ComputeCallNormal(int argc,
Code::ExtraICState extra_state,
bool has_qml_global_receiver) {
Code::Flags flags =
- Code::ComputeFlags(kind, MONOMORPHIC, extra_state, NORMAL, argc);
+ Code::ComputeFlags(kind, MONOMORPHIC, extra_state, Code::NORMAL, argc);
Handle<UnseededNumberDictionary> cache =
isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
@@ -783,7 +829,7 @@ Handle<Code> StubCache::ComputeCallArguments(int argc, Code::Kind kind) {
ASSERT(kind == Code::KEYED_CALL_IC);
Code::Flags flags =
Code::ComputeFlags(kind, MEGAMORPHIC, Code::kNoExtraICState,
- NORMAL, argc);
+ Code::NORMAL, argc);
Handle<UnseededNumberDictionary> cache =
isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
@@ -802,7 +848,7 @@ Handle<Code> StubCache::ComputeCallMegamorphic(
Code::ExtraICState extra_state) {
Code::Flags flags =
Code::ComputeFlags(kind, MEGAMORPHIC, extra_state,
- NORMAL, argc);
+ Code::NORMAL, argc);
Handle<UnseededNumberDictionary> cache =
isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
@@ -822,7 +868,7 @@ Handle<Code> StubCache::ComputeCallMiss(int argc,
// and monomorphic stubs are not mixed up together in the stub cache.
Code::Flags flags =
Code::ComputeFlags(kind, MONOMORPHIC_PROTOTYPE_FAILURE, extra_state,
- NORMAL, argc, OWN_MAP);
+ Code::NORMAL, argc, OWN_MAP);
Handle<UnseededNumberDictionary> cache =
isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
@@ -842,7 +888,7 @@ Handle<Code> StubCache::ComputeCallDebugBreak(int argc,
// the actual call ic to carry out the work.
Code::Flags flags =
Code::ComputeFlags(kind, DEBUG_BREAK, Code::kNoExtraICState,
- NORMAL, argc);
+ Code::NORMAL, argc);
Handle<UnseededNumberDictionary> cache =
isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
@@ -861,7 +907,7 @@ Handle<Code> StubCache::ComputeCallDebugPrepareStepIn(int argc,
// the actual call ic to carry out the work.
Code::Flags flags =
Code::ComputeFlags(kind, DEBUG_PREPARE_STEP_IN, Code::kNoExtraICState,
- NORMAL, argc);
+ Code::NORMAL, argc);
Handle<UnseededNumberDictionary> cache =
isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
@@ -891,7 +937,8 @@ void StubCache::Clear() {
void StubCache::CollectMatchingMaps(SmallMapList* types,
String* name,
Code::Flags flags,
- Handle<Context> global_context) {
+ Handle<Context> native_context,
+ Zone* zone) {
for (int i = 0; i < kPrimaryTableSize; i++) {
if (primary_[i].key == name) {
Map* map = primary_[i].value->FindFirstMap();
@@ -901,8 +948,8 @@ void StubCache::CollectMatchingMaps(SmallMapList* types,
int offset = PrimaryOffset(name, flags, map);
if (entry(primary_, offset) == &primary_[i] &&
- !TypeFeedbackOracle::CanRetainOtherContext(map, *global_context)) {
- types->Add(Handle<Map>(map));
+ !TypeFeedbackOracle::CanRetainOtherContext(map, *native_context)) {
+ types->Add(Handle<Map>(map), zone);
}
}
}
@@ -925,8 +972,8 @@ void StubCache::CollectMatchingMaps(SmallMapList* types,
// Lookup in secondary table and add matches.
int offset = SecondaryOffset(name, flags, primary_offset);
if (entry(secondary_, offset) == &secondary_[i] &&
- !TypeFeedbackOracle::CanRetainOtherContext(map, *global_context)) {
- types->Add(Handle<Map>(map));
+ !TypeFeedbackOracle::CanRetainOtherContext(map, *native_context)) {
+ types->Add(Handle<Map>(map), zone);
}
}
}
@@ -945,6 +992,7 @@ RUNTIME_FUNCTION(MaybeObject*, LoadCallbackProperty) {
Address getter_address = v8::ToCData<Address>(callback->getter());
v8::AccessorGetter fun = FUNCTION_CAST<v8::AccessorGetter>(getter_address);
ASSERT(fun != NULL);
+ ASSERT(callback->IsCompatibleReceiver(args[0]));
v8::AccessorInfo info(&args[0]);
HandleScope scope(isolate);
v8::Handle<v8::Value> result;
@@ -956,7 +1004,9 @@ RUNTIME_FUNCTION(MaybeObject*, LoadCallbackProperty) {
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (result.IsEmpty()) return HEAP->undefined_value();
- return *v8::Utils::OpenHandle(*result);
+ Handle<Object> result_internal = v8::Utils::OpenHandle(*result);
+ result_internal->VerifyApiCallResultType();
+ return *result_internal;
}
@@ -966,6 +1016,7 @@ RUNTIME_FUNCTION(MaybeObject*, StoreCallbackProperty) {
Address setter_address = v8::ToCData<Address>(callback->setter());
v8::AccessorSetter fun = FUNCTION_CAST<v8::AccessorSetter>(setter_address);
ASSERT(fun != NULL);
+ ASSERT(callback->IsCompatibleReceiver(recv));
Handle<String> name = args.at<String>(2);
Handle<Object> value = args.at<Object>(3);
HandleScope scope(isolate);
@@ -1020,6 +1071,8 @@ RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorOnly) {
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (!r.IsEmpty()) {
+ Handle<Object> result = v8::Utils::OpenHandle(*r);
+ result->VerifyApiCallResultType();
return *v8::Utils::OpenHandle(*r);
}
}
@@ -1076,7 +1129,9 @@ static MaybeObject* LoadWithInterceptor(Arguments* args,
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (!r.IsEmpty()) {
*attrs = NONE;
- return *v8::Utils::OpenHandle(*r);
+ Handle<Object> result = v8::Utils::OpenHandle(*r);
+ result->VerifyApiCallResultType();
+ return *result;
}
}
@@ -1315,16 +1370,14 @@ void StubCompiler::LookupPostInterceptor(Handle<JSObject> holder,
Handle<String> name,
LookupResult* lookup) {
holder->LocalLookupRealNamedProperty(*name, lookup);
- if (lookup->IsProperty()) return;
-
- lookup->NotFound();
+ if (lookup->IsFound()) return;
if (holder->GetPrototype()->IsNull()) return;
-
holder->GetPrototype()->Lookup(*name, lookup);
}
-Handle<Code> LoadStubCompiler::GetCode(PropertyType type, Handle<String> name) {
+Handle<Code> LoadStubCompiler::GetCode(Code::StubType type,
+ Handle<String> name) {
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, type);
Handle<Code> code = GetCodeWithFlags(flags, name);
PROFILE(isolate(), CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name));
@@ -1333,7 +1386,7 @@ Handle<Code> LoadStubCompiler::GetCode(PropertyType type, Handle<String> name) {
}
-Handle<Code> KeyedLoadStubCompiler::GetCode(PropertyType type,
+Handle<Code> KeyedLoadStubCompiler::GetCode(Code::StubType type,
Handle<String> name,
InlineCacheState state) {
Code::Flags flags = Code::ComputeFlags(
@@ -1345,7 +1398,7 @@ Handle<Code> KeyedLoadStubCompiler::GetCode(PropertyType type,
}
-Handle<Code> StoreStubCompiler::GetCode(PropertyType type,
+Handle<Code> StoreStubCompiler::GetCode(Code::StubType type,
Handle<String> name) {
Code::Flags flags =
Code::ComputeMonomorphicFlags(Code::STORE_IC, type, strict_mode_);
@@ -1356,7 +1409,7 @@ Handle<Code> StoreStubCompiler::GetCode(PropertyType type,
}
-Handle<Code> KeyedStoreStubCompiler::GetCode(PropertyType type,
+Handle<Code> KeyedStoreStubCompiler::GetCode(Code::StubType type,
Handle<String> name,
InlineCacheState state) {
Code::ExtraICState extra_state =
@@ -1424,6 +1477,7 @@ Handle<Code> CallStubCompiler::CompileCustomCall(
#undef CALL_GENERATOR_CASE
}
CallOptimization optimization(function);
+#ifndef _WIN32_WCE
ASSERT(optimization.is_simple_api_call());
return CompileFastApiCall(optimization,
object,
@@ -1431,10 +1485,15 @@ Handle<Code> CallStubCompiler::CompileCustomCall(
cell,
function,
fname);
+#else
+ // Disable optimization for wince as the calling convention looks different.
+ return Handle<Code>::null();
+#endif // _WIN32_WCE
}
-Handle<Code> CallStubCompiler::GetCode(PropertyType type, Handle<String> name) {
+Handle<Code> CallStubCompiler::GetCode(Code::StubType type,
+ Handle<String> name) {
int argc = arguments_.immediate();
Code::Flags flags = Code::ComputeMonomorphicFlags(kind_,
type,
@@ -1450,7 +1509,7 @@ Handle<Code> CallStubCompiler::GetCode(Handle<JSFunction> function) {
if (function->shared()->name()->IsString()) {
function_name = Handle<String>(String::cast(function->shared()->name()));
}
- return GetCode(CONSTANT_FUNCTION, function_name);
+ return GetCode(Code::CONSTANT_FUNCTION, function_name);
}
diff --git a/src/3rdparty/v8/src/stub-cache.h b/src/3rdparty/v8/src/stub-cache.h
index 206dddd..ec9274b 100644
--- a/src/3rdparty/v8/src/stub-cache.h
+++ b/src/3rdparty/v8/src/stub-cache.h
@@ -90,6 +90,11 @@ class StubCache {
Handle<JSObject> holder,
Handle<AccessorInfo> callback);
+ Handle<Code> ComputeLoadViaGetter(Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> getter);
+
Handle<Code> ComputeLoadConstant(Handle<String> name,
Handle<JSObject> receiver,
Handle<JSObject> holder,
@@ -154,9 +159,16 @@ class StubCache {
Handle<Code> ComputeStoreCallback(Handle<String> name,
Handle<JSObject> receiver,
+ Handle<JSObject> holder,
Handle<AccessorInfo> callback,
StrictModeFlag strict_mode);
+ Handle<Code> ComputeStoreViaSetter(Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> setter,
+ StrictModeFlag strict_mode);
+
Handle<Code> ComputeStoreInterceptor(Handle<String> name,
Handle<JSObject> receiver,
StrictModeFlag strict_mode);
@@ -169,7 +181,7 @@ class StubCache {
Handle<Map> transition,
StrictModeFlag strict_mode);
- Handle<Code> ComputeKeyedLoadOrStoreElement(Handle<JSObject> receiver,
+ Handle<Code> ComputeKeyedLoadOrStoreElement(Handle<Map> receiver_map,
KeyedIC::StubKind stub_kind,
StrictModeFlag strict_mode);
@@ -251,7 +263,8 @@ class StubCache {
void CollectMatchingMaps(SmallMapList* types,
String* name,
Code::Flags flags,
- Handle<Context> global_context);
+ Handle<Context> native_context,
+ Zone* zone);
// Generate code for probing the stub cache table.
// Arguments extra, extra2 and extra3 may be used to pass additional scratch
@@ -303,7 +316,7 @@ class StubCache {
Factory* factory() { return isolate()->factory(); }
private:
- explicit StubCache(Isolate* isolate);
+ StubCache(Isolate* isolate, Zone* zone);
Handle<Code> ComputeCallInitialize(int argc,
RelocInfo::Mode mode,
@@ -461,14 +474,16 @@ class StubCompiler BASE_EMBEDDED {
Register scratch2,
Label* miss_label);
- static void GenerateStoreField(MacroAssembler* masm,
- Handle<JSObject> object,
- int index,
- Handle<Map> transition,
- Register receiver_reg,
- Register name_reg,
- Register scratch,
- Label* miss_label);
+ void GenerateStoreField(MacroAssembler* masm,
+ Handle<JSObject> object,
+ int index,
+ Handle<Map> transition,
+ Handle<String> name,
+ Register receiver_reg,
+ Register name_reg,
+ Register scratch1,
+ Register scratch2,
+ Label* miss_label);
static void GenerateLoadMiss(MacroAssembler* masm,
Code::Kind kind);
@@ -512,6 +527,7 @@ class StubCompiler BASE_EMBEDDED {
int save_at_depth,
Label* miss);
+
protected:
Handle<Code> GetCodeWithFlags(Code::Flags flags, const char* name);
Handle<Code> GetCodeWithFlags(Code::Flags flags, Handle<String> name);
@@ -536,10 +552,20 @@ class StubCompiler BASE_EMBEDDED {
Register scratch1,
Register scratch2,
Register scratch3,
+ Register scratch4,
Handle<AccessorInfo> callback,
Handle<String> name,
Label* miss);
+ void GenerateDictionaryLoadCallback(Register receiver,
+ Register name_reg,
+ Register scratch1,
+ Register scratch2,
+ Register scratch3,
+ Handle<AccessorInfo> callback,
+ Handle<String> name,
+ Label* miss);
+
void GenerateLoadConstant(Handle<JSObject> object,
Handle<JSObject> holder,
Register receiver,
@@ -594,6 +620,14 @@ class LoadStubCompiler: public StubCompiler {
Handle<JSObject> holder,
Handle<AccessorInfo> callback);
+ static void GenerateLoadViaGetter(MacroAssembler* masm,
+ Handle<JSFunction> getter);
+
+ Handle<Code> CompileLoadViaGetter(Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> getter);
+
Handle<Code> CompileLoadConstant(Handle<JSObject> object,
Handle<JSObject> holder,
Handle<JSFunction> value,
@@ -610,7 +644,7 @@ class LoadStubCompiler: public StubCompiler {
bool is_dont_delete);
private:
- Handle<Code> GetCode(PropertyType type, Handle<String> name);
+ Handle<Code> GetCode(Code::StubType type, Handle<String> name);
};
@@ -658,7 +692,7 @@ class KeyedLoadStubCompiler: public StubCompiler {
static void GenerateLoadDictionaryElement(MacroAssembler* masm);
private:
- Handle<Code> GetCode(PropertyType type,
+ Handle<Code> GetCode(Code::StubType type,
Handle<String> name,
InlineCacheState state = MONOMORPHIC);
};
@@ -675,9 +709,18 @@ class StoreStubCompiler: public StubCompiler {
Handle<Map> transition,
Handle<String> name);
- Handle<Code> CompileStoreCallback(Handle<JSObject> object,
- Handle<AccessorInfo> callback,
- Handle<String> name);
+ Handle<Code> CompileStoreCallback(Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<AccessorInfo> callback);
+
+ static void GenerateStoreViaSetter(MacroAssembler* masm,
+ Handle<JSFunction> setter);
+
+ Handle<Code> CompileStoreViaSetter(Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> setter);
Handle<Code> CompileStoreInterceptor(Handle<JSObject> object,
Handle<String> name);
@@ -687,7 +730,7 @@ class StoreStubCompiler: public StubCompiler {
Handle<String> name);
private:
- Handle<Code> GetCode(PropertyType type, Handle<String> name);
+ Handle<Code> GetCode(Code::StubType type, Handle<String> name);
StrictModeFlag strict_mode_;
};
@@ -728,7 +771,7 @@ class KeyedStoreStubCompiler: public StubCompiler {
static void GenerateStoreDictionaryElement(MacroAssembler* masm);
private:
- Handle<Code> GetCode(PropertyType type,
+ Handle<Code> GetCode(Code::StubType type,
Handle<String> name,
InlineCacheState state = MONOMORPHIC);
@@ -808,7 +851,7 @@ class CallStubCompiler: public StubCompiler {
Handle<JSFunction> function,
Handle<String> name);
- Handle<Code> GetCode(PropertyType type, Handle<String> name);
+ Handle<Code> GetCode(Code::StubType type, Handle<String> name);
Handle<Code> GetCode(Handle<JSFunction> function);
const ParameterCount& arguments() { return arguments_; }
diff --git a/src/3rdparty/v8/src/token.h b/src/3rdparty/v8/src/token.h
index 3036e55..863ba62 100644
--- a/src/3rdparty/v8/src/token.h
+++ b/src/3rdparty/v8/src/token.h
@@ -99,6 +99,7 @@ namespace internal {
T(SHL, "<<", 11) \
T(SAR, ">>", 11) \
T(SHR, ">>>", 11) \
+ T(ROR, "rotate right", 11) /* only used by Crankshaft */ \
T(ADD, "+", 12) \
T(SUB, "-", 12) \
T(MUL, "*", 13) \
diff --git a/src/3rdparty/v8/src/transitions-inl.h b/src/3rdparty/v8/src/transitions-inl.h
new file mode 100644
index 0000000..cfaa99d
--- /dev/null
+++ b/src/3rdparty/v8/src/transitions-inl.h
@@ -0,0 +1,220 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_TRANSITIONS_INL_H_
+#define V8_TRANSITIONS_INL_H_
+
+#include "objects-inl.h"
+#include "transitions.h"
+
+namespace v8 {
+namespace internal {
+
+
+#define FIELD_ADDR(p, offset) \
+ (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
+
+#define WRITE_FIELD(p, offset, value) \
+ (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
+
+#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
+ if (mode == UPDATE_WRITE_BARRIER) { \
+ heap->incremental_marking()->RecordWrite( \
+ object, HeapObject::RawField(object, offset), value); \
+ if (heap->InNewSpace(value)) { \
+ heap->RecordWrite(object->address(), offset); \
+ } \
+ }
+
+
+TransitionArray* TransitionArray::cast(Object* object) {
+ ASSERT(object->IsTransitionArray());
+ return reinterpret_cast<TransitionArray*>(object);
+}
+
+
+Map* TransitionArray::elements_transition() {
+ Object* transition_map = get(kElementsTransitionIndex);
+ return Map::cast(transition_map);
+}
+
+
+void TransitionArray::ClearElementsTransition() {
+ WRITE_FIELD(this, kElementsTransitionOffset, Smi::FromInt(0));
+}
+
+
+bool TransitionArray::HasElementsTransition() {
+ return IsFullTransitionArray() &&
+ get(kElementsTransitionIndex) != Smi::FromInt(0);
+}
+
+
+void TransitionArray::set_elements_transition(Map* transition_map,
+ WriteBarrierMode mode) {
+ ASSERT(IsFullTransitionArray());
+ Heap* heap = GetHeap();
+ WRITE_FIELD(this, kElementsTransitionOffset, transition_map);
+ CONDITIONAL_WRITE_BARRIER(
+ heap, this, kElementsTransitionOffset, transition_map, mode);
+}
+
+
+Object* TransitionArray::back_pointer_storage() {
+ return get(kBackPointerStorageIndex);
+}
+
+
+void TransitionArray::set_back_pointer_storage(Object* back_pointer,
+ WriteBarrierMode mode) {
+ Heap* heap = GetHeap();
+ WRITE_FIELD(this, kBackPointerStorageOffset, back_pointer);
+ CONDITIONAL_WRITE_BARRIER(
+ heap, this, kBackPointerStorageOffset, back_pointer, mode);
+}
+
+
+bool TransitionArray::HasPrototypeTransitions() {
+ return IsFullTransitionArray() &&
+ get(kPrototypeTransitionsIndex) != Smi::FromInt(0);
+}
+
+
+FixedArray* TransitionArray::GetPrototypeTransitions() {
+ ASSERT(IsFullTransitionArray());
+ Object* prototype_transitions = get(kPrototypeTransitionsIndex);
+ return FixedArray::cast(prototype_transitions);
+}
+
+
+HeapObject* TransitionArray::UncheckedPrototypeTransitions() {
+ ASSERT(HasPrototypeTransitions());
+ return reinterpret_cast<HeapObject*>(get(kPrototypeTransitionsIndex));
+}
+
+
+void TransitionArray::SetPrototypeTransitions(FixedArray* transitions,
+ WriteBarrierMode mode) {
+ ASSERT(IsFullTransitionArray());
+ ASSERT(transitions->IsFixedArray());
+ Heap* heap = GetHeap();
+ WRITE_FIELD(this, kPrototypeTransitionsOffset, transitions);
+ CONDITIONAL_WRITE_BARRIER(
+ heap, this, kPrototypeTransitionsOffset, transitions, mode);
+}
+
+
+Object** TransitionArray::GetPrototypeTransitionsSlot() {
+ return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
+ kPrototypeTransitionsOffset);
+}
+
+
+Object** TransitionArray::GetKeySlot(int transition_number) {
+ ASSERT(!IsSimpleTransition());
+ ASSERT(transition_number < number_of_transitions());
+ return HeapObject::RawField(
+ reinterpret_cast<HeapObject*>(this),
+ OffsetOfElementAt(ToKeyIndex(transition_number)));
+}
+
+
+String* TransitionArray::GetKey(int transition_number) {
+ if (IsSimpleTransition()) {
+ Map* target = GetTarget(kSimpleTransitionIndex);
+ int descriptor = target->LastAdded();
+ String* key = target->instance_descriptors()->GetKey(descriptor);
+ return key;
+ }
+ ASSERT(transition_number < number_of_transitions());
+ return String::cast(get(ToKeyIndex(transition_number)));
+}
+
+
+void TransitionArray::SetKey(int transition_number, String* key) {
+ ASSERT(!IsSimpleTransition());
+ ASSERT(transition_number < number_of_transitions());
+ set(ToKeyIndex(transition_number), key);
+}
+
+
+Map* TransitionArray::GetTarget(int transition_number) {
+ if (IsSimpleTransition()) {
+ ASSERT(transition_number == kSimpleTransitionIndex);
+ return Map::cast(get(kSimpleTransitionTarget));
+ }
+ ASSERT(transition_number < number_of_transitions());
+ return Map::cast(get(ToTargetIndex(transition_number)));
+}
+
+
+void TransitionArray::SetTarget(int transition_number, Map* value) {
+ if (IsSimpleTransition()) {
+ ASSERT(transition_number == kSimpleTransitionIndex);
+ return set(kSimpleTransitionTarget, value);
+ }
+ ASSERT(transition_number < number_of_transitions());
+ set(ToTargetIndex(transition_number), value);
+}
+
+
+PropertyDetails TransitionArray::GetTargetDetails(int transition_number) {
+ Map* map = GetTarget(transition_number);
+ DescriptorArray* descriptors = map->instance_descriptors();
+ int descriptor = map->LastAdded();
+ return descriptors->GetDetails(descriptor);
+}
+
+
+int TransitionArray::Search(String* name) {
+ if (IsSimpleTransition()) {
+ String* key = GetKey(kSimpleTransitionIndex);
+ if (key->Equals(name)) return kSimpleTransitionIndex;
+ return kNotFound;
+ }
+ return internal::Search<ALL_ENTRIES>(this, name);
+}
+
+
+void TransitionArray::NoIncrementalWriteBarrierSet(int transition_number,
+ String* key,
+ Map* target) {
+ FixedArray::NoIncrementalWriteBarrierSet(
+ this, ToKeyIndex(transition_number), key);
+ FixedArray::NoIncrementalWriteBarrierSet(
+ this, ToTargetIndex(transition_number), target);
+}
+
+
+#undef FIELD_ADDR
+#undef WRITE_FIELD
+#undef CONDITIONAL_WRITE_BARRIER
+
+
+} } // namespace v8::internal
+
+#endif // V8_TRANSITIONS_INL_H_
diff --git a/src/3rdparty/v8/src/transitions.cc b/src/3rdparty/v8/src/transitions.cc
new file mode 100644
index 0000000..56b6caf
--- /dev/null
+++ b/src/3rdparty/v8/src/transitions.cc
@@ -0,0 +1,160 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "objects.h"
+#include "transitions-inl.h"
+#include "utils.h"
+
+namespace v8 {
+namespace internal {
+
+
+static MaybeObject* AllocateRaw(int length) {
+ Heap* heap = Isolate::Current()->heap();
+
+ // Use FixedArray to not use TransitionArray::cast on incomplete object.
+ FixedArray* array;
+ MaybeObject* maybe_array = heap->AllocateFixedArray(length);
+ if (!maybe_array->To(&array)) return maybe_array;
+ return array;
+}
+
+
+MaybeObject* TransitionArray::Allocate(int number_of_transitions) {
+ FixedArray* array;
+ MaybeObject* maybe_array = AllocateRaw(ToKeyIndex(number_of_transitions));
+ if (!maybe_array->To(&array)) return maybe_array;
+ array->set(kElementsTransitionIndex, Smi::FromInt(0));
+ array->set(kPrototypeTransitionsIndex, Smi::FromInt(0));
+ return array;
+}
+
+
+void TransitionArray::NoIncrementalWriteBarrierCopyFrom(TransitionArray* origin,
+ int origin_transition,
+ int target_transition) {
+ NoIncrementalWriteBarrierSet(target_transition,
+ origin->GetKey(origin_transition),
+ origin->GetTarget(origin_transition));
+}
+
+
+static bool InsertionPointFound(String* key1, String* key2) {
+ return key1->Hash() > key2->Hash();
+}
+
+
+MaybeObject* TransitionArray::NewWith(SimpleTransitionFlag flag,
+ String* key,
+ Map* target,
+ Object* back_pointer) {
+ TransitionArray* result;
+ MaybeObject* maybe_result;
+
+ if (flag == SIMPLE_TRANSITION) {
+ maybe_result = AllocateRaw(kSimpleTransitionSize);
+ if (!maybe_result->To(&result)) return maybe_result;
+ result->set(kSimpleTransitionTarget, target);
+ } else {
+ maybe_result = Allocate(1);
+ if (!maybe_result->To(&result)) return maybe_result;
+ result->NoIncrementalWriteBarrierSet(0, key, target);
+ }
+ result->set_back_pointer_storage(back_pointer);
+ return result;
+}
+
+
+MaybeObject* TransitionArray::ExtendToFullTransitionArray() {
+ ASSERT(!IsFullTransitionArray());
+ int nof = number_of_transitions();
+ TransitionArray* result;
+ MaybeObject* maybe_result = Allocate(nof);
+ if (!maybe_result->To(&result)) return maybe_result;
+
+ if (nof == 1) {
+ result->NoIncrementalWriteBarrierCopyFrom(this, kSimpleTransitionIndex, 0);
+ }
+
+ result->set_back_pointer_storage(back_pointer_storage());
+ return result;
+}
+
+
+MaybeObject* TransitionArray::CopyInsert(String* name, Map* target) {
+ TransitionArray* result;
+
+ int number_of_transitions = this->number_of_transitions();
+ int new_size = number_of_transitions;
+
+ int insertion_index = this->Search(name);
+ if (insertion_index == kNotFound) ++new_size;
+
+ MaybeObject* maybe_array;
+ maybe_array = TransitionArray::Allocate(new_size);
+ if (!maybe_array->To(&result)) return maybe_array;
+
+ if (HasElementsTransition()) {
+ result->set_elements_transition(elements_transition());
+ }
+
+ if (HasPrototypeTransitions()) {
+ result->SetPrototypeTransitions(GetPrototypeTransitions());
+ }
+
+ if (insertion_index != kNotFound) {
+ for (int i = 0; i < number_of_transitions; ++i) {
+ if (i != insertion_index) {
+ result->NoIncrementalWriteBarrierCopyFrom(this, i, i);
+ }
+ }
+ result->NoIncrementalWriteBarrierSet(insertion_index, name, target);
+ return result;
+ }
+
+ insertion_index = 0;
+ for (; insertion_index < number_of_transitions; ++insertion_index) {
+ if (InsertionPointFound(GetKey(insertion_index), name)) break;
+ result->NoIncrementalWriteBarrierCopyFrom(
+ this, insertion_index, insertion_index);
+ }
+
+ result->NoIncrementalWriteBarrierSet(insertion_index, name, target);
+
+ for (; insertion_index < number_of_transitions; ++insertion_index) {
+ result->NoIncrementalWriteBarrierCopyFrom(
+ this, insertion_index, insertion_index + 1);
+ }
+
+ result->set_back_pointer_storage(back_pointer_storage());
+ return result;
+}
+
+
+} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/transitions.h b/src/3rdparty/v8/src/transitions.h
new file mode 100644
index 0000000..0a66026
--- /dev/null
+++ b/src/3rdparty/v8/src/transitions.h
@@ -0,0 +1,207 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_TRANSITIONS_H_
+#define V8_TRANSITIONS_H_
+
+#include "elements-kind.h"
+#include "heap.h"
+#include "isolate.h"
+#include "objects.h"
+#include "v8checks.h"
+
+namespace v8 {
+namespace internal {
+
+
+// TransitionArrays are fixed arrays used to hold map transitions for property,
+// constant, and element changes. They can either be simple transition arrays
+// that store a single property transition, or a full transition array that has
+// space for elements transitions, prototype transitions and multiple property
+// transitons. The details related to property transitions are accessed in the
+// descriptor array of the target map. In the case of a simple transition, the
+// key is also read from the descriptor array of the target map.
+//
+// The simple format of the these objects is:
+// [0] Undefined or back pointer map
+// [1] Single transition
+//
+// The full format is:
+// [0] Undefined or back pointer map
+// [1] Smi(0) or elements transition map
+// [2] Smi(0) or fixed array of prototype transitions
+// [3] First transition
+// [length() - kTransitionSize] Last transition
+class TransitionArray: public FixedArray {
+ public:
+ // Accessors for fetching instance transition at transition number.
+ inline String* GetKey(int transition_number);
+ inline void SetKey(int transition_number, String* value);
+ inline Object** GetKeySlot(int transition_number);
+ int GetSortedKeyIndex(int transition_number) { return transition_number; }
+
+ String* GetSortedKey(int transition_number) {
+ return GetKey(transition_number);
+ }
+
+ inline Map* GetTarget(int transition_number);
+ inline void SetTarget(int transition_number, Map* target);
+
+ inline PropertyDetails GetTargetDetails(int transition_number);
+
+ inline Map* elements_transition();
+ inline void set_elements_transition(
+ Map* target,
+ WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
+ inline bool HasElementsTransition();
+ inline void ClearElementsTransition();
+
+ inline Object* back_pointer_storage();
+ inline void set_back_pointer_storage(
+ Object* back_pointer,
+ WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
+
+ inline FixedArray* GetPrototypeTransitions();
+ inline void SetPrototypeTransitions(
+ FixedArray* prototype_transitions,
+ WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
+ inline Object** GetPrototypeTransitionsSlot();
+ inline bool HasPrototypeTransitions();
+ inline HeapObject* UncheckedPrototypeTransitions();
+
+ // Returns the number of transitions in the array.
+ int number_of_transitions() {
+ if (IsSimpleTransition()) return 1;
+ int len = length();
+ return len <= kFirstIndex ? 0 : (len - kFirstIndex) / kTransitionSize;
+ }
+
+ inline int number_of_entries() { return number_of_transitions(); }
+
+ // Allocate a new transition array with a single entry.
+ static MUST_USE_RESULT MaybeObject* NewWith(
+ SimpleTransitionFlag flag,
+ String* key,
+ Map* target,
+ Object* back_pointer);
+
+ MUST_USE_RESULT MaybeObject* ExtendToFullTransitionArray();
+
+ // Copy the transition array, inserting a new transition.
+ // TODO(verwaest): This should not cause an existing transition to be
+ // overwritten.
+ MUST_USE_RESULT MaybeObject* CopyInsert(String* name, Map* target);
+
+ // Copy a single transition from the origin array.
+ inline void NoIncrementalWriteBarrierCopyFrom(TransitionArray* origin,
+ int origin_transition,
+ int target_transition);
+
+ // Search a transition for a given property name.
+ inline int Search(String* name);
+
+ // Allocates a TransitionArray.
+ MUST_USE_RESULT static MaybeObject* Allocate(int number_of_transitions);
+
+ bool IsSimpleTransition() { return length() == kSimpleTransitionSize; }
+ bool IsFullTransitionArray() { return length() >= kFirstIndex; }
+
+ // Casting.
+ static inline TransitionArray* cast(Object* obj);
+
+ // Constant for denoting key was not found.
+ static const int kNotFound = -1;
+
+ static const int kBackPointerStorageIndex = 0;
+
+ // Layout for full transition arrays.
+ static const int kElementsTransitionIndex = 1;
+ static const int kPrototypeTransitionsIndex = 2;
+ static const int kFirstIndex = 3;
+
+ // Layout for simple transition arrays.
+ static const int kSimpleTransitionTarget = 1;
+ static const int kSimpleTransitionSize = 2;
+ static const int kSimpleTransitionIndex = 0;
+ STATIC_ASSERT(kSimpleTransitionIndex != kNotFound);
+
+ static const int kBackPointerStorageOffset = FixedArray::kHeaderSize;
+
+ // Layout for the full transition array header.
+ static const int kElementsTransitionOffset = kBackPointerStorageOffset +
+ kPointerSize;
+ static const int kPrototypeTransitionsOffset = kElementsTransitionOffset +
+ kPointerSize;
+
+ // Layout of map transition entries in full transition arrays.
+ static const int kTransitionKey = 0;
+ static const int kTransitionTarget = 1;
+ static const int kTransitionSize = 2;
+
+#ifdef OBJECT_PRINT
+ // Print all the transitions.
+ inline void PrintTransitions() {
+ PrintTransitions(stdout);
+ }
+ void PrintTransitions(FILE* out);
+#endif
+
+#ifdef DEBUG
+ bool IsSortedNoDuplicates(int valid_entries = -1);
+ bool IsConsistentWithBackPointers(Map* current_map);
+ bool IsEqualTo(TransitionArray* other);
+#endif
+
+ // The maximum number of transitions we want in a transition array (should
+ // fit in a page).
+ static const int kMaxNumberOfTransitions = 1024 + 512;
+
+ private:
+ // Conversion from transition number to array indices.
+ static int ToKeyIndex(int transition_number) {
+ return kFirstIndex +
+ (transition_number * kTransitionSize) +
+ kTransitionKey;
+ }
+
+ static int ToTargetIndex(int transition_number) {
+ return kFirstIndex +
+ (transition_number * kTransitionSize) +
+ kTransitionTarget;
+ }
+
+ inline void NoIncrementalWriteBarrierSet(int transition_number,
+ String* key,
+ Map* target);
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(TransitionArray);
+};
+
+
+} } // namespace v8::internal
+
+#endif // V8_TRANSITIONS_H_
diff --git a/src/3rdparty/v8/src/type-info.cc b/src/3rdparty/v8/src/type-info.cc
index 159be6a..bc6a46b 100644
--- a/src/3rdparty/v8/src/type-info.cc
+++ b/src/3rdparty/v8/src/type-info.cc
@@ -60,17 +60,24 @@ TypeInfo TypeInfo::TypeFromValue(Handle<Object> value) {
TypeFeedbackOracle::TypeFeedbackOracle(Handle<Code> code,
- Handle<Context> global_context,
- Isolate* isolate) {
- global_context_ = global_context;
+ Handle<Context> native_context,
+ Isolate* isolate,
+ Zone* zone) {
+ native_context_ = native_context;
isolate_ = isolate;
+ zone_ = zone;
BuildDictionary(code);
ASSERT(reinterpret_cast<Address>(*dictionary_.location()) != kHandleZapValue);
}
-Handle<Object> TypeFeedbackOracle::GetInfo(unsigned ast_id) {
- int entry = dictionary_->FindEntry(ast_id);
+static uint32_t IdToKey(TypeFeedbackId ast_id) {
+ return static_cast<uint32_t>(ast_id.ToInt());
+}
+
+
+Handle<Object> TypeFeedbackOracle::GetInfo(TypeFeedbackId ast_id) {
+ int entry = dictionary_->FindEntry(IdToKey(ast_id));
return entry != UnseededNumberDictionary::kNotFound
? Handle<Object>(dictionary_->ValueAt(entry))
: Handle<Object>::cast(isolate_->factory()->undefined_value());
@@ -78,7 +85,7 @@ Handle<Object> TypeFeedbackOracle::GetInfo(unsigned ast_id) {
bool TypeFeedbackOracle::LoadIsUninitialized(Property* expr) {
- Handle<Object> map_or_code = GetInfo(expr->id());
+ Handle<Object> map_or_code = GetInfo(expr->PropertyFeedbackId());
if (map_or_code->IsMap()) return false;
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
@@ -89,22 +96,23 @@ bool TypeFeedbackOracle::LoadIsUninitialized(Property* expr) {
bool TypeFeedbackOracle::LoadIsMonomorphicNormal(Property* expr) {
- Handle<Object> map_or_code = GetInfo(expr->id());
+ Handle<Object> map_or_code = GetInfo(expr->PropertyFeedbackId());
if (map_or_code->IsMap()) return true;
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
- return code->is_keyed_load_stub() &&
+ bool preliminary_checks = code->is_keyed_load_stub() &&
code->ic_state() == MONOMORPHIC &&
- Code::ExtractTypeFromFlags(code->flags()) == NORMAL &&
- code->FindFirstMap() != NULL &&
- !CanRetainOtherContext(code->FindFirstMap(), *global_context_);
+ Code::ExtractTypeFromFlags(code->flags()) == Code::NORMAL;
+ if (!preliminary_checks) return false;
+ Map* map = code->FindFirstMap();
+ return map != NULL && !CanRetainOtherContext(map, *native_context_);
}
return false;
}
bool TypeFeedbackOracle::LoadIsMegamorphicWithTypeInfo(Property* expr) {
- Handle<Object> map_or_code = GetInfo(expr->id());
+ Handle<Object> map_or_code = GetInfo(expr->PropertyFeedbackId());
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
Builtins* builtins = isolate_->builtins();
@@ -116,27 +124,29 @@ bool TypeFeedbackOracle::LoadIsMegamorphicWithTypeInfo(Property* expr) {
}
-bool TypeFeedbackOracle::StoreIsMonomorphicNormal(Expression* expr) {
- Handle<Object> map_or_code = GetInfo(expr->id());
+bool TypeFeedbackOracle::StoreIsMonomorphicNormal(TypeFeedbackId ast_id) {
+ Handle<Object> map_or_code = GetInfo(ast_id);
if (map_or_code->IsMap()) return true;
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
bool allow_growth =
Code::GetKeyedAccessGrowMode(code->extra_ic_state()) ==
ALLOW_JSARRAY_GROWTH;
- return code->is_keyed_store_stub() &&
+ bool preliminary_checks =
+ code->is_keyed_store_stub() &&
!allow_growth &&
code->ic_state() == MONOMORPHIC &&
- Code::ExtractTypeFromFlags(code->flags()) == NORMAL &&
- code->FindFirstMap() != NULL &&
- !CanRetainOtherContext(code->FindFirstMap(), *global_context_);
+ Code::ExtractTypeFromFlags(code->flags()) == Code::NORMAL;
+ if (!preliminary_checks) return false;
+ Map* map = code->FindFirstMap();
+ return map != NULL && !CanRetainOtherContext(map, *native_context_);
}
return false;
}
-bool TypeFeedbackOracle::StoreIsMegamorphicWithTypeInfo(Expression* expr) {
- Handle<Object> map_or_code = GetInfo(expr->id());
+bool TypeFeedbackOracle::StoreIsMegamorphicWithTypeInfo(TypeFeedbackId ast_id) {
+ Handle<Object> map_or_code = GetInfo(ast_id);
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
Builtins* builtins = isolate_->builtins();
@@ -154,26 +164,26 @@ bool TypeFeedbackOracle::StoreIsMegamorphicWithTypeInfo(Expression* expr) {
bool TypeFeedbackOracle::CallIsMonomorphic(Call* expr) {
- Handle<Object> value = GetInfo(expr->id());
+ Handle<Object> value = GetInfo(expr->CallFeedbackId());
return value->IsMap() || value->IsSmi() || value->IsJSFunction();
}
bool TypeFeedbackOracle::CallNewIsMonomorphic(CallNew* expr) {
- Handle<Object> value = GetInfo(expr->id());
+ Handle<Object> value = GetInfo(expr->CallNewFeedbackId());
return value->IsJSFunction();
}
bool TypeFeedbackOracle::ObjectLiteralStoreIsMonomorphic(
ObjectLiteral::Property* prop) {
- Handle<Object> map_or_code = GetInfo(prop->key()->id());
+ Handle<Object> map_or_code = GetInfo(prop->key()->LiteralFeedbackId());
return map_or_code->IsMap();
}
bool TypeFeedbackOracle::IsForInFastCase(ForInStatement* stmt) {
- Handle<Object> value = GetInfo(stmt->PrepareId());
+ Handle<Object> value = GetInfo(stmt->ForInFeedbackId());
return value->IsSmi() &&
Smi::cast(*value)->value() == TypeFeedbackCells::kForInFastCaseMarker;
}
@@ -181,12 +191,12 @@ bool TypeFeedbackOracle::IsForInFastCase(ForInStatement* stmt) {
Handle<Map> TypeFeedbackOracle::LoadMonomorphicReceiverType(Property* expr) {
ASSERT(LoadIsMonomorphicNormal(expr));
- Handle<Object> map_or_code = GetInfo(expr->id());
+ Handle<Object> map_or_code = GetInfo(expr->PropertyFeedbackId());
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
Map* first_map = code->FindFirstMap();
ASSERT(first_map != NULL);
- return CanRetainOtherContext(first_map, *global_context_)
+ return CanRetainOtherContext(first_map, *native_context_)
? Handle<Map>::null()
: Handle<Map>(first_map);
}
@@ -194,14 +204,15 @@ Handle<Map> TypeFeedbackOracle::LoadMonomorphicReceiverType(Property* expr) {
}
-Handle<Map> TypeFeedbackOracle::StoreMonomorphicReceiverType(Expression* expr) {
- ASSERT(StoreIsMonomorphicNormal(expr));
- Handle<Object> map_or_code = GetInfo(expr->id());
+Handle<Map> TypeFeedbackOracle::StoreMonomorphicReceiverType(
+ TypeFeedbackId ast_id) {
+ ASSERT(StoreIsMonomorphicNormal(ast_id));
+ Handle<Object> map_or_code = GetInfo(ast_id);
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
Map* first_map = code->FindFirstMap();
ASSERT(first_map != NULL);
- return CanRetainOtherContext(first_map, *global_context_)
+ return CanRetainOtherContext(first_map, *native_context_)
? Handle<Map>::null()
: Handle<Map>(first_map);
}
@@ -212,16 +223,18 @@ Handle<Map> TypeFeedbackOracle::StoreMonomorphicReceiverType(Expression* expr) {
void TypeFeedbackOracle::LoadReceiverTypes(Property* expr,
Handle<String> name,
SmallMapList* types) {
- Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, NORMAL);
- CollectReceiverTypes(expr->id(), name, flags, types);
+ Code::Flags flags =
+ Code::ComputeMonomorphicFlags(Code::LOAD_IC, Code::NORMAL);
+ CollectReceiverTypes(expr->PropertyFeedbackId(), name, flags, types);
}
void TypeFeedbackOracle::StoreReceiverTypes(Assignment* expr,
Handle<String> name,
SmallMapList* types) {
- Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, NORMAL);
- CollectReceiverTypes(expr->id(), name, flags, types);
+ Code::Flags flags =
+ Code::ComputeMonomorphicFlags(Code::STORE_IC, Code::NORMAL);
+ CollectReceiverTypes(expr->AssignmentFeedbackId(), name, flags, types);
}
@@ -237,16 +250,16 @@ void TypeFeedbackOracle::CallReceiverTypes(Call* expr,
CallIC::Contextual::encode(call_kind == CALL_AS_FUNCTION);
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::CALL_IC,
- NORMAL,
+ Code::NORMAL,
extra_ic_state,
OWN_MAP,
arity);
- CollectReceiverTypes(expr->id(), name, flags, types);
+ CollectReceiverTypes(expr->CallFeedbackId(), name, flags, types);
}
CheckType TypeFeedbackOracle::GetCallCheckType(Call* expr) {
- Handle<Object> value = GetInfo(expr->id());
+ Handle<Object> value = GetInfo(expr->CallFeedbackId());
if (!value->IsSmi()) return RECEIVER_MAP_CHECK;
CheckType check = static_cast<CheckType>(Smi::cast(*value)->value());
ASSERT(check != RECEIVER_MAP_CHECK);
@@ -262,13 +275,13 @@ Handle<JSObject> TypeFeedbackOracle::GetPrototypeForPrimitiveCheck(
UNREACHABLE();
break;
case STRING_CHECK:
- function = global_context_->string_function();
+ function = native_context_->string_function();
break;
case NUMBER_CHECK:
- function = global_context_->number_function();
+ function = native_context_->number_function();
break;
case BOOLEAN_CHECK:
- function = global_context_->boolean_function();
+ function = native_context_->boolean_function();
break;
}
ASSERT(function != NULL);
@@ -277,30 +290,30 @@ Handle<JSObject> TypeFeedbackOracle::GetPrototypeForPrimitiveCheck(
Handle<JSFunction> TypeFeedbackOracle::GetCallTarget(Call* expr) {
- return Handle<JSFunction>::cast(GetInfo(expr->id()));
+ return Handle<JSFunction>::cast(GetInfo(expr->CallFeedbackId()));
}
Handle<JSFunction> TypeFeedbackOracle::GetCallNewTarget(CallNew* expr) {
- return Handle<JSFunction>::cast(GetInfo(expr->id()));
+ return Handle<JSFunction>::cast(GetInfo(expr->CallNewFeedbackId()));
}
Handle<Map> TypeFeedbackOracle::GetObjectLiteralStoreMap(
ObjectLiteral::Property* prop) {
ASSERT(ObjectLiteralStoreIsMonomorphic(prop));
- return Handle<Map>::cast(GetInfo(prop->key()->id()));
+ return Handle<Map>::cast(GetInfo(prop->key()->LiteralFeedbackId()));
}
bool TypeFeedbackOracle::LoadIsBuiltin(Property* expr, Builtins::Name id) {
- return *GetInfo(expr->id()) ==
+ return *GetInfo(expr->PropertyFeedbackId()) ==
isolate_->builtins()->builtin(id);
}
TypeInfo TypeFeedbackOracle::CompareType(CompareOperation* expr) {
- Handle<Object> object = GetInfo(expr->id());
+ Handle<Object> object = GetInfo(expr->CompareOperationFeedbackId());
TypeInfo unknown = TypeInfo::Unknown();
if (!object->IsCode()) return unknown;
Handle<Code> code = Handle<Code>::cast(object);
@@ -330,7 +343,7 @@ TypeInfo TypeFeedbackOracle::CompareType(CompareOperation* expr) {
bool TypeFeedbackOracle::IsSymbolCompare(CompareOperation* expr) {
- Handle<Object> object = GetInfo(expr->id());
+ Handle<Object> object = GetInfo(expr->CompareOperationFeedbackId());
if (!object->IsCode()) return false;
Handle<Code> code = Handle<Code>::cast(object);
if (!code->is_compare_ic_stub()) return false;
@@ -340,7 +353,7 @@ bool TypeFeedbackOracle::IsSymbolCompare(CompareOperation* expr) {
Handle<Map> TypeFeedbackOracle::GetCompareMap(CompareOperation* expr) {
- Handle<Object> object = GetInfo(expr->id());
+ Handle<Object> object = GetInfo(expr->CompareOperationFeedbackId());
if (!object->IsCode()) return Handle<Map>::null();
Handle<Code> code = Handle<Code>::cast(object);
if (!code->is_compare_ic_stub()) return Handle<Map>::null();
@@ -350,14 +363,14 @@ Handle<Map> TypeFeedbackOracle::GetCompareMap(CompareOperation* expr) {
}
Map* first_map = code->FindFirstMap();
ASSERT(first_map != NULL);
- return CanRetainOtherContext(first_map, *global_context_)
+ return CanRetainOtherContext(first_map, *native_context_)
? Handle<Map>::null()
: Handle<Map>(first_map);
}
TypeInfo TypeFeedbackOracle::UnaryType(UnaryOperation* expr) {
- Handle<Object> object = GetInfo(expr->id());
+ Handle<Object> object = GetInfo(expr->UnaryOperationFeedbackId());
TypeInfo unknown = TypeInfo::Unknown();
if (!object->IsCode()) return unknown;
Handle<Code> code = Handle<Code>::cast(object);
@@ -376,7 +389,7 @@ TypeInfo TypeFeedbackOracle::UnaryType(UnaryOperation* expr) {
TypeInfo TypeFeedbackOracle::BinaryType(BinaryOperation* expr) {
- Handle<Object> object = GetInfo(expr->id());
+ Handle<Object> object = GetInfo(expr->BinaryOperationFeedbackId());
TypeInfo unknown = TypeInfo::Unknown();
if (!object->IsCode()) return unknown;
Handle<Code> code = Handle<Code>::cast(object);
@@ -460,7 +473,7 @@ TypeInfo TypeFeedbackOracle::SwitchType(CaseClause* clause) {
TypeInfo TypeFeedbackOracle::IncrementType(CountOperation* expr) {
- Handle<Object> object = GetInfo(expr->CountId());
+ Handle<Object> object = GetInfo(expr->CountBinOpFeedbackId());
TypeInfo unknown = TypeInfo::Unknown();
if (!object->IsCode()) return unknown;
Handle<Code> code = Handle<Code>::cast(object);
@@ -488,7 +501,7 @@ TypeInfo TypeFeedbackOracle::IncrementType(CountOperation* expr) {
}
-void TypeFeedbackOracle::CollectReceiverTypes(unsigned ast_id,
+void TypeFeedbackOracle::CollectReceiverTypes(TypeFeedbackId ast_id,
Handle<String> name,
Code::Flags flags,
SmallMapList* types) {
@@ -501,34 +514,35 @@ void TypeFeedbackOracle::CollectReceiverTypes(unsigned ast_id,
// we need a generic store (or load) here.
ASSERT(Handle<Code>::cast(object)->ic_state() == MEGAMORPHIC);
} else if (object->IsMap()) {
- types->Add(Handle<Map>::cast(object));
+ types->Add(Handle<Map>::cast(object), zone());
} else if (FLAG_collect_megamorphic_maps_from_stub_cache &&
Handle<Code>::cast(object)->ic_state() == MEGAMORPHIC) {
- types->Reserve(4);
+ types->Reserve(4, zone());
ASSERT(object->IsCode());
isolate_->stub_cache()->CollectMatchingMaps(types,
*name,
flags,
- global_context_);
+ native_context_,
+ zone());
}
}
-// Check if a map originates from a given global context. We use this
+// Check if a map originates from a given native context. We use this
// information to filter out maps from different context to avoid
// retaining objects from different tabs in Chrome via optimized code.
bool TypeFeedbackOracle::CanRetainOtherContext(Map* map,
- Context* global_context) {
+ Context* native_context) {
Object* constructor = NULL;
while (!map->prototype()->IsNull()) {
constructor = map->constructor();
if (!constructor->IsNull()) {
// If the constructor is not null or a JSFunction, we have to
- // conservatively assume that it may retain a global context.
+ // conservatively assume that it may retain a native context.
if (!constructor->IsJSFunction()) return true;
// Check if the constructor directly references a foreign context.
if (CanRetainOtherContext(JSFunction::cast(constructor),
- global_context)) {
+ native_context)) {
return true;
}
}
@@ -537,26 +551,27 @@ bool TypeFeedbackOracle::CanRetainOtherContext(Map* map,
constructor = map->constructor();
if (constructor->IsNull()) return false;
JSFunction* function = JSFunction::cast(constructor);
- return CanRetainOtherContext(function, global_context);
+ return CanRetainOtherContext(function, native_context);
}
bool TypeFeedbackOracle::CanRetainOtherContext(JSFunction* function,
- Context* global_context) {
- return function->context()->global() != global_context->global()
- && function->context()->global() != global_context->builtins();
+ Context* native_context) {
+ return function->context()->global_object() != native_context->global_object()
+ && function->context()->global_object() != native_context->builtins();
}
-static void AddMapIfMissing(Handle<Map> map, SmallMapList* list) {
+static void AddMapIfMissing(Handle<Map> map, SmallMapList* list,
+ Zone* zone) {
for (int i = 0; i < list->length(); ++i) {
if (list->at(i).is_identical_to(map)) return;
}
- list->Add(map);
+ list->Add(map, zone);
}
-void TypeFeedbackOracle::CollectKeyedReceiverTypes(unsigned ast_id,
+void TypeFeedbackOracle::CollectKeyedReceiverTypes(TypeFeedbackId ast_id,
SmallMapList* types) {
Handle<Object> object = GetInfo(ast_id);
if (!object->IsCode()) return;
@@ -570,8 +585,8 @@ void TypeFeedbackOracle::CollectKeyedReceiverTypes(unsigned ast_id,
Object* object = info->target_object();
if (object->IsMap()) {
Map* map = Map::cast(object);
- if (!CanRetainOtherContext(map, *global_context_)) {
- AddMapIfMissing(Handle<Map>(map), types);
+ if (!CanRetainOtherContext(map, *native_context_)) {
+ AddMapIfMissing(Handle<Map>(map), types, zone());
}
}
}
@@ -579,7 +594,7 @@ void TypeFeedbackOracle::CollectKeyedReceiverTypes(unsigned ast_id,
}
-byte TypeFeedbackOracle::ToBooleanTypes(unsigned ast_id) {
+byte TypeFeedbackOracle::ToBooleanTypes(TypeFeedbackId ast_id) {
Handle<Object> object = GetInfo(ast_id);
return object->IsCode() ? Handle<Code>::cast(object)->to_boolean_state() : 0;
}
@@ -591,7 +606,7 @@ byte TypeFeedbackOracle::ToBooleanTypes(unsigned ast_id) {
// infos before we process them.
void TypeFeedbackOracle::BuildDictionary(Handle<Code> code) {
AssertNoAllocation no_allocation;
- ZoneList<RelocInfo> infos(16);
+ ZoneList<RelocInfo> infos(16, zone());
HandleScope scope;
GetRelocInfos(code, &infos);
CreateDictionary(code, &infos);
@@ -606,7 +621,7 @@ void TypeFeedbackOracle::GetRelocInfos(Handle<Code> code,
ZoneList<RelocInfo>* infos) {
int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
for (RelocIterator it(*code, mask); !it.done(); it.next()) {
- infos->Add(*it.rinfo());
+ infos->Add(*it.rinfo(), zone());
}
}
@@ -640,7 +655,8 @@ void TypeFeedbackOracle::ProcessRelocInfos(ZoneList<RelocInfo>* infos) {
for (int i = 0; i < infos->length(); i++) {
RelocInfo reloc_entry = (*infos)[i];
Address target_address = reloc_entry.target_address();
- unsigned ast_id = static_cast<unsigned>((*infos)[i].data());
+ TypeFeedbackId ast_id =
+ TypeFeedbackId(static_cast<unsigned>((*infos)[i].data()));
Code* target = Code::GetCodeFromTargetAddress(target_address);
switch (target->kind()) {
case Code::LOAD_IC:
@@ -656,7 +672,7 @@ void TypeFeedbackOracle::ProcessRelocInfos(ZoneList<RelocInfo>* infos) {
if (map == NULL) {
SetInfo(ast_id, static_cast<Object*>(target));
} else if (!CanRetainOtherContext(Map::cast(map),
- *global_context_)) {
+ *native_context_)) {
SetInfo(ast_id, map);
}
}
@@ -693,21 +709,22 @@ void TypeFeedbackOracle::ProcessTypeFeedbackCells(Handle<Code> code) {
Handle<TypeFeedbackCells> cache(
TypeFeedbackInfo::cast(raw_info)->type_feedback_cells());
for (int i = 0; i < cache->CellCount(); i++) {
- unsigned ast_id = cache->AstId(i)->value();
+ TypeFeedbackId ast_id = cache->AstId(i);
Object* value = cache->Cell(i)->value();
if (value->IsSmi() ||
(value->IsJSFunction() &&
!CanRetainOtherContext(JSFunction::cast(value),
- *global_context_))) {
+ *native_context_))) {
SetInfo(ast_id, value);
}
}
}
-void TypeFeedbackOracle::SetInfo(unsigned ast_id, Object* target) {
- ASSERT(dictionary_->FindEntry(ast_id) == UnseededNumberDictionary::kNotFound);
- MaybeObject* maybe_result = dictionary_->AtNumberPut(ast_id, target);
+void TypeFeedbackOracle::SetInfo(TypeFeedbackId ast_id, Object* target) {
+ ASSERT(dictionary_->FindEntry(IdToKey(ast_id)) ==
+ UnseededNumberDictionary::kNotFound);
+ MaybeObject* maybe_result = dictionary_->AtNumberPut(IdToKey(ast_id), target);
USE(maybe_result);
#ifdef DEBUG
Object* result = NULL;
diff --git a/src/3rdparty/v8/src/type-info.h b/src/3rdparty/v8/src/type-info.h
index d461331..00d88c2 100644
--- a/src/3rdparty/v8/src/type-info.h
+++ b/src/3rdparty/v8/src/type-info.h
@@ -232,17 +232,18 @@ class UnaryOperation;
class ForInStatement;
-class TypeFeedbackOracle BASE_EMBEDDED {
+class TypeFeedbackOracle: public ZoneObject {
public:
TypeFeedbackOracle(Handle<Code> code,
- Handle<Context> global_context,
- Isolate* isolate);
+ Handle<Context> native_context,
+ Isolate* isolate,
+ Zone* zone);
bool LoadIsMonomorphicNormal(Property* expr);
bool LoadIsUninitialized(Property* expr);
bool LoadIsMegamorphicWithTypeInfo(Property* expr);
- bool StoreIsMonomorphicNormal(Expression* expr);
- bool StoreIsMegamorphicWithTypeInfo(Expression* expr);
+ bool StoreIsMonomorphicNormal(TypeFeedbackId ast_id);
+ bool StoreIsMegamorphicWithTypeInfo(TypeFeedbackId ast_id);
bool CallIsMonomorphic(Call* expr);
bool CallNewIsMonomorphic(CallNew* expr);
bool ObjectLiteralStoreIsMonomorphic(ObjectLiteral::Property* prop);
@@ -250,7 +251,7 @@ class TypeFeedbackOracle BASE_EMBEDDED {
bool IsForInFastCase(ForInStatement* expr);
Handle<Map> LoadMonomorphicReceiverType(Property* expr);
- Handle<Map> StoreMonomorphicReceiverType(Expression* expr);
+ Handle<Map> StoreMonomorphicReceiverType(TypeFeedbackId ast_id);
void LoadReceiverTypes(Property* expr,
Handle<String> name,
@@ -262,12 +263,12 @@ class TypeFeedbackOracle BASE_EMBEDDED {
Handle<String> name,
CallKind call_kind,
SmallMapList* types);
- void CollectKeyedReceiverTypes(unsigned ast_id,
+ void CollectKeyedReceiverTypes(TypeFeedbackId ast_id,
SmallMapList* types);
- static bool CanRetainOtherContext(Map* map, Context* global_context);
+ static bool CanRetainOtherContext(Map* map, Context* native_context);
static bool CanRetainOtherContext(JSFunction* function,
- Context* global_context);
+ Context* native_context);
CheckType GetCallCheckType(Call* expr);
Handle<JSObject> GetPrototypeForPrimitiveCheck(CheckType check);
@@ -282,7 +283,7 @@ class TypeFeedbackOracle BASE_EMBEDDED {
// TODO(1571) We can't use ToBooleanStub::Types as the return value because
// of various cylces in our headers. Death to tons of implementations in
// headers!! :-P
- byte ToBooleanTypes(unsigned ast_id);
+ byte ToBooleanTypes(TypeFeedbackId ast_id);
// Get type information for arithmetic operations and compares.
TypeInfo UnaryType(UnaryOperation* expr);
@@ -293,13 +294,15 @@ class TypeFeedbackOracle BASE_EMBEDDED {
TypeInfo SwitchType(CaseClause* clause);
TypeInfo IncrementType(CountOperation* expr);
+ Zone* zone() const { return zone_; }
+
private:
- void CollectReceiverTypes(unsigned ast_id,
+ void CollectReceiverTypes(TypeFeedbackId ast_id,
Handle<String> name,
Code::Flags flags,
SmallMapList* types);
- void SetInfo(unsigned ast_id, Object* target);
+ void SetInfo(TypeFeedbackId ast_id, Object* target);
void BuildDictionary(Handle<Code> code);
void GetRelocInfos(Handle<Code> code, ZoneList<RelocInfo>* infos);
@@ -312,11 +315,12 @@ class TypeFeedbackOracle BASE_EMBEDDED {
// Returns an element from the backing store. Returns undefined if
// there is no information.
- Handle<Object> GetInfo(unsigned ast_id);
+ Handle<Object> GetInfo(TypeFeedbackId ast_id);
- Handle<Context> global_context_;
+ Handle<Context> native_context_;
Isolate* isolate_;
Handle<UnseededNumberDictionary> dictionary_;
+ Zone* zone_;
DISALLOW_COPY_AND_ASSIGN(TypeFeedbackOracle);
};
diff --git a/src/3rdparty/v8/src/unicode-inl.h b/src/3rdparty/v8/src/unicode-inl.h
index 9c0ebf9..ec9c69f 100644
--- a/src/3rdparty/v8/src/unicode-inl.h
+++ b/src/3rdparty/v8/src/unicode-inl.h
@@ -29,6 +29,7 @@
#define V8_UNICODE_INL_H_
#include "unicode.h"
+#include "checks.h"
namespace unibrow {
@@ -144,6 +145,7 @@ uchar CharacterStream::GetNext() {
} else {
remaining_--;
}
+ ASSERT(BoundsCheck(cursor_));
return result;
}
diff --git a/src/3rdparty/v8/src/unicode.h b/src/3rdparty/v8/src/unicode.h
index 94ab1b4..00227c2 100644
--- a/src/3rdparty/v8/src/unicode.h
+++ b/src/3rdparty/v8/src/unicode.h
@@ -28,7 +28,9 @@
#ifndef V8_UNICODE_H_
#define V8_UNICODE_H_
+#ifndef _WIN32_WCE
#include <sys/types.h>
+#endif
/**
* \file
@@ -201,6 +203,7 @@ class CharacterStream {
protected:
virtual void FillBuffer() = 0;
+ virtual bool BoundsCheck(unsigned offset) = 0;
// The number of characters left in the current buffer
unsigned remaining_;
// The current offset within the buffer
@@ -228,6 +231,9 @@ class InputBuffer : public CharacterStream {
InputBuffer() { }
explicit InputBuffer(Input input) { Reset(input); }
virtual void FillBuffer();
+ virtual bool BoundsCheck(unsigned offset) {
+ return (buffer_ != util_buffer_) || (offset < kSize);
+ }
// A custom offset that can be used by the string implementation to
// mark progress within the encoded string.
diff --git a/src/3rdparty/v8/src/utils.h b/src/3rdparty/v8/src/utils.h
index f116c14..e03f96f 100644
--- a/src/3rdparty/v8/src/utils.h
+++ b/src/3rdparty/v8/src/utils.h
@@ -248,6 +248,7 @@ class BitField {
// bitfield without compiler warnings we have to compute 2^32 without
// using a shift count of 32.
static const uint32_t kMask = ((1U << shift) << size) - (1U << shift);
+ static const uint32_t kShift = shift;
// Value for the field with all bits set.
static const T kMax = static_cast<T>((1U << size) - 1);
@@ -861,7 +862,11 @@ class EmbeddedContainer {
public:
EmbeddedContainer() : elems_() { }
- int length() { return NumElements; }
+ int length() const { return NumElements; }
+ const ElementType& operator[](int i) const {
+ ASSERT(i < length());
+ return elems_[i];
+ }
ElementType& operator[](int i) {
ASSERT(i < length());
return elems_[i];
@@ -875,7 +880,12 @@ class EmbeddedContainer {
template<typename ElementType>
class EmbeddedContainer<ElementType, 0> {
public:
- int length() { return 0; }
+ int length() const { return 0; }
+ const ElementType& operator[](int i) const {
+ UNREACHABLE();
+ static ElementType t = 0;
+ return t;
+ }
ElementType& operator[](int i) {
UNREACHABLE();
static ElementType t = 0;
@@ -973,13 +983,59 @@ class EnumSet {
T Mask(E element) const {
// The strange typing in ASSERT is necessary to avoid stupid warnings, see:
// http://gcc.gnu.org/bugzilla/show_bug.cgi?id=43680
- ASSERT(element < static_cast<int>(sizeof(T) * CHAR_BIT));
+ ASSERT(static_cast<int>(element) < static_cast<int>(sizeof(T) * CHAR_BIT));
return 1 << element;
}
T bits_;
};
+
+class TypeFeedbackId {
+ public:
+ explicit TypeFeedbackId(int id) : id_(id) { }
+ int ToInt() const { return id_; }
+
+ static TypeFeedbackId None() { return TypeFeedbackId(kNoneId); }
+ bool IsNone() const { return id_ == kNoneId; }
+
+ private:
+ static const int kNoneId = -1;
+
+ int id_;
+};
+
+
+class BailoutId {
+ public:
+ explicit BailoutId(int id) : id_(id) { }
+ int ToInt() const { return id_; }
+
+ static BailoutId None() { return BailoutId(kNoneId); }
+ static BailoutId FunctionEntry() { return BailoutId(kFunctionEntryId); }
+ static BailoutId Declarations() { return BailoutId(kDeclarationsId); }
+ static BailoutId FirstUsable() { return BailoutId(kFirstUsableId); }
+
+ bool IsNone() const { return id_ == kNoneId; }
+ bool operator==(const BailoutId& other) const { return id_ == other.id_; }
+
+ private:
+ static const int kNoneId = -1;
+
+ // Using 0 could disguise errors.
+ static const int kFunctionEntryId = 2;
+
+ // This AST id identifies the point after the declarations have been visited.
+ // We need it to capture the environment effects of declarations that emit
+ // code (function declarations).
+ static const int kDeclarationsId = 3;
+
+ // Ever FunctionState starts with this id.
+ static const int kFirstUsableId = 4;
+
+ int id_;
+};
+
} } // namespace v8::internal
#endif // V8_UTILS_H_
diff --git a/src/3rdparty/v8/src/v8-counters.cc b/src/3rdparty/v8/src/v8-counters.cc
index c6aa9cb..3f83dff 100644
--- a/src/3rdparty/v8/src/v8-counters.cc
+++ b/src/3rdparty/v8/src/v8-counters.cc
@@ -1,4 +1,4 @@
-// Copyright 2007-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -34,11 +34,23 @@ namespace internal {
Counters::Counters() {
#define HT(name, caption) \
- HistogramTimer name = { #caption, NULL, false, 0, 0 }; \
+ HistogramTimer name = { {#caption, 0, 10000, 50, NULL, false}, 0, 0 }; \
name##_ = name;
HISTOGRAM_TIMER_LIST(HT)
#undef HT
+#define HP(name, caption) \
+ Histogram name = { #caption, 0, 101, 100, NULL, false }; \
+ name##_ = name;
+ HISTOGRAM_PERCENTAGE_LIST(HP)
+#undef HP
+
+#define HM(name, caption) \
+ Histogram name = { #caption, 1000, 500000, 50, NULL, false }; \
+ name##_ = name;
+ HISTOGRAM_MEMORY_LIST(HM)
+#undef HM
+
#define SC(name, caption) \
StatsCounter name = { "c:" #caption, NULL, false };\
name##_ = name;
@@ -47,6 +59,34 @@ Counters::Counters() {
STATS_COUNTER_LIST_2(SC)
#undef SC
+#define SC(name) \
+ StatsCounter count_of_##name = { "c:" "V8.CountOf_" #name, NULL, false };\
+ count_of_##name##_ = count_of_##name; \
+ StatsCounter size_of_##name = { "c:" "V8.SizeOf_" #name, NULL, false };\
+ size_of_##name##_ = size_of_##name;
+ INSTANCE_TYPE_LIST(SC)
+#undef SC
+
+#define SC(name) \
+ StatsCounter count_of_CODE_TYPE_##name = { \
+ "c:" "V8.CountOf_CODE_TYPE-" #name, NULL, false }; \
+ count_of_CODE_TYPE_##name##_ = count_of_CODE_TYPE_##name; \
+ StatsCounter size_of_CODE_TYPE_##name = { \
+ "c:" "V8.SizeOf_CODE_TYPE-" #name, NULL, false }; \
+ size_of_CODE_TYPE_##name##_ = size_of_CODE_TYPE_##name;
+ CODE_KIND_LIST(SC)
+#undef SC
+
+#define SC(name) \
+ StatsCounter count_of_FIXED_ARRAY_##name = { \
+ "c:" "V8.CountOf_FIXED_ARRAY-" #name, NULL, false }; \
+ count_of_FIXED_ARRAY_##name##_ = count_of_FIXED_ARRAY_##name; \
+ StatsCounter size_of_FIXED_ARRAY_##name = { \
+ "c:" "V8.SizeOf_FIXED_ARRAY-" #name, NULL, false }; \
+ size_of_FIXED_ARRAY_##name##_ = size_of_FIXED_ARRAY_##name;
+ FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
+#undef SC
+
StatsCounter state_counters[] = {
#define COUNTER_NAME(name) \
{ "c:V8.State" #name, NULL, false },
@@ -59,4 +99,18 @@ Counters::Counters() {
}
}
+void Counters::ResetHistograms() {
+#define HT(name, caption) name##_.Reset();
+ HISTOGRAM_TIMER_LIST(HT)
+#undef HT
+
+#define HP(name, caption) name##_.Reset();
+ HISTOGRAM_PERCENTAGE_LIST(HP)
+#undef HP
+
+#define HM(name, caption) name##_.Reset();
+ HISTOGRAM_MEMORY_LIST(HM)
+#undef HM
+}
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/v8-counters.h b/src/3rdparty/v8/src/v8-counters.h
index 6db9c77..fad3454 100644
--- a/src/3rdparty/v8/src/v8-counters.h
+++ b/src/3rdparty/v8/src/v8-counters.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -30,6 +30,7 @@
#include "allocation.h"
#include "counters.h"
+#include "objects.h"
#include "v8globals.h"
namespace v8 {
@@ -50,6 +51,36 @@ namespace internal {
HT(compile_lazy, V8.CompileLazy)
+#define HISTOGRAM_PERCENTAGE_LIST(HP) \
+ HP(external_fragmentation_total, \
+ V8.MemoryExternalFragmentationTotal) \
+ HP(external_fragmentation_old_pointer_space, \
+ V8.MemoryExternalFragmentationOldPointerSpace) \
+ HP(external_fragmentation_old_data_space, \
+ V8.MemoryExternalFragmentationOldDataSpace) \
+ HP(external_fragmentation_code_space, \
+ V8.MemoryExternalFragmentationCodeSpace) \
+ HP(external_fragmentation_map_space, \
+ V8.MemoryExternalFragmentationMapSpace) \
+ HP(external_fragmentation_cell_space, \
+ V8.MemoryExternalFragmentationCellSpace) \
+ HP(external_fragmentation_lo_space, \
+ V8.MemoryExternalFragmentationLoSpace) \
+ HP(heap_fraction_map_space, \
+ V8.MemoryHeapFractionMapSpace) \
+ HP(heap_fraction_cell_space, \
+ V8.MemoryHeapFractionCellSpace) \
+
+
+#define HISTOGRAM_MEMORY_LIST(HM) \
+ HM(heap_sample_total_committed, V8.MemoryHeapSampleTotalCommitted) \
+ HM(heap_sample_total_used, V8.MemoryHeapSampleTotalUsed) \
+ HM(heap_sample_map_space_committed, \
+ V8.MemoryHeapSampleMapSpaceCommitted) \
+ HM(heap_sample_cell_space_committed, \
+ V8.MemoryHeapSampleCellSpaceCommitted)
+
+
// WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC
// Intellisense to crash. It was broken into two macros (each of length 40
// lines) rather than one macro (of length about 80 lines) to work around
@@ -210,6 +241,9 @@ namespace internal {
SC(compute_entry_frame, V8.ComputeEntryFrame) \
SC(generic_binary_stub_calls, V8.GenericBinaryStubCalls) \
SC(generic_binary_stub_calls_regs, V8.GenericBinaryStubCallsRegs) \
+ SC(fast_new_closure_total, V8.FastNewClosureTotal) \
+ SC(fast_new_closure_try_optimized, V8.FastNewClosureTryOptimized) \
+ SC(fast_new_closure_install_optimized, V8.FastNewClosureInstallOptimized) \
SC(string_add_runtime, V8.StringAddRuntime) \
SC(string_add_native, V8.StringAddNative) \
SC(string_add_runtime_ext_to_ascii, V8.StringAddRuntimeExtToAscii) \
@@ -240,14 +274,33 @@ namespace internal {
SC(transcendental_cache_miss, V8.TranscendentalCacheMiss) \
SC(stack_interrupts, V8.StackInterrupts) \
SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks) \
- SC(other_ticks, V8.OtherTicks) \
- SC(js_opt_ticks, V8.JsOptTicks) \
- SC(js_non_opt_ticks, V8.JsNonoptTicks) \
- SC(js_other_ticks, V8.JsOtherTicks) \
SC(smi_checks_removed, V8.SmiChecksRemoved) \
SC(map_checks_removed, V8.MapChecksRemoved) \
SC(quote_json_char_count, V8.QuoteJsonCharacterCount) \
- SC(quote_json_char_recount, V8.QuoteJsonCharacterReCount)
+ SC(quote_json_char_recount, V8.QuoteJsonCharacterReCount) \
+ SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable) \
+ SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted) \
+ SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed) \
+ SC(old_pointer_space_bytes_available, \
+ V8.MemoryOldPointerSpaceBytesAvailable) \
+ SC(old_pointer_space_bytes_committed, \
+ V8.MemoryOldPointerSpaceBytesCommitted) \
+ SC(old_pointer_space_bytes_used, V8.MemoryOldPointerSpaceBytesUsed) \
+ SC(old_data_space_bytes_available, V8.MemoryOldDataSpaceBytesAvailable) \
+ SC(old_data_space_bytes_committed, V8.MemoryOldDataSpaceBytesCommitted) \
+ SC(old_data_space_bytes_used, V8.MemoryOldDataSpaceBytesUsed) \
+ SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable) \
+ SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted) \
+ SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed) \
+ SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable) \
+ SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted) \
+ SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed) \
+ SC(cell_space_bytes_available, V8.MemoryCellSpaceBytesAvailable) \
+ SC(cell_space_bytes_committed, V8.MemoryCellSpaceBytesCommitted) \
+ SC(cell_space_bytes_used, V8.MemoryCellSpaceBytesUsed) \
+ SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable) \
+ SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted) \
+ SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed)
// This file contains all the v8 counters that are in use.
@@ -258,20 +311,69 @@ class Counters {
HISTOGRAM_TIMER_LIST(HT)
#undef HT
+#define HP(name, caption) \
+ Histogram* name() { return &name##_; }
+ HISTOGRAM_PERCENTAGE_LIST(HP)
+#undef HP
+
+#define HM(name, caption) \
+ Histogram* name() { return &name##_; }
+ HISTOGRAM_MEMORY_LIST(HM)
+#undef HM
+
#define SC(name, caption) \
StatsCounter* name() { return &name##_; }
STATS_COUNTER_LIST_1(SC)
STATS_COUNTER_LIST_2(SC)
#undef SC
+#define SC(name) \
+ StatsCounter* count_of_##name() { return &count_of_##name##_; } \
+ StatsCounter* size_of_##name() { return &size_of_##name##_; }
+ INSTANCE_TYPE_LIST(SC)
+#undef SC
+
+#define SC(name) \
+ StatsCounter* count_of_CODE_TYPE_##name() \
+ { return &count_of_CODE_TYPE_##name##_; } \
+ StatsCounter* size_of_CODE_TYPE_##name() \
+ { return &size_of_CODE_TYPE_##name##_; }
+ CODE_KIND_LIST(SC)
+#undef SC
+
+#define SC(name) \
+ StatsCounter* count_of_FIXED_ARRAY_##name() \
+ { return &count_of_FIXED_ARRAY_##name##_; } \
+ StatsCounter* size_of_FIXED_ARRAY_##name() \
+ { return &size_of_FIXED_ARRAY_##name##_; }
+ FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
+#undef SC
+
enum Id {
#define RATE_ID(name, caption) k_##name,
HISTOGRAM_TIMER_LIST(RATE_ID)
#undef RATE_ID
+#define PERCENTAGE_ID(name, caption) k_##name,
+ HISTOGRAM_PERCENTAGE_LIST(PERCENTAGE_ID)
+#undef PERCENTAGE_ID
+#define MEMORY_ID(name, caption) k_##name,
+ HISTOGRAM_MEMORY_LIST(MEMORY_ID)
+#undef MEMORY_ID
#define COUNTER_ID(name, caption) k_##name,
STATS_COUNTER_LIST_1(COUNTER_ID)
STATS_COUNTER_LIST_2(COUNTER_ID)
#undef COUNTER_ID
+#define COUNTER_ID(name) kCountOf##name, kSizeOf##name,
+ INSTANCE_TYPE_LIST(COUNTER_ID)
+#undef COUNTER_ID
+#define COUNTER_ID(name) kCountOfCODE_TYPE_##name, \
+ kSizeOfCODE_TYPE_##name,
+ CODE_KIND_LIST(COUNTER_ID)
+#undef COUNTER_ID
+#define COUNTER_ID(name) kCountOfFIXED_ARRAY__##name, \
+ kSizeOfFIXED_ARRAY__##name,
+ FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(COUNTER_ID)
+#undef COUNTER_ID
#define COUNTER_ID(name) k_##name,
STATE_TAG_LIST(COUNTER_ID)
#undef COUNTER_ID
@@ -282,18 +384,48 @@ class Counters {
return &state_counters_[state];
}
+ void ResetHistograms();
+
private:
#define HT(name, caption) \
HistogramTimer name##_;
HISTOGRAM_TIMER_LIST(HT)
#undef HT
+#define HP(name, caption) \
+ Histogram name##_;
+ HISTOGRAM_PERCENTAGE_LIST(HP)
+#undef HP
+
+#define HM(name, caption) \
+ Histogram name##_;
+ HISTOGRAM_MEMORY_LIST(HM)
+#undef HM
+
#define SC(name, caption) \
StatsCounter name##_;
STATS_COUNTER_LIST_1(SC)
STATS_COUNTER_LIST_2(SC)
#undef SC
+#define SC(name) \
+ StatsCounter size_of_##name##_; \
+ StatsCounter count_of_##name##_;
+ INSTANCE_TYPE_LIST(SC)
+#undef SC
+
+#define SC(name) \
+ StatsCounter size_of_CODE_TYPE_##name##_; \
+ StatsCounter count_of_CODE_TYPE_##name##_;
+ CODE_KIND_LIST(SC)
+#undef SC
+
+#define SC(name) \
+ StatsCounter size_of_FIXED_ARRAY_##name##_; \
+ StatsCounter count_of_FIXED_ARRAY_##name##_;
+ FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
+#undef SC
+
enum {
#define COUNTER_ID(name) __##name,
STATE_TAG_LIST(COUNTER_ID)
diff --git a/src/3rdparty/v8/src/v8.cc b/src/3rdparty/v8/src/v8.cc
index 2910a07..7d01582 100644
--- a/src/3rdparty/v8/src/v8.cc
+++ b/src/3rdparty/v8/src/v8.cc
@@ -38,6 +38,7 @@
#include "hydrogen.h"
#include "lithium-allocator.h"
#include "log.h"
+#include "objects.h"
#include "once.h"
#include "platform.h"
#include "runtime-profiler.h"
@@ -106,13 +107,16 @@ void V8::TearDown() {
if (!has_been_set_up_ || has_been_disposed_) return;
+ // The isolate has to be torn down before clearing the LOperand
+ // caches so that the optimizing compiler thread (if running)
+ // doesn't see an inconsistent view of the lithium instructions.
+ isolate->TearDown();
+ delete isolate;
+
ElementsAccessor::TearDown();
LOperand::TearDownCaches();
RegisteredExtension::UnregisterAll();
- isolate->TearDown();
- delete isolate;
-
is_running_ = false;
has_been_disposed_ = true;
@@ -166,7 +170,7 @@ void V8::SetReturnAddressLocationResolver(
// Used by JavaScript APIs
uint32_t V8::Random(Context* context) {
- ASSERT(context->IsGlobalContext());
+ ASSERT(context->IsNativeContext());
ByteArray* seed = context->random_seed();
return random_base(reinterpret_cast<uint32_t*>(seed->GetDataStartAddress()));
}
@@ -213,14 +217,22 @@ void V8::RemoveCallCompletedCallback(CallCompletedCallback callback) {
void V8::FireCallCompletedCallback(Isolate* isolate) {
- if (call_completed_callbacks_ == NULL) return;
+ bool has_call_completed_callbacks = call_completed_callbacks_ != NULL;
+ bool observer_delivery_pending =
+ FLAG_harmony_observation && isolate->observer_delivery_pending();
+ if (!has_call_completed_callbacks && !observer_delivery_pending) return;
HandleScopeImplementer* handle_scope_implementer =
isolate->handle_scope_implementer();
if (!handle_scope_implementer->CallDepthIsZero()) return;
// Fire callbacks. Increase call depth to prevent recursive callbacks.
handle_scope_implementer->IncrementCallDepth();
- for (int i = 0; i < call_completed_callbacks_->length(); i++) {
- call_completed_callbacks_->at(i)();
+ if (observer_delivery_pending) {
+ JSObject::DeliverChangeRecords(isolate);
+ }
+ if (has_call_completed_callbacks) {
+ for (int i = 0; i < call_completed_callbacks_->length(); i++) {
+ call_completed_callbacks_->at(i)();
+ }
}
handle_scope_implementer->DecrementCallDepth();
}
diff --git a/src/3rdparty/v8/src/v8.h b/src/3rdparty/v8/src/v8.h
index 59ce602..201cdf2 100644
--- a/src/3rdparty/v8/src/v8.h
+++ b/src/3rdparty/v8/src/v8.h
@@ -48,6 +48,11 @@
#error both DEBUG and NDEBUG are set
#endif
+// For Windows CE, Windows headers need to be included first as they define ASSERT
+#ifdef _WIN32_WCE
+# include "win32-headers.h"
+#endif
+
// Basic includes
#include "../include/v8.h"
#include "v8globals.h"
@@ -65,6 +70,7 @@
#include "log-inl.h"
#include "cpu-profiler-inl.h"
#include "handles-inl.h"
+#include "zone-inl.h"
namespace v8 {
namespace internal {
diff --git a/src/3rdparty/v8/src/v8globals.h b/src/3rdparty/v8/src/v8globals.h
index 6a1766a..95390ad 100644
--- a/src/3rdparty/v8/src/v8globals.h
+++ b/src/3rdparty/v8/src/v8globals.h
@@ -52,15 +52,6 @@ const intptr_t kPointerAlignmentMask = kPointerAlignment - 1;
const intptr_t kDoubleAlignment = 8;
const intptr_t kDoubleAlignmentMask = kDoubleAlignment - 1;
-// Desired alignment for maps.
-#if V8_HOST_ARCH_64_BIT
-const intptr_t kMapAlignmentBits = kObjectAlignmentBits;
-#else
-const intptr_t kMapAlignmentBits = kObjectAlignmentBits + 3;
-#endif
-const intptr_t kMapAlignment = (1 << kMapAlignmentBits);
-const intptr_t kMapAlignmentMask = kMapAlignment - 1;
-
// Desired alignment for generated code is 32 bytes (to improve cache line
// utilization).
const int kCodeAlignmentBits = 5;
@@ -94,6 +85,7 @@ const uint32_t kDebugZapValue = 0xbadbaddb;
const uint32_t kFreeListZapValue = 0xfeed1eaf;
#endif
+const int kCodeZapValue = 0xbadc0de;
// Number of bits to represent the page size for paged spaces. The value of 20
// gives 1Mb bytes per page.
@@ -126,6 +118,7 @@ class Debugger;
class DebugInfo;
class Descriptor;
class DescriptorArray;
+class TransitionArray;
class ExternalReference;
class FixedArray;
class FunctionTemplateInfo;
@@ -311,14 +304,6 @@ typedef void (*StoreBufferCallback)(Heap* heap,
StoreBufferEvent event);
-// Whether to remove map transitions and constant transitions from a
-// DescriptorArray.
-enum TransitionFlag {
- REMOVE_TRANSITIONS,
- KEEP_TRANSITIONS
-};
-
-
// Union used for fast testing of specific double values.
union DoubleRepresentation {
double value;
@@ -366,11 +351,12 @@ struct AccessorDescriptor {
// VMState object leaves a state by popping the current state from the
// stack.
-#define STATE_TAG_LIST(V) \
- V(JS) \
- V(GC) \
- V(COMPILER) \
- V(OTHER) \
+#define STATE_TAG_LIST(V) \
+ V(JS) \
+ V(GC) \
+ V(COMPILER) \
+ V(PARALLEL_COMPILER_PROLOGUE) \
+ V(OTHER) \
V(EXTERNAL)
enum StateTag {
@@ -401,10 +387,6 @@ enum StateTag {
#define POINTER_SIZE_ALIGN(value) \
(((value) + kPointerAlignmentMask) & ~kPointerAlignmentMask)
-// MAP_POINTER_ALIGN returns the value aligned as a map pointer.
-#define MAP_POINTER_ALIGN(value) \
- (((value) + kMapAlignmentMask) & ~kMapAlignmentMask)
-
// CODE_POINTER_ALIGN returns the value aligned as a generated code segment.
#define CODE_POINTER_ALIGN(value) \
(((value) + kCodeAlignmentMask) & ~kCodeAlignmentMask)
@@ -430,6 +412,13 @@ enum StateTag {
#endif
+enum CpuImplementer {
+ UNKNOWN_IMPLEMENTER,
+ ARM_IMPLEMENTER,
+ QUALCOMM_IMPLEMENTER
+};
+
+
// Feature flags bit positions. They are mostly based on the CPUID spec.
// (We assign CPUID itself to one of the currently reserved bits --
// feel free to change this if needed.)
@@ -442,6 +431,10 @@ enum CpuFeature { SSE4_1 = 32 + 19, // x86
CPUID = 10, // x86
VFP3 = 1, // ARM
ARMv7 = 2, // ARM
+ VFP2 = 3, // ARM
+ SUDIV = 4, // ARM
+ UNALIGNED_ACCESSES = 5, // ARM
+ MOVW_MOVT_IMMEDIATE_LOADS = 6, // ARM
SAHF = 0, // x86
FPU = 1}; // MIPS
@@ -483,16 +476,17 @@ const uint64_t kLastNonNaNInt64 =
(static_cast<uint64_t>(kNaNOrInfinityLowerBoundUpper32) << 32);
+// The order of this enum has to be kept in sync with the predicates below.
enum VariableMode {
// User declared variables:
VAR, // declared via 'var', and 'function' declarations
CONST, // declared via 'const' declarations
- CONST_HARMONY, // declared via 'const' declarations in harmony mode
-
LET, // declared via 'let' declarations
+ CONST_HARMONY, // declared via 'const' declarations in harmony mode
+
// Variables introduced by the compiler:
DYNAMIC, // always require dynamic lookup (we don't know
// the declaration)
@@ -514,6 +508,26 @@ enum VariableMode {
};
+inline bool IsDynamicVariableMode(VariableMode mode) {
+ return mode >= DYNAMIC && mode <= DYNAMIC_LOCAL;
+}
+
+
+inline bool IsDeclaredVariableMode(VariableMode mode) {
+ return mode >= VAR && mode <= CONST_HARMONY;
+}
+
+
+inline bool IsLexicalVariableMode(VariableMode mode) {
+ return mode >= LET && mode <= CONST_HARMONY;
+}
+
+
+inline bool IsImmutableVariableMode(VariableMode mode) {
+ return mode == CONST || mode == CONST_HARMONY;
+}
+
+
// ES6 Draft Rev3 10.2 specifies declarative environment records with mutable
// and immutable bindings that can be in two states: initialized and
// uninitialized. In ES5 only immutable bindings have these two states. When
diff --git a/src/3rdparty/v8/src/v8natives.js b/src/3rdparty/v8/src/v8natives.js
index f1e8084..20fc74d 100644
--- a/src/3rdparty/v8/src/v8natives.js
+++ b/src/3rdparty/v8/src/v8natives.js
@@ -60,7 +60,17 @@ function InstallFunctions(object, attributes, functions) {
%ToFastProperties(object);
}
-// Prevents changes to the prototype of a built-infunction.
+
+// Helper function to install a getter only property.
+function InstallGetter(object, name, getter) {
+ %FunctionSetName(getter, name);
+ %FunctionRemovePrototype(getter);
+ %DefineOrRedefineAccessorProperty(object, name, getter, null, DONT_ENUM);
+ %SetNativeFlag(getter);
+}
+
+
+// Prevents changes to the prototype of a built-in function.
// The "prototype" property of the function object is made non-configurable,
// and the prototype object is made non-extensible. The latter prevents
// changing the __proto__ property.
@@ -337,7 +347,7 @@ function ObjectKeys(obj) {
if (%IsJSProxy(obj)) {
var handler = %GetHandler(obj);
var names = CallTrap0(handler, "keys", DerivedKeysTrap);
- return ToStringArray(names);
+ return ToStringArray(names, "keys");
}
return %LocalKeys(obj);
}
@@ -963,7 +973,7 @@ function ToStringArray(obj, trap) {
var names = {}; // TODO(rossberg): use sets once they are ready.
for (var index = 0; index < n; index++) {
var s = ToString(obj[index]);
- if (s in names) {
+ if (%HasLocalProperty(names, s)) {
throw MakeTypeError("proxy_repeated_prop_name", [obj, trap, s]);
}
array[index] = s;
@@ -1654,7 +1664,9 @@ function NewFunction(arg1) { // length == 1
// The call to SetNewFunctionAttributes will ensure the prototype
// property of the resulting function is enumerable (ECMA262, 15.3.5.2).
- var f = %CompileString(source)();
+ var global_receiver = %GlobalReceiver(global);
+ var f = %_CallFunction(global_receiver, %CompileString(source));
+
%FunctionMarkNameShouldPrintAsAnonymous(f);
return %SetNewFunctionAttributes(f);
}
diff --git a/src/3rdparty/v8/src/v8threads.cc b/src/3rdparty/v8/src/v8threads.cc
index fd8d536..32ea5e1 100644
--- a/src/3rdparty/v8/src/v8threads.cc
+++ b/src/3rdparty/v8/src/v8threads.cc
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -238,12 +238,18 @@ static int ArchiveSpacePerThread() {
ThreadState::ThreadState(ThreadManager* thread_manager)
: id_(ThreadId::Invalid()),
terminate_on_restore_(false),
+ data_(NULL),
next_(this),
previous_(this),
thread_manager_(thread_manager) {
}
+ThreadState::~ThreadState() {
+ DeleteArray<char>(data_);
+}
+
+
void ThreadState::AllocateSpace() {
data_ = NewArray<char>(ArchiveSpacePerThread());
}
@@ -306,8 +312,19 @@ ThreadManager::ThreadManager()
ThreadManager::~ThreadManager() {
delete mutex_;
- delete free_anchor_;
- delete in_use_anchor_;
+ DeleteThreadStateList(free_anchor_);
+ DeleteThreadStateList(in_use_anchor_);
+}
+
+
+void ThreadManager::DeleteThreadStateList(ThreadState* anchor) {
+ // The list starts and ends with the anchor.
+ for (ThreadState* current = anchor->next_; current != anchor;) {
+ ThreadState* next = current->next_;
+ delete current;
+ current = next;
+ }
+ delete anchor;
}
diff --git a/src/3rdparty/v8/src/v8threads.h b/src/3rdparty/v8/src/v8threads.h
index a2aee4e..8dce860 100644
--- a/src/3rdparty/v8/src/v8threads.h
+++ b/src/3rdparty/v8/src/v8threads.h
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -57,6 +57,7 @@ class ThreadState {
private:
explicit ThreadState(ThreadManager* thread_manager);
+ ~ThreadState();
void AllocateSpace();
@@ -114,6 +115,8 @@ class ThreadManager {
ThreadManager();
~ThreadManager();
+ void DeleteThreadStateList(ThreadState* anchor);
+
void EagerlyArchiveThread();
Mutex* mutex_;
diff --git a/src/3rdparty/v8/src/v8utils.cc b/src/3rdparty/v8/src/v8utils.cc
index 042a60f..4ab97ed 100644
--- a/src/3rdparty/v8/src/v8utils.cc
+++ b/src/3rdparty/v8/src/v8utils.cc
@@ -31,7 +31,9 @@
#include "platform.h"
+#ifndef _WIN32_WCE
#include "sys/stat.h"
+#endif
namespace v8 {
namespace internal {
@@ -45,7 +47,7 @@ void PrintF(const char* format, ...) {
}
-void PrintF(FILE* out, const char* format, ...) {
+void FPrintF(FILE* out, const char* format, ...) {
va_list arguments;
va_start(arguments, format);
OS::VFPrint(out, format, arguments);
@@ -53,6 +55,15 @@ void PrintF(FILE* out, const char* format, ...) {
}
+void PrintPID(const char* format, ...) {
+ OS::Print("[%d] ", OS::GetCurrentProcessId());
+ va_list arguments;
+ va_start(arguments, format);
+ OS::VPrint(format, arguments);
+ va_end(arguments);
+}
+
+
void Flush(FILE* out) {
fflush(out);
}
@@ -124,7 +135,11 @@ char* ReadCharsFromFile(FILE* file,
// Get the size of the file and rewind it.
*size = ftell(file);
+#ifdef _WIN32_WCE
+ fseek(file, 0, SEEK_SET);
+#else
rewind(file);
+#endif // _WIN32_WCE
char* result = NewArray<char>(*size + extra_space);
for (int i = 0; i < *size && feof(file) == 0;) {
diff --git a/src/3rdparty/v8/src/v8utils.h b/src/3rdparty/v8/src/v8utils.h
index bb587e1..cb018e6 100644
--- a/src/3rdparty/v8/src/v8utils.h
+++ b/src/3rdparty/v8/src/v8utils.h
@@ -55,7 +55,10 @@ namespace internal {
// Our version of printf().
void PRINTF_CHECKING PrintF(const char* format, ...);
-void FPRINTF_CHECKING PrintF(FILE* out, const char* format, ...);
+void FPRINTF_CHECKING FPrintF(FILE* out, const char* format, ...);
+
+// Prepends the current process ID to the output.
+void PRINTF_CHECKING PrintPID(const char* format, ...);
// Our version of fflush.
void Flush(FILE* out);
diff --git a/src/3rdparty/v8/src/variables.cc b/src/3rdparty/v8/src/variables.cc
index 4984cbd..3e735d6 100644
--- a/src/3rdparty/v8/src/variables.cc
+++ b/src/3rdparty/v8/src/variables.cc
@@ -41,7 +41,7 @@ const char* Variable::Mode2String(VariableMode mode) {
switch (mode) {
case VAR: return "VAR";
case CONST: return "CONST";
- case CONST_HARMONY: return "CONST";
+ case CONST_HARMONY: return "CONST_HARMONY";
case LET: return "LET";
case DYNAMIC: return "DYNAMIC";
case DYNAMIC_GLOBAL: return "DYNAMIC_GLOBAL";
@@ -82,10 +82,11 @@ Variable::Variable(Scope* scope,
}
-bool Variable::is_global() const {
+bool Variable::IsGlobalObjectProperty() const {
// Temporaries are never global, they must always be allocated in the
// activation frame.
- return mode_ != TEMPORARY && scope_ != NULL && scope_->is_global_scope();
+ return mode_ != TEMPORARY && !IsLexicalVariableMode(mode_)
+ && scope_ != NULL && scope_->is_global_scope();
}
diff --git a/src/3rdparty/v8/src/variables.h b/src/3rdparty/v8/src/variables.h
index 43b2c81..d4e851b 100644
--- a/src/3rdparty/v8/src/variables.h
+++ b/src/3rdparty/v8/src/variables.h
@@ -55,7 +55,7 @@ class Variable: public ZoneObject {
UNALLOCATED,
// A slot in the parameter section on the stack. index() is the
- // parameter index, counting left-to-right. The reciever is index -1;
+ // parameter index, counting left-to-right. The receiver is index -1;
// the first parameter is index 0.
PARAMETER,
@@ -118,21 +118,14 @@ class Variable: public ZoneObject {
bool IsStackAllocated() const { return IsParameter() || IsStackLocal(); }
bool IsContextSlot() const { return location_ == CONTEXT; }
bool IsLookupSlot() const { return location_ == LOOKUP; }
+ bool IsGlobalObjectProperty() const;
- bool is_dynamic() const {
- return (mode_ == DYNAMIC ||
- mode_ == DYNAMIC_GLOBAL ||
- mode_ == DYNAMIC_LOCAL);
- }
- bool is_const_mode() const {
- return (mode_ == CONST ||
- mode_ == CONST_HARMONY);
- }
+ bool is_dynamic() const { return IsDynamicVariableMode(mode_); }
+ bool is_const_mode() const { return IsImmutableVariableMode(mode_); }
bool binding_needs_init() const {
return initialization_flag_ == kNeedsInitialization;
}
- bool is_global() const;
bool is_this() const { return kind_ == THIS; }
bool is_arguments() const { return kind_ == ARGUMENTS; }
diff --git a/src/3rdparty/v8/src/version.cc b/src/3rdparty/v8/src/version.cc
index 79b6ebd..3132b5e 100644
--- a/src/3rdparty/v8/src/version.cc
+++ b/src/3rdparty/v8/src/version.cc
@@ -33,8 +33,8 @@
// NOTE these macros are used by the SCons build script so their names
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
-#define MINOR_VERSION 11
-#define BUILD_NUMBER 4
+#define MINOR_VERSION 15
+#define BUILD_NUMBER 2
#define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
diff --git a/src/3rdparty/v8/src/vm-state-inl.h b/src/3rdparty/v8/src/vm-state-inl.h
index c647e56..384940d 100644
--- a/src/3rdparty/v8/src/vm-state-inl.h
+++ b/src/3rdparty/v8/src/vm-state-inl.h
@@ -47,6 +47,8 @@ inline const char* StateToString(StateTag state) {
return "GC";
case COMPILER:
return "COMPILER";
+ case PARALLEL_COMPILER_PROLOGUE:
+ return "PARALLEL_COMPILER_PROLOGUE";
case OTHER:
return "OTHER";
case EXTERNAL:
diff --git a/src/3rdparty/v8/src/win32-headers.h b/src/3rdparty/v8/src/win32-headers.h
index 5d9c89e..b476efe 100644
--- a/src/3rdparty/v8/src/win32-headers.h
+++ b/src/3rdparty/v8/src/win32-headers.h
@@ -56,7 +56,9 @@
#include <windows.h>
#ifdef V8_WIN32_HEADERS_FULL
+#ifndef _WIN32_WCE
#include <signal.h> // For raise().
+#endif // _WIN32_WCE
#include <time.h> // For LocalOffset() implementation.
#include <mmsystem.h> // For timeGetTime().
#ifdef __MINGW32__
@@ -65,10 +67,10 @@
#undef _WIN32_WINNT
#define _WIN32_WINNT 0x501
#endif // __MINGW32__
-#if !defined(__MINGW32__) || defined(__MINGW64_VERSION_MAJOR)
+#if (!defined(__MINGW32__) || defined(__MINGW64_VERSION_MAJOR)) && !defined(_WIN32_WCE)
#include <dbghelp.h> // For SymLoadModule64 and al.
#include <errno.h> // For STRUNCATE
-#endif // !defined(__MINGW32__) || defined(__MINGW64_VERSION_MAJOR)
+#endif // !defined(__MINGW32__) || defined(__MINGW64_VERSION_MAJOR) && !defined(_WIN32_WCE)
#include <limits.h> // For INT_MAX and al.
#include <tlhelp32.h> // For Module32First and al.
@@ -76,13 +78,26 @@
// makes it impossible to have them elsewhere.
#include <winsock2.h>
#include <ws2tcpip.h>
-#ifndef __MINGW32__
+#if !defined(__MINGW32__) && !defined(_WIN32_WCE)
#include <wspiapi.h>
-#endif // __MINGW32__
+#endif // __MINGW32__ && !defined(_WIN32_WCE)
+#ifndef _WIN32_WCE
#include <process.h> // For _beginthreadex().
+#endif
#include <stdlib.h>
#endif // V8_WIN32_HEADERS_FULL
+#ifdef _WIN32_WCE
+#ifdef DebugBreak
+#undef DebugBreak
+inline void DebugBreak() { __debugbreak(); };
+#endif // DebugBreak
+
+#ifndef _IOFBF
+#define _IOFBF 0x0000
+#endif
+#endif
+
#undef VOID
#undef DELETE
#undef IN
@@ -98,3 +113,4 @@
#undef GetObject
#undef CreateMutex
#undef CreateSemaphore
+#undef interface
diff --git a/src/3rdparty/v8/src/x64/assembler-x64-inl.h b/src/3rdparty/v8/src/x64/assembler-x64-inl.h
index a9cc2ef..f864174 100644
--- a/src/3rdparty/v8/src/x64/assembler-x64-inl.h
+++ b/src/3rdparty/v8/src/x64/assembler-x64-inl.h
@@ -42,6 +42,9 @@ namespace internal {
// Implementation of Assembler
+static const byte kCallOpcode = 0xE8;
+
+
void Assembler::emitl(uint32_t x) {
Memory::uint32_at(pc_) = x;
pc_ += sizeof(uint32_t);
@@ -65,10 +68,10 @@ void Assembler::emitw(uint16_t x) {
void Assembler::emit_code_target(Handle<Code> target,
RelocInfo::Mode rmode,
- unsigned ast_id) {
+ TypeFeedbackId ast_id) {
ASSERT(RelocInfo::IsCodeTarget(rmode));
- if (rmode == RelocInfo::CODE_TARGET && ast_id != kNoASTId) {
- RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id);
+ if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
+ RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id.ToInt());
} else {
RecordRelocInfo(rmode);
}
@@ -195,6 +198,12 @@ void Assembler::set_target_address_at(Address pc, Address target) {
CPU::FlushICache(pc, sizeof(int32_t));
}
+
+Address Assembler::target_address_from_return_address(Address pc) {
+ return pc - kCallTargetAddressOffset;
+}
+
+
Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
return code_targets_[Memory::int32_at(pc)];
}
@@ -211,6 +220,12 @@ void RelocInfo::apply(intptr_t delta) {
} else if (IsCodeTarget(rmode_)) {
Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
CPU::FlushICache(pc_, sizeof(int32_t));
+ } else if (rmode_ == CODE_AGE_SEQUENCE) {
+ if (*pc_ == kCallOpcode) {
+ int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
+ *p -= static_cast<int32_t>(delta); // Relocate entry.
+ CPU::FlushICache(p, sizeof(uint32_t));
+ }
}
}
@@ -309,10 +324,7 @@ Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
JSGlobalPropertyCell* RelocInfo::target_cell() {
ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
- Address address = Memory::Address_at(pc_);
- Object* object = HeapObject::FromAddress(
- address - JSGlobalPropertyCell::kValueOffset);
- return reinterpret_cast<JSGlobalPropertyCell*>(object);
+ return JSGlobalPropertyCell::FromValueAddress(Memory::Address_at(pc_));
}
@@ -352,6 +364,21 @@ bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
}
+Code* RelocInfo::code_age_stub() {
+ ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+ ASSERT(*pc_ == kCallOpcode);
+ return Code::GetCodeFromTargetAddress(
+ Assembler::target_address_at(pc_ + 1));
+}
+
+
+void RelocInfo::set_code_age_stub(Code* stub) {
+ ASSERT(*pc_ == kCallOpcode);
+ ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+ Assembler::set_target_address_at(pc_ + 1, stub->instruction_start());
+}
+
+
Address RelocInfo::call_address() {
ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
@@ -405,6 +432,8 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
visitor->VisitExternalReference(this);
CPU::FlushICache(pc_, sizeof(Address));
+ } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+ visitor->VisitCodeAgeSequence(this);
#ifdef ENABLE_DEBUGGER_SUPPORT
// TODO(isolates): Get a cached isolate below.
} else if (((RelocInfo::IsJSReturn(mode) &&
@@ -433,6 +462,8 @@ void RelocInfo::Visit(Heap* heap) {
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
StaticVisitor::VisitExternalReference(this);
CPU::FlushICache(pc_, sizeof(Address));
+ } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+ StaticVisitor::VisitCodeAgeSequence(heap, this);
#ifdef ENABLE_DEBUGGER_SUPPORT
} else if (heap->isolate()->debug()->has_break_points() &&
((RelocInfo::IsJSReturn(mode) &&
diff --git a/src/3rdparty/v8/src/x64/assembler-x64.cc b/src/3rdparty/v8/src/x64/assembler-x64.cc
index 2f0c542..1f5bea9 100644
--- a/src/3rdparty/v8/src/x64/assembler-x64.cc
+++ b/src/3rdparty/v8/src/x64/assembler-x64.cc
@@ -75,6 +75,7 @@ void CpuFeatures::Probe() {
// Save old rsp, since we are going to modify the stack.
__ push(rbp);
__ pushfq();
+ __ push(rdi);
__ push(rcx);
__ push(rbx);
__ movq(rbp, rsp);
@@ -128,6 +129,7 @@ void CpuFeatures::Probe() {
__ movq(rsp, rbp);
__ pop(rbx);
__ pop(rcx);
+ __ pop(rdi);
__ popfq();
__ pop(rbp);
__ ret(0);
@@ -347,8 +349,7 @@ static void InitCoverageLog();
Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
: AssemblerBase(arg_isolate),
code_targets_(100),
- positions_recorder_(this),
- emit_debug_code_(FLAG_debug_code) {
+ positions_recorder_(this) {
if (buffer == NULL) {
// Do our own buffer management.
if (buffer_size <= kMinimalBufferSize) {
@@ -467,7 +468,7 @@ void Assembler::bind_to(Label* L, int pos) {
static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos)));
ASSERT(offset_to_next <= 0);
int disp = pos - (fixup_pos + sizeof(int8_t));
- ASSERT(is_int8(disp));
+ CHECK(is_int8(disp));
set_byte_at(fixup_pos, disp);
if (offset_to_next < 0) {
L->link_to(fixup_pos + offset_to_next, Label::kNear);
@@ -875,7 +876,7 @@ void Assembler::call(Label* L) {
void Assembler::call(Handle<Code> target,
RelocInfo::Mode rmode,
- unsigned ast_id) {
+ TypeFeedbackId ast_id) {
positions_recorder()->WriteRecordedPositions();
EnsureSpace ensure_space(this);
// 1110 1000 #32-bit disp.
@@ -1232,7 +1233,16 @@ void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
const int long_size = 6;
int offs = L->pos() - pc_offset();
ASSERT(offs <= 0);
- if (is_int8(offs - short_size)) {
+ // Determine whether we can use 1-byte offsets for backwards branches,
+ // which have a max range of 128 bytes.
+
+ // We also need to check predictable_code_size() flag here, because on x64,
+ // when the full code generator recompiles code for debugging, some places
+ // need to be padded out to a certain size. The debugger is keeping track of
+ // how often it did this so that it can adjust return addresses on the
+ // stack, but if the size of jump instructions can also change, that's not
+ // enough and the calculated offsets would be incorrect.
+ if (is_int8(offs - short_size) && !predictable_code_size()) {
// 0111 tttn #8-bit disp.
emit(0x70 | cc);
emit((offs - short_size) & 0xFF);
@@ -1289,7 +1299,7 @@ void Assembler::jmp(Label* L, Label::Distance distance) {
if (L->is_bound()) {
int offs = L->pos() - pc_offset() - 1;
ASSERT(offs <= 0);
- if (is_int8(offs - short_size)) {
+ if (is_int8(offs - short_size) && !predictable_code_size()) {
// 1110 1011 #8-bit disp.
emit(0xEB);
emit((offs - short_size) & 0xFF);
@@ -3035,7 +3045,8 @@ void Assembler::RecordComment(const char* msg, bool force) {
const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
- 1 << RelocInfo::INTERNAL_REFERENCE;
+ 1 << RelocInfo::INTERNAL_REFERENCE |
+ 1 << RelocInfo::CODE_AGE_SEQUENCE;
bool RelocInfo::IsCodedSpecially() {
diff --git a/src/3rdparty/v8/src/x64/assembler-x64.h b/src/3rdparty/v8/src/x64/assembler-x64.h
index 9f5f850..5f9e147 100644
--- a/src/3rdparty/v8/src/x64/assembler-x64.h
+++ b/src/3rdparty/v8/src/x64/assembler-x64.h
@@ -455,6 +455,7 @@ class CpuFeatures : public AllStatic {
ASSERT(initialized_);
if (f == SSE2 && !FLAG_enable_sse2) return false;
if (f == SSE3 && !FLAG_enable_sse3) return false;
+ if (f == SSE4_1 && !FLAG_enable_sse4_1) return false;
if (f == CMOV && !FLAG_enable_cmov) return false;
if (f == RDTSC && !FLAG_enable_rdtsc) return false;
if (f == SAHF && !FLAG_enable_sahf) return false;
@@ -557,9 +558,6 @@ class Assembler : public AssemblerBase {
Assembler(Isolate* isolate, void* buffer, int buffer_size);
~Assembler();
- // Overrides the default provided by FLAG_debug_code.
- void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
-
// GetCode emits any pending (non-emitted) code and fills the descriptor
// desc. GetCode() is idempotent; it returns the same result if no other
// Assembler functions are invoked in between GetCode() calls.
@@ -575,6 +573,10 @@ class Assembler : public AssemblerBase {
static inline Address target_address_at(Address pc);
static inline void set_target_address_at(Address pc, Address target);
+ // Return the code target address at a call site from the return address
+ // of that call in the instruction stream.
+ static inline Address target_address_from_return_address(Address pc);
+
// This sets the branch destination (which is in the instruction on x64).
// This is for calls and branches within generated code.
inline static void deserialization_set_special_target_at(
@@ -614,6 +616,7 @@ class Assembler : public AssemblerBase {
static const int kCallInstructionLength = 13;
static const int kJSReturnSequenceLength = 13;
static const int kShortCallInstructionLength = 5;
+ static const int kPatchDebugBreakSlotReturnOffset = 4;
// The debug break slot must be able to contain a call instruction.
static const int kDebugBreakSlotLength = kCallInstructionLength;
@@ -1010,6 +1013,14 @@ class Assembler : public AssemblerBase {
shift(dst, imm8, 0x1);
}
+ void rorl(Register dst, Immediate imm8) {
+ shift_32(dst, imm8, 0x1);
+ }
+
+ void rorl_cl(Register dst) {
+ shift_32(dst, 0x1);
+ }
+
// Shifts dst:src left by cl bits, affecting only dst.
void shld(Register dst, Register src);
@@ -1201,7 +1212,7 @@ class Assembler : public AssemblerBase {
void call(Label* L);
void call(Handle<Code> target,
RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
- unsigned ast_id = kNoASTId);
+ TypeFeedbackId ast_id = TypeFeedbackId::None());
// Calls directly to the given address using a relative offset.
// Should only ever be used in Code objects for calls within the
@@ -1430,9 +1441,6 @@ class Assembler : public AssemblerBase {
byte byte_at(int pos) { return buffer_[pos]; }
void set_byte_at(int pos, byte value) { buffer_[pos] = value; }
- protected:
- bool emit_debug_code() const { return emit_debug_code_; }
-
private:
byte* addr_at(int pos) { return buffer_ + pos; }
uint32_t long_at(int pos) {
@@ -1451,7 +1459,7 @@ class Assembler : public AssemblerBase {
inline void emitw(uint16_t x);
inline void emit_code_target(Handle<Code> target,
RelocInfo::Mode rmode,
- unsigned ast_id = kNoASTId);
+ TypeFeedbackId ast_id = TypeFeedbackId::None());
void emit(Immediate x) { emitl(x.value_); }
// Emits a REX prefix that encodes a 64-bit operand size and
@@ -1634,9 +1642,6 @@ class Assembler : public AssemblerBase {
List< Handle<Code> > code_targets_;
PositionsRecorder positions_recorder_;
-
- bool emit_debug_code_;
-
friend class PositionsRecorder;
};
diff --git a/src/3rdparty/v8/src/x64/builtins-x64.cc b/src/3rdparty/v8/src/x64/builtins-x64.cc
index 4e037ff..ed0ec68 100644
--- a/src/3rdparty/v8/src/x64/builtins-x64.cc
+++ b/src/3rdparty/v8/src/x64/builtins-x64.cc
@@ -73,6 +73,45 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
}
+static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
+ __ movq(kScratchRegister,
+ FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+ __ movq(kScratchRegister,
+ FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
+ __ lea(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
+ __ jmp(kScratchRegister);
+}
+
+
+void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
+ GenerateTailCallToSharedCode(masm);
+}
+
+
+void Builtins::Generate_ParallelRecompile(MacroAssembler* masm) {
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ // Push a copy of the function onto the stack.
+ __ push(rdi);
+ // Push call kind information.
+ __ push(rcx);
+
+ __ push(rdi); // Function is also the parameter to the runtime call.
+ __ CallRuntime(Runtime::kParallelRecompile, 1);
+
+ // Restore call kind information.
+ __ pop(rcx);
+ // Restore receiver.
+ __ pop(rdi);
+
+ // Tear down internal frame.
+ }
+
+ GenerateTailCallToSharedCode(masm);
+}
+
+
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool count_constructions) {
@@ -567,6 +606,46 @@ void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
}
+static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
+ // For now, we are relying on the fact that make_code_young doesn't do any
+ // garbage collection which allows us to save/restore the registers without
+ // worrying about which of them contain pointers. We also don't build an
+ // internal frame to make the code faster, since we shouldn't have to do stack
+ // crawls in MakeCodeYoung. This seems a bit fragile.
+
+ // Re-execute the code that was patched back to the young age when
+ // the stub returns.
+ __ subq(Operand(rsp, 0), Immediate(5));
+ __ Pushad();
+#ifdef _WIN64
+ __ movq(rcx, Operand(rsp, kNumSafepointRegisters * kPointerSize));
+#else
+ __ movq(rdi, Operand(rsp, kNumSafepointRegisters * kPointerSize));
+#endif
+ { // NOLINT
+ FrameScope scope(masm, StackFrame::MANUAL);
+ __ PrepareCallCFunction(1);
+ __ CallCFunction(
+ ExternalReference::get_make_code_young_function(masm->isolate()), 1);
+ }
+ __ Popad();
+ __ ret(0);
+}
+
+
+#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
+void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
+ MacroAssembler* masm) { \
+ GenerateMakeCodeYoungAgainCommon(masm); \
+} \
+void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
+ MacroAssembler* masm) { \
+ GenerateMakeCodeYoungAgainCommon(masm); \
+}
+CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
+#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
+
+
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
Deoptimizer::BailoutType type) {
// Enter an internal frame.
@@ -711,9 +790,9 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// receiver.
__ bind(&use_global_receiver);
const int kGlobalIndex =
- Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
__ movq(rbx, FieldOperand(rsi, kGlobalIndex));
- __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
+ __ movq(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset));
__ movq(rbx, FieldOperand(rbx, kGlobalIndex));
__ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
@@ -896,9 +975,9 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Use the current global receiver object as the receiver.
__ bind(&use_global_receiver);
const int kGlobalOffset =
- Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
__ movq(rbx, FieldOperand(rsi, kGlobalOffset));
- __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
+ __ movq(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset));
__ movq(rbx, FieldOperand(rbx, kGlobalOffset));
__ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
@@ -977,7 +1056,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0);
- __ LoadInitialArrayMap(array_function, scratch2, scratch1);
+ __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
@@ -1076,7 +1155,8 @@ static void AllocateJSArray(MacroAssembler* masm,
Register scratch,
bool fill_with_hole,
Label* gc_required) {
- __ LoadInitialArrayMap(array_function, scratch, elements_array);
+ __ LoadInitialArrayMap(array_function, scratch,
+ elements_array, fill_with_hole);
if (FLAG_debug_code) { // Assert that array size is not zero.
__ testq(array_size, array_size);
@@ -1303,10 +1383,10 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ jmp(call_generic_code);
__ bind(&not_double);
- // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
+ // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
// rbx: JSArray
__ movq(r11, FieldOperand(rbx, HeapObject::kMapOffset));
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
r11,
kScratchRegister,
diff --git a/src/3rdparty/v8/src/x64/code-stubs-x64.cc b/src/3rdparty/v8/src/x64/code-stubs-x64.cc
index 7069829..06ce52a 100644
--- a/src/3rdparty/v8/src/x64/code-stubs-x64.cc
+++ b/src/3rdparty/v8/src/x64/code-stubs-x64.cc
@@ -62,9 +62,13 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
void FastNewClosureStub::Generate(MacroAssembler* masm) {
// Create a new closure from the given function info in new
// space. Set the context to the current context in rsi.
+ Counters* counters = masm->isolate()->counters();
+
Label gc;
__ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
+ __ IncrementCounter(counters->fast_new_closure_total(), 1);
+
// Get the function info from the stack.
__ movq(rdx, Operand(rsp, 1 * kPointerSize));
@@ -72,36 +76,113 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
? Context::FUNCTION_MAP_INDEX
: Context::STRICT_MODE_FUNCTION_MAP_INDEX;
- // Compute the function map in the current global context and set that
+ // Compute the function map in the current native context and set that
// as the map of the allocated object.
- __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
- __ movq(rcx, Operand(rcx, Context::SlotOffset(map_index)));
- __ movq(FieldOperand(rax, JSObject::kMapOffset), rcx);
+ __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
+ __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index)));
+ __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx);
// Initialize the rest of the function. We don't have to update the
// write barrier because the allocated object is in new space.
__ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
- __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
+ __ LoadRoot(r8, Heap::kTheHoleValueRootIndex);
__ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
__ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
__ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx);
- __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), rcx);
+ __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), r8);
__ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx);
__ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi);
__ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx);
- __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset), rdi);
// Initialize the code pointer in the function to be the one
// found in the shared function info object.
+ // But first check if there is an optimized version for our context.
+ Label check_optimized;
+ Label install_unoptimized;
+ if (FLAG_cache_optimized_code) {
+ __ movq(rbx,
+ FieldOperand(rdx, SharedFunctionInfo::kOptimizedCodeMapOffset));
+ __ testq(rbx, rbx);
+ __ j(not_zero, &check_optimized, Label::kNear);
+ }
+ __ bind(&install_unoptimized);
+ __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset),
+ rdi); // Initialize with undefined.
__ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
__ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
__ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx);
+ // Return and remove the on-stack parameter.
+ __ ret(1 * kPointerSize);
+
+ __ bind(&check_optimized);
+
+ __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1);
+
+ // rcx holds native context, ebx points to fixed array of 3-element entries
+ // (native context, optimized code, literals).
+ // The optimized code map must never be empty, so check the first elements.
+ Label install_optimized;
+ // Speculatively move code object into edx.
+ __ movq(rdx, FieldOperand(rbx, FixedArray::kHeaderSize + kPointerSize));
+ __ cmpq(rcx, FieldOperand(rbx, FixedArray::kHeaderSize));
+ __ j(equal, &install_optimized);
+
+ // Iterate through the rest of map backwards. rdx holds an index.
+ Label loop;
+ Label restore;
+ __ movq(rdx, FieldOperand(rbx, FixedArray::kLengthOffset));
+ __ SmiToInteger32(rdx, rdx);
+ __ bind(&loop);
+ // Do not double check first entry.
+ __ cmpq(rdx, Immediate(SharedFunctionInfo::kEntryLength));
+ __ j(equal, &restore);
+ __ subq(rdx, Immediate(SharedFunctionInfo::kEntryLength)); // Skip an entry.
+ __ cmpq(rcx, FieldOperand(rbx,
+ rdx,
+ times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ j(not_equal, &loop, Label::kNear);
+ // Hit: fetch the optimized code.
+ __ movq(rdx, FieldOperand(rbx,
+ rdx,
+ times_pointer_size,
+ FixedArray::kHeaderSize + 1 * kPointerSize));
+
+ __ bind(&install_optimized);
+ __ IncrementCounter(counters->fast_new_closure_install_optimized(), 1);
+
+ // TODO(fschneider): Idea: store proper code pointers in the map and either
+ // unmangle them on marking or do nothing as the whole map is discarded on
+ // major GC anyway.
+ __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
+ __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx);
+
+ // Now link a function into a list of optimized functions.
+ __ movq(rdx, ContextOperand(rcx, Context::OPTIMIZED_FUNCTIONS_LIST));
+
+ __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset), rdx);
+ // No need for write barrier as JSFunction (rax) is in the new space.
+
+ __ movq(ContextOperand(rcx, Context::OPTIMIZED_FUNCTIONS_LIST), rax);
+ // Store JSFunction (rax) into rdx before issuing write barrier as
+ // it clobbers all the registers passed.
+ __ movq(rdx, rax);
+ __ RecordWriteContextSlot(
+ rcx,
+ Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST),
+ rdx,
+ rbx,
+ kDontSaveFPRegs);
// Return and remove the on-stack parameter.
__ ret(1 * kPointerSize);
+ __ bind(&restore);
+ __ movq(rdx, Operand(rsp, 1 * kPointerSize));
+ __ jmp(&install_unoptimized);
+
// Create a new closure through the slower runtime call.
__ bind(&gc);
__ pop(rcx); // Temporarily remove return address.
@@ -136,12 +217,12 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
__ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
// Copy the global object from the previous context.
- __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
+ __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), rbx);
// Copy the qmlglobal object from the previous context.
- __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::QML_GLOBAL_INDEX)));
- __ movq(Operand(rax, Context::SlotOffset(Context::QML_GLOBAL_INDEX)), rbx);
+ __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::QML_GLOBAL_OBJECT_INDEX)));
+ __ movq(Operand(rax, Context::SlotOffset(Context::QML_GLOBAL_OBJECT_INDEX)), rbx);
// Initialize the rest of the slots to undefined.
__ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
@@ -182,9 +263,9 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
__ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
- // If this block context is nested in the global context we get a smi
+ // If this block context is nested in the native context we get a smi
// sentinel instead of a function. The block context should get the
- // canonical empty function of the global context as its closure which
+ // canonical empty function of the native context as its closure which
// we still have to look up.
Label after_sentinel;
__ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear);
@@ -194,7 +275,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ Assert(equal, message);
}
__ movq(rcx, GlobalObjectOperand());
- __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
+ __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
__ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX));
__ bind(&after_sentinel);
@@ -204,12 +285,12 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx);
// Copy the global object from the previous context.
- __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_INDEX));
- __ movq(ContextOperand(rax, Context::GLOBAL_INDEX), rbx);
+ __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
+ __ movq(ContextOperand(rax, Context::GLOBAL_OBJECT_INDEX), rbx);
// Copy the qmlglobal object from the previous context.
- __ movq(rbx, ContextOperand(rsi, Context::QML_GLOBAL_INDEX));
- __ movq(ContextOperand(rax, Context::QML_GLOBAL_INDEX), rbx);
+ __ movq(rbx, ContextOperand(rsi, Context::QML_GLOBAL_OBJECT_INDEX));
+ __ movq(ContextOperand(rax, Context::QML_GLOBAL_OBJECT_INDEX), rbx);
// Initialize the rest of the slots to the hole value.
__ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
@@ -1007,8 +1088,8 @@ void BinaryOpStub::GenerateSmiCode(
SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
// Arguments to BinaryOpStub are in rdx and rax.
- Register left = rdx;
- Register right = rax;
+ const Register left = rdx;
+ const Register right = rax;
// We only generate heapnumber answers for overflowing calculations
// for the four basic arithmetic operations and logical right shift by 0.
@@ -1050,20 +1131,16 @@ void BinaryOpStub::GenerateSmiCode(
case Token::DIV:
// SmiDiv will not accept left in rdx or right in rax.
- left = rcx;
- right = rbx;
__ movq(rbx, rax);
__ movq(rcx, rdx);
- __ SmiDiv(rax, left, right, &use_fp_on_smis);
+ __ SmiDiv(rax, rcx, rbx, &use_fp_on_smis);
break;
case Token::MOD:
// SmiMod will not accept left in rdx or right in rax.
- left = rcx;
- right = rbx;
__ movq(rbx, rax);
__ movq(rcx, rdx);
- __ SmiMod(rax, left, right, &use_fp_on_smis);
+ __ SmiMod(rax, rcx, rbx, &use_fp_on_smis);
break;
case Token::BIT_OR: {
@@ -1228,11 +1305,9 @@ void BinaryOpStub::GenerateFloatingPointCode(MacroAssembler* masm,
&allocation_failed,
TAG_OBJECT);
// Set the map.
- if (FLAG_debug_code) {
- __ AbortIfNotRootValue(heap_number_map,
- Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
- }
+ __ AssertRootValue(heap_number_map,
+ Heap::kHeapNumberMapRootIndex,
+ "HeapNumberMap register clobbered.");
__ movq(FieldOperand(rax, HeapObject::kMapOffset),
heap_number_map);
__ cvtqsi2sd(xmm0, rbx);
@@ -1980,10 +2055,7 @@ void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm,
__ JumpIfSmi(second, (on_success != NULL) ? on_success : &done);
__ bind(&first_smi);
- if (FLAG_debug_code) {
- // Second should be non-smi if we get here.
- __ AbortIfSmi(second);
- }
+ __ AssertNotSmi(second);
__ cmpq(FieldOperand(second, HeapObject::kMapOffset), heap_number_map);
__ j(not_equal, on_not_smis);
// Convert second to smi, if possible.
@@ -2193,21 +2265,28 @@ void MathPowStub::Generate(MacroAssembler* masm) {
__ movsd(double_scratch2, double_result); // Load double_exponent with 1.
// Get absolute value of exponent.
- Label no_neg, while_true, no_multiply;
+ Label no_neg, while_true, while_false;
__ testl(scratch, scratch);
__ j(positive, &no_neg, Label::kNear);
__ negl(scratch);
__ bind(&no_neg);
- __ bind(&while_true);
+ __ j(zero, &while_false, Label::kNear);
__ shrl(scratch, Immediate(1));
- __ j(not_carry, &no_multiply, Label::kNear);
- __ mulsd(double_result, double_scratch);
- __ bind(&no_multiply);
+ // Above condition means CF==0 && ZF==0. This means that the
+ // bit that has been shifted out is 0 and the result is not 0.
+ __ j(above, &while_true, Label::kNear);
+ __ movsd(double_result, double_scratch);
+ __ j(zero, &while_false, Label::kNear);
+ __ bind(&while_true);
+ __ shrl(scratch, Immediate(1));
__ mulsd(double_scratch, double_scratch);
+ __ j(above, &while_true, Label::kNear);
+ __ mulsd(double_result, double_scratch);
__ j(not_zero, &while_true);
+ __ bind(&while_false);
// If the exponent is negative, return 1/result.
__ testl(exponent, exponent);
__ j(greater, &done);
@@ -2387,10 +2466,10 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
// rax = address of new object(s) (tagged)
// rcx = argument count (untagged)
- // Get the arguments boilerplate from the current (global) context into rdi.
+ // Get the arguments boilerplate from the current native context into rdi.
Label has_mapped_parameters, copy;
- __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
+ __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
__ testq(rbx, rbx);
__ j(not_zero, &has_mapped_parameters, Label::kNear);
@@ -2533,7 +2612,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
__ bind(&runtime);
__ Integer32ToSmi(rcx, rcx);
__ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count.
- __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
+ __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
}
@@ -2603,9 +2682,9 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Do the allocation of both objects in one go.
__ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
- // Get the arguments boilerplate from the current (global) context.
- __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
+ // Get the arguments boilerplate from the current native context.
+ __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
const int offset =
Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
__ movq(rdi, Operand(rdi, offset));
@@ -2722,7 +2801,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Calculate number of capture registers (number_of_captures + 1) * 2.
__ leal(rdx, Operand(rdx, rdx, times_1, 2));
// Check that the static offsets vector buffer is large enough.
- __ cmpl(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize));
+ __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize));
__ j(above, &runtime);
// rax: RegExp data (FixedArray)
@@ -2872,30 +2951,37 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ IncrementCounter(counters->regexp_entry_native(), 1);
// Isolates: note we add an additional parameter here (isolate pointer).
- static const int kRegExpExecuteArguments = 8;
+ static const int kRegExpExecuteArguments = 9;
int argument_slots_on_stack =
masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
__ EnterApiExitFrame(argument_slots_on_stack);
- // Argument 8: Pass current isolate address.
+ // Argument 9: Pass current isolate address.
// __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
// Immediate(ExternalReference::isolate_address()));
__ LoadAddress(kScratchRegister, ExternalReference::isolate_address());
__ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
kScratchRegister);
- // Argument 7: Indicate that this is a direct call from JavaScript.
+ // Argument 8: Indicate that this is a direct call from JavaScript.
__ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize),
Immediate(1));
- // Argument 6: Start (high end) of backtracking stack memory area.
+ // Argument 7: Start (high end) of backtracking stack memory area.
__ movq(kScratchRegister, address_of_regexp_stack_memory_address);
__ movq(r9, Operand(kScratchRegister, 0));
__ movq(kScratchRegister, address_of_regexp_stack_memory_size);
__ addq(r9, Operand(kScratchRegister, 0));
- // Argument 6 passed in r9 on Linux and on the stack on Windows.
-#ifdef _WIN64
__ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r9);
+
+ // Argument 6: Set the number of capture registers to zero to force global
+ // regexps to behave as non-global. This does not affect non-global regexps.
+ // Argument 6 is passed in r9 on Linux and on the stack on Windows.
+#ifdef _WIN64
+ __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize),
+ Immediate(0));
+#else
+ __ Set(r9, 0);
#endif
// Argument 5: static offsets vector buffer.
@@ -2903,7 +2989,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
ExternalReference::address_of_static_offsets_vector(isolate));
// Argument 5 passed in r8 on Linux and on the stack on Windows.
#ifdef _WIN64
- __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize), r8);
+ __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kPointerSize), r8);
#endif
// First four arguments are passed in registers on both Linux and Windows.
@@ -2968,7 +3054,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Check the result.
Label success;
Label exception;
- __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS));
+ __ cmpl(rax, Immediate(1));
+ // We expect exactly one result since we force the called regexp to behave
+ // as non-global.
__ j(equal, &success, Label::kNear);
__ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
__ j(equal, &exception);
@@ -3125,8 +3213,8 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
// r8: Number of array elements as smi.
// Set JSArray map to global.regexp_result_map().
- __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX));
- __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
+ __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
__ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
__ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx);
@@ -3157,14 +3245,14 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
// Set length.
__ Integer32ToSmi(rdx, rbx);
__ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx);
- // Fill contents of fixed-array with the-hole.
- __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
+ // Fill contents of fixed-array with undefined.
+ __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
__ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize));
- // Fill fixed array elements with hole.
+ // Fill fixed array elements with undefined.
// rax: JSArray.
// rbx: Number of elements in array that remains to be filled, as int32.
// rcx: Start of elements in FixedArray.
- // rdx: the hole.
+ // rdx: undefined.
Label loop;
__ testl(rbx, rbx);
__ bind(&loop);
@@ -3349,13 +3437,13 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ jmp(&not_user_equal);
__ bind(&user_equal);
-
+
__ pop(rbx); // Return address.
__ push(rax);
__ push(rdx);
__ push(rbx);
__ TailCallRuntime(Runtime::kUserObjectEquals, 2, 1);
-
+
__ bind(&not_user_equal);
}
@@ -4691,7 +4779,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
Label non_ascii, allocated, ascii_data;
__ movl(rcx, r8);
__ and_(rcx, r9);
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+ STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
__ testl(rcx, Immediate(kStringEncodingMask));
__ j(zero, &non_ascii);
@@ -4717,9 +4805,9 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ testb(rcx, Immediate(kAsciiDataHintMask));
__ j(not_zero, &ascii_data);
__ xor_(r8, r9);
- STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
- __ andb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
- __ cmpb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
+ STATIC_ASSERT(kOneByteStringTag != 0 && kAsciiDataHintTag != 0);
+ __ andb(r8, Immediate(kOneByteStringTag | kAsciiDataHintTag));
+ __ cmpb(r8, Immediate(kOneByteStringTag | kAsciiDataHintTag));
__ j(equal, &ascii_data);
// Allocate a two byte cons string.
__ AllocateTwoByteConsString(rcx, rdi, no_reg, &call_runtime);
@@ -5239,7 +5327,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// string's encoding is wrong because we always have to recheck encoding of
// the newly created string's parent anyways due to externalized strings.
Label two_byte_slice, set_slice_header;
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+ STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
__ testb(rbx, Immediate(kStringEncodingMask));
__ j(zero, &two_byte_slice, Label::kNear);
@@ -5283,7 +5371,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
__ bind(&sequential_string);
- STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0);
+ STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
__ testb(rbx, Immediate(kStringEncodingMask));
__ j(zero, &two_byte_sequential);
@@ -5887,8 +5975,7 @@ void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
ASSERT(!name.is(r0));
ASSERT(!name.is(r1));
- // Assert that name contains a string.
- if (FLAG_debug_code) __ AbortIfNotString(name);
+ __ AssertString(name);
__ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
__ decl(r0);
@@ -6044,18 +6131,20 @@ struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
// KeyedStoreStubCompiler::GenerateStoreFastElement.
{ REG(rdi), REG(rbx), REG(rcx), EMIT_REMEMBERED_SET},
{ REG(rdx), REG(rdi), REG(rbx), EMIT_REMEMBERED_SET},
- // ElementsTransitionGenerator::GenerateSmiOnlyToObject
- // and ElementsTransitionGenerator::GenerateSmiOnlyToObject
+ // ElementsTransitionGenerator::GenerateMapChangeElementTransition
+ // and ElementsTransitionGenerator::GenerateSmiToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
{ REG(rdx), REG(rbx), REG(rdi), EMIT_REMEMBERED_SET},
{ REG(rdx), REG(rbx), REG(rdi), OMIT_REMEMBERED_SET},
- // ElementsTransitionGenerator::GenerateSmiOnlyToDouble
+ // ElementsTransitionGenerator::GenerateSmiToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
{ REG(rdx), REG(r11), REG(r15), EMIT_REMEMBERED_SET},
// ElementsTransitionGenerator::GenerateDoubleToObject
{ REG(r11), REG(rax), REG(r15), EMIT_REMEMBERED_SET},
// StoreArrayLiteralElementStub::Generate
{ REG(rbx), REG(rax), REG(rcx), EMIT_REMEMBERED_SET},
+ // FastNewClosureStub::Generate
+ { REG(rcx), REG(rdx), REG(rbx), EMIT_REMEMBERED_SET},
// Null termination.
{ REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
@@ -6100,6 +6189,11 @@ void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
}
+bool CodeStub::CanUseFPRegisters() {
+ return true; // Always have SSE2 on x64.
+}
+
+
// Takes the input in 3 registers: address_ value_ and object_. A pointer to
// the value has just been written into the object, now this stub makes sure
// we keep the GC informed. The word in the object where the value has been
@@ -6232,6 +6326,17 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
Label need_incremental;
Label need_incremental_pop_object;
+ __ movq(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
+ __ and_(regs_.scratch0(), regs_.object());
+ __ movq(regs_.scratch1(),
+ Operand(regs_.scratch0(),
+ MemoryChunk::kWriteBarrierCounterOffset));
+ __ subq(regs_.scratch1(), Immediate(1));
+ __ movq(Operand(regs_.scratch0(),
+ MemoryChunk::kWriteBarrierCounterOffset),
+ regs_.scratch1());
+ __ j(negative, &need_incremental);
+
// Let's look at the color of the object: If it is not black we don't have
// to inform the incremental marker.
__ JumpIfBlack(regs_.object(),
@@ -6323,9 +6428,9 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ CheckFastElements(rdi, &double_elements);
- // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+ // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
__ JumpIfSmi(rax, &smi_element);
- __ CheckFastSmiOnlyElements(rdi, &fast_elements);
+ __ CheckFastSmiElements(rdi, &fast_elements);
// Store into the array literal requires a elements transition. Call into
// the runtime.
@@ -6343,7 +6448,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
// place.
__ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
- // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+ // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
__ bind(&fast_elements);
__ SmiToInteger32(kScratchRegister, rcx);
__ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
@@ -6357,8 +6462,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
OMIT_SMI_CHECK);
__ ret(0);
- // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
- // FAST_ELEMENTS, and value is Smi.
+ // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or
+ // FAST_*_ELEMENTS, and value is Smi.
__ bind(&smi_element);
__ SmiToInteger32(kScratchRegister, rcx);
__ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
@@ -6379,6 +6484,74 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ ret(0);
}
+
+void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
+ if (entry_hook_ != NULL) {
+ ProfileEntryHookStub stub;
+ masm->CallStub(&stub);
+ }
+}
+
+
+void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
+ // Save volatile registers.
+ // Live registers at this point are the same as at the start of any
+ // JS function:
+ // o rdi: the JS function object being called (i.e. ourselves)
+ // o rsi: our context
+ // o rbp: our caller's frame pointer
+ // o rsp: stack pointer (pointing to return address)
+ // o rcx: rcx is zero for method calls and non-zero for function calls.
+#ifdef _WIN64
+ const int kNumSavedRegisters = 1;
+
+ __ push(rcx);
+#else
+ const int kNumSavedRegisters = 3;
+
+ __ push(rcx);
+ __ push(rdi);
+ __ push(rsi);
+#endif
+
+ // Calculate the original stack pointer and store it in the second arg.
+#ifdef _WIN64
+ __ lea(rdx, Operand(rsp, kNumSavedRegisters * kPointerSize));
+#else
+ __ lea(rsi, Operand(rsp, kNumSavedRegisters * kPointerSize));
+#endif
+
+ // Calculate the function address to the first arg.
+#ifdef _WIN64
+ __ movq(rcx, Operand(rdx, 0));
+ __ subq(rcx, Immediate(Assembler::kShortCallInstructionLength));
+#else
+ __ movq(rdi, Operand(rsi, 0));
+ __ subq(rdi, Immediate(Assembler::kShortCallInstructionLength));
+#endif
+
+ // Call the entry hook function.
+ __ movq(rax, &entry_hook_, RelocInfo::NONE);
+ __ movq(rax, Operand(rax, 0));
+
+ AllowExternalCallThatCantCauseGC scope(masm);
+
+ const int kArgumentCount = 2;
+ __ PrepareCallCFunction(kArgumentCount);
+ __ CallCFunction(rax, kArgumentCount);
+
+ // Restore volatile regs.
+#ifdef _WIN64
+ __ pop(rcx);
+#else
+ __ pop(rsi);
+ __ pop(rdi);
+ __ pop(rcx);
+#endif
+
+ __ Ret();
+}
+
#undef __
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/x64/codegen-x64.cc b/src/3rdparty/v8/src/x64/codegen-x64.cc
index a8d39b2..ffccf47 100644
--- a/src/3rdparty/v8/src/x64/codegen-x64.cc
+++ b/src/3rdparty/v8/src/x64/codegen-x64.cc
@@ -220,7 +220,7 @@ ModuloFunction CreateModuloFunction() {
#define __ ACCESS_MASM(masm)
-void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
+void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : value
@@ -241,7 +241,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
}
-void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
+void ElementsTransitionGenerator::GenerateSmiToDouble(
MacroAssembler* masm, Label* fail) {
// ----------- S t a t e -------------
// -- rax : value
@@ -551,7 +551,7 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
// Dispatch on the encoding: ASCII or two-byte.
Label ascii;
__ bind(&seq_string);
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+ STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
__ testb(result, Immediate(kStringEncodingMask));
__ j(not_zero, &ascii, Label::kNear);
@@ -577,6 +577,91 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
#undef __
+
+static const int kNoCodeAgeSequenceLength = 6;
+
+static byte* GetNoCodeAgeSequence(uint32_t* length) {
+ static bool initialized = false;
+ static byte sequence[kNoCodeAgeSequenceLength];
+ *length = kNoCodeAgeSequenceLength;
+ if (!initialized) {
+ // The sequence of instructions that is patched out for aging code is the
+ // following boilerplate stack-building prologue that is found both in
+ // FUNCTION and OPTIMIZED_FUNCTION code:
+ CodePatcher patcher(sequence, kNoCodeAgeSequenceLength);
+ patcher.masm()->push(rbp);
+ patcher.masm()->movq(rbp, rsp);
+ patcher.masm()->push(rsi);
+ patcher.masm()->push(rdi);
+ initialized = true;
+ }
+ return sequence;
+}
+
+
+byte* Code::FindPlatformCodeAgeSequence() {
+ byte* start = instruction_start();
+ uint32_t young_length;
+ byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+ if (!memcmp(start, young_sequence, young_length) ||
+ *start == kCallOpcode) {
+ return start;
+ } else {
+ byte* start_after_strict = NULL;
+ if (kind() == FUNCTION) {
+ start_after_strict = start + kSizeOfFullCodegenStrictModePrologue;
+ } else {
+ ASSERT(kind() == OPTIMIZED_FUNCTION);
+ start_after_strict = start + kSizeOfOptimizedStrictModePrologue;
+ }
+ ASSERT(!memcmp(start_after_strict, young_sequence, young_length) ||
+ *start_after_strict == kCallOpcode);
+ return start_after_strict;
+ }
+}
+
+
+bool Code::IsYoungSequence(byte* sequence) {
+ uint32_t young_length;
+ byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+ bool result = (!memcmp(sequence, young_sequence, young_length));
+ ASSERT(result || *sequence == kCallOpcode);
+ return result;
+}
+
+
+void Code::GetCodeAgeAndParity(byte* sequence, Age* age,
+ MarkingParity* parity) {
+ if (IsYoungSequence(sequence)) {
+ *age = kNoAge;
+ *parity = NO_MARKING_PARITY;
+ } else {
+ sequence++; // Skip the kCallOpcode byte
+ Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
+ Assembler::kCallTargetAddressOffset;
+ Code* stub = GetCodeFromTargetAddress(target_address);
+ GetCodeAgeAndParity(stub, age, parity);
+ }
+}
+
+
+void Code::PatchPlatformCodeAge(byte* sequence,
+ Code::Age age,
+ MarkingParity parity) {
+ uint32_t young_length;
+ byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+ if (age == kNoAge) {
+ memcpy(sequence, young_sequence, young_length);
+ CPU::FlushICache(sequence, young_length);
+ } else {
+ Code* stub = GetCodeAgeStub(age, parity);
+ CodePatcher patcher(sequence, young_length);
+ patcher.masm()->call(stub->instruction_start());
+ patcher.masm()->nop();
+ }
+}
+
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_X64
diff --git a/src/3rdparty/v8/src/x64/codegen-x64.h b/src/3rdparty/v8/src/x64/codegen-x64.h
index 2e80751..5d8bbff 100644
--- a/src/3rdparty/v8/src/x64/codegen-x64.h
+++ b/src/3rdparty/v8/src/x64/codegen-x64.h
@@ -39,6 +39,8 @@ class CompilationInfo;
enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
+static const int kSizeOfFullCodegenStrictModePrologue = 14;
+static const int kSizeOfOptimizedStrictModePrologue = 14;
// -------------------------------------------------------------------------
// CodeGenerator
diff --git a/src/3rdparty/v8/src/x64/deoptimizer-x64.cc b/src/3rdparty/v8/src/x64/deoptimizer-x64.cc
index f3046b9..a3fe8f9 100644
--- a/src/3rdparty/v8/src/x64/deoptimizer-x64.cc
+++ b/src/3rdparty/v8/src/x64/deoptimizer-x64.cc
@@ -52,6 +52,10 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
if (!function->IsOptimized()) return;
+ // The optimized code is going to be patched, so we cannot use it
+ // any more. Play safe and reset the whole cache.
+ function->shared()->ClearOptimizedCodeMap();
+
// Get the optimized code.
Code* code = function->code();
@@ -100,8 +104,7 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
// ignore all slots that might have been recorded on it.
isolate->heap()->mark_compact_collector()->InvalidateCode(code);
- // Set the code for the function to non-optimized version.
- function->ReplaceCode(function->shared()->code());
+ ReplaceCodeForRelatedFunctions(function, code);
if (FLAG_trace_deopt) {
PrintF("[forced deoptimization: ");
@@ -188,11 +191,11 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
}
-static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) {
+static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) {
ByteArray* translations = data->TranslationByteArray();
int length = data->DeoptCount();
for (int i = 0; i < length; i++) {
- if (static_cast<unsigned>(data->AstId(i)->value()) == ast_id) {
+ if (data->AstId(i) == ast_id) {
TranslationIterator it(translations, data->TranslationIndex(i)->value());
int value = it.Next();
ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value));
@@ -214,7 +217,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
// the ast id. Confusing.
ASSERT(bailout_id_ == ast_id);
- int bailout_id = LookupBailoutId(data, ast_id);
+ int bailout_id = LookupBailoutId(data, BailoutId(ast_id));
unsigned translation_index = data->TranslationIndex(bailout_id)->value();
ByteArray* translations = data->TranslationByteArray();
@@ -234,9 +237,9 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
unsigned node_id = iterator.Next();
USE(node_id);
ASSERT(node_id == ast_id);
- JSFunction* function = JSFunction::cast(ComputeLiteral(iterator.Next()));
- USE(function);
- ASSERT(function == function_);
+ int closure_id = iterator.Next();
+ USE(closure_id);
+ ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
unsigned height = iterator.Next();
unsigned height_in_bytes = height * kPointerSize;
USE(height_in_bytes);
@@ -341,15 +344,15 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
output_[0]->SetPc(pc);
}
Code* continuation =
- function->GetIsolate()->builtins()->builtin(Builtins::kNotifyOSR);
+ function_->GetIsolate()->builtins()->builtin(Builtins::kNotifyOSR);
output_[0]->SetContinuation(
reinterpret_cast<intptr_t>(continuation->entry()));
if (FLAG_trace_osr) {
PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ",
ok ? "finished" : "aborted",
- reinterpret_cast<intptr_t>(function));
- function->PrintName();
+ reinterpret_cast<intptr_t>(function_));
+ function_->PrintName();
PrintF(" => pc=0x%0" V8PRIxPTR "]\n", output_[0]->GetPc());
}
}
@@ -576,16 +579,143 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
}
+void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
+ int frame_index,
+ bool is_setter_stub_frame) {
+ JSFunction* accessor = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ // The receiver (and the implicit return value, if any) are expected in
+ // registers by the LoadIC/StoreIC, so they don't belong to the output stack
+ // frame. This means that we have to use a height of 0.
+ unsigned height = 0;
+ unsigned height_in_bytes = height * kPointerSize;
+ const char* kind = is_setter_stub_frame ? "setter" : "getter";
+ if (FLAG_trace_deopt) {
+ PrintF(" translating %s stub => height=%u\n", kind, height_in_bytes);
+ }
+
+ // We need 1 stack entry for the return address + 4 stack entries from
+ // StackFrame::INTERNAL (FP, context, frame type, code object, see
+ // MacroAssembler::EnterFrame). For a setter stub frame we need one additional
+ // entry for the implicit return value, see
+ // StoreStubCompiler::CompileStoreViaSetter.
+ unsigned fixed_frame_entries = 1 + 4 + (is_setter_stub_frame ? 1 : 0);
+ unsigned fixed_frame_size = fixed_frame_entries * kPointerSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, accessor);
+ output_frame->SetFrameType(StackFrame::INTERNAL);
+
+ // A frame for an accessor stub can not be the topmost or bottommost one.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous frame's top and
+ // this frame's size.
+ intptr_t top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ unsigned output_offset = output_frame_size;
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // The context can be gotten from the previous frame.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetContext();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; context\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // A marker value is used in place of the function.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::INTERNAL));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; function (%s sentinel)\n",
+ top_address + output_offset, output_offset, value, kind);
+ }
+
+ // Get Code object from accessor stub.
+ output_offset -= kPointerSize;
+ Builtins::Name name = is_setter_stub_frame ?
+ Builtins::kStoreIC_Setter_ForDeopt :
+ Builtins::kLoadIC_Getter_ForDeopt;
+ Code* accessor_stub = isolate_->builtins()->builtin(name);
+ value = reinterpret_cast<intptr_t>(accessor_stub);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
+ " ; code object\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Skip receiver.
+ Translation::Opcode opcode =
+ static_cast<Translation::Opcode>(iterator->Next());
+ iterator->Skip(Translation::NumberOfOperandsFor(opcode));
+
+ if (is_setter_stub_frame) {
+ // The implicit return value was part of the artificial setter stub
+ // environment.
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Smi* offset = is_setter_stub_frame ?
+ isolate_->heap()->setter_stub_deopt_pc_offset() :
+ isolate_->heap()->getter_stub_deopt_pc_offset();
+ intptr_t pc = reinterpret_cast<intptr_t>(
+ accessor_stub->instruction_start() + offset->value());
+ output_frame->SetPc(pc);
+}
+
+
void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
int frame_index) {
- int node_id = iterator->Next();
- JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ BailoutId node_id = BailoutId(iterator->Next());
+ JSFunction* function;
+ if (frame_index != 0) {
+ function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ } else {
+ int closure_id = iterator->Next();
+ USE(closure_id);
+ ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
+ function = function_;
+ }
unsigned height = iterator->Next();
unsigned height_in_bytes = height * kPointerSize;
if (FLAG_trace_deopt) {
PrintF(" translating ");
function->PrintName();
- PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes);
+ PrintF(" => node=%d, height=%d\n", node_id.ToInt(), height_in_bytes);
}
// The 'fixed' part of the frame consists of the incoming parameters and
diff --git a/src/3rdparty/v8/src/x64/disasm-x64.cc b/src/3rdparty/v8/src/x64/disasm-x64.cc
index 0738153..c8606c4 100644
--- a/src/3rdparty/v8/src/x64/disasm-x64.cc
+++ b/src/3rdparty/v8/src/x64/disasm-x64.cc
@@ -703,6 +703,9 @@ int DisassemblerX64::F6F7Instruction(byte* data) {
case 4:
mnem = "mul";
break;
+ case 5:
+ mnem = "imul";
+ break;
case 7:
mnem = "idiv";
break;
diff --git a/src/3rdparty/v8/src/x64/full-codegen-x64.cc b/src/3rdparty/v8/src/x64/full-codegen-x64.cc
index 22f2ebb..a71c9b1 100644
--- a/src/3rdparty/v8/src/x64/full-codegen-x64.cc
+++ b/src/3rdparty/v8/src/x64/full-codegen-x64.cc
@@ -123,6 +123,8 @@ void FullCodeGenerator::Generate() {
SetFunctionPosition(function());
Comment cmnt(masm_, "[ function compiled by full code generator");
+ ProfileEntryHookStub::MaybeCallEntryHook(masm_);
+
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
@@ -136,6 +138,8 @@ void FullCodeGenerator::Generate() {
// function calls.
if (!info->is_classic_mode() || info->is_native()) {
Label ok;
+ Label begin;
+ __ bind(&begin);
__ testq(rcx, rcx);
__ j(zero, &ok, Label::kNear);
// +1 for return address.
@@ -143,6 +147,8 @@ void FullCodeGenerator::Generate() {
__ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
__ movq(Operand(rsp, receiver_offset), kScratchRegister);
__ bind(&ok);
+ ASSERT(!FLAG_age_code ||
+ (kSizeOfFullCodegenStrictModePrologue == ok.pos() - begin.pos()));
}
// Open a frame scope to indicate that there is a frame on the stack. The
@@ -173,11 +179,14 @@ void FullCodeGenerator::Generate() {
int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
if (heap_slots > 0 ||
(scope()->is_qml_mode() && scope()->is_global_scope())) {
- Comment cmnt(masm_, "[ Allocate local context");
+ Comment cmnt(masm_, "[ Allocate context");
// Argument to NewContext is the function, which is still in rdi.
__ push(rdi);
- if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
+ if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
+ __ Push(info->scope()->GetScopeInfo());
+ __ CallRuntime(Runtime::kNewGlobalContext, 2);
+ } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+ FastNewContextStub stub((heap_slots < 0) ? 0 : heap_slots);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kNewFunctionContext, 1);
@@ -253,7 +262,7 @@ void FullCodeGenerator::Generate() {
scope()->VisitIllegalRedeclaration(this);
} else {
- PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
+ PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
{ Comment cmnt(masm_, "[ Declarations");
// For named function expressions, declare the function name as a
// constant.
@@ -268,7 +277,7 @@ void FullCodeGenerator::Generate() {
}
{ Comment cmnt(masm_, "[ Stack check");
- PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
+ PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
Label ok;
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(above_equal, &ok, Label::kNear);
@@ -311,10 +320,6 @@ void FullCodeGenerator::EmitProfilingCounterReset() {
// Self-optimization is a one-off thing; if it fails, don't try again.
reset_value = Smi::kMaxValue;
}
- if (isolate()->IsDebuggerActive()) {
- // Detect debug break requests as soon as possible.
- reset_value = 10;
- }
__ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
__ movq(kScratchRegister,
reinterpret_cast<uint64_t>(Smi::FromInt(reset_value)),
@@ -324,10 +329,6 @@ void FullCodeGenerator::EmitProfilingCounterReset() {
}
-static const int kMaxBackEdgeWeight = 127;
-static const int kBackEdgeDistanceDivisor = 162;
-
-
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
Label* back_edge_target) {
Comment cmnt(masm_, "[ Stack check");
@@ -339,7 +340,7 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
ASSERT(back_edge_target->is_bound());
int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
weight = Min(kMaxBackEdgeWeight,
- Max(1, distance / kBackEdgeDistanceDivisor));
+ Max(1, distance / kBackEdgeDistanceUnit));
}
EmitProfilingCounterDecrement(weight);
__ j(positive, &ok, Label::kNear);
@@ -395,7 +396,7 @@ void FullCodeGenerator::EmitReturnSequence() {
} else if (FLAG_weighted_back_edges) {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
- Max(1, distance = kBackEdgeDistanceDivisor));
+ Max(1, distance / kBackEdgeDistanceUnit));
}
EmitProfilingCounterDecrement(weight);
Label ok;
@@ -509,12 +510,20 @@ void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
void FullCodeGenerator::AccumulatorValueContext::Plug(
Handle<Object> lit) const {
- __ Move(result_register(), lit);
+ if (lit->IsSmi()) {
+ __ SafeMove(result_register(), Smi::cast(*lit));
+ } else {
+ __ Move(result_register(), lit);
+ }
}
void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
- __ Push(lit);
+ if (lit->IsSmi()) {
+ __ SafePush(Smi::cast(*lit));
+ } else {
+ __ Push(lit);
+ }
}
@@ -660,7 +669,7 @@ void FullCodeGenerator::DoTest(Expression* condition,
Label* fall_through) {
ToBooleanStub stub(result_register());
__ push(result_register());
- __ CallStub(&stub);
+ __ CallStub(&stub, condition->test_id());
__ testq(result_register(), result_register());
// The stub returns nonzero for true.
Split(not_zero, if_true, if_false, fall_through);
@@ -758,7 +767,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
// The variable in the declaration always resides in the current function
// context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
- if (FLAG_debug_code) {
+ if (generate_debug_code_) {
// Check that we're not inside a with or catch context.
__ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
__ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
@@ -780,11 +789,12 @@ void FullCodeGenerator::VisitVariableDeclaration(
bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
switch (variable->location()) {
case Variable::UNALLOCATED:
- globals_->Add(variable->name());
+ globals_->Add(variable->name(), zone());
globals_->Add(variable->binding_needs_init()
? isolate()->factory()->the_hole_value()
- : isolate()->factory()->undefined_value());
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ : isolate()->factory()->undefined_value(),
+ zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
break;
case Variable::PARAMETER:
@@ -812,10 +822,9 @@ void FullCodeGenerator::VisitVariableDeclaration(
__ push(rsi);
__ Push(variable->name());
// Declaration nodes are always introduced in one of four modes.
- ASSERT(mode == VAR || mode == LET ||
- mode == CONST || mode == CONST_HARMONY);
+ ASSERT(IsDeclaredVariableMode(mode));
PropertyAttributes attr =
- (mode == CONST || mode == CONST_HARMONY) ? READ_ONLY : NONE;
+ IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
__ Push(Smi::FromInt(attr));
// Push initial value, if any.
// Note: For variables we must not push an initial value (such as
@@ -839,13 +848,13 @@ void FullCodeGenerator::VisitFunctionDeclaration(
Variable* variable = proxy->var();
switch (variable->location()) {
case Variable::UNALLOCATED: {
- globals_->Add(variable->name());
+ globals_->Add(variable->name(), zone());
Handle<SharedFunctionInfo> function =
Compiler::BuildFunctionInfo(declaration->fun(), script());
// Check for stack-overflow exception.
if (function.is_null()) return SetStackOverflow();
- globals_->Add(function);
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ globals_->Add(function, zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
break;
}
@@ -897,9 +906,9 @@ void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
switch (variable->location()) {
case Variable::UNALLOCATED: {
Comment cmnt(masm_, "[ ModuleDeclaration");
- globals_->Add(variable->name());
- globals_->Add(instance);
- globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
+ globals_->Add(variable->name(), zone());
+ globals_->Add(instance, zone());
+ globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()), zone());
Visit(declaration->module());
break;
}
@@ -1103,22 +1112,32 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Label fixed_array;
__ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
Heap::kMetaMapRootIndex);
- __ j(not_equal, &fixed_array, Label::kNear);
+ __ j(not_equal, &fixed_array);
// We got a map in register rax. Get the enumeration cache from it.
__ bind(&use_cache);
+
+ Label no_descriptors;
+
+ __ EnumLength(rdx, rax);
+ __ Cmp(rdx, Smi::FromInt(0));
+ __ j(equal, &no_descriptors);
+
__ LoadInstanceDescriptors(rax, rcx);
- __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
- __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
+ __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
+ __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
// Set up the four remaining stack slots.
__ push(rax); // Map.
- __ push(rdx); // Enumeration cache.
- __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
- __ push(rax); // Enumeration cache length (as smi).
+ __ push(rcx); // Enumeration cache.
+ __ push(rdx); // Number of valid entries for the map in the enum cache.
__ Push(Smi::FromInt(0)); // Initial index.
__ jmp(&loop);
+ __ bind(&no_descriptors);
+ __ addq(rsp, Immediate(kPointerSize));
+ __ jmp(&exit);
+
// We got a fixed array in register rax. Iterate through that.
Label non_proxy;
__ bind(&fixed_array);
@@ -1127,7 +1146,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
isolate()->factory()->NewJSGlobalPropertyCell(
Handle<Object>(
Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
- RecordTypeFeedbackCell(stmt->PrepareId(), cell);
+ RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
__ LoadHeapObject(rbx, cell);
__ Move(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker));
@@ -1286,9 +1305,9 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
__ movq(temp, context);
}
// Load map for comparison into register, outside loop.
- __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex);
+ __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
__ bind(&next);
- // Terminate at global context.
+ // Terminate at native context.
__ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
__ j(equal, &fast, Label::kNear);
// Check that extension is NULL.
@@ -1571,9 +1590,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// Mark all computed expressions that are bound to a key that
// is shadowed by a later occurrence of the same key. For the
// marked expressions, no store code is emitted.
- expr->CalculateEmitStore();
+ expr->CalculateEmitStore(zone());
- AccessorTable accessor_table(isolate()->zone());
+ AccessorTable accessor_table(zone());
for (int i = 0; i < expr->properties()->length(); i++) {
ObjectLiteral::Property* property = expr->properties()->at(i);
if (property->IsCompileTimeValue()) continue;
@@ -1599,7 +1618,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, key->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -1663,7 +1682,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT_EQ(2, constant_elements->length());
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
- bool has_constant_fast_elements = constant_elements_kind == FAST_ELEMENTS;
+ bool has_constant_fast_elements =
+ IsFastObjectElementsKind(constant_elements_kind);
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
@@ -1674,7 +1694,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
- // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+ // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
FastCloneShallowArrayStub stub(
@@ -1686,10 +1706,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
- ASSERT(constant_elements_kind == FAST_ELEMENTS ||
- constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
+ ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
- // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+ // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
? FastCloneShallowArrayStub::CLONE_ELEMENTS
@@ -1717,9 +1736,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
}
VisitForAccumulatorValue(subexpr);
- if (constant_elements_kind == FAST_ELEMENTS) {
- // Fast-case array literal with ElementsKind of FAST_ELEMENTS, they cannot
- // transition and don't need to call the runtime stub.
+ if (IsFastObjectElementsKind(constant_elements_kind)) {
+ // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
+ // cannot transition and don't need to call the runtime stub.
int offset = FixedArray::kHeaderSize + (i * kPointerSize);
__ movq(rbx, Operand(rsp, 0)); // Copy of array literal.
__ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
@@ -1810,11 +1829,11 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
break;
case NAMED_PROPERTY:
EmitNamedPropertyLoad(property);
- PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
+ PrepareForBailoutForId(property->LoadId(), TOS_REG);
break;
case KEYED_PROPERTY:
EmitKeyedPropertyLoad(property);
- PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
+ PrepareForBailoutForId(property->LoadId(), TOS_REG);
break;
}
}
@@ -1869,14 +1888,14 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
Literal* key = prop->key()->AsLiteral();
__ Move(rcx, key->handle());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
}
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
}
@@ -1898,7 +1917,8 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&stub_call);
__ movq(rax, rcx);
BinaryOpStub stub(op, mode);
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
__ jmp(&done, Label::kNear);
@@ -1947,7 +1967,8 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
__ pop(rdx);
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
context()->Plug(rax);
}
@@ -2073,7 +2094,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// in harmony mode.
if (var->IsStackAllocated() || var->IsContextSlot()) {
MemOperand location = VarOperand(var, rcx);
- if (FLAG_debug_code && op == Token::INIT_LET) {
+ if (generate_debug_code_ && op == Token::INIT_LET) {
// Check for an uninitialized let binding.
__ movq(rdx, location);
__ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
@@ -2105,37 +2126,15 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
ASSERT(prop != NULL);
ASSERT(prop->key()->AsLiteral() != NULL);
- // If the assignment starts a block of assignments to the same object,
- // change to slow case to avoid the quadratic behavior of repeatedly
- // adding fast properties.
- if (expr->starts_initialization_block()) {
- __ push(result_register());
- __ push(Operand(rsp, kPointerSize)); // Receiver is now under value.
- __ CallRuntime(Runtime::kToSlowProperties, 1);
- __ pop(result_register());
- }
-
// Record source code position before IC call.
SetSourcePosition(expr->position());
__ Move(rcx, prop->key()->AsLiteral()->handle());
- if (expr->ends_initialization_block()) {
- __ movq(rdx, Operand(rsp, 0));
- } else {
- __ pop(rdx);
- }
+ __ pop(rdx);
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
- // If the assignment ends an initialization block, revert to fast case.
- if (expr->ends_initialization_block()) {
- __ push(rax); // Result of assignment, saved even if not needed.
- __ push(Operand(rsp, kPointerSize)); // Receiver is under value.
- __ CallRuntime(Runtime::kToFastProperties, 1);
- __ pop(rax);
- __ Drop(1);
- }
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(rax);
}
@@ -2144,38 +2143,14 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
// Assignment to a property, using a keyed store IC.
- // If the assignment starts a block of assignments to the same object,
- // change to slow case to avoid the quadratic behavior of repeatedly
- // adding fast properties.
- if (expr->starts_initialization_block()) {
- __ push(result_register());
- // Receiver is now under the key and value.
- __ push(Operand(rsp, 2 * kPointerSize));
- __ CallRuntime(Runtime::kToSlowProperties, 1);
- __ pop(result_register());
- }
-
__ pop(rcx);
- if (expr->ends_initialization_block()) {
- __ movq(rdx, Operand(rsp, 0)); // Leave receiver on the stack for later.
- } else {
- __ pop(rdx);
- }
+ __ pop(rdx);
// Record source code position before IC call.
SetSourcePosition(expr->position());
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
-
- // If the assignment ends an initialization block, revert to fast case.
- if (expr->ends_initialization_block()) {
- __ pop(rdx);
- __ push(rax); // Result of assignment, saved even if not needed.
- __ push(rdx);
- __ CallRuntime(Runtime::kToFastProperties, 1);
- __ pop(rax);
- }
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(rax);
@@ -2189,6 +2164,7 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
if (key->IsPropertyName()) {
VisitForAccumulatorValue(expr->obj());
EmitNamedPropertyLoad(expr);
+ PrepareForBailoutForId(expr->LoadId(), TOS_REG);
context()->Plug(rax);
} else {
VisitForStackValue(expr->obj());
@@ -2202,7 +2178,7 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
void FullCodeGenerator::CallIC(Handle<Code> code,
RelocInfo::Mode rmode,
- unsigned ast_id) {
+ TypeFeedbackId ast_id) {
ic_total_count_++;
__ call(code, rmode, ast_id);
}
@@ -2225,7 +2201,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
// Call the IC initialization code.
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- CallIC(ic, mode, expr->id());
+ CallIC(ic, mode, expr->CallFeedbackId());
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2258,7 +2234,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
__ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2278,20 +2254,18 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
// Record source position for debugger.
SetSourcePosition(expr->position());
- // Record call targets in unoptimized code, but not in the snapshot.
- if (!Serializer::enabled()) {
- flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
- Handle<Object> uninitialized =
- TypeFeedbackCells::UninitializedSentinel(isolate());
- Handle<JSGlobalPropertyCell> cell =
- isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
- RecordTypeFeedbackCell(expr->id(), cell);
- __ Move(rbx, cell);
- }
+ // Record call targets in unoptimized code.
+ flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
+ __ Move(rbx, cell);
CallFunctionStub stub(arg_count, flags);
__ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
- __ CallStub(&stub);
+ __ CallStub(&stub, expr->CallFeedbackId());
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2469,20 +2443,14 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ movq(rdi, Operand(rsp, arg_count * kPointerSize));
// Record call targets in unoptimized code, but not in the snapshot.
- CallFunctionFlags flags;
- if (!Serializer::enabled()) {
- flags = RECORD_CALL_TARGET;
- Handle<Object> uninitialized =
- TypeFeedbackCells::UninitializedSentinel(isolate());
- Handle<JSGlobalPropertyCell> cell =
- isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
- RecordTypeFeedbackCell(expr->id(), cell);
- __ Move(rbx, cell);
- } else {
- flags = NO_CALL_FUNCTION_FLAGS;
- }
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
+ __ Move(rbx, cell);
- CallConstructStub stub(flags);
+ CallConstructStub stub(RECORD_CALL_TARGET);
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(rax);
@@ -2623,7 +2591,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
- if (FLAG_debug_code) __ AbortIfSmi(rax);
+ __ AssertNotSmi(rax);
// Check whether this map has already been checked to be safe for default
// valueOf.
@@ -2639,45 +2607,50 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
__ j(equal, if_false);
// Look for valueOf symbol in the descriptor array, and indicate false if
- // found. The type is not checked, so if it is a transition it is a false
- // negative.
+ // found. Since we omit an enumeration index check, if it is added via a
+ // transition that shares its descriptor array, this is a false positive.
+ Label entry, loop, done;
+
+ // Skip loop if no descriptors are valid.
+ __ NumberOfOwnDescriptors(rcx, rbx);
+ __ cmpq(rcx, Immediate(0));
+ __ j(equal, &done);
+
__ LoadInstanceDescriptors(rbx, rbx);
- __ movq(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
- // rbx: descriptor array
- // rcx: length of descriptor array
+ // rbx: descriptor array.
+ // rcx: valid entries in the descriptor array.
// Calculate the end of the descriptor array.
+ __ imul(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize));
SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2);
__ lea(rcx,
Operand(
- rbx, index.reg, index.scale, FixedArray::kHeaderSize));
+ rbx, index.reg, index.scale, DescriptorArray::kFirstOffset));
// Calculate location of the first key name.
- __ addq(rbx,
- Immediate(FixedArray::kHeaderSize +
- DescriptorArray::kFirstIndex * kPointerSize));
+ __ addq(rbx, Immediate(DescriptorArray::kFirstOffset));
// Loop through all the keys in the descriptor array. If one of these is the
// symbol valueOf the result is false.
- Label entry, loop;
__ jmp(&entry);
__ bind(&loop);
__ movq(rdx, FieldOperand(rbx, 0));
__ Cmp(rdx, FACTORY->value_of_symbol());
__ j(equal, if_false);
- __ addq(rbx, Immediate(kPointerSize));
+ __ addq(rbx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
__ bind(&entry);
__ cmpq(rbx, rcx);
__ j(not_equal, &loop);
+ __ bind(&done);
// Reload map as register rbx was used as temporary above.
__ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
- // If a valueOf property is not found on the object check that it's
+ // If a valueOf property is not found on the object check that its
// prototype is the un-modified String prototype. If not result is false.
__ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
__ testq(rcx, Immediate(kSmiTagMask));
__ j(zero, if_false);
__ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
- __ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
+ __ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
__ cmpq(rcx,
ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
__ j(not_equal, if_false);
@@ -2847,7 +2820,7 @@ void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
__ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ bind(&exit);
- if (FLAG_debug_code) __ AbortIfNotSmi(rax);
+ __ AssertSmi(rax);
context()->Plug(rax);
}
@@ -2954,12 +2927,14 @@ void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
// The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
__ PrepareCallCFunction(1);
#ifdef _WIN64
- __ movq(rcx, ContextOperand(context_register(), Context::GLOBAL_INDEX));
- __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
+ __ movq(rcx,
+ ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
+ __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
#else
- __ movq(rdi, ContextOperand(context_register(), Context::GLOBAL_INDEX));
- __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
+ __ movq(rdi,
+ ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
+ __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
#endif
__ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
@@ -3033,19 +3008,18 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
VisitForAccumulatorValue(args->at(0)); // Load the object.
- Label runtime, done;
+ Label runtime, done, not_date_object;
Register object = rax;
Register result = rax;
Register scratch = rcx;
-#ifdef DEBUG
- __ AbortIfSmi(object);
+ __ JumpIfSmi(object, &not_date_object);
__ CmpObjectType(object, JS_DATE_TYPE, scratch);
- __ Assert(equal, "Trying to get date field from non-date.");
-#endif
+ __ j(not_equal, &not_date_object);
if (index->value() == 0) {
__ movq(result, FieldOperand(object, JSDate::kValueOffset));
+ __ jmp(&done);
} else {
if (index->value() < JSDate::kFirstUncachedField) {
ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
@@ -3067,8 +3041,12 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
#endif
__ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
- __ bind(&done);
+ __ jmp(&done);
}
+
+ __ bind(&not_date_object);
+ __ CallRuntime(Runtime::kThrowNotDateError, 0);
+ __ bind(&done);
context()->Plug(rax);
}
@@ -3333,10 +3311,11 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
}
VisitForAccumulatorValue(args->last()); // Function.
- // Check for proxy.
- Label proxy, done;
- __ CmpObjectType(rax, JS_FUNCTION_PROXY_TYPE, rbx);
- __ j(equal, &proxy);
+ Label runtime, done;
+ // Check for non-function argument (including proxy).
+ __ JumpIfSmi(rax, &runtime);
+ __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
+ __ j(not_equal, &runtime);
// InvokeFunction requires the function in rdi. Move it in there.
__ movq(rdi, result_register());
@@ -3346,7 +3325,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
__ jmp(&done);
- __ bind(&proxy);
+ __ bind(&runtime);
__ push(rax);
__ CallRuntime(Runtime::kCall, args->length());
__ bind(&done);
@@ -3375,7 +3354,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
Handle<FixedArray> jsfunction_result_caches(
- isolate()->global_context()->jsfunction_result_caches());
+ isolate()->native_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
__ Abort("Attempt to use undefined cache.");
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
@@ -3388,9 +3367,9 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
Register key = rax;
Register cache = rbx;
Register tmp = rcx;
- __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX));
+ __ movq(cache, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
__ movq(cache,
- FieldOperand(cache, GlobalObject::kGlobalContextOffset));
+ FieldOperand(cache, GlobalObject::kNativeContextOffset));
__ movq(cache,
ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
__ movq(cache,
@@ -3491,9 +3470,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
- if (FLAG_debug_code) {
- __ AbortIfNotString(rax);
- }
+ __ AssertString(rax);
__ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
ASSERT(String::kHashShift >= kSmiTagSize);
@@ -3571,7 +3548,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// Loop condition: while (index < array_length).
// Live loop registers: index(int32), array_length(int32), string(String*),
// scratch, string_length(int32), elements(FixedArray*).
- if (FLAG_debug_code) {
+ if (generate_debug_code_) {
__ cmpq(index, array_length);
__ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
}
@@ -3585,7 +3562,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
__ andb(scratch, Immediate(
kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
- __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
+ __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
__ j(not_equal, &bailout);
__ AddSmiField(string_length,
FieldOperand(string, SeqAsciiString::kLengthOffset));
@@ -3624,7 +3601,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
__ andb(scratch, Immediate(
kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
- __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
+ __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
__ j(not_equal, &bailout);
// Live registers:
@@ -3817,7 +3794,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- CallIC(ic, mode, expr->id());
+ CallIC(ic, mode, expr->CallRuntimeFeedbackId());
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
} else {
@@ -3975,7 +3952,8 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
// accumulator register rax.
VisitForAccumulatorValue(expr->expression());
SetSourcePosition(expr->position());
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
+ expr->UnaryOperationFeedbackId());
context()->Plug(rax);
}
@@ -4031,7 +4009,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
if (assign_type == VARIABLE) {
PrepareForBailout(expr->expression(), TOS_REG);
} else {
- PrepareForBailoutForId(expr->CountId(), TOS_REG);
+ PrepareForBailoutForId(prop->LoadId(), TOS_REG);
}
// Call ToNumber only if operand is not a smi.
@@ -4096,7 +4074,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ movq(rdx, rax);
__ Move(rax, Smi::FromInt(1));
}
- CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
patch_site.EmitPatchInfo();
__ bind(&done);
@@ -4130,7 +4108,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4147,7 +4125,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4354,7 +4332,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
@@ -4436,7 +4414,7 @@ void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
Scope* declaration_scope = scope()->DeclarationScope();
if (declaration_scope->is_global_scope() ||
declaration_scope->is_module_scope()) {
- // Contexts nested in the global context have a canonical empty function
+ // Contexts nested in the native context have a canonical empty function
// as their closure, not the anonymous closure containing the global
// code. Pass a smi sentinel and let the runtime look up the empty
// function.
@@ -4466,15 +4444,52 @@ void FullCodeGenerator::EnterFinallyBlock() {
__ subq(rdx, rcx);
__ Integer32ToSmi(rdx, rdx);
__ push(rdx);
+
// Store result register while executing finally block.
__ push(result_register());
+
+ // Store pending message while executing finally block.
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ Load(rdx, pending_message_obj);
+ __ push(rdx);
+
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ Load(rdx, has_pending_message);
+ __ Integer32ToSmi(rdx, rdx);
+ __ push(rdx);
+
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ Load(rdx, pending_message_script);
+ __ push(rdx);
}
void FullCodeGenerator::ExitFinallyBlock() {
ASSERT(!result_register().is(rdx));
ASSERT(!result_register().is(rcx));
+ // Restore pending message from stack.
+ __ pop(rdx);
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ Store(pending_message_script, rdx);
+
+ __ pop(rdx);
+ __ SmiToInteger32(rdx, rdx);
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ Store(has_pending_message, rdx);
+
+ __ pop(rdx);
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ Store(pending_message_obj, rdx);
+
+ // Restore result register from stack.
__ pop(result_register());
+
// Uncook return address.
__ pop(rdx);
__ SmiToInteger32(rdx, rdx);
diff --git a/src/3rdparty/v8/src/x64/ic-x64.cc b/src/3rdparty/v8/src/x64/ic-x64.cc
index 6ba5fb6..efa07a8 100644
--- a/src/3rdparty/v8/src/x64/ic-x64.cc
+++ b/src/3rdparty/v8/src/x64/ic-x64.cc
@@ -135,7 +135,7 @@ static void GenerateDictionaryLoad(MacroAssembler* masm,
r0,
r1);
- // If probing finds an entry in the dictionary, r0 contains the
+ // If probing finds an entry in the dictionary, r1 contains the
// index into the dictionary. Check that the value is a normal
// property.
__ bind(&done);
@@ -178,10 +178,9 @@ static void GenerateDictionaryStore(MacroAssembler* masm,
//
// value - holds the value to store and is unchanged.
//
- // scratch0 - used for index into the property dictionary and is clobbered.
+ // scratch0 - used during the positive dictionary lookup and is clobbered.
//
- // scratch1 - used to hold the capacity of the property dictionary and is
- // clobbered.
+ // scratch1 - used for index into the property dictionary and is clobbered.
Label done;
// Probe the dictionary.
@@ -624,6 +623,123 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
}
+static void KeyedStoreGenerateGenericHelper(
+ MacroAssembler* masm,
+ Label* fast_object,
+ Label* fast_double,
+ Label* slow,
+ KeyedStoreCheckMap check_map,
+ KeyedStoreIncrementLength increment_length) {
+ Label transition_smi_elements;
+ Label finish_object_store, non_double_value, transition_double_elements;
+ Label fast_double_without_map_check;
+ // Fast case: Do the store, could be either Object or double.
+ __ bind(fast_object);
+ // rax: value
+ // rbx: receiver's elements array (a FixedArray)
+ // rcx: index
+ // rdx: receiver (a JSArray)
+ // r9: map of receiver
+ if (check_map == kCheckMap) {
+ __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
+ __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
+ __ j(not_equal, fast_double);
+ }
+ // Smi stores don't require further checks.
+ Label non_smi_value;
+ __ JumpIfNotSmi(rax, &non_smi_value);
+ if (increment_length == kIncrementLength) {
+ // Add 1 to receiver->length.
+ __ leal(rdi, Operand(rcx, 1));
+ __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
+ }
+ // It's irrelevant whether array is smi-only or not when writing a smi.
+ __ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize),
+ rax);
+ __ ret(0);
+
+ __ bind(&non_smi_value);
+ // Writing a non-smi, check whether array allows non-smi elements.
+ // r9: receiver's map
+ __ CheckFastObjectElements(r9, &transition_smi_elements);
+
+ __ bind(&finish_object_store);
+ if (increment_length == kIncrementLength) {
+ // Add 1 to receiver->length.
+ __ leal(rdi, Operand(rcx, 1));
+ __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
+ }
+ __ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize),
+ rax);
+ __ movq(rdx, rax); // Preserve the value which is returned.
+ __ RecordWriteArray(
+ rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+ __ ret(0);
+
+ __ bind(fast_double);
+ if (check_map == kCheckMap) {
+ // Check for fast double array case. If this fails, call through to the
+ // runtime.
+ // rdi: elements array's map
+ __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
+ __ j(not_equal, slow);
+ }
+ __ bind(&fast_double_without_map_check);
+ __ StoreNumberToDoubleElements(rax, rbx, rcx, xmm0,
+ &transition_double_elements);
+ if (increment_length == kIncrementLength) {
+ // Add 1 to receiver->length.
+ __ leal(rdi, Operand(rcx, 1));
+ __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
+ }
+ __ ret(0);
+
+ __ bind(&transition_smi_elements);
+ __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
+
+ // Transition the array appropriately depending on the value type.
+ __ movq(r9, FieldOperand(rax, HeapObject::kMapOffset));
+ __ CompareRoot(r9, Heap::kHeapNumberMapRootIndex);
+ __ j(not_equal, &non_double_value);
+
+ // Value is a double. Transition FAST_SMI_ELEMENTS ->
+ // FAST_DOUBLE_ELEMENTS and complete the store.
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_DOUBLE_ELEMENTS,
+ rbx,
+ rdi,
+ slow);
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, slow);
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
+ __ jmp(&fast_double_without_map_check);
+
+ __ bind(&non_double_value);
+ // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_ELEMENTS,
+ rbx,
+ rdi,
+ slow);
+ ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm);
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
+
+ __ bind(&transition_double_elements);
+ // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
+ // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
+ // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
+ __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
+ __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
+ FAST_ELEMENTS,
+ rbx,
+ rdi,
+ slow);
+ ElementsTransitionGenerator::GenerateDoubleToObject(masm, slow);
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
+}
+
+
void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
StrictModeFlag strict_mode) {
// ----------- S t a t e -------------
@@ -632,11 +748,9 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
// -- rdx : receiver
// -- rsp[0] : return address
// -----------------------------------
- Label slow, slow_with_tagged_index, fast, array, extra, check_extra_double;
- Label fast_object_with_map_check, fast_object_without_map_check;
- Label fast_double_with_map_check, fast_double_without_map_check;
- Label transition_smi_elements, finish_object_store, non_double_value;
- Label transition_double_elements;
+ Label slow, slow_with_tagged_index, fast_object, fast_object_grow;
+ Label fast_double, fast_double_grow;
+ Label array, extra, check_if_double_array;
// Check that the object isn't a smi.
__ JumpIfSmi(rdx, &slow_with_tagged_index);
@@ -667,7 +781,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
// rax: value
// rbx: FixedArray
// rcx: index
- __ j(above, &fast_object_with_map_check);
+ __ j(above, &fast_object);
// Slow case: call runtime.
__ bind(&slow);
@@ -691,18 +805,14 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
// Increment index to get new length.
__ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
__ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
- __ j(not_equal, &check_extra_double);
- __ leal(rdi, Operand(rcx, 1));
- __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
- __ jmp(&fast_object_without_map_check);
+ __ j(not_equal, &check_if_double_array);
+ __ jmp(&fast_object_grow);
- __ bind(&check_extra_double);
+ __ bind(&check_if_double_array);
// rdi: elements array's map
__ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
__ j(not_equal, &slow);
- __ leal(rdi, Operand(rcx, 1));
- __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
- __ jmp(&fast_double_without_map_check);
+ __ jmp(&fast_double_grow);
// Array case: Get the length and the elements array from the JS
// array. Check that the array is in fast mode (and writable); if it
@@ -718,92 +828,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ SmiCompareInteger32(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
__ j(below_equal, &extra);
- // Fast case: Do the store.
- __ bind(&fast_object_with_map_check);
- // rax: value
- // rbx: receiver's elements array (a FixedArray)
- // rcx: index
- // rdx: receiver (a JSArray)
- __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
- __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
- __ j(not_equal, &fast_double_with_map_check);
- __ bind(&fast_object_without_map_check);
- // Smi stores don't require further checks.
- Label non_smi_value;
- __ JumpIfNotSmi(rax, &non_smi_value);
- // It's irrelevant whether array is smi-only or not when writing a smi.
- __ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize),
- rax);
- __ ret(0);
-
- __ bind(&non_smi_value);
- // Writing a non-smi, check whether array allows non-smi elements.
- // r9: receiver's map
- __ CheckFastObjectElements(r9, &transition_smi_elements);
- __ bind(&finish_object_store);
- __ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize),
- rax);
- __ movq(rdx, rax); // Preserve the value which is returned.
- __ RecordWriteArray(
- rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ ret(0);
-
- __ bind(&fast_double_with_map_check);
- // Check for fast double array case. If this fails, call through to the
- // runtime.
- // rdi: elements array's map
- __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
- __ j(not_equal, &slow);
- __ bind(&fast_double_without_map_check);
- // If the value is a number, store it as a double in the FastDoubleElements
- // array.
- __ StoreNumberToDoubleElements(rax, rbx, rcx, xmm0,
- &transition_double_elements);
- __ ret(0);
-
- __ bind(&transition_smi_elements);
- __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
-
- // Transition the array appropriately depending on the value type.
- __ movq(r9, FieldOperand(rax, HeapObject::kMapOffset));
- __ CompareRoot(r9, Heap::kHeapNumberMapRootIndex);
- __ j(not_equal, &non_double_value);
-
- // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
- // FAST_DOUBLE_ELEMENTS and complete the store.
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_DOUBLE_ELEMENTS,
- rbx,
- rdi,
- &slow);
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
- __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
- __ jmp(&fast_double_without_map_check);
-
- __ bind(&non_double_value);
- // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_ELEMENTS,
- rbx,
- rdi,
- &slow);
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
- __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
- __ jmp(&finish_object_store);
-
- __ bind(&transition_double_elements);
- // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
- // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
- // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
- __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
- __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
- FAST_ELEMENTS,
- rbx,
- rdi,
- &slow);
- ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
- __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
- __ jmp(&finish_object_store);
+ KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
+ &slow, kCheckMap, kDontIncrementLength);
+ KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
+ &slow, kDontCheckMap, kIncrementLength);
}
@@ -823,7 +851,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
Code::Flags flags = Code::ComputeFlags(kind,
MONOMORPHIC,
extra_state,
- NORMAL,
+ Code::NORMAL,
argc);
Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
rax);
@@ -1642,7 +1670,7 @@ void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
// Must return the modified receiver in eax.
if (!FLAG_trace_elements_transitions) {
Label fail;
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
__ movq(rax, rdx);
__ Ret();
__ bind(&fail);
diff --git a/src/3rdparty/v8/src/x64/lithium-codegen-x64.cc b/src/3rdparty/v8/src/x64/lithium-codegen-x64.cc
index d34a520..a948ccc 100644
--- a/src/3rdparty/v8/src/x64/lithium-codegen-x64.cc
+++ b/src/3rdparty/v8/src/x64/lithium-codegen-x64.cc
@@ -92,17 +92,8 @@ void LCodeGen::FinishCode(Handle<Code> code) {
}
-void LCodeGen::Abort(const char* format, ...) {
- if (FLAG_trace_bailout) {
- SmartArrayPointer<char> name(
- info()->shared_info()->DebugName()->ToCString());
- PrintF("Aborting LCodeGen in @\"%s\": ", *name);
- va_list arguments;
- va_start(arguments, format);
- OS::VPrint(format, arguments);
- va_end(arguments);
- PrintF("\n");
- }
+void LChunkBuilder::Abort(const char* reason) {
+ info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -128,6 +119,8 @@ void LCodeGen::Comment(const char* format, ...) {
bool LCodeGen::GeneratePrologue() {
ASSERT(is_generating());
+ ProfileEntryHookStub::MaybeCallEntryHook(masm_);
+
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
@@ -140,6 +133,8 @@ bool LCodeGen::GeneratePrologue() {
// object). rcx is zero for method calls and non-zero for function
// calls.
if (!info_->is_classic_mode() || info_->is_native()) {
+ Label begin;
+ __ bind(&begin);
Label ok;
__ testq(rcx, rcx);
__ j(zero, &ok, Label::kNear);
@@ -148,6 +143,8 @@ bool LCodeGen::GeneratePrologue() {
__ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
__ movq(Operand(rsp, receiver_offset), kScratchRegister);
__ bind(&ok);
+ ASSERT(!FLAG_age_code ||
+ (kSizeOfOptimizedStrictModePrologue == ok.pos() - begin.pos()));
}
__ push(rbp); // Caller's frame pointer.
@@ -321,24 +318,24 @@ bool LCodeGen::IsTaggedConstant(LConstantOperand* op) const {
int LCodeGen::ToInteger32(LConstantOperand* op) const {
- Handle<Object> value = chunk_->LookupLiteral(op);
+ HConstant* constant = chunk_->LookupConstant(op);
ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
- ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
- value->Number());
- return static_cast<int32_t>(value->Number());
+ ASSERT(constant->HasInteger32Value());
+ return constant->Integer32Value();
}
double LCodeGen::ToDouble(LConstantOperand* op) const {
- Handle<Object> value = chunk_->LookupLiteral(op);
- return value->Number();
+ HConstant* constant = chunk_->LookupConstant(op);
+ ASSERT(constant->HasDoubleValue());
+ return constant->DoubleValue();
}
Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
- Handle<Object> literal = chunk_->LookupLiteral(op);
+ HConstant* constant = chunk_->LookupConstant(op);
ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
- return literal;
+ return constant->handle();
}
@@ -359,7 +356,9 @@ Operand LCodeGen::ToOperand(LOperand* op) const {
void LCodeGen::WriteTranslation(LEnvironment* environment,
- Translation* translation) {
+ Translation* translation,
+ int* arguments_index,
+ int* arguments_count) {
if (environment == NULL) return;
// The translation includes one command per value in the environment.
@@ -367,8 +366,21 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
// The output frame height does not include the parameters.
int height = translation_size - environment->parameter_count();
- WriteTranslation(environment->outer(), translation);
- int closure_id = DefineDeoptimizationLiteral(environment->closure());
+ // Function parameters are arguments to the outermost environment. The
+ // arguments index points to the first element of a sequence of tagged
+ // values on the stack that represent the arguments. This needs to be
+ // kept in sync with the LArgumentsElements implementation.
+ *arguments_index = -environment->parameter_count();
+ *arguments_count = environment->parameter_count();
+
+ WriteTranslation(environment->outer(),
+ translation,
+ arguments_index,
+ arguments_count);
+ int closure_id = *info()->closure() != *environment->closure()
+ ? DefineDeoptimizationLiteral(environment->closure())
+ : Translation::kSelfLiteralId;
+
switch (environment->frame_type()) {
case JS_FUNCTION:
translation->BeginJSFrame(environment->ast_id(), closure_id, height);
@@ -376,12 +388,31 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
case JS_CONSTRUCT:
translation->BeginConstructStubFrame(closure_id, translation_size);
break;
+ case JS_GETTER:
+ ASSERT(translation_size == 1);
+ ASSERT(height == 0);
+ translation->BeginGetterStubFrame(closure_id);
+ break;
+ case JS_SETTER:
+ ASSERT(translation_size == 2);
+ ASSERT(height == 0);
+ translation->BeginSetterStubFrame(closure_id);
+ break;
case ARGUMENTS_ADAPTOR:
translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
break;
- default:
- UNREACHABLE();
}
+
+ // Inlined frames which push their arguments cause the index to be
+ // bumped and a new stack area to be used for materialization.
+ if (environment->entry() != NULL &&
+ environment->entry()->arguments_pushed()) {
+ *arguments_index = *arguments_index < 0
+ ? GetStackSlotCount()
+ : *arguments_index + *arguments_count;
+ *arguments_count = environment->entry()->arguments_count() + 1;
+ }
+
for (int i = 0; i < translation_size; ++i) {
LOperand* value = environment->values()->at(i);
// spilled_registers_ and spilled_double_registers_ are either
@@ -392,7 +423,10 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
translation->MarkDuplicate();
AddToTranslation(translation,
environment->spilled_registers()[value->index()],
- environment->HasTaggedValueAt(i));
+ environment->HasTaggedValueAt(i),
+ environment->HasUint32ValueAt(i),
+ *arguments_index,
+ *arguments_count);
} else if (
value->IsDoubleRegister() &&
environment->spilled_double_registers()[value->index()] != NULL) {
@@ -400,26 +434,39 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
AddToTranslation(
translation,
environment->spilled_double_registers()[value->index()],
- false);
+ false,
+ false,
+ *arguments_index,
+ *arguments_count);
}
}
- AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
+ AddToTranslation(translation,
+ value,
+ environment->HasTaggedValueAt(i),
+ environment->HasUint32ValueAt(i),
+ *arguments_index,
+ *arguments_count);
}
}
void LCodeGen::AddToTranslation(Translation* translation,
LOperand* op,
- bool is_tagged) {
+ bool is_tagged,
+ bool is_uint32,
+ int arguments_index,
+ int arguments_count) {
if (op == NULL) {
// TODO(twuerthinger): Introduce marker operands to indicate that this value
// is not present and must be reconstructed from the deoptimizer. Currently
// this is only used for the arguments object.
- translation->StoreArgumentsObject();
+ translation->StoreArgumentsObject(arguments_index, arguments_count);
} else if (op->IsStackSlot()) {
if (is_tagged) {
translation->StoreStackSlot(op->index());
+ } else if (is_uint32) {
+ translation->StoreUint32StackSlot(op->index());
} else {
translation->StoreInt32StackSlot(op->index());
}
@@ -433,6 +480,8 @@ void LCodeGen::AddToTranslation(Translation* translation,
Register reg = ToRegister(op);
if (is_tagged) {
translation->StoreRegister(reg);
+ } else if (is_uint32) {
+ translation->StoreUint32Register(reg);
} else {
translation->StoreInt32Register(reg);
}
@@ -440,8 +489,8 @@ void LCodeGen::AddToTranslation(Translation* translation,
XMMRegister reg = ToDoubleRegister(op);
translation->StoreDoubleRegister(reg);
} else if (op->IsConstantOperand()) {
- Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
- int src_index = DefineDeoptimizationLiteral(literal);
+ HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op));
+ int src_index = DefineDeoptimizationLiteral(constant->handle());
translation->StoreLiteral(src_index);
} else {
UNREACHABLE();
@@ -518,20 +567,22 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
int frame_count = 0;
int jsframe_count = 0;
+ int args_index = 0;
+ int args_count = 0;
for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
++frame_count;
if (e->frame_type() == JS_FUNCTION) {
++jsframe_count;
}
}
- Translation translation(&translations_, frame_count, jsframe_count);
- WriteTranslation(environment, &translation);
+ Translation translation(&translations_, frame_count, jsframe_count, zone());
+ WriteTranslation(environment, &translation, &args_index, &args_count);
int deoptimization_index = deoptimizations_.length();
int pc_offset = masm()->pc_offset();
environment->Register(deoptimization_index,
translation.index(),
(mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
- deoptimizations_.Add(environment);
+ deoptimizations_.Add(environment, environment->zone());
}
}
@@ -553,7 +604,7 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
// jump entry if this is the case.
if (jump_table_.is_empty() ||
jump_table_.last().address != entry) {
- jump_table_.Add(JumpTableEntry(entry));
+ jump_table_.Add(JumpTableEntry(entry), zone());
}
__ j(cc, &jump_table_.last().label);
}
@@ -577,13 +628,13 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
}
data->SetLiteralArray(*literals);
- data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
+ data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
// Populate the deoptimization entries.
for (int i = 0; i < length; i++) {
LEnvironment* env = deoptimizations_[i];
- data->SetAstId(i, Smi::FromInt(env->ast_id()));
+ data->SetAstId(i, env->ast_id());
data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
data->SetArgumentsStackHeight(i,
Smi::FromInt(env->arguments_stack_height()));
@@ -598,7 +649,7 @@ int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
for (int i = 0; i < deoptimization_literals_.length(); ++i) {
if (deoptimization_literals_[i].is_identical_to(literal)) return i;
}
- deoptimization_literals_.Add(literal);
+ deoptimization_literals_.Add(literal, zone());
return result;
}
@@ -645,14 +696,14 @@ void LCodeGen::RecordSafepoint(
for (int i = 0; i < operands->length(); i++) {
LOperand* pointer = operands->at(i);
if (pointer->IsStackSlot()) {
- safepoint.DefinePointerSlot(pointer->index());
+ safepoint.DefinePointerSlot(pointer->index(), zone());
} else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
- safepoint.DefinePointerRegister(ToRegister(pointer));
+ safepoint.DefinePointerRegister(ToRegister(pointer), zone());
}
}
if (kind & Safepoint::kWithRegisters) {
// Register rsi always contains a pointer to the context.
- safepoint.DefinePointerRegister(rsi);
+ safepoint.DefinePointerRegister(rsi, zone());
}
}
@@ -664,7 +715,7 @@ void LCodeGen::RecordSafepoint(LPointerMap* pointers,
void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
- LPointerMap empty_pointers(RelocInfo::kNoPosition);
+ LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
RecordSafepoint(&empty_pointers, deopt_mode);
}
@@ -772,7 +823,7 @@ void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
void LCodeGen::DoModI(LModI* instr) {
if (instr->hydrogen()->HasPowerOf2Divisor()) {
- Register dividend = ToRegister(instr->InputAt(0));
+ Register dividend = ToRegister(instr->left());
int32_t divisor =
HConstant::cast(instr->hydrogen()->right())->Integer32Value();
@@ -796,8 +847,8 @@ void LCodeGen::DoModI(LModI* instr) {
__ bind(&done);
} else {
Label done, remainder_eq_dividend, slow, do_subtraction, both_positive;
- Register left_reg = ToRegister(instr->InputAt(0));
- Register right_reg = ToRegister(instr->InputAt(1));
+ Register left_reg = ToRegister(instr->left());
+ Register right_reg = ToRegister(instr->right());
Register result_reg = ToRegister(instr->result());
ASSERT(left_reg.is(rax));
@@ -827,7 +878,7 @@ void LCodeGen::DoModI(LModI* instr) {
__ j(less, &remainder_eq_dividend, Label::kNear);
// Check if the divisor is a PowerOfTwo integer.
- Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch = ToRegister(instr->temp());
__ movl(scratch, right_reg);
__ subl(scratch, Immediate(1));
__ testl(scratch, right_reg);
@@ -883,12 +934,95 @@ void LCodeGen::DoModI(LModI* instr) {
}
+void LCodeGen::DoMathFloorOfDiv(LMathFloorOfDiv* instr) {
+ ASSERT(instr->right()->IsConstantOperand());
+
+ const Register dividend = ToRegister(instr->left());
+ int32_t divisor = ToInteger32(LConstantOperand::cast(instr->right()));
+ const Register result = ToRegister(instr->result());
+
+ switch (divisor) {
+ case 0:
+ DeoptimizeIf(no_condition, instr->environment());
+ return;
+
+ case 1:
+ if (!result.is(dividend)) {
+ __ movl(result, dividend);
+ }
+ return;
+
+ case -1:
+ if (!result.is(dividend)) {
+ __ movl(result, dividend);
+ }
+ __ negl(result);
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ DeoptimizeIf(zero, instr->environment());
+ }
+ if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
+ DeoptimizeIf(overflow, instr->environment());
+ }
+ return;
+ }
+
+ uint32_t divisor_abs = abs(divisor);
+ if (IsPowerOf2(divisor_abs)) {
+ int32_t power = WhichPowerOf2(divisor_abs);
+ if (divisor < 0) {
+ __ movsxlq(result, dividend);
+ __ neg(result);
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ DeoptimizeIf(zero, instr->environment());
+ }
+ __ sar(result, Immediate(power));
+ } else {
+ if (!result.is(dividend)) {
+ __ movl(result, dividend);
+ }
+ __ sarl(result, Immediate(power));
+ }
+ } else {
+ Register reg1 = ToRegister(instr->temp());
+ Register reg2 = ToRegister(instr->result());
+
+ // Find b which: 2^b < divisor_abs < 2^(b+1).
+ unsigned b = 31 - CompilerIntrinsics::CountLeadingZeros(divisor_abs);
+ unsigned shift = 32 + b; // Precision +1bit (effectively).
+ double multiplier_f =
+ static_cast<double>(static_cast<uint64_t>(1) << shift) / divisor_abs;
+ int64_t multiplier;
+ if (multiplier_f - floor(multiplier_f) < 0.5) {
+ multiplier = static_cast<int64_t>(floor(multiplier_f));
+ } else {
+ multiplier = static_cast<int64_t>(floor(multiplier_f)) + 1;
+ }
+ // The multiplier is a uint32.
+ ASSERT(multiplier > 0 &&
+ multiplier < (static_cast<int64_t>(1) << 32));
+ // The multiply is int64, so sign-extend to r64.
+ __ movsxlq(reg1, dividend);
+ if (divisor < 0 &&
+ instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ __ neg(reg1);
+ DeoptimizeIf(zero, instr->environment());
+ }
+ __ movq(reg2, multiplier, RelocInfo::NONE);
+ // Result just fit in r64, because it's int32 * uint32.
+ __ imul(reg2, reg1);
+
+ __ addq(reg2, Immediate(1 << 30));
+ __ sar(reg2, Immediate(shift));
+ }
+}
+
+
void LCodeGen::DoDivI(LDivI* instr) {
- LOperand* right = instr->InputAt(1);
+ LOperand* right = instr->right();
ASSERT(ToRegister(instr->result()).is(rax));
- ASSERT(ToRegister(instr->InputAt(0)).is(rax));
- ASSERT(!ToRegister(instr->InputAt(1)).is(rax));
- ASSERT(!ToRegister(instr->InputAt(1)).is(rdx));
+ ASSERT(ToRegister(instr->left()).is(rax));
+ ASSERT(!ToRegister(instr->right()).is(rax));
+ ASSERT(!ToRegister(instr->right()).is(rdx));
Register left_reg = rax;
@@ -930,8 +1064,8 @@ void LCodeGen::DoDivI(LDivI* instr) {
void LCodeGen::DoMulI(LMulI* instr) {
- Register left = ToRegister(instr->InputAt(0));
- LOperand* right = instr->InputAt(1);
+ Register left = ToRegister(instr->left());
+ LOperand* right = instr->right();
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
__ movl(kScratchRegister, left);
@@ -996,8 +1130,11 @@ void LCodeGen::DoMulI(LMulI* instr) {
__ testl(left, left);
__ j(not_zero, &done, Label::kNear);
if (right->IsConstantOperand()) {
- if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
+ if (ToInteger32(LConstantOperand::cast(right)) < 0) {
DeoptimizeIf(no_condition, instr->environment());
+ } else if (ToInteger32(LConstantOperand::cast(right)) == 0) {
+ __ cmpl(kScratchRegister, Immediate(0));
+ DeoptimizeIf(less, instr->environment());
}
} else if (right->IsStackSlot()) {
__ orl(kScratchRegister, ToOperand(right));
@@ -1013,8 +1150,8 @@ void LCodeGen::DoMulI(LMulI* instr) {
void LCodeGen::DoBitI(LBitI* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
ASSERT(left->Equals(instr->result()));
ASSERT(left->IsRegister());
@@ -1070,14 +1207,17 @@ void LCodeGen::DoBitI(LBitI* instr) {
void LCodeGen::DoShiftI(LShiftI* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
ASSERT(left->Equals(instr->result()));
ASSERT(left->IsRegister());
if (right->IsRegister()) {
ASSERT(ToRegister(right).is(rcx));
switch (instr->op()) {
+ case Token::ROR:
+ __ rorl_cl(ToRegister(left));
+ break;
case Token::SAR:
__ sarl_cl(ToRegister(left));
break;
@@ -1099,6 +1239,11 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
int value = ToInteger32(LConstantOperand::cast(right));
uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
switch (instr->op()) {
+ case Token::ROR:
+ if (shift_count != 0) {
+ __ rorl(ToRegister(left), Immediate(shift_count));
+ }
+ break;
case Token::SAR:
if (shift_count != 0) {
__ sarl(ToRegister(left), Immediate(shift_count));
@@ -1126,8 +1271,8 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
void LCodeGen::DoSubI(LSubI* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
ASSERT(left->Equals(instr->result()));
if (right->IsConstantOperand()) {
@@ -1161,7 +1306,7 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
if (int_val == 0) {
__ xorps(res, res);
} else {
- Register tmp = ToRegister(instr->TempAt(0));
+ Register tmp = ToRegister(instr->temp());
__ Set(tmp, int_val);
__ movq(res, tmp);
}
@@ -1181,21 +1326,28 @@ void LCodeGen::DoConstantT(LConstantT* instr) {
void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Register result = ToRegister(instr->result());
- Register array = ToRegister(instr->InputAt(0));
+ Register array = ToRegister(instr->value());
__ movq(result, FieldOperand(array, JSArray::kLengthOffset));
}
void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
Register result = ToRegister(instr->result());
- Register array = ToRegister(instr->InputAt(0));
+ Register array = ToRegister(instr->value());
__ movq(result, FieldOperand(array, FixedArrayBase::kLengthOffset));
}
+void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
+ Register result = ToRegister(instr->result());
+ Register map = ToRegister(instr->value());
+ __ EnumLength(result, map);
+}
+
+
void LCodeGen::DoElementsKind(LElementsKind* instr) {
Register result = ToRegister(instr->result());
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
// Load map into |result|.
__ movq(result, FieldOperand(input, HeapObject::kMapOffset));
@@ -1208,7 +1360,7 @@ void LCodeGen::DoElementsKind(LElementsKind* instr) {
void LCodeGen::DoValueOf(LValueOf* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
ASSERT(input.is(result));
Label done;
@@ -1225,18 +1377,17 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
void LCodeGen::DoDateField(LDateField* instr) {
- Register object = ToRegister(instr->InputAt(0));
+ Register object = ToRegister(instr->date());
Register result = ToRegister(instr->result());
Smi* index = instr->index();
- Label runtime, done;
+ Label runtime, done, not_date_object;
ASSERT(object.is(result));
ASSERT(object.is(rax));
-#ifdef DEBUG
- __ AbortIfSmi(object);
+ Condition cc = masm()->CheckSmi(object);
+ DeoptimizeIf(cc, instr->environment());
__ CmpObjectType(object, JS_DATE_TYPE, kScratchRegister);
- __ Assert(equal, "Trying to get date field from non-date.");
-#endif
+ DeoptimizeIf(not_equal, instr->environment());
if (index->value() == 0) {
__ movq(result, FieldOperand(object, JSDate::kValueOffset));
@@ -1268,14 +1419,14 @@ void LCodeGen::DoDateField(LDateField* instr) {
void LCodeGen::DoBitNotI(LBitNotI* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->Equals(instr->result()));
__ not_(ToRegister(input));
}
void LCodeGen::DoThrow(LThrow* instr) {
- __ push(ToRegister(instr->InputAt(0)));
+ __ push(ToRegister(instr->value()));
CallRuntime(Runtime::kThrow, 1, instr);
if (FLAG_debug_code) {
@@ -1286,8 +1437,8 @@ void LCodeGen::DoThrow(LThrow* instr) {
void LCodeGen::DoAddI(LAddI* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
ASSERT(left->Equals(instr->result()));
if (right->IsConstantOperand()) {
@@ -1305,9 +1456,75 @@ void LCodeGen::DoAddI(LAddI* instr) {
}
+void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
+ ASSERT(left->Equals(instr->result()));
+ HMathMinMax::Operation operation = instr->hydrogen()->operation();
+ if (instr->hydrogen()->representation().IsInteger32()) {
+ Label return_left;
+ Condition condition = (operation == HMathMinMax::kMathMin)
+ ? less_equal
+ : greater_equal;
+ Register left_reg = ToRegister(left);
+ if (right->IsConstantOperand()) {
+ Immediate right_imm =
+ Immediate(ToInteger32(LConstantOperand::cast(right)));
+ __ cmpq(left_reg, right_imm);
+ __ j(condition, &return_left, Label::kNear);
+ __ movq(left_reg, right_imm);
+ } else if (right->IsRegister()) {
+ Register right_reg = ToRegister(right);
+ __ cmpq(left_reg, right_reg);
+ __ j(condition, &return_left, Label::kNear);
+ __ movq(left_reg, right_reg);
+ } else {
+ Operand right_op = ToOperand(right);
+ __ cmpq(left_reg, right_op);
+ __ j(condition, &return_left, Label::kNear);
+ __ movq(left_reg, right_op);
+ }
+ __ bind(&return_left);
+ } else {
+ ASSERT(instr->hydrogen()->representation().IsDouble());
+ Label check_nan_left, check_zero, return_left, return_right;
+ Condition condition = (operation == HMathMinMax::kMathMin) ? below : above;
+ XMMRegister left_reg = ToDoubleRegister(left);
+ XMMRegister right_reg = ToDoubleRegister(right);
+ __ ucomisd(left_reg, right_reg);
+ __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN.
+ __ j(equal, &check_zero, Label::kNear); // left == right.
+ __ j(condition, &return_left, Label::kNear);
+ __ jmp(&return_right, Label::kNear);
+
+ __ bind(&check_zero);
+ XMMRegister xmm_scratch = xmm0;
+ __ xorps(xmm_scratch, xmm_scratch);
+ __ ucomisd(left_reg, xmm_scratch);
+ __ j(not_equal, &return_left, Label::kNear); // left == right != 0.
+ // At this point, both left and right are either 0 or -0.
+ if (operation == HMathMinMax::kMathMin) {
+ __ orpd(left_reg, right_reg);
+ } else {
+ // Since we operate on +0 and/or -0, addsd and andsd have the same effect.
+ __ addsd(left_reg, right_reg);
+ }
+ __ jmp(&return_left, Label::kNear);
+
+ __ bind(&check_nan_left);
+ __ ucomisd(left_reg, left_reg); // NaN check.
+ __ j(parity_even, &return_left, Label::kNear);
+ __ bind(&return_right);
+ __ movsd(left_reg, right_reg);
+
+ __ bind(&return_left);
+ }
+}
+
+
void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
- XMMRegister left = ToDoubleRegister(instr->InputAt(0));
- XMMRegister right = ToDoubleRegister(instr->InputAt(1));
+ XMMRegister left = ToDoubleRegister(instr->left());
+ XMMRegister right = ToDoubleRegister(instr->right());
XMMRegister result = ToDoubleRegister(instr->result());
// All operations except MOD are computed in-place.
ASSERT(instr->op() == Token::MOD || left.is(result));
@@ -1341,8 +1558,8 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
- ASSERT(ToRegister(instr->InputAt(0)).is(rdx));
- ASSERT(ToRegister(instr->InputAt(1)).is(rax));
+ ASSERT(ToRegister(instr->left()).is(rdx));
+ ASSERT(ToRegister(instr->right()).is(rax));
ASSERT(ToRegister(instr->result()).is(rax));
BinaryOpStub stub(instr->op(), NO_OVERWRITE);
@@ -1386,17 +1603,17 @@ void LCodeGen::DoBranch(LBranch* instr) {
Representation r = instr->hydrogen()->value()->representation();
if (r.IsInteger32()) {
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
__ testl(reg, reg);
EmitBranch(true_block, false_block, not_zero);
} else if (r.IsDouble()) {
- XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
+ XMMRegister reg = ToDoubleRegister(instr->value());
__ xorps(xmm0, xmm0);
__ ucomisd(reg, xmm0);
EmitBranch(true_block, false_block, not_equal);
} else {
ASSERT(r.IsTagged());
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
HType type = instr->hydrogen()->value()->type();
if (type.IsBoolean()) {
__ CompareRoot(reg, Heap::kTrueValueRootIndex);
@@ -1533,8 +1750,8 @@ inline Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
+ LOperand* left = instr->left();
+ LOperand* right = instr->right();
int false_block = chunk_->LookupDestination(instr->false_block_id());
int true_block = chunk_->LookupDestination(instr->true_block_id());
Condition cc = TokenToCondition(instr->op(), instr->is_double());
@@ -1581,8 +1798,8 @@ void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
- Register left = ToRegister(instr->InputAt(0));
- Register right = ToRegister(instr->InputAt(1));
+ Register left = ToRegister(instr->left());
+ Register right = ToRegister(instr->right());
int false_block = chunk_->LookupDestination(instr->false_block_id());
int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -1592,7 +1809,7 @@ void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
- Register left = ToRegister(instr->InputAt(0));
+ Register left = ToRegister(instr->left());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1602,7 +1819,7 @@ void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
int false_block = chunk_->LookupDestination(instr->false_block_id());
// If the expression is known to be untagged or a smi, then it's definitely
@@ -1632,7 +1849,7 @@ void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
__ JumpIfSmi(reg, false_label);
// Check for undetectable objects by looking in the bit field in
// the map. The object has already been smi checked.
- Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch = ToRegister(instr->temp());
__ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
__ testb(FieldOperand(scratch, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
@@ -1667,7 +1884,7 @@ Condition LCodeGen::EmitIsObject(Register input,
void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1691,8 +1908,8 @@ Condition LCodeGen::EmitIsString(Register input,
void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
- Register reg = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
+ Register reg = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1709,11 +1926,11 @@ void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
int false_block = chunk_->LookupDestination(instr->false_block_id());
Condition is_smi;
- if (instr->InputAt(0)->IsRegister()) {
- Register input = ToRegister(instr->InputAt(0));
+ if (instr->value()->IsRegister()) {
+ Register input = ToRegister(instr->value());
is_smi = masm()->CheckSmi(input);
} else {
- Operand input = ToOperand(instr->InputAt(0));
+ Operand input = ToOperand(instr->value());
is_smi = masm()->CheckSmi(input);
}
EmitBranch(true_block, false_block, is_smi);
@@ -1721,8 +1938,8 @@ void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
+ Register input = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1771,7 +1988,7 @@ static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1786,12 +2003,10 @@ void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
Register result = ToRegister(instr->result());
- if (FLAG_debug_code) {
- __ AbortIfNotString(input);
- }
+ __ AssertString(input);
__ movl(result, FieldOperand(input, String::kHashFieldOffset));
ASSERT(String::kHashShift >= kSmiTagSize);
@@ -1801,7 +2016,7 @@ void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
void LCodeGen::DoHasCachedArrayIndexAndBranch(
LHasCachedArrayIndexAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1881,9 +2096,9 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
- Register temp = ToRegister(instr->TempAt(0));
- Register temp2 = ToRegister(instr->TempAt(1));
+ Register input = ToRegister(instr->value());
+ Register temp = ToRegister(instr->temp());
+ Register temp2 = ToRegister(instr->temp2());
Handle<String> class_name = instr->hydrogen()->class_name();
int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -1899,7 +2114,7 @@ void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
- Register reg = ToRegister(instr->InputAt(0));
+ Register reg = ToRegister(instr->value());
int true_block = instr->true_block_id();
int false_block = instr->false_block_id();
@@ -1910,8 +2125,8 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
InstanceofStub stub(InstanceofStub::kNoFlags);
- __ push(ToRegister(instr->InputAt(0)));
- __ push(ToRegister(instr->InputAt(1)));
+ __ push(ToRegister(instr->left()));
+ __ push(ToRegister(instr->right()));
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Label true_value, done;
__ testq(rax, rax);
@@ -1942,10 +2157,10 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
DeferredInstanceOfKnownGlobal* deferred;
- deferred = new DeferredInstanceOfKnownGlobal(this, instr);
+ deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
Label done, false_result;
- Register object = ToRegister(instr->InputAt(0));
+ Register object = ToRegister(instr->value());
// A Smi is not an instance of anything.
__ JumpIfSmi(object, &false_result);
@@ -1955,7 +2170,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
// instanceof stub.
Label cache_miss;
// Use a temp register to avoid memory operands with variable lengths.
- Register map = ToRegister(instr->TempAt(0));
+ Register map = ToRegister(instr->temp());
__ movq(map, FieldOperand(object, HeapObject::kMapOffset));
__ bind(deferred->map_check()); // Label for calculating code patching.
Handle<JSGlobalPropertyCell> cache_cell =
@@ -1998,7 +2213,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck);
InstanceofStub stub(flags);
- __ push(ToRegister(instr->InputAt(0)));
+ __ push(ToRegister(instr->value()));
__ PushHeapObject(instr->function());
static const int kAdditionalDelta = 10;
@@ -2098,7 +2313,7 @@ void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
// it as no longer deleted. We deoptimize in that case.
if (instr->hydrogen()->RequiresHoleCheck()) {
// We have a temp because CompareRoot might clobber kScratchRegister.
- Register cell = ToRegister(instr->TempAt(0));
+ Register cell = ToRegister(instr->temp());
ASSERT(!value.is(cell));
__ movq(cell, cell_handle, RelocInfo::GLOBAL_PROPERTY_CELL);
__ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex);
@@ -2166,7 +2381,7 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
SmiCheck check_needed =
type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
int offset = Context::SlotOffset(instr->slot_index());
- Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch = ToRegister(instr->temp());
__ RecordWriteContextSlot(context,
offset,
value,
@@ -2181,7 +2396,7 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
- Register object = ToRegister(instr->InputAt(0));
+ Register object = ToRegister(instr->object());
Register result = ToRegister(instr->result());
if (instr->hydrogen()->is_in_object()) {
__ movq(result, FieldOperand(object, instr->hydrogen()->offset()));
@@ -2195,12 +2410,12 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name) {
+ Handle<String> name,
+ LEnvironment* env) {
LookupResult lookup(isolate());
- type->LookupInDescriptors(NULL, *name, &lookup);
- ASSERT(lookup.IsFound() &&
- (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
- if (lookup.type() == FIELD) {
+ type->LookupDescriptor(NULL, *name, &lookup);
+ ASSERT(lookup.IsFound() || lookup.IsCacheable());
+ if (lookup.IsField()) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
int offset = index * kPointerSize;
if (index < 0) {
@@ -2212,13 +2427,43 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
__ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
__ movq(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
}
- } else {
+ } else if (lookup.IsConstantFunction()) {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
__ LoadHeapObject(result, function);
+ } else {
+ // Negative lookup.
+ // Check prototypes.
+ Handle<HeapObject> current(HeapObject::cast((*type)->prototype()));
+ Heap* heap = type->GetHeap();
+ while (*current != heap->null_value()) {
+ __ LoadHeapObject(result, current);
+ __ Cmp(FieldOperand(result, HeapObject::kMapOffset),
+ Handle<Map>(current->map()));
+ DeoptimizeIf(not_equal, env);
+ current =
+ Handle<HeapObject>(HeapObject::cast(current->map()->prototype()));
+ }
+ __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
}
}
+// Check for cases where EmitLoadFieldOrConstantFunction needs to walk the
+// prototype chain, which causes unbounded code generation.
+static bool CompactEmit(SmallMapList* list,
+ Handle<String> name,
+ int i,
+ Isolate* isolate) {
+ Handle<Map> map = list->at(i);
+ // If the map has ElementsKind transitions, we will generate map checks
+ // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS).
+ if (map->HasElementsTransition()) return false;
+ LookupResult lookup(isolate);
+ map->LookupDescriptor(NULL, *name, &lookup);
+ return lookup.IsField() || lookup.IsConstantFunction();
+}
+
+
void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
Register object = ToRegister(instr->object());
Register result = ToRegister(instr->result());
@@ -2232,18 +2477,32 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
}
Handle<String> name = instr->hydrogen()->name();
Label done;
+ bool all_are_compact = true;
+ for (int i = 0; i < map_count; ++i) {
+ if (!CompactEmit(instr->hydrogen()->types(), name, i, isolate())) {
+ all_are_compact = false;
+ break;
+ }
+ }
for (int i = 0; i < map_count; ++i) {
bool last = (i == map_count - 1);
Handle<Map> map = instr->hydrogen()->types()->at(i);
- __ Cmp(FieldOperand(object, HeapObject::kMapOffset), map);
+ Label check_passed;
+ __ CompareMap(object, map, &check_passed, ALLOW_ELEMENT_TRANSITION_MAPS);
if (last && !need_generic) {
DeoptimizeIf(not_equal, instr->environment());
- EmitLoadFieldOrConstantFunction(result, object, map, name);
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
} else {
Label next;
- __ j(not_equal, &next, Label::kNear);
- EmitLoadFieldOrConstantFunction(result, object, map, name);
- __ jmp(&done, Label::kNear);
+ bool compact = all_are_compact ? true :
+ CompactEmit(instr->hydrogen()->types(), name, i, isolate());
+ __ j(not_equal, &next, compact ? Label::kNear : Label::kFar);
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
+ __ jmp(&done, all_are_compact ? Label::kNear : Label::kFar);
__ bind(&next);
}
}
@@ -2309,7 +2568,7 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
void LCodeGen::DoLoadElements(LLoadElements* instr) {
Register result = ToRegister(instr->result());
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->object());
__ movq(result, FieldOperand(input, JSObject::kElementsOffset));
if (FLAG_debug_code) {
Label done, ok, fail;
@@ -2325,8 +2584,10 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
__ movzxbq(temp, FieldOperand(temp, Map::kBitField2Offset));
__ and_(temp, Immediate(Map::kElementsKindMask));
__ shr(temp, Immediate(Map::kElementsKindShift));
- __ cmpl(temp, Immediate(FAST_ELEMENTS));
- __ j(equal, &ok, Label::kNear);
+ __ cmpl(temp, Immediate(GetInitialFastElementsKind()));
+ __ j(less, &fail, Label::kNear);
+ __ cmpl(temp, Immediate(TERMINAL_FAST_ELEMENTS_KIND));
+ __ j(less_equal, &ok, Label::kNear);
__ cmpl(temp, Immediate(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
__ j(less, &fail, Label::kNear);
__ cmpl(temp, Immediate(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
@@ -2343,7 +2604,7 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
void LCodeGen::DoLoadExternalArrayPointer(
LLoadExternalArrayPointer* instr) {
Register result = ToRegister(instr->result());
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->object());
__ movq(result, FieldOperand(input,
ExternalPixelArray::kExternalPointerOffset));
}
@@ -2353,118 +2614,47 @@ void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Register arguments = ToRegister(instr->arguments());
Register length = ToRegister(instr->length());
Register result = ToRegister(instr->result());
-
+ // There are two words between the frame pointer and the last argument.
+ // Subtracting from length accounts for one of them add one more.
if (instr->index()->IsRegister()) {
__ subl(length, ToRegister(instr->index()));
} else {
__ subl(length, ToOperand(instr->index()));
}
- DeoptimizeIf(below_equal, instr->environment());
-
- // There are two words between the frame pointer and the last argument.
- // Subtracting from length accounts for one of them add one more.
__ movq(result, Operand(arguments, length, times_pointer_size, kPointerSize));
}
-void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
- Register result = ToRegister(instr->result());
-
- if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
- // Sign extend key because it could be a 32 bit negative value
- // and the dehoisted address computation happens in 64 bits.
- Register key_reg = ToRegister(instr->key());
- __ movsxlq(key_reg, key_reg);
- }
-
- // Load the result.
- __ movq(result,
- BuildFastArrayOperand(instr->elements(),
- instr->key(),
- FAST_ELEMENTS,
- FixedArray::kHeaderSize - kHeapObjectTag,
- instr->additional_index()));
-
- // Check for the hole value.
- if (instr->hydrogen()->RequiresHoleCheck()) {
- __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
- DeoptimizeIf(equal, instr->environment());
- }
-}
-
-
-void LCodeGen::DoLoadKeyedFastDoubleElement(
- LLoadKeyedFastDoubleElement* instr) {
- XMMRegister result(ToDoubleRegister(instr->result()));
-
- if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
- // Sign extend key because it could be a 32 bit negative value
- // and the dehoisted address computation happens in 64 bits
- Register key_reg = ToRegister(instr->key());
- __ movsxlq(key_reg, key_reg);
- }
-
- int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
- sizeof(kHoleNanLower32);
- Operand hole_check_operand = BuildFastArrayOperand(
- instr->elements(),
- instr->key(),
- FAST_DOUBLE_ELEMENTS,
- offset,
- instr->additional_index());
- __ cmpl(hole_check_operand, Immediate(kHoleNanUpper32));
- DeoptimizeIf(equal, instr->environment());
-
- Operand double_load_operand = BuildFastArrayOperand(
- instr->elements(),
- instr->key(),
- FAST_DOUBLE_ELEMENTS,
- FixedDoubleArray::kHeaderSize - kHeapObjectTag,
- instr->additional_index());
- __ movsd(result, double_load_operand);
-}
-
-
-Operand LCodeGen::BuildFastArrayOperand(
- LOperand* elements_pointer,
- LOperand* key,
- ElementsKind elements_kind,
- uint32_t offset,
- uint32_t additional_index) {
- Register elements_pointer_reg = ToRegister(elements_pointer);
- int shift_size = ElementsKindToShiftSize(elements_kind);
- if (key->IsConstantOperand()) {
- int constant_value = ToInteger32(LConstantOperand::cast(key));
- if (constant_value & 0xF0000000) {
- Abort("array index constant value too big");
+template <class T>
+inline void LCodeGen::PrepareKeyForKeyedOp(T* hydrogen_instr, LOperand* key) {
+ if (ArrayOpClobbersKey<T>(hydrogen_instr)) {
+ // Even though the HLoad/StoreKeyed (in this case) instructions force
+ // the input representation for the key to be an integer, the input
+ // gets replaced during bound check elimination with the index argument
+ // to the bounds check, which can be tagged, so that case must be
+ // handled here, too.
+ Register key_reg = ToRegister(key);
+ if (hydrogen_instr->key()->representation().IsTagged()) {
+ __ SmiToInteger64(key_reg, key_reg);
+ } else if (hydrogen_instr->IsDehoisted()) {
+ // Sign extend key because it could be a 32 bit negative value
+ // and the dehoisted address computation happens in 64 bits
+ __ movsxlq(key_reg, key_reg);
}
- return Operand(elements_pointer_reg,
- ((constant_value + additional_index) << shift_size)
- + offset);
- } else {
- ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
- return Operand(elements_pointer_reg,
- ToRegister(key),
- scale_factor,
- offset + (additional_index << shift_size));
}
}
-void LCodeGen::DoLoadKeyedSpecializedArrayElement(
- LLoadKeyedSpecializedArrayElement* instr) {
+void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
ElementsKind elements_kind = instr->elements_kind();
- Operand operand(BuildFastArrayOperand(instr->external_pointer(),
- instr->key(),
- elements_kind,
- 0,
- instr->additional_index()));
- if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
- // Sign extend key because it could be a 32 bit negative value
- // and the dehoisted address computation happens in 64 bits
- Register key_reg = ToRegister(instr->key());
- __ movsxlq(key_reg, key_reg);
- }
+ LOperand* key = instr->key();
+ PrepareKeyForKeyedOp(instr->hydrogen(), key);
+ Operand operand(BuildFastArrayOperand(
+ instr->elements(),
+ key,
+ elements_kind,
+ 0,
+ instr->additional_index()));
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
XMMRegister result(ToDoubleRegister(instr->result()));
@@ -2493,17 +2683,19 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
break;
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
__ movl(result, operand);
- __ testl(result, result);
- // TODO(danno): we could be more clever here, perhaps having a special
- // version of the stub that detects if the overflow case actually
- // happens, and generate code that returns a double rather than int.
- DeoptimizeIf(negative, instr->environment());
+ if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) {
+ __ testl(result, result);
+ DeoptimizeIf(negative, instr->environment());
+ }
break;
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -2513,6 +2705,96 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
}
+void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
+ XMMRegister result(ToDoubleRegister(instr->result()));
+ LOperand* key = instr->key();
+ PrepareKeyForKeyedOp<HLoadKeyed>(instr->hydrogen(), key);
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
+ sizeof(kHoleNanLower32);
+ Operand hole_check_operand = BuildFastArrayOperand(
+ instr->elements(),
+ key,
+ FAST_DOUBLE_ELEMENTS,
+ offset,
+ instr->additional_index());
+ __ cmpl(hole_check_operand, Immediate(kHoleNanUpper32));
+ DeoptimizeIf(equal, instr->environment());
+ }
+
+ Operand double_load_operand = BuildFastArrayOperand(
+ instr->elements(),
+ key,
+ FAST_DOUBLE_ELEMENTS,
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag,
+ instr->additional_index());
+ __ movsd(result, double_load_operand);
+}
+
+
+void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
+ Register result = ToRegister(instr->result());
+ LOperand* key = instr->key();
+ PrepareKeyForKeyedOp<HLoadKeyed>(instr->hydrogen(), key);
+
+ // Load the result.
+ __ movq(result,
+ BuildFastArrayOperand(instr->elements(),
+ key,
+ FAST_ELEMENTS,
+ FixedArray::kHeaderSize - kHeapObjectTag,
+ instr->additional_index()));
+
+ // Check for the hole value.
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
+ Condition smi = __ CheckSmi(result);
+ DeoptimizeIf(NegateCondition(smi), instr->environment());
+ } else {
+ __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
+ DeoptimizeIf(equal, instr->environment());
+ }
+ }
+}
+
+
+void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
+ if (instr->is_external()) {
+ DoLoadKeyedExternalArray(instr);
+ } else if (instr->hydrogen()->representation().IsDouble()) {
+ DoLoadKeyedFixedDoubleArray(instr);
+ } else {
+ DoLoadKeyedFixedArray(instr);
+ }
+}
+
+
+Operand LCodeGen::BuildFastArrayOperand(
+ LOperand* elements_pointer,
+ LOperand* key,
+ ElementsKind elements_kind,
+ uint32_t offset,
+ uint32_t additional_index) {
+ Register elements_pointer_reg = ToRegister(elements_pointer);
+ int shift_size = ElementsKindToShiftSize(elements_kind);
+ if (key->IsConstantOperand()) {
+ int constant_value = ToInteger32(LConstantOperand::cast(key));
+ if (constant_value & 0xF0000000) {
+ Abort("array index constant value too big");
+ }
+ return Operand(elements_pointer_reg,
+ ((constant_value + additional_index) << shift_size)
+ + offset);
+ } else {
+ ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
+ return Operand(elements_pointer_reg,
+ ToRegister(key),
+ scale_factor,
+ offset + (additional_index << shift_size));
+ }
+}
+
+
void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
ASSERT(ToRegister(instr->object()).is(rdx));
ASSERT(ToRegister(instr->key()).is(rax));
@@ -2556,10 +2838,10 @@ void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Label done;
// If no arguments adaptor frame the number of arguments is fixed.
- if (instr->InputAt(0)->IsRegister()) {
- __ cmpq(rbp, ToRegister(instr->InputAt(0)));
+ if (instr->elements()->IsRegister()) {
+ __ cmpq(rbp, ToRegister(instr->elements()));
} else {
- __ cmpq(rbp, ToOperand(instr->InputAt(0)));
+ __ cmpq(rbp, ToOperand(instr->elements()));
}
__ movl(result, Immediate(scope()->num_parameters()));
__ j(equal, &done, Label::kNear);
@@ -2616,7 +2898,7 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
// TODO(kmillikin): We have a hydrogen value for the global object. See
// if it's better to use it than to explicitly fetch it from the context
// here.
- __ movq(receiver, ContextOperand(rsi, Context::GLOBAL_INDEX));
+ __ movq(receiver, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
__ movq(receiver,
FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
__ bind(&receiver_ok);
@@ -2667,7 +2949,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
void LCodeGen::DoPushArgument(LPushArgument* instr) {
- LOperand* argument = instr->InputAt(0);
+ LOperand* argument = instr->value();
EmitPushTaggedOperand(argument);
}
@@ -2679,7 +2961,7 @@ void LCodeGen::DoDrop(LDrop* instr) {
void LCodeGen::DoThisFunction(LThisFunction* instr) {
Register result = ToRegister(instr->result());
- __ LoadHeapObject(result, instr->hydrogen()->closure());
+ __ movq(result, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
}
@@ -2734,14 +3016,8 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
__ LoadHeapObject(rdi, function);
}
- // Change context if needed.
- bool change_context =
- (info()->closure()->context() != function->context()) ||
- scope()->contains_with() ||
- (scope()->num_heap_slots() > 0);
- if (change_context) {
- __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
- }
+ // Change context.
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
// Set rax to arguments count if adaption is not needed. Assumes that rax
// is available to write to at this point.
@@ -2783,7 +3059,7 @@ void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
- Register input_reg = ToRegister(instr->InputAt(0));
+ Register input_reg = ToRegister(instr->value());
__ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
Heap::kHeapNumberMapRootIndex);
DeoptimizeIf(not_equal, instr->environment());
@@ -2835,7 +3111,7 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
- Register input_reg = ToRegister(instr->InputAt(0));
+ Register input_reg = ToRegister(instr->value());
__ testl(input_reg, input_reg);
Label is_positive;
__ j(not_sign, &is_positive);
@@ -2860,12 +3136,12 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
LUnaryMathOperation* instr_;
};
- ASSERT(instr->InputAt(0)->Equals(instr->result()));
+ ASSERT(instr->value()->Equals(instr->result()));
Representation r = instr->hydrogen()->value()->representation();
if (r.IsDouble()) {
XMMRegister scratch = xmm0;
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
__ xorps(scratch, scratch);
__ subsd(scratch, input_reg);
__ andpd(input_reg, scratch);
@@ -2873,8 +3149,8 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
EmitIntegerMathAbs(instr);
} else { // Tagged case.
DeferredMathAbsTaggedHeapNumber* deferred =
- new DeferredMathAbsTaggedHeapNumber(this, instr);
- Register input_reg = ToRegister(instr->InputAt(0));
+ new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
+ Register input_reg = ToRegister(instr->value());
// Smi check.
__ JumpIfNotSmi(input_reg, deferred->entry());
__ SmiToInteger32(input_reg, input_reg);
@@ -2888,8 +3164,7 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
XMMRegister xmm_scratch = xmm0;
Register output_reg = ToRegister(instr->result());
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
- Label done;
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
if (CpuFeatures::IsSupported(SSE4_1)) {
CpuFeatures::Scope scope(SSE4_1);
@@ -2904,10 +3179,13 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
__ cmpl(output_reg, Immediate(0x80000000));
DeoptimizeIf(equal, instr->environment());
} else {
+ Label negative_sign, done;
// Deoptimize on negative inputs.
__ xorps(xmm_scratch, xmm_scratch); // Zero the register.
__ ucomisd(input_reg, xmm_scratch);
- DeoptimizeIf(below, instr->environment());
+ DeoptimizeIf(parity_even, instr->environment());
+ __ j(below, &negative_sign, Label::kNear);
+
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
// Check for negative zero.
Label positive_sign;
@@ -2922,19 +3200,30 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
// Use truncating instruction (OK because input is positive).
__ cvttsd2si(output_reg, input_reg);
-
// Overflow is signalled with minint.
__ cmpl(output_reg, Immediate(0x80000000));
DeoptimizeIf(equal, instr->environment());
+ __ jmp(&done, Label::kNear);
+
+ // Non-zero negative reaches here.
+ __ bind(&negative_sign);
+ // Truncate, then compare and compensate.
+ __ cvttsd2si(output_reg, input_reg);
+ __ cvtlsi2sd(xmm_scratch, output_reg);
+ __ ucomisd(input_reg, xmm_scratch);
+ __ j(equal, &done, Label::kNear);
+ __ subl(output_reg, Immediate(1));
+ DeoptimizeIf(overflow, instr->environment());
+
+ __ bind(&done);
}
- __ bind(&done);
}
void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
const XMMRegister xmm_scratch = xmm0;
Register output_reg = ToRegister(instr->result());
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
Label done;
// xmm_scratch = 0.5
@@ -2979,7 +3268,7 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
__ sqrtsd(input_reg, input_reg);
}
@@ -2987,7 +3276,7 @@ void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
XMMRegister xmm_scratch = xmm0;
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
// Note that according to ECMA-262 15.8.2.13:
@@ -3028,11 +3317,11 @@ void LCodeGen::DoPower(LPower* instr) {
#else
Register exponent = rdi;
#endif
- ASSERT(!instr->InputAt(1)->IsRegister() ||
- ToRegister(instr->InputAt(1)).is(exponent));
- ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
- ToDoubleRegister(instr->InputAt(1)).is(xmm1));
- ASSERT(ToDoubleRegister(instr->InputAt(0)).is(xmm2));
+ ASSERT(!instr->right()->IsRegister() ||
+ ToRegister(instr->right()).is(exponent));
+ ASSERT(!instr->right()->IsDoubleRegister() ||
+ ToDoubleRegister(instr->right()).is(xmm1));
+ ASSERT(ToDoubleRegister(instr->left()).is(xmm2));
ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
if (exponent_type.IsTagged()) {
@@ -3065,7 +3354,7 @@ void LCodeGen::DoRandom(LRandom* instr) {
LRandom* instr_;
};
- DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
+ DeferredDoRandom* deferred = new(zone()) DeferredDoRandom(this, instr);
// Having marked this instruction as a call we can use any
// registers.
@@ -3074,10 +3363,10 @@ void LCodeGen::DoRandom(LRandom* instr) {
// Choose the right register for the first argument depending on
// calling convention.
#ifdef _WIN64
- ASSERT(ToRegister(instr->InputAt(0)).is(rcx));
+ ASSERT(ToRegister(instr->global_object()).is(rcx));
Register global_object = rcx;
#else
- ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
+ ASSERT(ToRegister(instr->global_object()).is(rdi));
Register global_object = rdi;
#endif
@@ -3085,11 +3374,11 @@ void LCodeGen::DoRandom(LRandom* instr) {
STATIC_ASSERT(kPointerSize == 2 * kSeedSize);
__ movq(global_object,
- FieldOperand(global_object, GlobalObject::kGlobalContextOffset));
+ FieldOperand(global_object, GlobalObject::kNativeContextOffset));
static const int kRandomSeedOffset =
FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
__ movq(rbx, FieldOperand(global_object, kRandomSeedOffset));
- // rbx: FixedArray of the global context's random seeds
+ // rbx: FixedArray of the native context's random seeds
// Load state[0].
__ movl(rax, FieldOperand(rbx, ByteArray::kHeaderSize));
@@ -3292,7 +3581,7 @@ void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
void LCodeGen::DoCallNew(LCallNew* instr) {
- ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
+ ASSERT(ToRegister(instr->constructor()).is(rdi));
ASSERT(ToRegister(instr->result()).is(rax));
CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
@@ -3312,7 +3601,22 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
int offset = instr->offset();
if (!instr->transition().is_null()) {
- __ Move(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
+ if (!instr->hydrogen()->NeedsWriteBarrierForMap()) {
+ __ Move(FieldOperand(object, HeapObject::kMapOffset),
+ instr->transition());
+ } else {
+ Register temp = ToRegister(instr->temp());
+ __ Move(kScratchRegister, instr->transition());
+ __ movq(FieldOperand(object, HeapObject::kMapOffset), kScratchRegister);
+ // Update the write barrier for the map field.
+ __ RecordWriteField(object,
+ HeapObject::kMapOffset,
+ kScratchRegister,
+ temp,
+ kSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ }
}
// Do the store.
@@ -3322,7 +3626,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
if (instr->is_in_object()) {
__ movq(FieldOperand(object, offset), value);
if (instr->hydrogen()->NeedsWriteBarrier()) {
- Register temp = ToRegister(instr->TempAt(0));
+ Register temp = ToRegister(instr->temp());
// Update the write barrier for the object for in-object properties.
__ RecordWriteField(object,
offset,
@@ -3333,7 +3637,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
check_needed);
}
} else {
- Register temp = ToRegister(instr->TempAt(0));
+ Register temp = ToRegister(instr->temp());
__ movq(temp, FieldOperand(object, JSObject::kPropertiesOffset));
__ movq(FieldOperand(temp, offset), value);
if (instr->hydrogen()->NeedsWriteBarrier()) {
@@ -3363,21 +3667,76 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
}
-void LCodeGen::DoStoreKeyedSpecializedArrayElement(
- LStoreKeyedSpecializedArrayElement* instr) {
- ElementsKind elements_kind = instr->elements_kind();
- Operand operand(BuildFastArrayOperand(instr->external_pointer(),
- instr->key(),
- elements_kind,
- 0,
- instr->additional_index()));
+void LCodeGen::DeoptIfTaggedButNotSmi(LEnvironment* environment,
+ HValue* value,
+ LOperand* operand) {
+ if (value->representation().IsTagged() && !value->type().IsSmi()) {
+ Condition cc;
+ if (operand->IsRegister()) {
+ cc = masm()->CheckSmi(ToRegister(operand));
+ } else {
+ cc = masm()->CheckSmi(ToOperand(operand));
+ }
+ DeoptimizeIf(NegateCondition(cc), environment);
+ }
+}
+
- if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
- // Sign extend key because it could be a 32 bit negative value
- // and the dehoisted address computation happens in 64 bits
- Register key_reg = ToRegister(instr->key());
- __ movsxlq(key_reg, key_reg);
+void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
+ DeoptIfTaggedButNotSmi(instr->environment(),
+ instr->hydrogen()->length(),
+ instr->length());
+ DeoptIfTaggedButNotSmi(instr->environment(),
+ instr->hydrogen()->index(),
+ instr->index());
+ if (instr->length()->IsRegister()) {
+ Register reg = ToRegister(instr->length());
+ if (!instr->hydrogen()->length()->representation().IsTagged()) {
+ __ AssertZeroExtended(reg);
+ }
+ if (instr->index()->IsConstantOperand()) {
+ int constant_index =
+ ToInteger32(LConstantOperand::cast(instr->index()));
+ if (instr->hydrogen()->length()->representation().IsTagged()) {
+ __ Cmp(reg, Smi::FromInt(constant_index));
+ } else {
+ __ cmpq(reg, Immediate(constant_index));
+ }
+ } else {
+ Register reg2 = ToRegister(instr->index());
+ if (!instr->hydrogen()->index()->representation().IsTagged()) {
+ __ AssertZeroExtended(reg2);
+ }
+ __ cmpq(reg, reg2);
+ }
+ } else {
+ Operand length = ToOperand(instr->length());
+ if (instr->index()->IsConstantOperand()) {
+ int constant_index =
+ ToInteger32(LConstantOperand::cast(instr->index()));
+ if (instr->hydrogen()->length()->representation().IsTagged()) {
+ __ Cmp(length, Smi::FromInt(constant_index));
+ } else {
+ __ cmpq(length, Immediate(constant_index));
+ }
+ } else {
+ __ cmpq(length, ToRegister(instr->index()));
+ }
}
+ DeoptimizeIf(below_equal, instr->environment());
+}
+
+
+void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
+ ElementsKind elements_kind = instr->elements_kind();
+ LOperand* key = instr->key();
+ PrepareKeyForKeyedOp<HStoreKeyed>(instr->hydrogen(), key);
+ Operand operand(BuildFastArrayOperand(
+ instr->elements(),
+ key,
+ elements_kind,
+ 0,
+ instr->additional_index()));
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
XMMRegister value(ToDoubleRegister(instr->value()));
@@ -3404,8 +3763,11 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3415,106 +3777,79 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
}
-void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
- if (instr->length()->IsRegister()) {
- Register reg = ToRegister(instr->length());
- if (FLAG_debug_code) {
- __ AbortIfNotZeroExtended(reg);
- }
- if (instr->index()->IsConstantOperand()) {
- __ cmpq(reg,
- Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
- } else {
- Register reg2 = ToRegister(instr->index());
- if (FLAG_debug_code) {
- __ AbortIfNotZeroExtended(reg2);
- }
- __ cmpq(reg, reg2);
- }
- } else {
- if (instr->index()->IsConstantOperand()) {
- __ cmpq(ToOperand(instr->length()),
- Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
- } else {
- __ cmpq(ToOperand(instr->length()), ToRegister(instr->index()));
- }
+void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
+ XMMRegister value = ToDoubleRegister(instr->value());
+ LOperand* key = instr->key();
+ PrepareKeyForKeyedOp<HStoreKeyed>(instr->hydrogen(), key);
+ if (instr->NeedsCanonicalization()) {
+ Label have_value;
+
+ __ ucomisd(value, value);
+ __ j(parity_odd, &have_value); // NaN.
+
+ __ Set(kScratchRegister, BitCast<uint64_t>(
+ FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
+ __ movq(value, kScratchRegister);
+
+ __ bind(&have_value);
}
- DeoptimizeIf(below_equal, instr->environment());
+
+ Operand double_store_operand = BuildFastArrayOperand(
+ instr->elements(),
+ key,
+ FAST_DOUBLE_ELEMENTS,
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag,
+ instr->additional_index());
+
+ __ movsd(double_store_operand, value);
}
-void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
+void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) {
Register value = ToRegister(instr->value());
- Register elements = ToRegister(instr->object());
- Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
+ Register elements = ToRegister(instr->elements());
+ LOperand* key = instr->key();
+ PrepareKeyForKeyedOp<HStoreKeyed>(instr->hydrogen(), key);
Operand operand =
- BuildFastArrayOperand(instr->object(),
- instr->key(),
+ BuildFastArrayOperand(instr->elements(),
+ key,
FAST_ELEMENTS,
FixedArray::kHeaderSize - kHeapObjectTag,
instr->additional_index());
- if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
- // Sign extend key because it could be a 32 bit negative value
- // and the dehoisted address computation happens in 64 bits
- Register key_reg = ToRegister(instr->key());
- __ movsxlq(key_reg, key_reg);
- }
-
- __ movq(operand, value);
-
if (instr->hydrogen()->NeedsWriteBarrier()) {
ASSERT(!instr->key()->IsConstantOperand());
HType type = instr->hydrogen()->value()->type();
SmiCheck check_needed =
type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
// Compute address of modified element and store it into key register.
- __ lea(key, operand);
+ Register key_reg(ToRegister(key));
+ __ lea(key_reg, operand);
+ __ movq(Operand(key_reg, 0), value);
__ RecordWrite(elements,
- key,
+ key_reg,
value,
kSaveFPRegs,
EMIT_REMEMBERED_SET,
check_needed);
+ } else {
+ __ movq(operand, value);
}
}
-void LCodeGen::DoStoreKeyedFastDoubleElement(
- LStoreKeyedFastDoubleElement* instr) {
- XMMRegister value = ToDoubleRegister(instr->value());
-
- if (instr->NeedsCanonicalization()) {
- Label have_value;
-
- __ ucomisd(value, value);
- __ j(parity_odd, &have_value); // NaN.
-
- __ Set(kScratchRegister, BitCast<uint64_t>(
- FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
- __ movq(value, kScratchRegister);
-
- __ bind(&have_value);
- }
-
- Operand double_store_operand = BuildFastArrayOperand(
- instr->elements(),
- instr->key(),
- FAST_DOUBLE_ELEMENTS,
- FixedDoubleArray::kHeaderSize - kHeapObjectTag,
- instr->additional_index());
-
- if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
- // Sign extend key because it could be a 32 bit negative value
- // and the dehoisted address computation happens in 64 bits
- Register key_reg = ToRegister(instr->key());
- __ movsxlq(key_reg, key_reg);
+void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) {
+ if (instr->is_external()) {
+ DoStoreKeyedExternalArray(instr);
+ } else if (instr->hydrogen()->value()->representation().IsDouble()) {
+ DoStoreKeyedFixedDoubleArray(instr);
+ } else {
+ DoStoreKeyedFixedArray(instr);
}
-
- __ movsd(double_store_operand, value);
}
+
void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
ASSERT(ToRegister(instr->object()).is(rdx));
ASSERT(ToRegister(instr->key()).is(rcx));
@@ -3529,7 +3864,7 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
Register object_reg = ToRegister(instr->object());
- Register new_map_reg = ToRegister(instr->new_map_reg());
+ Register new_map_reg = ToRegister(instr->new_map_temp());
Handle<Map> from_map = instr->original_map();
Handle<Map> to_map = instr->transitioned_map();
@@ -3540,22 +3875,23 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
__ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map);
__ j(not_equal, &not_applicable);
__ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT);
- if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
__ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg);
// Write barrier.
- ASSERT_NE(instr->temp_reg(), NULL);
+ ASSERT_NE(instr->temp(), NULL);
__ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
- ToRegister(instr->temp_reg()), kDontSaveFPRegs);
- } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
- to_kind == FAST_DOUBLE_ELEMENTS) {
- Register fixed_object_reg = ToRegister(instr->temp_reg());
+ ToRegister(instr->temp()), kDontSaveFPRegs);
+ } else if (IsFastSmiElementsKind(from_kind) &&
+ IsFastDoubleElementsKind(to_kind)) {
+ Register fixed_object_reg = ToRegister(instr->temp());
ASSERT(fixed_object_reg.is(rdx));
ASSERT(new_map_reg.is(rbx));
__ movq(fixed_object_reg, object_reg);
CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
RelocInfo::CODE_TARGET, instr);
- } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
- Register fixed_object_reg = ToRegister(instr->temp_reg());
+ } else if (IsFastDoubleElementsKind(from_kind) &&
+ IsFastObjectElementsKind(to_kind)) {
+ Register fixed_object_reg = ToRegister(instr->temp());
ASSERT(fixed_object_reg.is(rdx));
ASSERT(new_map_reg.is(rbx));
__ movq(fixed_object_reg, object_reg);
@@ -3588,7 +3924,7 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
};
DeferredStringCharCodeAt* deferred =
- new DeferredStringCharCodeAt(this, instr);
+ new(zone()) DeferredStringCharCodeAt(this, instr);
StringCharLoadGenerator::Generate(masm(),
ToRegister(instr->string()),
@@ -3622,9 +3958,7 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
__ push(index);
}
CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
- if (FLAG_debug_code) {
- __ AbortIfNotSmi(rax);
- }
+ __ AssertSmi(rax);
__ SmiToInteger32(rax, rax);
__ StoreToSafepointRegisterSlot(result, rax);
}
@@ -3642,7 +3976,7 @@ void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
};
DeferredStringCharFromCode* deferred =
- new DeferredStringCharFromCode(this, instr);
+ new(zone()) DeferredStringCharFromCode(this, instr);
ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
Register char_code = ToRegister(instr->char_code());
@@ -3686,7 +4020,7 @@ void LCodeGen::DoStringLength(LStringLength* instr) {
void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister() || input->IsStackSlot());
LOperand* output = instr->result();
ASSERT(output->IsDoubleRegister());
@@ -3698,8 +4032,19 @@ void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
}
+void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
+ LOperand* input = instr->value();
+ LOperand* output = instr->result();
+ LOperand* temp = instr->temp();
+
+ __ LoadUint32(ToDoubleRegister(output),
+ ToRegister(input),
+ ToDoubleRegister(temp));
+}
+
+
void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister() && input->Equals(instr->result()));
Register reg = ToRegister(input);
@@ -3707,6 +4052,69 @@ void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
}
+void LCodeGen::DoNumberTagU(LNumberTagU* instr) {
+ class DeferredNumberTagU: public LDeferredCode {
+ public:
+ DeferredNumberTagU(LCodeGen* codegen, LNumberTagU* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() {
+ codegen()->DoDeferredNumberTagU(instr_);
+ }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LNumberTagU* instr_;
+ };
+
+ LOperand* input = instr->value();
+ ASSERT(input->IsRegister() && input->Equals(instr->result()));
+ Register reg = ToRegister(input);
+
+ DeferredNumberTagU* deferred = new(zone()) DeferredNumberTagU(this, instr);
+ __ cmpl(reg, Immediate(Smi::kMaxValue));
+ __ j(above, deferred->entry());
+ __ Integer32ToSmi(reg, reg);
+ __ bind(deferred->exit());
+}
+
+
+void LCodeGen::DoDeferredNumberTagU(LNumberTagU* instr) {
+ Label slow;
+ Register reg = ToRegister(instr->value());
+ Register tmp = reg.is(rax) ? rcx : rax;
+
+ // Preserve the value of all registers.
+ PushSafepointRegistersScope scope(this);
+
+ Label done;
+ // Load value into xmm1 which will be preserved across potential call to
+ // runtime (MacroAssembler::EnterExitFrameEpilogue preserves only allocatable
+ // XMM registers on x64).
+ __ LoadUint32(xmm1, reg, xmm0);
+
+ if (FLAG_inline_new) {
+ __ AllocateHeapNumber(reg, tmp, &slow);
+ __ jmp(&done, Label::kNear);
+ }
+
+ // Slow case: Call the runtime system to do the number allocation.
+ __ bind(&slow);
+
+ // Put a valid pointer value in the stack slot where the result
+ // register is stored, as this register is in the pointer map, but contains an
+ // integer value.
+ __ StoreToSafepointRegisterSlot(reg, Immediate(0));
+
+ CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
+ if (!reg.is(rax)) __ movq(reg, rax);
+
+ // Done. Put the value in xmm1 into the value of the allocated heap
+ // number.
+ __ bind(&done);
+ __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), xmm1);
+ __ StoreToSafepointRegisterSlot(reg, reg);
+}
+
+
void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
class DeferredNumberTagD: public LDeferredCode {
public:
@@ -3718,11 +4126,11 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
LNumberTagD* instr_;
};
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
Register reg = ToRegister(instr->result());
- Register tmp = ToRegister(instr->TempAt(0));
+ Register tmp = ToRegister(instr->temp());
- DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
+ DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
if (FLAG_inline_new) {
__ AllocateHeapNumber(reg, tmp, deferred->entry());
} else {
@@ -3751,19 +4159,21 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
void LCodeGen::DoSmiTag(LSmiTag* instr) {
- ASSERT(instr->InputAt(0)->Equals(instr->result()));
- Register input = ToRegister(instr->InputAt(0));
+ ASSERT(instr->value()->Equals(instr->result()));
+ Register input = ToRegister(instr->value());
ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
__ Integer32ToSmi(input, input);
}
void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
- ASSERT(instr->InputAt(0)->Equals(instr->result()));
- Register input = ToRegister(instr->InputAt(0));
+ ASSERT(instr->value()->Equals(instr->result()));
+ Register input = ToRegister(instr->value());
if (instr->needs_check()) {
Condition is_smi = __ CheckSmi(input);
DeoptimizeIf(NegateCondition(is_smi), instr->environment());
+ } else {
+ __ AssertSmi(input);
}
__ SmiToInteger32(input, input);
}
@@ -3821,7 +4231,7 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
Label done, heap_number;
- Register input_reg = ToRegister(instr->InputAt(0));
+ Register input_reg = ToRegister(instr->value());
// Heap number map check.
__ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
@@ -3847,7 +4257,7 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
// Deoptimize if we don't have a heap number.
DeoptimizeIf(not_equal, instr->environment());
- XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
+ XMMRegister xmm_temp = ToDoubleRegister(instr->temp());
__ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
__ cvttsd2si(input_reg, xmm0);
__ cvtlsi2sd(xmm_temp, input_reg);
@@ -3877,12 +4287,12 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
LTaggedToI* instr_;
};
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister());
ASSERT(input->Equals(instr->result()));
Register input_reg = ToRegister(input);
- DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
+ DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr);
__ JumpIfNotSmi(input_reg, deferred->entry());
__ SmiToInteger32(input_reg, input_reg);
__ bind(deferred->exit());
@@ -3890,7 +4300,7 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister());
LOperand* result = instr->result();
ASSERT(result->IsDoubleRegister());
@@ -3906,7 +4316,7 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsDoubleRegister());
LOperand* result = instr->result();
ASSERT(result->IsRegister());
@@ -3946,21 +4356,21 @@ void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
Condition cc = masm()->CheckSmi(ToRegister(input));
DeoptimizeIf(NegateCondition(cc), instr->environment());
}
void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
Condition cc = masm()->CheckSmi(ToRegister(input));
DeoptimizeIf(cc, instr->environment());
}
void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
__ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset));
@@ -4032,7 +4442,7 @@ void LCodeGen::DoCheckMapCommon(Register reg,
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
@@ -4052,8 +4462,7 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
Register result_reg = ToRegister(instr->result());
- Register temp_reg = ToRegister(instr->TempAt(0));
- __ ClampDoubleToUint8(value_reg, xmm0, result_reg, temp_reg);
+ __ ClampDoubleToUint8(value_reg, xmm0, result_reg);
}
@@ -4067,8 +4476,7 @@ void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
ASSERT(instr->unclamped()->Equals(instr->result()));
Register input_reg = ToRegister(instr->unclamped());
- Register temp_reg = ToRegister(instr->TempAt(0));
- XMMRegister temp_xmm_reg = ToDoubleRegister(instr->TempAt(1));
+ XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm());
Label is_smi, done, heap_number;
__ JumpIfSmi(input_reg, &is_smi);
@@ -4088,7 +4496,7 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
// Heap number
__ bind(&heap_number);
__ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
- __ ClampDoubleToUint8(xmm0, temp_xmm_reg, input_reg, temp_reg);
+ __ ClampDoubleToUint8(xmm0, temp_xmm_reg, input_reg);
__ jmp(&done, Label::kNear);
// smi
@@ -4101,7 +4509,8 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
- Register reg = ToRegister(instr->TempAt(0));
+ ASSERT(instr->temp()->Equals(instr->result()));
+ Register reg = ToRegister(instr->temp());
Handle<JSObject> holder = instr->holder();
Handle<JSObject> current_prototype = instr->prototype();
@@ -4136,10 +4545,11 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
LAllocateObject* instr_;
};
- DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+ DeferredAllocateObject* deferred =
+ new(zone()) DeferredAllocateObject(this, instr);
Register result = ToRegister(instr->result());
- Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch = ToRegister(instr->temp());
Handle<JSFunction> constructor = instr->hydrogen()->constructor();
Handle<Map> initial_map(constructor->initial_map());
int instance_size = initial_map->instance_size();
@@ -4172,7 +4582,7 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
__ movq(map, FieldOperand(scratch, JSFunction::kPrototypeOrInitialMapOffset));
if (FLAG_debug_code) {
- __ AbortIfSmi(map);
+ __ AssertNotSmi(map);
__ cmpb(FieldOperand(map, Map::kInstanceSizeOffset),
Immediate(instance_size >> kPointerSizeLog2));
__ Assert(equal, "Unexpected instance size");
@@ -4222,14 +4632,15 @@ void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
- Heap* heap = isolate()->heap();
+ Handle<FixedArray> literals(instr->environment()->closure()->literals());
ElementsKind boilerplate_elements_kind =
instr->hydrogen()->boilerplate_elements_kind();
// Deopt if the array literal boilerplate ElementsKind is of a type different
// than the expected one. The check isn't necessary if the boilerplate has
- // already been converted to FAST_ELEMENTS.
- if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
__ LoadHeapObject(rax, instr->hydrogen()->boilerplate_object());
__ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
// Load the map's "bit field 2".
@@ -4242,12 +4653,11 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
}
// Set up the parameters to the stub/runtime call.
- __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
- __ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
+ __ PushHeapObject(literals);
__ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
// Boilerplate already exists, constant elements are never accessed.
// Pass an empty fixed array.
- __ Push(Handle<FixedArray>(heap->empty_fixed_array()));
+ __ Push(isolate()->factory()->empty_fixed_array());
// Pick the right runtime function or stub to call.
int length = instr->hydrogen()->length();
@@ -4375,10 +4785,11 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
ElementsKind boilerplate_elements_kind =
instr->hydrogen()->boilerplate()->GetElementsKind();
- // Deopt if the literal boilerplate ElementsKind is of a type different than
- // the expected one. The check isn't necessary if the boilerplate has already
- // been converted to FAST_ELEMENTS.
- if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ // Deopt if the array literal boilerplate ElementsKind is of a type different
+ // than the expected one. The check isn't necessary if the boilerplate has
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
__ LoadHeapObject(rbx, instr->hydrogen()->boilerplate());
__ movq(rcx, FieldOperand(rbx, HeapObject::kMapOffset));
// Load the map's "bit field 2".
@@ -4440,7 +4851,7 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
- ASSERT(ToRegister(instr->InputAt(0)).is(rax));
+ ASSERT(ToRegister(instr->value()).is(rax));
__ push(rax);
CallRuntime(Runtime::kToFastProperties, 1, instr);
}
@@ -4449,14 +4860,12 @@ void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Label materialized;
// Registers will be used as follows:
- // rdi = JS function.
// rcx = literals array.
// rbx = regexp literal.
// rax = regexp literal clone.
- __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
- __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
- int literal_offset = FixedArray::kHeaderSize +
- instr->hydrogen()->literal_index() * kPointerSize;
+ int literal_offset =
+ FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
+ __ LoadHeapObject(rcx, instr->hydrogen()->literals());
__ movq(rbx, FieldOperand(rcx, literal_offset));
__ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
__ j(not_equal, &materialized, Label::kNear);
@@ -4519,7 +4928,7 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
void LCodeGen::DoTypeof(LTypeof* instr) {
- LOperand* input = instr->InputAt(0);
+ LOperand* input = instr->value();
EmitPushTaggedOperand(input);
CallRuntime(Runtime::kTypeof, 1, instr);
}
@@ -4543,7 +4952,7 @@ void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
- Register input = ToRegister(instr->InputAt(0));
+ Register input = ToRegister(instr->value());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
Label* true_label = chunk_->GetAssemblyLabel(true_block);
@@ -4629,7 +5038,7 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
- Register temp = ToRegister(instr->TempAt(0));
+ Register temp = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -4756,7 +5165,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
DeferredStackCheck* deferred_stack_check =
- new DeferredStackCheck(this, instr);
+ new(zone()) DeferredStackCheck(this, instr);
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(below, deferred_stack_check->entry());
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
@@ -4825,11 +5234,19 @@ void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
Register map = ToRegister(instr->map());
Register result = ToRegister(instr->result());
+ Label load_cache, done;
+ __ EnumLength(result, map);
+ __ Cmp(result, Smi::FromInt(0));
+ __ j(not_equal, &load_cache);
+ __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex);
+ __ jmp(&done);
+ __ bind(&load_cache);
__ LoadInstanceDescriptors(map, result);
__ movq(result,
- FieldOperand(result, DescriptorArray::kEnumerationIndexOffset));
+ FieldOperand(result, DescriptorArray::kEnumCacheOffset));
__ movq(result,
FieldOperand(result, FixedArray::SizeFor(instr->idx())));
+ __ bind(&done);
Condition cc = masm()->CheckSmi(result);
DeoptimizeIf(cc, instr->environment());
}
diff --git a/src/3rdparty/v8/src/x64/lithium-codegen-x64.h b/src/3rdparty/v8/src/x64/lithium-codegen-x64.h
index 73e1a9b..0f8a62a 100644
--- a/src/3rdparty/v8/src/x64/lithium-codegen-x64.h
+++ b/src/3rdparty/v8/src/x64/lithium-codegen-x64.h
@@ -46,21 +46,24 @@ class SafepointGenerator;
class LCodeGen BASE_EMBEDDED {
public:
LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
- : chunk_(chunk),
+ : zone_(info->zone()),
+ chunk_(static_cast<LPlatformChunk*>(chunk)),
masm_(assembler),
info_(info),
current_block_(-1),
current_instruction_(-1),
instructions_(chunk->instructions()),
- deoptimizations_(4),
- jump_table_(4),
- deoptimization_literals_(8),
+ deoptimizations_(4, info->zone()),
+ jump_table_(4, info->zone()),
+ deoptimization_literals_(8, info->zone()),
inlined_function_count_(0),
scope_(info->scope()),
status_(UNUSED),
- deferred_(8),
+ translations_(info->zone()),
+ deferred_(8, info->zone()),
osr_pc_offset_(-1),
last_lazy_deopt_pc_(0),
+ safepoints_(info->zone()),
resolver_(this),
expected_safepoint_kind_(Safepoint::kSimple) {
PopulateDeoptimizationLiteralsWithInlinedFunctions();
@@ -72,6 +75,7 @@ class LCodeGen BASE_EMBEDDED {
Isolate* isolate() const { return info_->isolate(); }
Factory* factory() const { return isolate()->factory(); }
Heap* heap() const { return isolate()->heap(); }
+ Zone* zone() const { return zone_; }
// Support for converting LOperands to assembler types.
Register ToRegister(LOperand* op) const;
@@ -94,6 +98,7 @@ class LCodeGen BASE_EMBEDDED {
// Deferred code support.
void DoDeferredNumberTagD(LNumberTagD* instr);
+ void DoDeferredNumberTagU(LNumberTagU* instr);
void DoDeferredTaggedToI(LTaggedToI* instr);
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
void DoDeferredStackCheck(LStackCheck* instr);
@@ -112,7 +117,10 @@ class LCodeGen BASE_EMBEDDED {
void DoGap(LGap* instr);
// Emit frame translation commands for an environment.
- void WriteTranslation(LEnvironment* environment, Translation* translation);
+ void WriteTranslation(LEnvironment* environment,
+ Translation* translation,
+ int* arguments_index,
+ int* arguments_count);
// Declare methods that deal with the individual node types.
#define DECLARE_DO(type) void Do##type(L##type* node);
@@ -136,7 +144,7 @@ class LCodeGen BASE_EMBEDDED {
return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
}
- LChunk* chunk() const { return chunk_; }
+ LPlatformChunk* chunk() const { return chunk_; }
Scope* scope() const { return scope_; }
HGraph* graph() const { return chunk_->graph(); }
@@ -152,10 +160,10 @@ class LCodeGen BASE_EMBEDDED {
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
int GetParameterCount() const { return scope()->num_parameters(); }
- void Abort(const char* format, ...);
+ void Abort(const char* reason);
void Comment(const char* format, ...);
- void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code); }
+ void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
// Code generation passes. Returns true if code generation should
// continue.
@@ -219,7 +227,10 @@ class LCodeGen BASE_EMBEDDED {
void AddToTranslation(Translation* translation,
LOperand* op,
- bool is_tagged);
+ bool is_tagged,
+ bool is_uint32,
+ int arguments_index,
+ int arguments_count);
void PopulateDeoptimizationData(Handle<Code> code);
int DefineDeoptimizationLiteral(Handle<Object> literal);
@@ -267,6 +278,11 @@ class LCodeGen BASE_EMBEDDED {
bool deoptimize_on_minus_zero,
LEnvironment* env);
+
+ void DeoptIfTaggedButNotSmi(LEnvironment* environment,
+ HValue* value,
+ LOperand* operand);
+
// Emits optimized code for typeof x == "y". Modifies input register.
// Returns the condition on which a final split to
// true and false label should be made, to optimize fallthrough.
@@ -296,7 +312,8 @@ class LCodeGen BASE_EMBEDDED {
void EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name);
+ Handle<String> name,
+ LEnvironment* env);
// Emits code for pushing either a tagged constant, a (non-double)
// register, or a stack slot operand.
@@ -318,8 +335,17 @@ class LCodeGen BASE_EMBEDDED {
};
void EnsureSpaceForLazyDeopt(int space_needed);
-
- LChunk* const chunk_;
+ void DoLoadKeyedExternalArray(LLoadKeyed* instr);
+ void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
+ void DoLoadKeyedFixedArray(LLoadKeyed* instr);
+ void DoStoreKeyedExternalArray(LStoreKeyed* instr);
+ void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
+ void DoStoreKeyedFixedArray(LStoreKeyed* instr);
+ template <class T>
+ void PrepareKeyForKeyedOp(T* hydrogen_instr, LOperand* key);
+
+ Zone* zone_;
+ LPlatformChunk* const chunk_;
MacroAssembler* const masm_;
CompilationInfo* const info_;
diff --git a/src/3rdparty/v8/src/x64/lithium-gap-resolver-x64.cc b/src/3rdparty/v8/src/x64/lithium-gap-resolver-x64.cc
index 877ea8c..22183a2 100644
--- a/src/3rdparty/v8/src/x64/lithium-gap-resolver-x64.cc
+++ b/src/3rdparty/v8/src/x64/lithium-gap-resolver-x64.cc
@@ -36,7 +36,7 @@ namespace v8 {
namespace internal {
LGapResolver::LGapResolver(LCodeGen* owner)
- : cgen_(owner), moves_(32) {}
+ : cgen_(owner), moves_(32, owner->zone()) {}
void LGapResolver::Resolve(LParallelMove* parallel_move) {
@@ -74,7 +74,7 @@ void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) {
const ZoneList<LMoveOperands>* moves = parallel_move->move_operands();
for (int i = 0; i < moves->length(); ++i) {
LMoveOperands move = moves->at(i);
- if (!move.IsRedundant()) moves_.Add(move);
+ if (!move.IsRedundant()) moves_.Add(move, cgen_->zone());
}
Verify();
}
diff --git a/src/3rdparty/v8/src/x64/lithium-x64.cc b/src/3rdparty/v8/src/x64/lithium-x64.cc
index 6d723a5..3985dc0 100644
--- a/src/3rdparty/v8/src/x64/lithium-x64.cc
+++ b/src/3rdparty/v8/src/x64/lithium-x64.cc
@@ -179,6 +179,7 @@ const char* LArithmeticT::Mnemonic() const {
case Token::BIT_AND: return "bit-and-t";
case Token::BIT_OR: return "bit-or-t";
case Token::BIT_XOR: return "bit-xor-t";
+ case Token::ROR: return "ror-t";
case Token::SHL: return "sal-t";
case Token::SAR: return "sar-t";
case Token::SHR: return "shr-t";
@@ -196,22 +197,22 @@ void LGoto::PrintDataTo(StringStream* stream) {
void LBranch::PrintDataTo(StringStream* stream) {
stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
}
void LCmpIDAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if ");
- InputAt(0)->PrintTo(stream);
+ left()->PrintTo(stream);
stream->Add(" %s ", Token::String(op()));
- InputAt(1)->PrintTo(stream);
+ right()->PrintTo(stream);
stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
}
void LIsNilAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if ");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(kind() == kStrictEquality ? " === " : " == ");
stream->Add(nil() == kNullValue ? "null" : "undefined");
stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
@@ -220,57 +221,57 @@ void LIsNilAndBranch::PrintDataTo(StringStream* stream) {
void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_object(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_string(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_smi(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_undetectable(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if string_compare(");
- InputAt(0)->PrintTo(stream);
- InputAt(1)->PrintTo(stream);
+ left()->PrintTo(stream);
+ right()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if has_instance_type(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if has_cached_array_index(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
}
void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if class_of_test(");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(", \"%o\") then B%d else B%d",
*hydrogen()->class_name(),
true_block_id(),
@@ -280,7 +281,7 @@ void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if typeof ");
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
stream->Add(" == \"%s\" then B%d else B%d",
*hydrogen()->type_literal()->ToCString(),
true_block_id(), false_block_id());
@@ -294,26 +295,26 @@ void LCallConstantFunction::PrintDataTo(StringStream* stream) {
void LUnaryMathOperation::PrintDataTo(StringStream* stream) {
stream->Add("/%s ", hydrogen()->OpName());
- InputAt(0)->PrintTo(stream);
+ value()->PrintTo(stream);
}
void LLoadContextSlot::PrintDataTo(StringStream* stream) {
- InputAt(0)->PrintTo(stream);
+ context()->PrintTo(stream);
stream->Add("[%d]", slot_index());
}
void LStoreContextSlot::PrintDataTo(StringStream* stream) {
- InputAt(0)->PrintTo(stream);
+ context()->PrintTo(stream);
stream->Add("[%d] <- ", slot_index());
- InputAt(1)->PrintTo(stream);
+ value()->PrintTo(stream);
}
void LInvokeFunction::PrintDataTo(StringStream* stream) {
stream->Add("= ");
- InputAt(0)->PrintTo(stream);
+ function()->PrintTo(stream);
stream->Add(" #%d / ", arity());
}
@@ -342,7 +343,7 @@ void LCallKnownGlobal::PrintDataTo(StringStream* stream) {
void LCallNew::PrintDataTo(StringStream* stream) {
stream->Add("= ");
- InputAt(0)->PrintTo(stream);
+ constructor()->PrintTo(stream);
stream->Add(" #%d / ", arity());
}
@@ -358,56 +359,20 @@ void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
}
-int LChunk::GetNextSpillIndex(bool is_double) {
+int LPlatformChunk::GetNextSpillIndex(bool is_double) {
return spill_slot_count_++;
}
-LOperand* LChunk::GetNextSpillSlot(bool is_double) {
+LOperand* LPlatformChunk::GetNextSpillSlot(bool is_double) {
// All stack slots are Double stack slots on x64.
// Alternatively, at some point, start using half-size
// stack slots for int32 values.
int index = GetNextSpillIndex(is_double);
if (is_double) {
- return LDoubleStackSlot::Create(index);
+ return LDoubleStackSlot::Create(index, zone());
} else {
- return LStackSlot::Create(index);
- }
-}
-
-
-void LChunk::MarkEmptyBlocks() {
- HPhase phase("L_Mark empty blocks", this);
- for (int i = 0; i < graph()->blocks()->length(); ++i) {
- HBasicBlock* block = graph()->blocks()->at(i);
- int first = block->first_instruction_index();
- int last = block->last_instruction_index();
- LInstruction* first_instr = instructions()->at(first);
- LInstruction* last_instr = instructions()->at(last);
-
- LLabel* label = LLabel::cast(first_instr);
- if (last_instr->IsGoto()) {
- LGoto* goto_instr = LGoto::cast(last_instr);
- if (label->IsRedundant() &&
- !label->is_loop_header()) {
- bool can_eliminate = true;
- for (int i = first + 1; i < last && can_eliminate; ++i) {
- LInstruction* cur = instructions()->at(i);
- if (cur->IsGap()) {
- LGap* gap = LGap::cast(cur);
- if (!gap->IsRedundant()) {
- can_eliminate = false;
- }
- } else {
- can_eliminate = false;
- }
- }
-
- if (can_eliminate) {
- label->set_replacement(GetLabel(goto_instr->block_id()));
- }
- }
- }
+ return LStackSlot::Create(index, zone());
}
}
@@ -430,16 +395,7 @@ void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
}
-void LStoreKeyedFastElement::PrintDataTo(StringStream* stream) {
- object()->PrintTo(stream);
- stream->Add("[");
- key()->PrintTo(stream);
- stream->Add("] <- ");
- value()->PrintTo(stream);
-}
-
-
-void LStoreKeyedFastDoubleElement::PrintDataTo(StringStream* stream) {
+void LStoreKeyed::PrintDataTo(StringStream* stream) {
elements()->PrintTo(stream);
stream->Add("[");
key()->PrintTo(stream);
@@ -463,83 +419,9 @@ void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
}
-void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
- LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
- int index = -1;
- if (instr->IsControl()) {
- instructions_.Add(gap);
- index = instructions_.length();
- instructions_.Add(instr);
- } else {
- index = instructions_.length();
- instructions_.Add(instr);
- instructions_.Add(gap);
- }
- if (instr->HasPointerMap()) {
- pointer_maps_.Add(instr->pointer_map());
- instr->pointer_map()->set_lithium_position(index);
- }
-}
-
-
-LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
- return LConstantOperand::Create(constant->id());
-}
-
-
-int LChunk::GetParameterStackSlot(int index) const {
- // The receiver is at index 0, the first parameter at index 1, so we
- // shift all parameter indexes down by the number of parameters, and
- // make sure they end up negative so they are distinguishable from
- // spill slots.
- int result = index - info()->scope()->num_parameters() - 1;
- ASSERT(result < 0);
- return result;
-}
-
-// A parameter relative to ebp in the arguments stub.
-int LChunk::ParameterAt(int index) {
- ASSERT(-1 <= index); // -1 is the receiver.
- return (1 + info()->scope()->num_parameters() - index) *
- kPointerSize;
-}
-
-
-LGap* LChunk::GetGapAt(int index) const {
- return LGap::cast(instructions_[index]);
-}
-
-
-bool LChunk::IsGapAt(int index) const {
- return instructions_[index]->IsGap();
-}
-
-
-int LChunk::NearestGapPos(int index) const {
- while (!IsGapAt(index)) index--;
- return index;
-}
-
-
-void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
- GetGapAt(index)->GetOrCreateParallelMove(LGap::START)->AddMove(from, to);
-}
-
-
-Handle<Object> LChunk::LookupLiteral(LConstantOperand* operand) const {
- return HConstant::cast(graph_->LookupValue(operand->index()))->handle();
-}
-
-
-Representation LChunk::LookupLiteralRepresentation(
- LConstantOperand* operand) const {
- return graph_->LookupValue(operand->index())->representation();
-}
-
-
-LChunk* LChunkBuilder::Build() {
+LPlatformChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
- chunk_ = new(zone()) LChunk(info(), graph());
+ chunk_ = new(zone()) LPlatformChunk(info(), graph());
HPhase phase("L_Building chunk", chunk_);
status_ = BUILDING;
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
@@ -554,17 +436,8 @@ LChunk* LChunkBuilder::Build() {
}
-void LChunkBuilder::Abort(const char* format, ...) {
- if (FLAG_trace_bailout) {
- SmartArrayPointer<char> name(
- info()->shared_info()->DebugName()->ToCString());
- PrintF("Aborting LChunk building in @\"%s\": ", *name);
- va_list arguments;
- va_start(arguments, format);
- OS::VPrint(format, arguments);
- va_end(arguments);
- PrintF("\n");
- }
+void LCodeGen::Abort(const char* reason) {
+ info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -735,7 +608,7 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
ASSERT(hinstr->next()->IsSimulate());
HSimulate* sim = HSimulate::cast(hinstr->next());
ASSERT(instruction_pending_deoptimization_environment_ == NULL);
- ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
+ ASSERT(pending_deoptimization_ast_id_.IsNone());
instruction_pending_deoptimization_environment_ = instr;
pending_deoptimization_ast_id_ = sim->ast_id();
}
@@ -757,7 +630,7 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
- instr->set_pointer_map(new(zone()) LPointerMap(position_));
+ instr->set_pointer_map(new(zone()) LPointerMap(position_, zone()));
return instr;
}
@@ -830,13 +703,16 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
// Shift operations can only deoptimize if we do a logical shift by 0 and
// the result cannot be truncated to int32.
- bool may_deopt = (op == Token::SHR && constant_value == 0);
bool does_deopt = false;
- if (may_deopt) {
- for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
- if (!it.value()->CheckFlag(HValue::kTruncatingToInt32)) {
- does_deopt = true;
- break;
+ if (op == Token::SHR && constant_value == 0) {
+ if (FLAG_opt_safe_uint32_operations) {
+ does_deopt = !instr->CheckFlag(HInstruction::kUint32);
+ } else {
+ for (HUseIterator it(instr->uses()); !it.Done(); it.Advance()) {
+ if (!it.value()->CheckFlag(HValue::kTruncatingToInt32)) {
+ does_deopt = true;
+ break;
+ }
}
}
}
@@ -969,8 +845,8 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LEnvironment* outer =
CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
- int ast_id = hydrogen_env->ast_id();
- ASSERT(ast_id != AstNode::kNoNumber ||
+ BailoutId ast_id = hydrogen_env->ast_id();
+ ASSERT(!ast_id.IsNone() ||
hydrogen_env->frame_type() != JS_FUNCTION);
int value_count = hydrogen_env->length();
LEnvironment* result = new(zone()) LEnvironment(
@@ -980,7 +856,9 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
hydrogen_env->parameter_count(),
argument_count_,
value_count,
- outer);
+ outer,
+ hydrogen_env->entry(),
+ zone());
int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -994,7 +872,9 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
} else {
op = UseAny(value);
}
- result->AddValue(op, value->representation());
+ result->AddValue(op,
+ value->representation(),
+ value->CheckFlag(HInstruction::kUint32));
}
if (hydrogen_env->frame_type() == JS_FUNCTION) {
@@ -1221,6 +1101,11 @@ LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
}
+LInstruction* LChunkBuilder::DoRor(HRor* instr) {
+ return DoShift(Token::ROR, instr);
+}
+
+
LInstruction* LChunkBuilder::DoShr(HShr* instr) {
return DoShift(Token::SHR, instr);
}
@@ -1285,12 +1170,55 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
}
-LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
- UNIMPLEMENTED();
+HValue* LChunkBuilder::SimplifiedDividendForMathFloorOfDiv(HValue* dividend) {
+ // A value with an integer representation does not need to be transformed.
+ if (dividend->representation().IsInteger32()) {
+ return dividend;
+ // A change from an integer32 can be replaced by the integer32 value.
+ } else if (dividend->IsChange() &&
+ HChange::cast(dividend)->from().IsInteger32()) {
+ return HChange::cast(dividend)->value();
+ }
+ return NULL;
+}
+
+
+HValue* LChunkBuilder::SimplifiedDivisorForMathFloorOfDiv(HValue* divisor) {
+ if (divisor->IsConstant() &&
+ HConstant::cast(divisor)->HasInteger32Value()) {
+ HConstant* constant_val = HConstant::cast(divisor);
+ return constant_val->CopyToRepresentation(Representation::Integer32(),
+ divisor->block()->zone());
+ }
return NULL;
}
+LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
+ HValue* right = instr->right();
+ ASSERT(right->IsConstant() && HConstant::cast(right)->HasInteger32Value());
+ LOperand* divisor = chunk_->DefineConstantOperand(HConstant::cast(right));
+ int32_t divisor_si = HConstant::cast(right)->Integer32Value();
+ if (divisor_si == 0) {
+ LOperand* dividend = UseRegister(instr->left());
+ return AssignEnvironment(DefineAsRegister(
+ new(zone()) LMathFloorOfDiv(dividend, divisor, NULL)));
+ } else if (IsPowerOf2(abs(divisor_si))) {
+ LOperand* dividend = UseRegisterAtStart(instr->left());
+ LInstruction* result = DefineAsRegister(
+ new(zone()) LMathFloorOfDiv(dividend, divisor, NULL));
+ return divisor_si < 0 ? AssignEnvironment(result) : result;
+ } else {
+ // use two r64
+ LOperand* dividend = UseRegisterAtStart(instr->left());
+ LOperand* temp = TempRegister();
+ LInstruction* result = DefineAsRegister(
+ new(zone()) LMathFloorOfDiv(dividend, divisor, temp));
+ return divisor_si < 0 ? AssignEnvironment(result) : result;
+ }
+}
+
+
LInstruction* LChunkBuilder::DoMod(HMod* instr) {
if (instr->representation().IsInteger32()) {
ASSERT(instr->left()->representation().IsInteger32());
@@ -1396,6 +1324,26 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
}
+LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
+ LOperand* left = NULL;
+ LOperand* right = NULL;
+ if (instr->representation().IsInteger32()) {
+ ASSERT(instr->left()->representation().IsInteger32());
+ ASSERT(instr->right()->representation().IsInteger32());
+ left = UseRegisterAtStart(instr->LeastConstantOperand());
+ right = UseOrConstantAtStart(instr->MostConstantOperand());
+ } else {
+ ASSERT(instr->representation().IsDouble());
+ ASSERT(instr->left()->representation().IsDouble());
+ ASSERT(instr->right()->representation().IsDouble());
+ left = UseRegisterAtStart(instr->left());
+ right = UseRegisterAtStart(instr->right());
+ }
+ LMathMinMax* minmax = new(zone()) LMathMinMax(left, right);
+ return DefineSameAsFirst(minmax);
+}
+
+
LInstruction* LChunkBuilder::DoPower(HPower* instr) {
ASSERT(instr->representation().IsDouble());
// We call a C function for double power. It can't trigger a GC.
@@ -1578,6 +1526,12 @@ LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
}
+LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
+ LOperand* map = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new(zone()) LMapEnumLength(map));
+}
+
+
LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
LOperand* object = UseRegisterAtStart(instr->value());
return DefineAsRegister(new(zone()) LElementsKind(object));
@@ -1593,8 +1547,8 @@ LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
LOperand* object = UseFixed(instr->value(), rax);
- LDateField* result = new LDateField(object, instr->index());
- return MarkAsCall(DefineFixed(result, rax), instr);
+ LDateField* result = new(zone()) LDateField(object, instr->index());
+ return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
}
@@ -1642,14 +1596,13 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
} else {
ASSERT(to.IsInteger32());
LOperand* value = UseRegister(instr->value());
- bool needs_check = !instr->value()->type().IsSmi();
- if (needs_check) {
+ if (instr->value()->type().IsSmi()) {
+ return DefineSameAsFirst(new(zone()) LSmiUntag(value, false));
+ } else {
bool truncating = instr->CanTruncateToInt32();
LOperand* xmm_temp = truncating ? NULL : FixedTemp(xmm1);
LTaggedToI* res = new(zone()) LTaggedToI(value, xmm_temp);
return AssignEnvironment(DefineSameAsFirst(res));
- } else {
- return DefineSameAsFirst(new(zone()) LSmiUntag(value, needs_check));
}
}
} else if (from.IsDouble()) {
@@ -1670,16 +1623,26 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
if (to.IsTagged()) {
HValue* val = instr->value();
LOperand* value = UseRegister(val);
- if (val->HasRange() && val->range()->IsInSmiRange()) {
+ if (val->CheckFlag(HInstruction::kUint32)) {
+ LOperand* temp = FixedTemp(xmm1);
+ LNumberTagU* result = new(zone()) LNumberTagU(value, temp);
+ return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
+ } else if (val->HasRange() && val->range()->IsInSmiRange()) {
return DefineSameAsFirst(new(zone()) LSmiTag(value));
} else {
LNumberTagI* result = new(zone()) LNumberTagI(value);
return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
}
} else {
- ASSERT(to.IsDouble());
- LOperand* value = Use(instr->value());
- return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
+ if (instr->value()->CheckFlag(HInstruction::kUint32)) {
+ LOperand* temp = FixedTemp(xmm1);
+ return DefineAsRegister(
+ new(zone()) LUint32ToDouble(UseRegister(instr->value()), temp));
+ } else {
+ ASSERT(to.IsDouble());
+ LOperand* value = Use(instr->value());
+ return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
+ }
}
}
UNREACHABLE();
@@ -1701,9 +1664,9 @@ LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
- LOperand* temp = TempRegister();
+ LUnallocated* temp = TempRegister();
LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp);
- return AssignEnvironment(result);
+ return AssignEnvironment(Define(result, temp));
}
@@ -1731,8 +1694,7 @@ LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
Representation input_rep = value->representation();
LOperand* reg = UseRegister(value);
if (input_rep.IsDouble()) {
- return DefineAsRegister(new(zone()) LClampDToUint8(reg,
- TempRegister()));
+ return DefineAsRegister(new(zone()) LClampDToUint8(reg));
} else if (input_rep.IsInteger32()) {
return DefineSameAsFirst(new(zone()) LClampIToUint8(reg));
} else {
@@ -1740,7 +1702,6 @@ LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
// Register allocator doesn't (yet) support allocation of double
// temps. Reserve xmm1 explicitly.
LClampTToUint8* result = new(zone()) LClampTToUint8(reg,
- TempRegister(),
FixedTemp(xmm1));
return AssignEnvironment(DefineSameAsFirst(result));
}
@@ -1879,50 +1840,35 @@ LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
}
-LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
- HLoadKeyedFastElement* instr) {
- ASSERT(instr->representation().IsTagged());
- ASSERT(instr->key()->representation().IsInteger32());
- LOperand* obj = UseRegisterAtStart(instr->object());
- LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
- if (instr->RequiresHoleCheck()) AssignEnvironment(result);
- return DefineAsRegister(result);
-}
-
-
-LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
- HLoadKeyedFastDoubleElement* instr) {
- ASSERT(instr->representation().IsDouble());
- ASSERT(instr->key()->representation().IsInteger32());
+LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
+ ASSERT(instr->key()->representation().IsInteger32() ||
+ instr->key()->representation().IsTagged());
+ ElementsKind elements_kind = instr->elements_kind();
+ bool clobbers_key = ArrayOpClobbersKey<HLoadKeyed>(instr);
+ LOperand* key = clobbers_key
+ ? UseTempRegister(instr->key())
+ : UseRegisterOrConstantAtStart(instr->key());
LOperand* elements = UseRegisterAtStart(instr->elements());
- LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- LLoadKeyedFastDoubleElement* result =
- new(zone()) LLoadKeyedFastDoubleElement(elements, key);
- return AssignEnvironment(DefineAsRegister(result));
-}
+ LLoadKeyed* result = new(zone()) LLoadKeyed(elements, key);
+#ifdef DEBUG
+ if (instr->is_external()) {
+ ASSERT(
+ (instr->representation().IsInteger32() &&
+ (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
+ (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
+ (instr->representation().IsDouble() &&
+ ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
+ (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
+ }
+#endif
-LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
- HLoadKeyedSpecializedArrayElement* instr) {
- ElementsKind elements_kind = instr->elements_kind();
- ASSERT(
- (instr->representation().IsInteger32() &&
- (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
- (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
- (instr->representation().IsDouble() &&
- ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
- (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
- ASSERT(instr->key()->representation().IsInteger32());
- LOperand* external_pointer = UseRegister(instr->external_pointer());
- LOperand* key = UseRegisterOrConstant(instr->key());
- LLoadKeyedSpecializedArrayElement* result =
- new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
- LInstruction* load_instr = DefineAsRegister(result);
+ DefineAsRegister(result);
+ bool can_deoptimize = instr->RequiresHoleCheck() ||
+ (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS);
// An unsigned int array load might overflow and cause a deopt, make sure it
// has an environment.
- return (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) ?
- AssignEnvironment(load_instr) : load_instr;
+ return can_deoptimize ? AssignEnvironment(result) : result;
}
@@ -1935,63 +1881,36 @@ LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
}
-LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
- HStoreKeyedFastElement* instr) {
+LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
bool needs_write_barrier = instr->NeedsWriteBarrier();
- ASSERT(instr->value()->representation().IsTagged());
- ASSERT(instr->object()->representation().IsTagged());
- ASSERT(instr->key()->representation().IsInteger32());
-
- LOperand* obj = UseTempRegister(instr->object());
+ bool clobbers_key = ArrayOpClobbersKey<HStoreKeyed>(instr);
+ LOperand* key = (clobbers_key || needs_write_barrier)
+ ? UseTempRegister(instr->key())
+ : UseRegisterOrConstantAtStart(instr->key());
LOperand* val = needs_write_barrier
? UseTempRegister(instr->value())
: UseRegisterAtStart(instr->value());
- LOperand* key = needs_write_barrier
- ? UseTempRegister(instr->key())
- : UseRegisterOrConstantAtStart(instr->key());
- return new(zone()) LStoreKeyedFastElement(obj, key, val);
-}
-
-
-LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
- HStoreKeyedFastDoubleElement* instr) {
- ASSERT(instr->value()->representation().IsDouble());
- ASSERT(instr->elements()->representation().IsTagged());
- ASSERT(instr->key()->representation().IsInteger32());
-
LOperand* elements = UseRegisterAtStart(instr->elements());
- LOperand* val = UseTempRegister(instr->value());
- LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
-}
-
-
-LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
- HStoreKeyedSpecializedArrayElement* instr) {
- ElementsKind elements_kind = instr->elements_kind();
- ASSERT(
- (instr->value()->representation().IsInteger32() &&
- (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
- (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
- (instr->value()->representation().IsDouble() &&
- ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
- (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
- ASSERT(instr->external_pointer()->representation().IsExternal());
- ASSERT(instr->key()->representation().IsInteger32());
-
- LOperand* external_pointer = UseRegister(instr->external_pointer());
- bool val_is_temp_register =
- elements_kind == EXTERNAL_PIXEL_ELEMENTS ||
- elements_kind == EXTERNAL_FLOAT_ELEMENTS;
- LOperand* val = val_is_temp_register
- ? UseTempRegister(instr->value())
- : UseRegister(instr->value());
- LOperand* key = UseRegisterOrConstant(instr->key());
+#ifdef DEBUG
+ if (!instr->is_external()) {
+ ASSERT(instr->elements()->representation().IsTagged());
+ } else {
+ ElementsKind elements_kind = instr->elements_kind();
+ ASSERT(
+ (instr->value()->representation().IsInteger32() &&
+ (elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
+ (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
+ (instr->value()->representation().IsDouble() &&
+ ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
+ (elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
+ ASSERT(instr->elements()->representation().IsExternal());
+ }
+#endif
- return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
- key,
- val);
+ LStoreKeyed* result = new(zone()) LStoreKeyed(elements, key, val);
+ ASSERT(result != NULL);
+ return result;
}
@@ -2012,8 +1931,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
LInstruction* LChunkBuilder::DoTransitionElementsKind(
HTransitionElementsKind* instr) {
- if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
- instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) {
+ ElementsKind from_kind = instr->original_map()->elements_kind();
+ ElementsKind to_kind = instr->transitioned_map()->elements_kind();
+ if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
LOperand* object = UseRegister(instr->object());
LOperand* new_map_reg = TempRegister();
LOperand* temp_reg = TempRegister();
@@ -2035,10 +1955,19 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool needs_write_barrier = instr->NeedsWriteBarrier();
-
- LOperand* obj = needs_write_barrier
- ? UseTempRegister(instr->object())
- : UseRegisterAtStart(instr->object());
+ bool needs_write_barrier_for_map = !instr->transition().is_null() &&
+ instr->NeedsWriteBarrierForMap();
+
+ LOperand* obj;
+ if (needs_write_barrier) {
+ obj = instr->is_in_object()
+ ? UseRegister(instr->object())
+ : UseTempRegister(instr->object());
+ } else {
+ obj = needs_write_barrier_for_map
+ ? UseRegister(instr->object())
+ : UseRegisterAtStart(instr->object());
+ }
LOperand* val = needs_write_barrier
? UseTempRegister(instr->value())
@@ -2046,8 +1975,8 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
// We only need a scratch register if we have a write barrier or we
// have a store into the properties array (not in-object-property).
- LOperand* temp = (!instr->is_in_object() || needs_write_barrier)
- ? TempRegister() : NULL;
+ LOperand* temp = (!instr->is_in_object() || needs_write_barrier ||
+ needs_write_barrier_for_map) ? TempRegister() : NULL;
return new(zone()) LStoreNamedField(obj, val, temp);
}
@@ -2092,7 +2021,7 @@ LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
- LAllocateObject* result = new LAllocateObject(TempRegister());
+ LAllocateObject* result = new(zone()) LAllocateObject(TempRegister());
return AssignPointerMap(DefineAsRegister(result));
}
@@ -2131,6 +2060,7 @@ LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
+ ASSERT(argument_count_ == 0);
allocator_->MarkAsOsrEntry();
current_block_->last_environment()->set_ast_id(instr->ast_id());
return AssignEnvironment(new(zone()) LOsrEntry);
@@ -2169,12 +2099,10 @@ LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
- LOperand* arguments = UseRegister(instr->arguments());
+ LOperand* args = UseRegister(instr->arguments());
LOperand* length = UseTempRegister(instr->length());
LOperand* index = Use(instr->index());
- LAccessArgumentsAt* result =
- new(zone()) LAccessArgumentsAt(arguments, length, index);
- return AssignEnvironment(DefineAsRegister(result));
+ return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
}
@@ -2228,7 +2156,7 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
instruction_pending_deoptimization_environment_->
SetDeferredLazyDeoptimizationEnvironment(result->environment());
instruction_pending_deoptimization_environment_ = NULL;
- pending_deoptimization_ast_id_ = AstNode::kNoNumber;
+ pending_deoptimization_ast_id_ = BailoutId::None();
return result;
}
@@ -2254,10 +2182,11 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
instr->function(),
undefined,
instr->call_kind(),
- instr->is_construct());
+ instr->inlining_kind());
if (instr->arguments_var() != NULL) {
inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
}
+ inner->set_entry(instr);
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
return NULL;
@@ -2269,7 +2198,7 @@ LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
HEnvironment* env = current_block_->last_environment();
- if (instr->arguments_pushed()) {
+ if (env->entry()->arguments_pushed()) {
int argument_count = env->arguments_environment()->parameter_count();
pop = new(zone()) LDrop(argument_count);
argument_count_ -= argument_count;
diff --git a/src/3rdparty/v8/src/x64/lithium-x64.h b/src/3rdparty/v8/src/x64/lithium-x64.h
index ac1a5db..a437a2b 100644
--- a/src/3rdparty/v8/src/x64/lithium-x64.h
+++ b/src/3rdparty/v8/src/x64/lithium-x64.h
@@ -96,6 +96,7 @@ class LCodeGen;
V(ElementsKind) \
V(FastLiteral) \
V(FixedArrayBaseLength) \
+ V(MapEnumLength) \
V(FunctionLiteral) \
V(GetCachedArrayIndex) \
V(GlobalObject) \
@@ -108,6 +109,7 @@ class LCodeGen;
V(InstanceOfKnownGlobal) \
V(InstructionGap) \
V(Integer32ToDouble) \
+ V(Uint32ToDouble) \
V(InvokeFunction) \
V(IsConstructCallAndBranch) \
V(IsNilAndBranch) \
@@ -115,7 +117,6 @@ class LCodeGen;
V(IsStringAndBranch) \
V(IsSmiAndBranch) \
V(IsUndetectableAndBranch) \
- V(StringCompareAndBranch) \
V(JSArrayLength) \
V(Label) \
V(LazyBailout) \
@@ -125,17 +126,18 @@ class LCodeGen;
V(LoadFunctionPrototype) \
V(LoadGlobalCell) \
V(LoadGlobalGeneric) \
- V(LoadKeyedFastDoubleElement) \
- V(LoadKeyedFastElement) \
+ V(LoadKeyed) \
V(LoadKeyedGeneric) \
- V(LoadKeyedSpecializedArrayElement) \
V(LoadNamedField) \
V(LoadNamedFieldPolymorphic) \
V(LoadNamedGeneric) \
+ V(MathFloorOfDiv) \
+ V(MathMinMax) \
V(ModI) \
V(MulI) \
V(NumberTagD) \
V(NumberTagI) \
+ V(NumberTagU) \
V(NumberUntagD) \
V(ObjectLiteral) \
V(OsrEntry) \
@@ -153,15 +155,14 @@ class LCodeGen;
V(StoreContextSlot) \
V(StoreGlobalCell) \
V(StoreGlobalGeneric) \
- V(StoreKeyedFastDoubleElement) \
- V(StoreKeyedFastElement) \
+ V(StoreKeyed) \
V(StoreKeyedGeneric) \
- V(StoreKeyedSpecializedArrayElement) \
V(StoreNamedField) \
V(StoreNamedGeneric) \
V(StringAdd) \
V(StringCharCodeAt) \
V(StringCharFromCode) \
+ V(StringCompareAndBranch) \
V(StringLength) \
V(SubI) \
V(TaggedToI) \
@@ -257,11 +258,6 @@ class LInstruction: public ZoneObject {
virtual bool HasResult() const = 0;
virtual LOperand* result() = 0;
- virtual int InputCount() = 0;
- virtual LOperand* InputAt(int i) = 0;
- virtual int TempCount() = 0;
- virtual LOperand* TempAt(int i) = 0;
-
LOperand* FirstInput() { return InputAt(0); }
LOperand* Output() { return HasResult() ? result() : NULL; }
@@ -270,6 +266,15 @@ class LInstruction: public ZoneObject {
#endif
private:
+ // Iterator support.
+ friend class InputIterator;
+ virtual int InputCount() = 0;
+ virtual LOperand* InputAt(int i) = 0;
+
+ friend class TempIterator;
+ virtual int TempCount() = 0;
+ virtual LOperand* TempAt(int i) = 0;
+
LEnvironment* environment_;
SetOncePointer<LPointerMap> pointer_map_;
HValue* hydrogen_value_;
@@ -289,16 +294,18 @@ class LTemplateInstruction: public LInstruction {
void set_result(LOperand* operand) { results_[0] = operand; }
LOperand* result() { return results_[0]; }
- int InputCount() { return I; }
- LOperand* InputAt(int i) { return inputs_[i]; }
-
- int TempCount() { return T; }
- LOperand* TempAt(int i) { return temps_[i]; }
-
protected:
EmbeddedContainer<LOperand*, R> results_;
EmbeddedContainer<LOperand*, I> inputs_;
EmbeddedContainer<LOperand*, T> temps_;
+
+ private:
+ // Iterator support.
+ virtual int InputCount() { return I; }
+ virtual LOperand* InputAt(int i) { return inputs_[i]; }
+
+ virtual int TempCount() { return T; }
+ virtual LOperand* TempAt(int i) { return temps_[i]; }
};
@@ -333,8 +340,11 @@ class LGap: public LTemplateInstruction<0, 0, 0> {
LAST_INNER_POSITION = AFTER
};
- LParallelMove* GetOrCreateParallelMove(InnerPosition pos) {
- if (parallel_moves_[pos] == NULL) parallel_moves_[pos] = new LParallelMove;
+ LParallelMove* GetOrCreateParallelMove(InnerPosition pos,
+ Zone* zone) {
+ if (parallel_moves_[pos] == NULL) {
+ parallel_moves_[pos] = new(zone) LParallelMove(zone);
+ }
return parallel_moves_[pos];
}
@@ -462,10 +472,10 @@ class LWrapReceiver: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = function;
}
- DECLARE_CONCRETE_INSTRUCTION(WrapReceiver, "wrap-receiver")
-
LOperand* receiver() { return inputs_[0]; }
LOperand* function() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(WrapReceiver, "wrap-receiver")
};
@@ -481,12 +491,12 @@ class LApplyArguments: public LTemplateInstruction<1, 4, 0> {
inputs_[3] = elements;
}
- DECLARE_CONCRETE_INSTRUCTION(ApplyArguments, "apply-arguments")
-
LOperand* function() { return inputs_[0]; }
LOperand* receiver() { return inputs_[1]; }
LOperand* length() { return inputs_[2]; }
LOperand* elements() { return inputs_[3]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(ApplyArguments, "apply-arguments")
};
@@ -498,12 +508,12 @@ class LAccessArgumentsAt: public LTemplateInstruction<1, 3, 0> {
inputs_[2] = index;
}
- DECLARE_CONCRETE_INSTRUCTION(AccessArgumentsAt, "access-arguments-at")
-
LOperand* arguments() { return inputs_[0]; }
LOperand* length() { return inputs_[1]; }
LOperand* index() { return inputs_[2]; }
+ DECLARE_CONCRETE_INSTRUCTION(AccessArgumentsAt, "access-arguments-at")
+
virtual void PrintDataTo(StringStream* stream);
};
@@ -514,6 +524,8 @@ class LArgumentsLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = elements;
}
+ LOperand* elements() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ArgumentsLength, "arguments-length")
};
@@ -533,6 +545,10 @@ class LModI: public LTemplateInstruction<1, 2, 1> {
temps_[0] = temp;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ModI, "mod-i")
DECLARE_HYDROGEN_ACCESSOR(Mod)
};
@@ -546,11 +562,34 @@ class LDivI: public LTemplateInstruction<1, 2, 1> {
temps_[0] = temp;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(DivI, "div-i")
DECLARE_HYDROGEN_ACCESSOR(Div)
};
+class LMathFloorOfDiv: public LTemplateInstruction<1, 2, 1> {
+ public:
+ LMathFloorOfDiv(LOperand* left,
+ LOperand* right,
+ LOperand* temp = NULL) {
+ inputs_[0] = left;
+ inputs_[1] = right;
+ temps_[0] = temp;
+ }
+
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(MathFloorOfDiv, "math-floor-of-div")
+ DECLARE_HYDROGEN_ACCESSOR(MathFloorOfDiv)
+};
+
+
class LMulI: public LTemplateInstruction<1, 2, 0> {
public:
LMulI(LOperand* left, LOperand* right) {
@@ -558,6 +597,9 @@ class LMulI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(MulI, "mul-i")
DECLARE_HYDROGEN_ACCESSOR(Mul)
};
@@ -570,6 +612,9 @@ class LCmpIDAndBranch: public LControlInstruction<2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpIDAndBranch, "cmp-id-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareIDAndBranch)
@@ -588,6 +633,8 @@ class LUnaryMathOperation: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(UnaryMathOperation, "unary-math-operation")
DECLARE_HYDROGEN_ACCESSOR(UnaryMathOperation)
@@ -603,6 +650,9 @@ class LCmpObjectEqAndBranch: public LControlInstruction<2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpObjectEqAndBranch,
"cmp-object-eq-and-branch")
};
@@ -614,6 +664,8 @@ class LCmpConstantEqAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = left;
}
+ LOperand* left() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpConstantEqAndBranch,
"cmp-constant-eq-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareConstantEqAndBranch)
@@ -627,6 +679,9 @@ class LIsNilAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsNilAndBranch, "is-nil-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsNilAndBranch)
@@ -643,6 +698,8 @@ class LIsObjectAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsObjectAndBranch, "is-object-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsObjectAndBranch)
@@ -657,6 +714,9 @@ class LIsStringAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsStringAndBranch, "is-string-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsStringAndBranch)
@@ -670,6 +730,8 @@ class LIsSmiAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsSmiAndBranch, "is-smi-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsSmiAndBranch)
@@ -684,6 +746,9 @@ class LIsUndetectableAndBranch: public LControlInstruction<1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsUndetectableAndBranch,
"is-undetectable-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsUndetectableAndBranch)
@@ -699,6 +764,9 @@ class LStringCompareAndBranch: public LControlInstruction<2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,
"string-compare-and-branch")
DECLARE_HYDROGEN_ACCESSOR(StringCompareAndBranch)
@@ -715,6 +783,8 @@ class LHasInstanceTypeAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(HasInstanceTypeAndBranch,
"has-instance-type-and-branch")
DECLARE_HYDROGEN_ACCESSOR(HasInstanceTypeAndBranch)
@@ -729,6 +799,8 @@ class LGetCachedArrayIndex: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(GetCachedArrayIndex, "get-cached-array-index")
DECLARE_HYDROGEN_ACCESSOR(GetCachedArrayIndex)
};
@@ -740,6 +812,8 @@ class LHasCachedArrayIndexAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(HasCachedArrayIndexAndBranch,
"has-cached-array-index-and-branch")
DECLARE_HYDROGEN_ACCESSOR(HasCachedArrayIndexAndBranch)
@@ -756,6 +830,10 @@ class LClassOfTestAndBranch: public LControlInstruction<1, 2> {
temps_[1] = temp2;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+ LOperand* temp2() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(ClassOfTestAndBranch,
"class-of-test-and-branch")
DECLARE_HYDROGEN_ACCESSOR(ClassOfTestAndBranch)
@@ -771,6 +849,9 @@ class LCmpT: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpT, "cmp-t")
DECLARE_HYDROGEN_ACCESSOR(CompareGeneric)
@@ -799,6 +880,9 @@ class LInstanceOf: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(InstanceOf, "instance-of")
};
@@ -810,6 +894,9 @@ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
"instance-of-known-global")
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
@@ -838,6 +925,7 @@ class LBoundsCheck: public LTemplateInstruction<0, 2, 0> {
LOperand* length() { return inputs_[1]; }
DECLARE_CONCRETE_INSTRUCTION(BoundsCheck, "bounds-check")
+ DECLARE_HYDROGEN_ACCESSOR(BoundsCheck)
};
@@ -848,6 +936,9 @@ class LBitI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
Token::Value op() const { return hydrogen()->op(); }
DECLARE_CONCRETE_INSTRUCTION(BitI, "bit-i")
@@ -864,7 +955,8 @@ class LShiftI: public LTemplateInstruction<1, 2, 0> {
}
Token::Value op() const { return op_; }
-
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
bool can_deopt() const { return can_deopt_; }
DECLARE_CONCRETE_INSTRUCTION(ShiftI, "shift-i")
@@ -882,6 +974,9 @@ class LSubI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(SubI, "sub-i")
DECLARE_HYDROGEN_ACCESSOR(Sub)
};
@@ -901,6 +996,9 @@ class LConstantD: public LTemplateInstruction<1, 0, 1> {
explicit LConstantD(LOperand* temp) {
temps_[0] = temp;
}
+
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ConstantD, "constant-d")
DECLARE_HYDROGEN_ACCESSOR(Constant)
@@ -923,6 +1021,8 @@ class LBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Branch, "branch")
DECLARE_HYDROGEN_ACCESSOR(Branch)
@@ -936,6 +1036,8 @@ class LCmpMapAndBranch: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CmpMapAndBranch, "cmp-map-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareMap)
@@ -957,6 +1059,8 @@ class LJSArrayLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(JSArrayLength, "js-array-length")
DECLARE_HYDROGEN_ACCESSOR(JSArrayLength)
};
@@ -968,18 +1072,34 @@ class LFixedArrayBaseLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(FixedArrayBaseLength,
"fixed-array-base-length")
DECLARE_HYDROGEN_ACCESSOR(FixedArrayBaseLength)
};
+class LMapEnumLength: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LMapEnumLength(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(MapEnumLength, "map-enum-length")
+};
+
+
class LElementsKind: public LTemplateInstruction<1, 1, 0> {
public:
explicit LElementsKind(LOperand* value) {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ElementsKind, "elements-kind")
DECLARE_HYDROGEN_ACCESSOR(ElementsKind)
};
@@ -991,6 +1111,8 @@ class LValueOf: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ValueOf, "value-of")
DECLARE_HYDROGEN_ACCESSOR(ValueOf)
};
@@ -1002,11 +1124,12 @@ class LDateField: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = date;
}
+ LOperand* date() { return inputs_[0]; }
+ Smi* index() const { return index_; }
+
DECLARE_CONCRETE_INSTRUCTION(ValueOf, "date-field")
DECLARE_HYDROGEN_ACCESSOR(ValueOf)
- Smi* index() const { return index_; }
-
private:
Smi* index_;
};
@@ -1018,6 +1141,8 @@ class LThrow: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Throw, "throw")
};
@@ -1028,6 +1153,8 @@ class LBitNotI: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(BitNotI, "bit-not-i")
};
@@ -1039,11 +1166,29 @@ class LAddI: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(AddI, "add-i")
DECLARE_HYDROGEN_ACCESSOR(Add)
};
+class LMathMinMax: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LMathMinMax(LOperand* left, LOperand* right) {
+ inputs_[0] = left;
+ inputs_[1] = right;
+ }
+
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(MathMinMax, "min-max")
+ DECLARE_HYDROGEN_ACCESSOR(MathMinMax)
+};
+
+
class LPower: public LTemplateInstruction<1, 2, 0> {
public:
LPower(LOperand* left, LOperand* right) {
@@ -1051,6 +1196,9 @@ class LPower: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(Power, "power")
DECLARE_HYDROGEN_ACCESSOR(Power)
};
@@ -1062,6 +1210,8 @@ class LRandom: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = global_object;
}
+ LOperand* global_object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Random, "random")
DECLARE_HYDROGEN_ACCESSOR(Random)
};
@@ -1076,6 +1226,8 @@ class LArithmeticD: public LTemplateInstruction<1, 2, 0> {
}
Token::Value op() const { return op_; }
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
virtual Opcode opcode() const { return LInstruction::kArithmeticD; }
virtual void CompileToNative(LCodeGen* generator);
@@ -1094,12 +1246,14 @@ class LArithmeticT: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
+ Token::Value op() const { return op_; }
+ LOperand* left() { return inputs_[0]; }
+ LOperand* right() { return inputs_[1]; }
+
virtual Opcode opcode() const { return LInstruction::kArithmeticT; }
virtual void CompileToNative(LCodeGen* generator);
virtual const char* Mnemonic() const;
- Token::Value op() const { return op_; }
-
private:
Token::Value op_;
};
@@ -1111,6 +1265,8 @@ class LReturn: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Return, "return")
};
@@ -1121,6 +1277,8 @@ class LLoadNamedField: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadNamedField, "load-named-field")
DECLARE_HYDROGEN_ACCESSOR(LoadNamedField)
};
@@ -1172,6 +1330,8 @@ class LLoadElements: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadElements, "load-elements")
};
@@ -1182,64 +1342,43 @@ class LLoadExternalArrayPointer: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = object;
}
+ LOperand* object() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadExternalArrayPointer,
"load-external-array-pointer")
};
-class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
+class LLoadKeyed: public LTemplateInstruction<1, 2, 0> {
public:
- LLoadKeyedFastElement(LOperand* elements, LOperand* key) {
+ LLoadKeyed(LOperand* elements, LOperand* key) {
inputs_[0] = elements;
inputs_[1] = key;
}
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastElement, "load-keyed-fast-element")
- DECLARE_HYDROGEN_ACCESSOR(LoadKeyedFastElement)
-
- LOperand* elements() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
- uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
-
+ DECLARE_CONCRETE_INSTRUCTION(LoadKeyed, "load-keyed")
+ DECLARE_HYDROGEN_ACCESSOR(LoadKeyed)
-class LLoadKeyedFastDoubleElement: public LTemplateInstruction<1, 2, 0> {
- public:
- LLoadKeyedFastDoubleElement(LOperand* elements, LOperand* key) {
- inputs_[0] = elements;
- inputs_[1] = key;
+ bool is_external() const {
+ return hydrogen()->is_external();
}
-
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastDoubleElement,
- "load-keyed-fast-double-element")
- DECLARE_HYDROGEN_ACCESSOR(LoadKeyedFastDoubleElement)
-
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
-
-
-class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
- public:
- LLoadKeyedSpecializedArrayElement(LOperand* external_pointer, LOperand* key) {
- inputs_[0] = external_pointer;
- inputs_[1] = key;
- }
-
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedSpecializedArrayElement,
- "load-keyed-specialized-array-element")
- DECLARE_HYDROGEN_ACCESSOR(LoadKeyedSpecializedArrayElement)
-
- LOperand* external_pointer() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
ElementsKind elements_kind() const {
return hydrogen()->elements_kind();
}
- uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
+template <class T>
+inline static bool ArrayOpClobbersKey(T *value) {
+ CHECK(value->IsLoadKeyed() || value->IsStoreKeyed());
+ return !value->IsConstant() && (value->key()->representation().IsTagged()
+ || value->IsDehoisted());
+}
+
+
class LLoadKeyedGeneric: public LTemplateInstruction<1, 2, 0> {
public:
LLoadKeyedGeneric(LOperand* obj, LOperand* key) {
@@ -1283,10 +1422,11 @@ class LStoreGlobalCell: public LTemplateInstruction<0, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreGlobalCell, "store-global-cell")
DECLARE_HYDROGEN_ACCESSOR(StoreGlobalCell)
-
- LOperand* value() { return inputs_[0]; }
};
@@ -1298,12 +1438,13 @@ class LStoreGlobalGeneric: public LTemplateInstruction<0, 2, 0> {
inputs_[1] = value;
}
+ LOperand* global_object() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreGlobalGeneric, "store-global-generic")
DECLARE_HYDROGEN_ACCESSOR(StoreGlobalGeneric)
- LOperand* global_object() { return InputAt(0); }
Handle<Object> name() const { return hydrogen()->name(); }
- LOperand* value() { return InputAt(1); }
StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
@@ -1314,10 +1455,11 @@ class LLoadContextSlot: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = context;
}
+ LOperand* context() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(LoadContextSlot, "load-context-slot")
DECLARE_HYDROGEN_ACCESSOR(LoadContextSlot)
- LOperand* context() { return InputAt(0); }
int slot_index() { return hydrogen()->slot_index(); }
virtual void PrintDataTo(StringStream* stream);
@@ -1332,11 +1474,13 @@ class LStoreContextSlot: public LTemplateInstruction<0, 2, 1> {
temps_[0] = temp;
}
+ LOperand* context() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreContextSlot, "store-context-slot")
DECLARE_HYDROGEN_ACCESSOR(StoreContextSlot)
- LOperand* context() { return InputAt(0); }
- LOperand* value() { return InputAt(1); }
int slot_index() { return hydrogen()->slot_index(); }
virtual void PrintDataTo(StringStream* stream);
@@ -1349,6 +1493,8 @@ class LPushArgument: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(PushArgument, "push-argument")
};
@@ -1385,9 +1531,9 @@ class LOuterContext: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = context;
}
- DECLARE_CONCRETE_INSTRUCTION(OuterContext, "outer-context")
+ LOperand* context() { return inputs_[0]; }
- LOperand* context() { return InputAt(0); }
+ DECLARE_CONCRETE_INSTRUCTION(OuterContext, "outer-context")
};
@@ -1416,9 +1562,9 @@ class LGlobalReceiver: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = global_object;
}
- DECLARE_CONCRETE_INSTRUCTION(GlobalReceiver, "global-receiver")
+ LOperand* global() { return inputs_[0]; }
- LOperand* global() { return InputAt(0); }
+ DECLARE_CONCRETE_INSTRUCTION(GlobalReceiver, "global-receiver")
};
@@ -1440,11 +1586,11 @@ class LInvokeFunction: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = function;
}
+ LOperand* function() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(InvokeFunction, "invoke-function")
DECLARE_HYDROGEN_ACCESSOR(InvokeFunction)
- LOperand* function() { return inputs_[0]; }
-
virtual void PrintDataTo(StringStream* stream);
int arity() const { return hydrogen()->argument_count() - 1; }
@@ -1531,6 +1677,8 @@ class LCallNew: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = constructor;
}
+ LOperand* constructor() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CallNew, "call-new")
DECLARE_HYDROGEN_ACCESSOR(CallNew)
@@ -1556,20 +1704,52 @@ class LInteger32ToDouble: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Integer32ToDouble, "int32-to-double")
};
+class LUint32ToDouble: public LTemplateInstruction<1, 1, 1> {
+ public:
+ explicit LUint32ToDouble(LOperand* value, LOperand* temp) {
+ inputs_[0] = value;
+ temps_[0] = temp;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(Uint32ToDouble, "uint32-to-double")
+};
+
+
class LNumberTagI: public LTemplateInstruction<1, 1, 0> {
public:
explicit LNumberTagI(LOperand* value) {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(NumberTagI, "number-tag-i")
};
+class LNumberTagU: public LTemplateInstruction<1, 1, 1> {
+ public:
+ explicit LNumberTagU(LOperand* value, LOperand* temp) {
+ inputs_[0] = value;
+ temps_[0] = temp;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(NumberTagU, "number-tag-u")
+};
+
+
class LNumberTagD: public LTemplateInstruction<1, 1, 1> {
public:
explicit LNumberTagD(LOperand* value, LOperand* temp) {
@@ -1577,6 +1757,9 @@ class LNumberTagD: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(NumberTagD, "number-tag-d")
};
@@ -1588,6 +1771,8 @@ class LDoubleToI: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(DoubleToI, "double-to-i")
DECLARE_HYDROGEN_ACCESSOR(UnaryOperation)
@@ -1603,6 +1788,9 @@ class LTaggedToI: public LTemplateInstruction<1, 1, 1> {
temps_[0] = temp;
}
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(TaggedToI, "tagged-to-i")
DECLARE_HYDROGEN_ACCESSOR(UnaryOperation)
@@ -1616,6 +1804,8 @@ class LSmiTag: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(SmiTag, "smi-tag")
};
@@ -1626,6 +1816,8 @@ class LNumberUntagD: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(NumberUntagD, "double-untag")
DECLARE_HYDROGEN_ACCESSOR(Change);
};
@@ -1638,10 +1830,11 @@ class LSmiUntag: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(SmiUntag, "smi-untag")
-
+ LOperand* value() { return inputs_[0]; }
bool needs_check() const { return needs_check_; }
+ DECLARE_CONCRETE_INSTRUCTION(SmiUntag, "smi-untag")
+
private:
bool needs_check_;
};
@@ -1655,14 +1848,15 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
temps_[0] = temp;
}
+ LOperand* object() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreNamedField, "store-named-field")
DECLARE_HYDROGEN_ACCESSOR(StoreNamedField)
virtual void PrintDataTo(StringStream* stream);
- LOperand* object() { return inputs_[0]; }
- LOperand* value() { return inputs_[1]; }
-
Handle<Object> name() const { return hydrogen()->name(); }
bool is_in_object() { return hydrogen()->is_in_object(); }
int offset() { return hydrogen()->offset(); }
@@ -1677,88 +1871,42 @@ class LStoreNamedGeneric: public LTemplateInstruction<0, 2, 0> {
inputs_[1] = value;
}
+ LOperand* object() { return inputs_[0]; }
+ LOperand* value() { return inputs_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store-named-generic")
DECLARE_HYDROGEN_ACCESSOR(StoreNamedGeneric)
virtual void PrintDataTo(StringStream* stream);
- LOperand* object() { return inputs_[0]; }
- LOperand* value() { return inputs_[1]; }
Handle<Object> name() const { return hydrogen()->name(); }
StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
-class LStoreKeyedFastElement: public LTemplateInstruction<0, 3, 0> {
+class LStoreKeyed: public LTemplateInstruction<0, 3, 0> {
public:
- LStoreKeyedFastElement(LOperand* obj, LOperand* key, LOperand* val) {
- inputs_[0] = obj;
+ LStoreKeyed(LOperand* object, LOperand* key, LOperand* value) {
+ inputs_[0] = object;
inputs_[1] = key;
- inputs_[2] = val;
+ inputs_[2] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedFastElement,
- "store-keyed-fast-element")
- DECLARE_HYDROGEN_ACCESSOR(StoreKeyedFastElement)
-
- virtual void PrintDataTo(StringStream* stream);
-
- LOperand* object() { return inputs_[0]; }
+ bool is_external() const { return hydrogen()->is_external(); }
+ LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
- uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
+ ElementsKind elements_kind() const { return hydrogen()->elements_kind(); }
-
-class LStoreKeyedFastDoubleElement: public LTemplateInstruction<0, 3, 0> {
- public:
- LStoreKeyedFastDoubleElement(LOperand* elements,
- LOperand* key,
- LOperand* val) {
- inputs_[0] = elements;
- inputs_[1] = key;
- inputs_[2] = val;
- }
-
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedFastDoubleElement,
- "store-keyed-fast-double-element")
- DECLARE_HYDROGEN_ACCESSOR(StoreKeyedFastDoubleElement)
+ DECLARE_CONCRETE_INSTRUCTION(StoreKeyed, "store-keyed")
+ DECLARE_HYDROGEN_ACCESSOR(StoreKeyed)
virtual void PrintDataTo(StringStream* stream);
-
- LOperand* elements() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
- LOperand* value() { return inputs_[2]; }
-
bool NeedsCanonicalization() { return hydrogen()->NeedsCanonicalization(); }
uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
-class LStoreKeyedSpecializedArrayElement: public LTemplateInstruction<0, 3, 0> {
- public:
- LStoreKeyedSpecializedArrayElement(LOperand* external_pointer,
- LOperand* key,
- LOperand* val) {
- inputs_[0] = external_pointer;
- inputs_[1] = key;
- inputs_[2] = val;
- }
-
- DECLARE_CONCRETE_INSTRUCTION(StoreKeyedSpecializedArrayElement,
- "store-keyed-specialized-array-element")
- DECLARE_HYDROGEN_ACCESSOR(StoreKeyedSpecializedArrayElement)
-
- LOperand* external_pointer() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
- LOperand* value() { return inputs_[2]; }
- ElementsKind elements_kind() const {
- return hydrogen()->elements_kind();
- }
- uint32_t additional_index() const { return hydrogen()->index_offset(); }
-};
-
-
class LStoreKeyedGeneric: public LTemplateInstruction<0, 3, 0> {
public:
LStoreKeyedGeneric(LOperand* object, LOperand* key, LOperand* value) {
@@ -1767,14 +1915,15 @@ class LStoreKeyedGeneric: public LTemplateInstruction<0, 3, 0> {
inputs_[2] = value;
}
+ LOperand* object() { return inputs_[0]; }
+ LOperand* key() { return inputs_[1]; }
+ LOperand* value() { return inputs_[2]; }
+
DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
virtual void PrintDataTo(StringStream* stream);
- LOperand* object() { return inputs_[0]; }
- LOperand* key() { return inputs_[1]; }
- LOperand* value() { return inputs_[2]; }
StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
@@ -1783,21 +1932,22 @@ class LTransitionElementsKind: public LTemplateInstruction<1, 1, 2> {
public:
LTransitionElementsKind(LOperand* object,
LOperand* new_map_temp,
- LOperand* temp_reg) {
+ LOperand* temp) {
inputs_[0] = object;
temps_[0] = new_map_temp;
- temps_[1] = temp_reg;
+ temps_[1] = temp;
}
+ LOperand* object() { return inputs_[0]; }
+ LOperand* new_map_temp() { return temps_[0]; }
+ LOperand* temp() { return temps_[1]; }
+
DECLARE_CONCRETE_INSTRUCTION(TransitionElementsKind,
"transition-elements-kind")
DECLARE_HYDROGEN_ACCESSOR(TransitionElementsKind)
virtual void PrintDataTo(StringStream* stream);
- LOperand* object() { return inputs_[0]; }
- LOperand* new_map_reg() { return temps_[0]; }
- LOperand* temp_reg() { return temps_[1]; }
Handle<Map> original_map() { return hydrogen()->original_map(); }
Handle<Map> transitioned_map() { return hydrogen()->transitioned_map(); }
};
@@ -1810,11 +1960,11 @@ class LStringAdd: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = right;
}
- DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string-add")
- DECLARE_HYDROGEN_ACCESSOR(StringAdd)
-
LOperand* left() { return inputs_[0]; }
LOperand* right() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string-add")
+ DECLARE_HYDROGEN_ACCESSOR(StringAdd)
};
@@ -1825,11 +1975,11 @@ class LStringCharCodeAt: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = index;
}
- DECLARE_CONCRETE_INSTRUCTION(StringCharCodeAt, "string-char-code-at")
- DECLARE_HYDROGEN_ACCESSOR(StringCharCodeAt)
-
LOperand* string() { return inputs_[0]; }
LOperand* index() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringCharCodeAt, "string-char-code-at")
+ DECLARE_HYDROGEN_ACCESSOR(StringCharCodeAt)
};
@@ -1839,10 +1989,10 @@ class LStringCharFromCode: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = char_code;
}
+ LOperand* char_code() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StringCharFromCode, "string-char-from-code")
DECLARE_HYDROGEN_ACCESSOR(StringCharFromCode)
-
- LOperand* char_code() { return inputs_[0]; }
};
@@ -1852,10 +2002,10 @@ class LStringLength: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = string;
}
+ LOperand* string() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(StringLength, "string-length")
DECLARE_HYDROGEN_ACCESSOR(StringLength)
-
- LOperand* string() { return inputs_[0]; }
};
@@ -1865,7 +2015,7 @@ class LCheckFunction: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
- LOperand* value() { return InputAt(0); }
+ LOperand* value() { return inputs_[0]; }
DECLARE_CONCRETE_INSTRUCTION(CheckFunction, "check-function")
DECLARE_HYDROGEN_ACCESSOR(CheckFunction)
@@ -1878,6 +2028,8 @@ class LCheckInstanceType: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckInstanceType, "check-instance-type")
DECLARE_HYDROGEN_ACCESSOR(CheckInstanceType)
};
@@ -1889,17 +2041,21 @@ class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps")
DECLARE_HYDROGEN_ACCESSOR(CheckMaps)
};
-class LCheckPrototypeMaps: public LTemplateInstruction<0, 0, 1> {
+class LCheckPrototypeMaps: public LTemplateInstruction<1, 0, 1> {
public:
explicit LCheckPrototypeMaps(LOperand* temp) {
temps_[0] = temp;
}
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps, "check-prototype-maps")
DECLARE_HYDROGEN_ACCESSOR(CheckPrototypeMaps)
@@ -1914,15 +2070,16 @@ class LCheckSmi: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckSmi, "check-smi")
};
-class LClampDToUint8: public LTemplateInstruction<1, 1, 1> {
+class LClampDToUint8: public LTemplateInstruction<1, 1, 0> {
public:
- LClampDToUint8(LOperand* value, LOperand* temp) {
- inputs_[0] = value;
- temps_[0] = temp;
+ explicit LClampDToUint8(LOperand* unclamped) {
+ inputs_[0] = unclamped;
}
LOperand* unclamped() { return inputs_[0]; }
@@ -1933,8 +2090,8 @@ class LClampDToUint8: public LTemplateInstruction<1, 1, 1> {
class LClampIToUint8: public LTemplateInstruction<1, 1, 0> {
public:
- explicit LClampIToUint8(LOperand* value) {
- inputs_[0] = value;
+ explicit LClampIToUint8(LOperand* unclamped) {
+ inputs_[0] = unclamped;
}
LOperand* unclamped() { return inputs_[0]; }
@@ -1943,17 +2100,16 @@ class LClampIToUint8: public LTemplateInstruction<1, 1, 0> {
};
-class LClampTToUint8: public LTemplateInstruction<1, 1, 2> {
+class LClampTToUint8: public LTemplateInstruction<1, 1, 1> {
public:
- LClampTToUint8(LOperand* value,
- LOperand* temp,
- LOperand* temp2) {
- inputs_[0] = value;
- temps_[0] = temp;
- temps_[1] = temp2;
+ LClampTToUint8(LOperand* unclamped,
+ LOperand* temp_xmm) {
+ inputs_[0] = unclamped;
+ temps_[0] = temp_xmm;
}
LOperand* unclamped() { return inputs_[0]; }
+ LOperand* temp_xmm() { return temps_[0]; }
DECLARE_CONCRETE_INSTRUCTION(ClampTToUint8, "clamp-t-to-uint8")
};
@@ -1965,6 +2121,8 @@ class LCheckNonSmi: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(CheckNonSmi, "check-non-smi")
};
@@ -1975,6 +2133,8 @@ class LAllocateObject: public LTemplateInstruction<1, 0, 1> {
temps_[0] = temp;
}
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
};
@@ -2023,6 +2183,8 @@ class LToFastProperties: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(ToFastProperties, "to-fast-properties")
DECLARE_HYDROGEN_ACCESSOR(ToFastProperties)
};
@@ -2034,6 +2196,8 @@ class LTypeof: public LTemplateInstruction<1, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(Typeof, "typeof")
};
@@ -2044,6 +2208,8 @@ class LTypeofIsAndBranch: public LControlInstruction<1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return inputs_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(TypeofIsAndBranch, "typeof-is-and-branch")
DECLARE_HYDROGEN_ACCESSOR(TypeofIsAndBranch)
@@ -2059,6 +2225,8 @@ class LIsConstructCallAndBranch: public LControlInstruction<0, 1> {
temps_[0] = temp;
}
+ LOperand* temp() { return temps_[0]; }
+
DECLARE_CONCRETE_INSTRUCTION(IsConstructCallAndBranch,
"is-construct-call-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsConstructCallAndBranch)
@@ -2072,10 +2240,10 @@ class LDeleteProperty: public LTemplateInstruction<1, 2, 0> {
inputs_[1] = key;
}
- DECLARE_CONCRETE_INSTRUCTION(DeleteProperty, "delete-property")
-
LOperand* object() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(DeleteProperty, "delete-property")
};
@@ -2171,69 +2339,13 @@ class LLoadFieldByIndex: public LTemplateInstruction<1, 2, 0> {
class LChunkBuilder;
-class LChunk: public ZoneObject {
+class LPlatformChunk: public LChunk {
public:
- explicit LChunk(CompilationInfo* info, HGraph* graph)
- : spill_slot_count_(0),
- info_(info),
- graph_(graph),
- instructions_(32),
- pointer_maps_(8),
- inlined_closures_(1) { }
-
- void AddInstruction(LInstruction* instruction, HBasicBlock* block);
- LConstantOperand* DefineConstantOperand(HConstant* constant);
- Handle<Object> LookupLiteral(LConstantOperand* operand) const;
- Representation LookupLiteralRepresentation(LConstantOperand* operand) const;
+ LPlatformChunk(CompilationInfo* info, HGraph* graph)
+ : LChunk(info, graph) { }
int GetNextSpillIndex(bool is_double);
LOperand* GetNextSpillSlot(bool is_double);
-
- int ParameterAt(int index);
- int GetParameterStackSlot(int index) const;
- int spill_slot_count() const { return spill_slot_count_; }
- CompilationInfo* info() const { return info_; }
- HGraph* graph() const { return graph_; }
- const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
- void AddGapMove(int index, LOperand* from, LOperand* to);
- LGap* GetGapAt(int index) const;
- bool IsGapAt(int index) const;
- int NearestGapPos(int index) const;
- void MarkEmptyBlocks();
- const ZoneList<LPointerMap*>* pointer_maps() const { return &pointer_maps_; }
- LLabel* GetLabel(int block_id) const {
- HBasicBlock* block = graph_->blocks()->at(block_id);
- int first_instruction = block->first_instruction_index();
- return LLabel::cast(instructions_[first_instruction]);
- }
- int LookupDestination(int block_id) const {
- LLabel* cur = GetLabel(block_id);
- while (cur->replacement() != NULL) {
- cur = cur->replacement();
- }
- return cur->block_id();
- }
- Label* GetAssemblyLabel(int block_id) const {
- LLabel* label = GetLabel(block_id);
- ASSERT(!label->HasReplacement());
- return label->label();
- }
-
- const ZoneList<Handle<JSFunction> >* inlined_closures() const {
- return &inlined_closures_;
- }
-
- void AddInlinedClosure(Handle<JSFunction> closure) {
- inlined_closures_.Add(closure);
- }
-
- private:
- int spill_slot_count_;
- CompilationInfo* info_;
- HGraph* const graph_;
- ZoneList<LInstruction*> instructions_;
- ZoneList<LPointerMap*> pointer_maps_;
- ZoneList<Handle<JSFunction> > inlined_closures_;
};
@@ -2243,7 +2355,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
- zone_(graph->isolate()->zone()),
+ zone_(graph->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2252,16 +2364,19 @@ class LChunkBuilder BASE_EMBEDDED {
allocator_(allocator),
position_(RelocInfo::kNoPosition),
instruction_pending_deoptimization_environment_(NULL),
- pending_deoptimization_ast_id_(AstNode::kNoNumber) { }
+ pending_deoptimization_ast_id_(BailoutId::None()) { }
// Build the sequence for the graph.
- LChunk* Build();
+ LPlatformChunk* Build();
// Declare methods that deal with the individual node types.
#define DECLARE_DO(type) LInstruction* Do##type(H##type* node);
HYDROGEN_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
#undef DECLARE_DO
+ static HValue* SimplifiedDividendForMathFloorOfDiv(HValue* val);
+ static HValue* SimplifiedDivisorForMathFloorOfDiv(HValue* val);
+
private:
enum Status {
UNUSED,
@@ -2270,7 +2385,7 @@ class LChunkBuilder BASE_EMBEDDED {
ABORTED
};
- LChunk* chunk() const { return chunk_; }
+ LPlatformChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
Zone* zone() const { return zone_; }
@@ -2280,7 +2395,7 @@ class LChunkBuilder BASE_EMBEDDED {
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
- void Abort(const char* format, ...);
+ void Abort(const char* reason);
// Methods for getting operands for Use / Define / Temp.
LUnallocated* ToUnallocated(Register reg);
@@ -2374,7 +2489,7 @@ class LChunkBuilder BASE_EMBEDDED {
LInstruction* DoArithmeticT(Token::Value op,
HArithmeticBinaryOperation* instr);
- LChunk* chunk_;
+ LPlatformChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
Zone* zone_;
@@ -2386,7 +2501,7 @@ class LChunkBuilder BASE_EMBEDDED {
LAllocator* allocator_;
int position_;
LInstruction* instruction_pending_deoptimization_environment_;
- int pending_deoptimization_ast_id_;
+ BailoutId pending_deoptimization_ast_id_;
DISALLOW_COPY_AND_ASSIGN(LChunkBuilder);
};
diff --git a/src/3rdparty/v8/src/x64/macro-assembler-x64.cc b/src/3rdparty/v8/src/x64/macro-assembler-x64.cc
index 3d380a2..962c2e8 100644
--- a/src/3rdparty/v8/src/x64/macro-assembler-x64.cc
+++ b/src/3rdparty/v8/src/x64/macro-assembler-x64.cc
@@ -53,9 +53,17 @@ MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
}
-static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
+static const int kInvalidRootRegisterDelta = -1;
+
+
+intptr_t MacroAssembler::RootRegisterDelta(ExternalReference other) {
+ if (predictable_code_size() &&
+ (other.address() < reinterpret_cast<Address>(isolate()) ||
+ other.address() >= reinterpret_cast<Address>(isolate() + 1))) {
+ return kInvalidRootRegisterDelta;
+ }
Address roots_register_value = kRootRegisterBias +
- reinterpret_cast<Address>(isolate->heap()->roots_array_start());
+ reinterpret_cast<Address>(isolate()->heap()->roots_array_start());
intptr_t delta = other.address() - roots_register_value;
return delta;
}
@@ -64,8 +72,8 @@ static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
Operand MacroAssembler::ExternalOperand(ExternalReference target,
Register scratch) {
if (root_array_available_ && !Serializer::enabled()) {
- intptr_t delta = RootRegisterDelta(target, isolate());
- if (is_int32(delta)) {
+ intptr_t delta = RootRegisterDelta(target);
+ if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Serializer::TooLateToEnableNow();
return Operand(kRootRegister, static_cast<int32_t>(delta));
}
@@ -77,8 +85,8 @@ Operand MacroAssembler::ExternalOperand(ExternalReference target,
void MacroAssembler::Load(Register destination, ExternalReference source) {
if (root_array_available_ && !Serializer::enabled()) {
- intptr_t delta = RootRegisterDelta(source, isolate());
- if (is_int32(delta)) {
+ intptr_t delta = RootRegisterDelta(source);
+ if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Serializer::TooLateToEnableNow();
movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
return;
@@ -96,8 +104,8 @@ void MacroAssembler::Load(Register destination, ExternalReference source) {
void MacroAssembler::Store(ExternalReference destination, Register source) {
if (root_array_available_ && !Serializer::enabled()) {
- intptr_t delta = RootRegisterDelta(destination, isolate());
- if (is_int32(delta)) {
+ intptr_t delta = RootRegisterDelta(destination);
+ if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Serializer::TooLateToEnableNow();
movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
return;
@@ -116,8 +124,8 @@ void MacroAssembler::Store(ExternalReference destination, Register source) {
void MacroAssembler::LoadAddress(Register destination,
ExternalReference source) {
if (root_array_available_ && !Serializer::enabled()) {
- intptr_t delta = RootRegisterDelta(source, isolate());
- if (is_int32(delta)) {
+ intptr_t delta = RootRegisterDelta(source);
+ if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Serializer::TooLateToEnableNow();
lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
return;
@@ -133,8 +141,8 @@ int MacroAssembler::LoadAddressSize(ExternalReference source) {
// This calculation depends on the internals of LoadAddress.
// It's correctness is ensured by the asserts in the Call
// instruction below.
- intptr_t delta = RootRegisterDelta(source, isolate());
- if (is_int32(delta)) {
+ intptr_t delta = RootRegisterDelta(source);
+ if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Serializer::TooLateToEnableNow();
// Operand is lea(scratch, Operand(kRootRegister, delta));
// Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
@@ -216,7 +224,7 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
Register scratch,
SaveFPRegsMode save_fp,
RememberedSetFinalAction and_then) {
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
Label ok;
JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
int3();
@@ -388,16 +396,14 @@ void MacroAssembler::RecordWrite(Register object,
ASSERT(!object.is(value));
ASSERT(!object.is(address));
ASSERT(!value.is(address));
- if (emit_debug_code()) {
- AbortIfSmi(object);
- }
+ AssertNotSmi(object);
if (remembered_set_action == OMIT_REMEMBERED_SET &&
!FLAG_incremental_marking) {
return;
}
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
Label ok;
cmpq(value, Operand(address, 0));
j(equal, &ok, Label::kNear);
@@ -538,7 +544,7 @@ void MacroAssembler::Abort(const char* msg) {
}
-void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
+void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs
Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
}
@@ -743,17 +749,52 @@ void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
Cmp(Operand(rsi, 0), factory->the_hole_value());
j(not_equal, &promote_scheduled_exception);
+#if ENABLE_EXTRA_CHECKS
+ // Check if the function returned a valid JavaScript value.
+ Label ok;
+ Register return_value = rax;
+ Register map = rcx;
+
+ JumpIfSmi(return_value, &ok, Label::kNear);
+ movq(map, FieldOperand(return_value, HeapObject::kMapOffset));
+
+ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
+ j(below, &ok, Label::kNear);
+
+ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
+ j(above_equal, &ok, Label::kNear);
+
+ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
+ j(equal, &ok, Label::kNear);
+
+ CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
+ j(equal, &ok, Label::kNear);
+
+ CompareRoot(return_value, Heap::kTrueValueRootIndex);
+ j(equal, &ok, Label::kNear);
+
+ CompareRoot(return_value, Heap::kFalseValueRootIndex);
+ j(equal, &ok, Label::kNear);
+
+ CompareRoot(return_value, Heap::kNullValueRootIndex);
+ j(equal, &ok, Label::kNear);
+
+ Abort("API call returned invalid object");
+
+ bind(&ok);
+#endif
+
LeaveApiExitFrame();
ret(stack_space * kPointerSize);
- bind(&promote_scheduled_exception);
- TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
-
bind(&empty_result);
// It was zero; the result is undefined.
- Move(rax, factory->undefined_value());
+ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
jmp(&prologue);
+ bind(&promote_scheduled_exception);
+ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
+
// HandleScope limit has changed. Delete allocated extensions.
bind(&delete_allocated_handles);
movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
@@ -798,7 +839,7 @@ void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
void MacroAssembler::GetBuiltinFunction(Register target,
Builtins::JavaScript id) {
// Load the builtins object into target register.
- movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
movq(target, FieldOperand(target,
JSBuiltinsObject::OffsetOfFunctionWithId(id)));
@@ -892,6 +933,38 @@ void MacroAssembler::Set(const Operand& dst, int64_t x) {
}
}
+
+bool MacroAssembler::IsUnsafeInt(const int x) {
+ static const int kMaxBits = 17;
+ return !is_intn(x, kMaxBits);
+}
+
+
+void MacroAssembler::SafeMove(Register dst, Smi* src) {
+ ASSERT(!dst.is(kScratchRegister));
+ ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi.
+ if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
+ Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
+ Move(kScratchRegister, Smi::FromInt(jit_cookie()));
+ xor_(dst, kScratchRegister);
+ } else {
+ Move(dst, src);
+ }
+}
+
+
+void MacroAssembler::SafePush(Smi* src) {
+ ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi.
+ if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
+ Push(Smi::FromInt(src->value() ^ jit_cookie()));
+ Move(kScratchRegister, Smi::FromInt(jit_cookie()));
+ xor_(Operand(rsp, 0), kScratchRegister);
+ } else {
+ Push(src);
+ }
+}
+
+
// ----------------------------------------------------------------------------
// Smi tagging, untagging and tag detection.
@@ -1040,18 +1113,14 @@ void MacroAssembler::SmiTest(Register src) {
void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
- if (emit_debug_code()) {
- AbortIfNotSmi(smi1);
- AbortIfNotSmi(smi2);
- }
+ AssertSmi(smi1);
+ AssertSmi(smi2);
cmpq(smi1, smi2);
}
void MacroAssembler::SmiCompare(Register dst, Smi* src) {
- if (emit_debug_code()) {
- AbortIfNotSmi(dst);
- }
+ AssertSmi(dst);
Cmp(dst, src);
}
@@ -1068,27 +1137,21 @@ void MacroAssembler::Cmp(Register dst, Smi* src) {
void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
- if (emit_debug_code()) {
- AbortIfNotSmi(dst);
- AbortIfNotSmi(src);
- }
+ AssertSmi(dst);
+ AssertSmi(src);
cmpq(dst, src);
}
void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
- if (emit_debug_code()) {
- AbortIfNotSmi(dst);
- AbortIfNotSmi(src);
- }
+ AssertSmi(dst);
+ AssertSmi(src);
cmpq(dst, src);
}
void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
- if (emit_debug_code()) {
- AbortIfNotSmi(dst);
- }
+ AssertSmi(dst);
cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
}
@@ -2165,7 +2228,7 @@ void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
andl(scratch, Immediate(kFlatAsciiStringMask));
- cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
+ cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kOneByteStringTag));
j(not_equal, failure, near_jump);
}
@@ -2377,7 +2440,7 @@ void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
void MacroAssembler::Call(Handle<Code> code_object,
RelocInfo::Mode rmode,
- unsigned ast_id) {
+ TypeFeedbackId ast_id) {
#ifdef DEBUG
int end_position = pc_offset() + CallSize(code_object);
#endif
@@ -2460,6 +2523,12 @@ MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
};
+void MacroAssembler::StoreToSafepointRegisterSlot(Register dst,
+ const Immediate& imm) {
+ movq(SafepointRegisterSlot(dst), imm);
+}
+
+
void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
movq(SafepointRegisterSlot(dst), src);
}
@@ -2658,10 +2727,12 @@ void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
void MacroAssembler::CheckFastElements(Register map,
Label* fail,
Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Immediate(Map::kMaximumBitField2FastElementValue));
+ Immediate(Map::kMaximumBitField2FastHoleyElementValue));
j(above, fail, distance);
}
@@ -2669,23 +2740,26 @@ void MacroAssembler::CheckFastElements(Register map,
void MacroAssembler::CheckFastObjectElements(Register map,
Label* fail,
Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Immediate(Map::kMaximumBitField2FastSmiOnlyElementValue));
+ Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
j(below_equal, fail, distance);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Immediate(Map::kMaximumBitField2FastElementValue));
+ Immediate(Map::kMaximumBitField2FastHoleyElementValue));
j(above, fail, distance);
}
-void MacroAssembler::CheckFastSmiOnlyElements(Register map,
- Label* fail,
- Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
+void MacroAssembler::CheckFastSmiElements(Register map,
+ Label* fail,
+ Label::Distance distance) {
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Immediate(Map::kMaximumBitField2FastSmiOnlyElementValue));
+ Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
j(above, fail, distance);
}
@@ -2749,24 +2823,18 @@ void MacroAssembler::CompareMap(Register obj,
CompareMapMode mode) {
Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
- Map* transitioned_fast_element_map(
- map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
- ASSERT(transitioned_fast_element_map == NULL ||
- map->elements_kind() != FAST_ELEMENTS);
- if (transitioned_fast_element_map != NULL) {
- j(equal, early_success, Label::kNear);
- Cmp(FieldOperand(obj, HeapObject::kMapOffset),
- Handle<Map>(transitioned_fast_element_map));
- }
-
- Map* transitioned_double_map(
- map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
- ASSERT(transitioned_double_map == NULL ||
- map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
- if (transitioned_double_map != NULL) {
- j(equal, early_success, Label::kNear);
- Cmp(FieldOperand(obj, HeapObject::kMapOffset),
- Handle<Map>(transitioned_double_map));
+ ElementsKind kind = map->elements_kind();
+ if (IsFastElementsKind(kind)) {
+ bool packed = IsFastPackedElementsKind(kind);
+ Map* current_map = *map;
+ while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
+ kind = GetNextMoreGeneralFastElementsKind(kind, packed);
+ current_map = current_map->LookupElementsTransitionMap(kind);
+ if (!current_map) break;
+ j(equal, early_success, Label::kNear);
+ Cmp(FieldOperand(obj, HeapObject::kMapOffset),
+ Handle<Map>(current_map));
+ }
}
}
}
@@ -2800,33 +2868,66 @@ void MacroAssembler::ClampUint8(Register reg) {
void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
XMMRegister temp_xmm_reg,
- Register result_reg,
- Register temp_reg) {
+ Register result_reg) {
Label done;
- Set(result_reg, 0);
+ Label conv_failure;
xorps(temp_xmm_reg, temp_xmm_reg);
- ucomisd(input_reg, temp_xmm_reg);
- j(below, &done, Label::kNear);
- uint64_t one_half = BitCast<uint64_t, double>(0.5);
- Set(temp_reg, one_half);
- movq(temp_xmm_reg, temp_reg);
- addsd(temp_xmm_reg, input_reg);
- cvttsd2si(result_reg, temp_xmm_reg);
+ cvtsd2si(result_reg, input_reg);
testl(result_reg, Immediate(0xFFFFFF00));
j(zero, &done, Label::kNear);
+ cmpl(result_reg, Immediate(0x80000000));
+ j(equal, &conv_failure, Label::kNear);
+ movl(result_reg, Immediate(0));
+ setcc(above, result_reg);
+ subl(result_reg, Immediate(1));
+ andl(result_reg, Immediate(255));
+ jmp(&done, Label::kNear);
+ bind(&conv_failure);
+ Set(result_reg, 0);
+ ucomisd(input_reg, temp_xmm_reg);
+ j(below, &done, Label::kNear);
Set(result_reg, 255);
bind(&done);
}
+static double kUint32Bias =
+ static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
+
+
+void MacroAssembler::LoadUint32(XMMRegister dst,
+ Register src,
+ XMMRegister scratch) {
+ Label done;
+ cmpl(src, Immediate(0));
+ movq(kScratchRegister,
+ reinterpret_cast<int64_t>(&kUint32Bias),
+ RelocInfo::NONE);
+ movsd(scratch, Operand(kScratchRegister, 0));
+ cvtlsi2sd(dst, src);
+ j(not_sign, &done, Label::kNear);
+ addsd(dst, scratch);
+ bind(&done);
+}
+
+
void MacroAssembler::LoadInstanceDescriptors(Register map,
Register descriptors) {
- movq(descriptors, FieldOperand(map,
- Map::kInstanceDescriptorsOrBitField3Offset));
- Label not_smi;
- JumpIfNotSmi(descriptors, &not_smi, Label::kNear);
- Move(descriptors, isolate()->factory()->empty_descriptor_array());
- bind(&not_smi);
+ movq(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
+}
+
+
+void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
+ movq(dst, FieldOperand(map, Map::kBitField3Offset));
+ DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
+}
+
+
+void MacroAssembler::EnumLength(Register dst, Register map) {
+ STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
+ movq(dst, FieldOperand(map, Map::kBitField3Offset));
+ Move(kScratchRegister, Smi::FromInt(Map::EnumLengthBits::kMask));
+ and_(dst, kScratchRegister);
}
@@ -2845,61 +2946,75 @@ void MacroAssembler::DispatchMap(Register obj,
}
-void MacroAssembler::AbortIfNotNumber(Register object) {
- Label ok;
- Condition is_smi = CheckSmi(object);
- j(is_smi, &ok, Label::kNear);
- Cmp(FieldOperand(object, HeapObject::kMapOffset),
- isolate()->factory()->heap_number_map());
- Assert(equal, "Operand not a number");
- bind(&ok);
+void MacroAssembler::AssertNumber(Register object) {
+ if (emit_debug_code()) {
+ Label ok;
+ Condition is_smi = CheckSmi(object);
+ j(is_smi, &ok, Label::kNear);
+ Cmp(FieldOperand(object, HeapObject::kMapOffset),
+ isolate()->factory()->heap_number_map());
+ Check(equal, "Operand is not a number");
+ bind(&ok);
+ }
}
-void MacroAssembler::AbortIfSmi(Register object) {
- Condition is_smi = CheckSmi(object);
- Assert(NegateCondition(is_smi), "Operand is a smi");
+void MacroAssembler::AssertNotSmi(Register object) {
+ if (emit_debug_code()) {
+ Condition is_smi = CheckSmi(object);
+ Check(NegateCondition(is_smi), "Operand is a smi");
+ }
}
-void MacroAssembler::AbortIfNotSmi(Register object) {
- Condition is_smi = CheckSmi(object);
- Assert(is_smi, "Operand is not a smi");
+void MacroAssembler::AssertSmi(Register object) {
+ if (emit_debug_code()) {
+ Condition is_smi = CheckSmi(object);
+ Check(is_smi, "Operand is not a smi");
+ }
}
-void MacroAssembler::AbortIfNotSmi(const Operand& object) {
- Condition is_smi = CheckSmi(object);
- Assert(is_smi, "Operand is not a smi");
+void MacroAssembler::AssertSmi(const Operand& object) {
+ if (emit_debug_code()) {
+ Condition is_smi = CheckSmi(object);
+ Check(is_smi, "Operand is not a smi");
+ }
}
-void MacroAssembler::AbortIfNotZeroExtended(Register int32_register) {
- ASSERT(!int32_register.is(kScratchRegister));
- movq(kScratchRegister, 0x100000000l, RelocInfo::NONE);
- cmpq(kScratchRegister, int32_register);
- Assert(above_equal, "32 bit value in register is not zero-extended");
+void MacroAssembler::AssertZeroExtended(Register int32_register) {
+ if (emit_debug_code()) {
+ ASSERT(!int32_register.is(kScratchRegister));
+ movq(kScratchRegister, 0x100000000l, RelocInfo::NONE);
+ cmpq(kScratchRegister, int32_register);
+ Check(above_equal, "32 bit value in register is not zero-extended");
+ }
}
-void MacroAssembler::AbortIfNotString(Register object) {
- testb(object, Immediate(kSmiTagMask));
- Assert(not_equal, "Operand is not a string");
- push(object);
- movq(object, FieldOperand(object, HeapObject::kMapOffset));
- CmpInstanceType(object, FIRST_NONSTRING_TYPE);
- pop(object);
- Assert(below, "Operand is not a string");
+void MacroAssembler::AssertString(Register object) {
+ if (emit_debug_code()) {
+ testb(object, Immediate(kSmiTagMask));
+ Check(not_equal, "Operand is a smi and not a string");
+ push(object);
+ movq(object, FieldOperand(object, HeapObject::kMapOffset));
+ CmpInstanceType(object, FIRST_NONSTRING_TYPE);
+ pop(object);
+ Check(below, "Operand is not a string");
+ }
}
-void MacroAssembler::AbortIfNotRootValue(Register src,
- Heap::RootListIndex root_value_index,
- const char* message) {
- ASSERT(!src.is(kScratchRegister));
- LoadRoot(kScratchRegister, root_value_index);
- cmpq(src, kScratchRegister);
- Check(equal, message);
+void MacroAssembler::AssertRootValue(Register src,
+ Heap::RootListIndex root_value_index,
+ const char* message) {
+ if (emit_debug_code()) {
+ ASSERT(!src.is(kScratchRegister));
+ LoadRoot(kScratchRegister, root_value_index);
+ cmpq(src, kScratchRegister);
+ Check(equal, message);
+ }
}
@@ -3396,20 +3511,21 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
cmpq(scratch, Immediate(0));
Check(not_equal, "we should not have an empty lexical context");
}
- // Load the global context of the current context.
- int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ // Load the native context of the current context.
+ int offset =
+ Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
movq(scratch, FieldOperand(scratch, offset));
- movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
+ movq(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
- // Check the context is a global context.
+ // Check the context is a native context.
if (emit_debug_code()) {
Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
- isolate()->factory()->global_context_map());
- Check(equal, "JSGlobalObject::global_context should be a global context.");
+ isolate()->factory()->native_context_map());
+ Check(equal, "JSGlobalObject::native_context should be a native context.");
}
// Check if both contexts are the same.
- cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
+ cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
j(equal, &same_contexts);
// Compare security tokens.
@@ -3417,23 +3533,24 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// compatible with the security token in the receiving global
// object.
- // Check the context is a global context.
+ // Check the context is a native context.
if (emit_debug_code()) {
// Preserve original value of holder_reg.
push(holder_reg);
- movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
+ movq(holder_reg,
+ FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
CompareRoot(holder_reg, Heap::kNullValueRootIndex);
Check(not_equal, "JSGlobalProxy::context() should not be null.");
- // Read the first word and compare to global_context_map(),
+ // Read the first word and compare to native_context_map(),
movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
- CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
- Check(equal, "JSGlobalObject::global_context should be a global context.");
+ CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
+ Check(equal, "JSGlobalObject::native_context should be a native context.");
pop(holder_reg);
}
movq(kScratchRegister,
- FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
+ FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
int token_offset =
Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
movq(scratch, FieldOperand(scratch, token_offset));
@@ -3955,7 +4072,7 @@ void MacroAssembler::CopyBytes(Register destination,
int min_length,
Register scratch) {
ASSERT(min_length >= 0);
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
cmpl(length, Immediate(min_length));
Assert(greater_equal, "Invalid min_length");
}
@@ -4053,31 +4170,43 @@ void MacroAssembler::LoadTransitionedArrayMapConditional(
Register scratch,
Label* no_map_match) {
// Load the global or builtins object from the current context.
- movq(scratch, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
+ movq(scratch,
+ Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ movq(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
// Check that the function's map is the same as the expected cached map.
- int expected_index =
- Context::GetContextMapIndexFromElementsKind(expected_kind);
- cmpq(map_in_out, Operand(scratch, Context::SlotOffset(expected_index)));
+ movq(scratch, Operand(scratch,
+ Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
+
+ int offset = expected_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ cmpq(map_in_out, FieldOperand(scratch, offset));
j(not_equal, no_map_match);
// Use the transitioned cached map.
- int trans_index =
- Context::GetContextMapIndexFromElementsKind(transitioned_kind);
- movq(map_in_out, Operand(scratch, Context::SlotOffset(trans_index)));
+ offset = transitioned_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ movq(map_in_out, FieldOperand(scratch, offset));
}
void MacroAssembler::LoadInitialArrayMap(
- Register function_in, Register scratch, Register map_out) {
+ Register function_in, Register scratch,
+ Register map_out, bool can_have_holes) {
ASSERT(!function_in.is(map_out));
Label done;
movq(map_out, FieldOperand(function_in,
JSFunction::kPrototypeOrInitialMapOffset));
if (!FLAG_smi_only_arrays) {
- LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_ELEMENTS,
+ ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ kind,
+ map_out,
+ scratch,
+ &done);
+ } else if (can_have_holes) {
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_HOLEY_SMI_ELEMENTS,
map_out,
scratch,
&done);
@@ -4093,10 +4222,11 @@ static const int kRegisterPassedArguments = 6;
void MacroAssembler::LoadGlobalFunction(int index, Register function) {
// Load the global or builtins object from the current context.
- movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- // Load the global context from the global or builtins object.
- movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
- // Load the function from the global context.
+ movq(function,
+ Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ // Load the native context from the global or builtins object.
+ movq(function, FieldOperand(function, GlobalObject::kNativeContextOffset));
+ // Load the function from the native context.
movq(function, Operand(function, Context::SlotOffset(index)));
}
@@ -4321,7 +4451,7 @@ void MacroAssembler::EnsureNotWhite(
testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
j(not_zero, &done, Label::kNear);
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
// Check for impossible bit pattern.
Label ok;
push(mask_scratch);
@@ -4373,7 +4503,7 @@ void MacroAssembler::EnsureNotWhite(
bind(&not_external);
// Sequential string, either ASCII or UC16.
- ASSERT(kAsciiStringTag == 0x04);
+ ASSERT(kOneByteStringTag == 0x04);
and_(length, Immediate(kStringEncodingMask));
xor_(length, Immediate(kStringEncodingMask));
addq(length, Immediate(0x04));
@@ -4396,44 +4526,38 @@ void MacroAssembler::EnsureNotWhite(
void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
- Label next;
+ Label next, start;
Register empty_fixed_array_value = r8;
LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
- Register empty_descriptor_array_value = r9;
- LoadRoot(empty_descriptor_array_value,
- Heap::kEmptyDescriptorArrayRootIndex);
movq(rcx, rax);
- bind(&next);
-
- // Check that there are no elements. Register rcx contains the
- // current JS object we've reached through the prototype chain.
- cmpq(empty_fixed_array_value,
- FieldOperand(rcx, JSObject::kElementsOffset));
- j(not_equal, call_runtime);
- // Check that instance descriptors are not empty so that we can
- // check for an enum cache. Leave the map in rbx for the subsequent
- // prototype load.
+ // Check if the enum length field is properly initialized, indicating that
+ // there is an enum cache.
movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
- movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOrBitField3Offset));
- JumpIfSmi(rdx, call_runtime);
- // Check that there is an enum cache in the non-empty instance
- // descriptors (rdx). This is the case if the next enumeration
- // index field does not contain a smi.
- movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
- JumpIfSmi(rdx, call_runtime);
+ EnumLength(rdx, rbx);
+ Cmp(rdx, Smi::FromInt(Map::kInvalidEnumCache));
+ j(equal, call_runtime);
+
+ jmp(&start);
+
+ bind(&next);
+
+ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
// For all objects but the receiver, check that the cache is empty.
- Label check_prototype;
- cmpq(rcx, rax);
- j(equal, &check_prototype, Label::kNear);
- movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
- cmpq(rdx, empty_fixed_array_value);
+ EnumLength(rdx, rbx);
+ Cmp(rdx, Smi::FromInt(0));
+ j(not_equal, call_runtime);
+
+ bind(&start);
+
+ // Check that there are no elements. Register rcx contains the current JS
+ // object we've reached through the prototype chain.
+ cmpq(empty_fixed_array_value,
+ FieldOperand(rcx, JSObject::kElementsOffset));
j(not_equal, call_runtime);
- // Load the prototype from the map and loop if non-null.
- bind(&check_prototype);
movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
cmpq(rcx, null_value);
j(not_equal, &next);
diff --git a/src/3rdparty/v8/src/x64/macro-assembler-x64.h b/src/3rdparty/v8/src/x64/macro-assembler-x64.h
index 5ec8873..fdddc13 100644
--- a/src/3rdparty/v8/src/x64/macro-assembler-x64.h
+++ b/src/3rdparty/v8/src/x64/macro-assembler-x64.h
@@ -317,6 +317,7 @@ class MacroAssembler: public Assembler {
void PopSafepointRegisters() { Popad(); }
// Store the value in register src in the safepoint register stack
// slot for register dst.
+ void StoreToSafepointRegisterSlot(Register dst, const Immediate& imm);
void StoreToSafepointRegisterSlot(Register dst, Register src);
void LoadFromSafepointRegisterSlot(Register dst, Register src);
@@ -774,6 +775,11 @@ class MacroAssembler: public Assembler {
// Move if the registers are not identical.
void Move(Register target, Register source);
+ // Support for constant splitting.
+ bool IsUnsafeInt(const int x);
+ void SafeMove(Register dst, Smi* src);
+ void SafePush(Smi* src);
+
// Bit-field support.
void TestBit(const Operand& dst, int bit_index);
@@ -817,7 +823,7 @@ class MacroAssembler: public Assembler {
void Call(ExternalReference ext);
void Call(Handle<Code> code_object,
RelocInfo::Mode rmode,
- unsigned ast_id = kNoASTId);
+ TypeFeedbackId ast_id = TypeFeedbackId::None());
// The size of the code generated for different call instructions.
int CallSize(Address destination, RelocInfo::Mode rmode) {
@@ -877,9 +883,9 @@ class MacroAssembler: public Assembler {
// Check if a map for a JSObject indicates that the object has fast smi only
// elements. Jump to the specified label if it does not.
- void CheckFastSmiOnlyElements(Register map,
- Label* fail,
- Label::Distance distance = Label::kFar);
+ void CheckFastSmiElements(Register map,
+ Label* fail,
+ Label::Distance distance = Label::kFar);
// Check to see if maybe_number can be stored as a double in
// FastDoubleElements. If it can, store it at the index specified by index in
@@ -936,32 +942,45 @@ class MacroAssembler: public Assembler {
void ClampDoubleToUint8(XMMRegister input_reg,
XMMRegister temp_xmm_reg,
- Register result_reg,
- Register temp_reg);
+ Register result_reg);
+
+ void LoadUint32(XMMRegister dst, Register src, XMMRegister scratch);
void LoadInstanceDescriptors(Register map, Register descriptors);
+ void EnumLength(Register dst, Register map);
+ void NumberOfOwnDescriptors(Register dst, Register map);
+
+ template<typename Field>
+ void DecodeField(Register reg) {
+ static const int shift = Field::kShift + kSmiShift;
+ static const int mask = Field::kMask >> Field::kShift;
+ shr(reg, Immediate(shift));
+ and_(reg, Immediate(mask));
+ shl(reg, Immediate(kSmiShift));
+ }
- // Abort execution if argument is not a number. Used in debug code.
- void AbortIfNotNumber(Register object);
+ // Abort execution if argument is not a number, enabled via --debug-code.
+ void AssertNumber(Register object);
- // Abort execution if argument is a smi. Used in debug code.
- void AbortIfSmi(Register object);
+ // Abort execution if argument is a smi, enabled via --debug-code.
+ void AssertNotSmi(Register object);
- // Abort execution if argument is not a smi. Used in debug code.
- void AbortIfNotSmi(Register object);
- void AbortIfNotSmi(const Operand& object);
+ // Abort execution if argument is not a smi, enabled via --debug-code.
+ void AssertSmi(Register object);
+ void AssertSmi(const Operand& object);
// Abort execution if a 64 bit register containing a 32 bit payload does not
- // have zeros in the top 32 bits.
- void AbortIfNotZeroExtended(Register reg);
+ // have zeros in the top 32 bits, enabled via --debug-code.
+ void AssertZeroExtended(Register reg);
- // Abort execution if argument is a string. Used in debug code.
- void AbortIfNotString(Register object);
+ // Abort execution if argument is not a string, enabled via --debug-code.
+ void AssertString(Register object);
- // Abort execution if argument is not the root value with the given index.
- void AbortIfNotRootValue(Register src,
- Heap::RootListIndex root_value_index,
- const char* message);
+ // Abort execution if argument is not the root value with the given index,
+ // enabled via --debug-code.
+ void AssertRootValue(Register src,
+ Heap::RootListIndex root_value_index,
+ const char* message);
// ---------------------------------------------------------------------------
// Exception handling
@@ -1128,8 +1147,8 @@ class MacroAssembler: public Assembler {
void LoadContext(Register dst, int context_chain_length);
// Conditionally load the cached Array transitioned map of type
- // transitioned_kind from the global context if the map in register
- // map_in_out is the cached Array map in the global context of
+ // transitioned_kind from the native context if the map in register
+ // map_in_out is the cached Array map in the native context of
// expected_kind.
void LoadTransitionedArrayMapConditional(
ElementsKind expected_kind,
@@ -1141,7 +1160,8 @@ class MacroAssembler: public Assembler {
// Load the initial map for new Arrays from a JSFunction.
void LoadInitialArrayMap(Register function_in,
Register scratch,
- Register map_out);
+ Register map_out,
+ bool can_have_holes);
// Load the global function with the given index.
void LoadGlobalFunction(int index, Register function);
@@ -1154,7 +1174,7 @@ class MacroAssembler: public Assembler {
// Runtime calls
// Call a code stub.
- void CallStub(CodeStub* stub, unsigned ast_id = kNoASTId);
+ void CallStub(CodeStub* stub, TypeFeedbackId ast_id = TypeFeedbackId::None());
// Tail call a code stub (jump).
void TailCallStub(CodeStub* stub);
@@ -1322,6 +1342,8 @@ class MacroAssembler: public Assembler {
// modified. It may be the "smi 1 constant" register.
Register GetSmiConstant(Smi* value);
+ intptr_t RootRegisterDelta(ExternalReference other);
+
// Moves the smi value to the destination register.
void LoadSmiConstant(Register dst, Smi* value);
@@ -1441,12 +1463,12 @@ inline Operand ContextOperand(Register context, int index) {
inline Operand GlobalObjectOperand() {
- return ContextOperand(rsi, Context::GLOBAL_INDEX);
+ return ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX);
}
static inline Operand QmlGlobalObjectOperand() {
- return ContextOperand(rsi, Context::QML_GLOBAL_INDEX);
+ return ContextOperand(rsi, Context::QML_GLOBAL_OBJECT_INDEX);
}
diff --git a/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.cc b/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.cc
index bf232bf..86f7bfe 100644
--- a/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.cc
+++ b/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -44,21 +44,23 @@ namespace internal {
/*
* This assembler uses the following register assignment convention
- * - rdx : currently loaded character(s) as ASCII or UC16. Must be loaded using
- * LoadCurrentCharacter before using any of the dispatch methods.
- * - rdi : current position in input, as negative offset from end of string.
+ * - rdx : Currently loaded character(s) as ASCII or UC16. Must be loaded
+ * using LoadCurrentCharacter before using any of the dispatch methods.
+ * Temporarily stores the index of capture start after a matching pass
+ * for a global regexp.
+ * - rdi : Current position in input, as negative offset from end of string.
* Please notice that this is the byte offset, not the character
- * offset! Is always a 32-bit signed (negative) offset, but must be
+ * offset! Is always a 32-bit signed (negative) offset, but must be
* maintained sign-extended to 64 bits, since it is used as index.
- * - rsi : end of input (points to byte after last character in input),
+ * - rsi : End of input (points to byte after last character in input),
* so that rsi+rdi points to the current character.
- * - rbp : frame pointer. Used to access arguments, local variables and
+ * - rbp : Frame pointer. Used to access arguments, local variables and
* RegExp registers.
- * - rsp : points to tip of C stack.
- * - rcx : points to tip of backtrack stack. The backtrack stack contains
- * only 32-bit values. Most are offsets from some base (e.g., character
+ * - rsp : Points to tip of C stack.
+ * - rcx : Points to tip of backtrack stack. The backtrack stack contains
+ * only 32-bit values. Most are offsets from some base (e.g., character
* positions from end of string or code location from Code* pointer).
- * - r8 : code object pointer. Used to convert between absolute and
+ * - r8 : Code object pointer. Used to convert between absolute and
* code-object-relative addresses.
*
* The registers rax, rbx, r9 and r11 are free to use for computations.
@@ -72,20 +74,22 @@ namespace internal {
*
* The stack will have the following content, in some order, indexable from the
* frame pointer (see, e.g., kStackHighEnd):
- * - Isolate* isolate (Address of the current isolate)
+ * - Isolate* isolate (address of the current isolate)
* - direct_call (if 1, direct call from JavaScript code, if 0 call
* through the runtime system)
- * - stack_area_base (High end of the memory area to use as
+ * - stack_area_base (high end of the memory area to use as
* backtracking stack)
+ * - capture array size (may fit multiple sets of matches)
* - int* capture_array (int[num_saved_registers_], for output).
- * - end of input (Address of end of string)
- * - start of input (Address of first character in string)
+ * - end of input (address of end of string)
+ * - start of input (address of first character in string)
* - start index (character index of start)
* - String* input_string (input string)
* - return address
* - backup of callee save registers (rbx, possibly rsi and rdi).
+ * - success counter (only useful for global regexp to count matches)
* - Offset of location before start of input (effectively character
- * position -1). Used to initialize capture registers to a non-position.
+ * position -1). Used to initialize capture registers to a non-position.
* - At start of string (if 1, we are starting at the start of the
* string, otherwise 0)
* - register 0 rbp[-n] (Only positions must be stored in the first
@@ -94,7 +98,7 @@ namespace internal {
*
* The first num_saved_registers_ registers are initialized to point to
* "character -1" in the string (i.e., char_size() bytes before the first
- * character of the string). The remaining registers starts out uninitialized.
+ * character of the string). The remaining registers starts out uninitialized.
*
* The first seven values must be provided by the calling code by
* calling the code's entry address cast to a function pointer with the
@@ -113,10 +117,12 @@ namespace internal {
RegExpMacroAssemblerX64::RegExpMacroAssemblerX64(
Mode mode,
- int registers_to_save)
- : masm_(Isolate::Current(), NULL, kRegExpCodeSize),
+ int registers_to_save,
+ Zone* zone)
+ : NativeRegExpMacroAssembler(zone),
+ masm_(Isolate::Current(), NULL, kRegExpCodeSize),
no_root_array_scope_(&masm_),
- code_relative_fixup_positions_(4),
+ code_relative_fixup_positions_(4, zone),
mode_(mode),
num_registers_(registers_to_save),
num_saved_registers_(registers_to_save),
@@ -347,6 +353,14 @@ void RegExpMacroAssemblerX64::CheckNotBackReferenceIgnoreCase(
// In either case succeed immediately.
__ j(equal, &fallthrough);
+ // -----------------------
+ // rdx - Start of capture
+ // rbx - length of capture
+ // Check that there are sufficient characters left in the input.
+ __ movl(rax, rdi);
+ __ addl(rax, rbx);
+ BranchOrBacktrack(greater, on_no_match);
+
if (mode_ == ASCII) {
Label loop_increment;
if (on_no_match == NULL) {
@@ -523,15 +537,6 @@ void RegExpMacroAssemblerX64::CheckNotBackReference(
}
-void RegExpMacroAssemblerX64::CheckNotRegistersEqual(int reg1,
- int reg2,
- Label* on_not_equal) {
- __ movq(rax, register_location(reg1));
- __ cmpq(rax, register_location(reg2));
- BranchOrBacktrack(not_equal, on_not_equal);
-}
-
-
void RegExpMacroAssemblerX64::CheckNotCharacter(uint32_t c,
Label* on_not_equal) {
__ cmpl(current_character(), Immediate(c));
@@ -744,13 +749,16 @@ bool RegExpMacroAssemblerX64::CheckSpecialCharacterClass(uc16 type,
void RegExpMacroAssemblerX64::Fail() {
- ASSERT(FAILURE == 0); // Return value for failure is zero.
- __ Set(rax, 0);
+ STATIC_ASSERT(FAILURE == 0); // Return value for failure is zero.
+ if (!global()) {
+ __ Set(rax, FAILURE);
+ }
__ jmp(&exit_label_);
}
Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
+ Label return_rax;
// Finalize code - write the entry point code now we know how many
// registers we need.
// Entry code:
@@ -784,7 +792,7 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
ASSERT_EQ(kInputStart, -3 * kPointerSize);
ASSERT_EQ(kInputEnd, -4 * kPointerSize);
ASSERT_EQ(kRegisterOutput, -5 * kPointerSize);
- ASSERT_EQ(kStackHighEnd, -6 * kPointerSize);
+ ASSERT_EQ(kNumOutputRegisters, -6 * kPointerSize);
__ push(rdi);
__ push(rsi);
__ push(rdx);
@@ -795,7 +803,8 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
__ push(rbx); // Callee-save
#endif
- __ push(Immediate(0)); // Make room for "at start" constant.
+ __ push(Immediate(0)); // Number of successful matches in a global regexp.
+ __ push(Immediate(0)); // Make room for "input start - 1" constant.
// Check if we have space on the stack for registers.
Label stack_limit_hit;
@@ -815,14 +824,14 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
// Exit with OutOfMemory exception. There is not enough space on the stack
// for our working registers.
__ Set(rax, EXCEPTION);
- __ jmp(&exit_label_);
+ __ jmp(&return_rax);
__ bind(&stack_limit_hit);
__ Move(code_object_pointer(), masm_.CodeObject());
CallCheckStackGuardState(); // Preserves no registers beside rbp and rsp.
__ testq(rax, rax);
// If returned value is non-zero, we exit with the returned value as result.
- __ j(not_zero, &exit_label_);
+ __ j(not_zero, &return_rax);
__ bind(&stack_ok);
@@ -847,19 +856,7 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
// position registers.
__ movq(Operand(rbp, kInputStartMinusOne), rax);
- if (num_saved_registers_ > 0) {
- // Fill saved registers with initial value = start offset - 1
- // Fill in stack push order, to avoid accessing across an unwritten
- // page (a problem on Windows).
- __ Set(rcx, kRegisterZero);
- Label init_loop;
- __ bind(&init_loop);
- __ movq(Operand(rbp, rcx, times_1, 0), rax);
- __ subq(rcx, Immediate(kPointerSize));
- __ cmpq(rcx,
- Immediate(kRegisterZero - num_saved_registers_ * kPointerSize));
- __ j(greater, &init_loop);
- }
+#ifdef WIN32
// Ensure that we have written to each stack page, in order. Skipping a page
// on Windows can cause segmentation faults. Assuming page size is 4k.
const int kPageSize = 4096;
@@ -869,21 +866,49 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
i += kRegistersPerPage) {
__ movq(register_location(i), rax); // One write every page.
}
+#endif // WIN32
- // Initialize backtrack stack pointer.
- __ movq(backtrack_stackpointer(), Operand(rbp, kStackHighEnd));
// Initialize code object pointer.
__ Move(code_object_pointer(), masm_.CodeObject());
- // Load previous char as initial value of current-character.
- Label at_start;
- __ cmpb(Operand(rbp, kStartIndex), Immediate(0));
- __ j(equal, &at_start);
- LoadCurrentCharacterUnchecked(-1, 1); // Load previous char.
- __ jmp(&start_label_);
- __ bind(&at_start);
+
+ Label load_char_start_regexp, start_regexp;
+ // Load newline if index is at start, previous character otherwise.
+ __ cmpl(Operand(rbp, kStartIndex), Immediate(0));
+ __ j(not_equal, &load_char_start_regexp, Label::kNear);
__ Set(current_character(), '\n');
- __ jmp(&start_label_);
+ __ jmp(&start_regexp, Label::kNear);
+
+ // Global regexp restarts matching here.
+ __ bind(&load_char_start_regexp);
+ // Load previous char as initial value of current character register.
+ LoadCurrentCharacterUnchecked(-1, 1);
+ __ bind(&start_regexp);
+
+ // Initialize on-stack registers.
+ if (num_saved_registers_ > 0) {
+ // Fill saved registers with initial value = start offset - 1
+ // Fill in stack push order, to avoid accessing across an unwritten
+ // page (a problem on Windows).
+ if (num_saved_registers_ > 8) {
+ __ Set(rcx, kRegisterZero);
+ Label init_loop;
+ __ bind(&init_loop);
+ __ movq(Operand(rbp, rcx, times_1, 0), rax);
+ __ subq(rcx, Immediate(kPointerSize));
+ __ cmpq(rcx,
+ Immediate(kRegisterZero - num_saved_registers_ * kPointerSize));
+ __ j(greater, &init_loop);
+ } else { // Unroll the loop.
+ for (int i = 0; i < num_saved_registers_; i++) {
+ __ movq(register_location(i), rax);
+ }
+ }
+ }
+ // Initialize backtrack stack pointer.
+ __ movq(backtrack_stackpointer(), Operand(rbp, kStackHighEnd));
+
+ __ jmp(&start_label_);
// Exit code:
if (success_label_.is_linked()) {
@@ -902,6 +927,10 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
}
for (int i = 0; i < num_saved_registers_; i++) {
__ movq(rax, register_location(i));
+ if (i == 0 && global_with_zero_length_check()) {
+ // Keep capture start in rdx for the zero-length check later.
+ __ movq(rdx, rax);
+ }
__ addq(rax, rcx); // Convert to index from start, not end.
if (mode_ == UC16) {
__ sar(rax, Immediate(1)); // Convert byte index to character index.
@@ -909,12 +938,57 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
__ movl(Operand(rbx, i * kIntSize), rax);
}
}
- __ Set(rax, SUCCESS);
+
+ if (global()) {
+ // Restart matching if the regular expression is flagged as global.
+ // Increment success counter.
+ __ incq(Operand(rbp, kSuccessfulCaptures));
+ // Capture results have been stored, so the number of remaining global
+ // output registers is reduced by the number of stored captures.
+ __ movsxlq(rcx, Operand(rbp, kNumOutputRegisters));
+ __ subq(rcx, Immediate(num_saved_registers_));
+ // Check whether we have enough room for another set of capture results.
+ __ cmpq(rcx, Immediate(num_saved_registers_));
+ __ j(less, &exit_label_);
+
+ __ movq(Operand(rbp, kNumOutputRegisters), rcx);
+ // Advance the location for output.
+ __ addq(Operand(rbp, kRegisterOutput),
+ Immediate(num_saved_registers_ * kIntSize));
+
+ // Prepare rax to initialize registers with its value in the next run.
+ __ movq(rax, Operand(rbp, kInputStartMinusOne));
+
+ if (global_with_zero_length_check()) {
+ // Special case for zero-length matches.
+ // rdx: capture start index
+ __ cmpq(rdi, rdx);
+ // Not a zero-length match, restart.
+ __ j(not_equal, &load_char_start_regexp);
+ // rdi (offset from the end) is zero if we already reached the end.
+ __ testq(rdi, rdi);
+ __ j(zero, &exit_label_, Label::kNear);
+ // Advance current position after a zero-length match.
+ if (mode_ == UC16) {
+ __ addq(rdi, Immediate(2));
+ } else {
+ __ incq(rdi);
+ }
+ }
+
+ __ jmp(&load_char_start_regexp);
+ } else {
+ __ movq(rax, Immediate(SUCCESS));
+ }
}
- // Exit and return rax
__ bind(&exit_label_);
+ if (global()) {
+ // Return the number of successful captures.
+ __ movq(rax, Operand(rbp, kSuccessfulCaptures));
+ }
+ __ bind(&return_rax);
#ifdef _WIN64
// Restore callee save registers.
__ lea(rsp, Operand(rbp, kLastCalleeSaveRegister));
@@ -951,7 +1025,7 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
__ testq(rax, rax);
// If returning non-zero, we should end execution with the given
// result as return value.
- __ j(not_zero, &exit_label_);
+ __ j(not_zero, &return_rax);
// Restore registers.
__ Move(code_object_pointer(), masm_.CodeObject());
@@ -1012,7 +1086,7 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
__ bind(&exit_with_exception);
// Exit with Result EXCEPTION(-1) to signal thrown exception.
__ Set(rax, EXCEPTION);
- __ jmp(&exit_label_);
+ __ jmp(&return_rax);
}
FixupCodeRelativePositions();
@@ -1135,8 +1209,9 @@ void RegExpMacroAssemblerX64::SetRegister(int register_index, int to) {
}
-void RegExpMacroAssemblerX64::Succeed() {
+bool RegExpMacroAssemblerX64::Succeed() {
__ jmp(&success_label_);
+ return global();
}
diff --git a/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.h b/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.h
index cd24b60..a082cf2 100644
--- a/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.h
+++ b/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -41,7 +41,7 @@ namespace internal {
class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
public:
- RegExpMacroAssemblerX64(Mode mode, int registers_to_save);
+ RegExpMacroAssemblerX64(Mode mode, int registers_to_save, Zone* zone);
virtual ~RegExpMacroAssemblerX64();
virtual int stack_limit_slack();
virtual void AdvanceCurrentPosition(int by);
@@ -66,7 +66,6 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
Label* on_no_match);
- virtual void CheckNotRegistersEqual(int reg1, int reg2, Label* on_not_equal);
virtual void CheckNotCharacter(uint32_t c, Label* on_not_equal);
virtual void CheckNotCharacterAfterAnd(uint32_t c,
uint32_t mask,
@@ -109,7 +108,7 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
virtual void ReadStackPointerFromRegister(int reg);
virtual void SetCurrentPositionFromEnd(int by);
virtual void SetRegister(int register_index, int to);
- virtual void Succeed();
+ virtual bool Succeed();
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset);
virtual void ClearRegisters(int reg_from, int reg_to);
virtual void WriteStackPointerToRegister(int reg);
@@ -154,7 +153,12 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
static const int kInputStart = kStartIndex + kPointerSize;
static const int kInputEnd = kInputStart + kPointerSize;
static const int kRegisterOutput = kInputEnd + kPointerSize;
- static const int kStackHighEnd = kRegisterOutput + kPointerSize;
+ // For the case of global regular expression, we have room to store at least
+ // one set of capture results. For the case of non-global regexp, we ignore
+ // this value. NumOutputRegisters is passed as 32-bit value. The upper
+ // 32 bit of this 64-bit stack slot may contain garbage.
+ static const int kNumOutputRegisters = kRegisterOutput + kPointerSize;
+ static const int kStackHighEnd = kNumOutputRegisters + kPointerSize;
// DirectCall is passed as 32 bit int (values 0 or 1).
static const int kDirectCall = kStackHighEnd + kPointerSize;
static const int kIsolate = kDirectCall + kPointerSize;
@@ -167,8 +171,12 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
static const int kInputStart = kStartIndex - kPointerSize;
static const int kInputEnd = kInputStart - kPointerSize;
static const int kRegisterOutput = kInputEnd - kPointerSize;
- static const int kStackHighEnd = kRegisterOutput - kPointerSize;
- static const int kDirectCall = kFrameAlign;
+ // For the case of global regular expression, we have room to store at least
+ // one set of capture results. For the case of non-global regexp, we ignore
+ // this value.
+ static const int kNumOutputRegisters = kRegisterOutput - kPointerSize;
+ static const int kStackHighEnd = kFrameAlign;
+ static const int kDirectCall = kStackHighEnd + kPointerSize;
static const int kIsolate = kDirectCall + kPointerSize;
#endif
@@ -183,14 +191,14 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
// AMD64 Calling Convention has only one callee-save register that
// we use. We push this after the frame pointer (and after the
// parameters).
- static const int kBackup_rbx = kStackHighEnd - kPointerSize;
+ static const int kBackup_rbx = kNumOutputRegisters - kPointerSize;
static const int kLastCalleeSaveRegister = kBackup_rbx;
#endif
+ static const int kSuccessfulCaptures = kLastCalleeSaveRegister - kPointerSize;
// When adding local variables remember to push space for them in
// the frame in GetCode.
- static const int kInputStartMinusOne =
- kLastCalleeSaveRegister - kPointerSize;
+ static const int kInputStartMinusOne = kSuccessfulCaptures - kPointerSize;
// First register address. Following registers are below it on the stack.
static const int kRegisterZero = kInputStartMinusOne - kPointerSize;
@@ -232,7 +240,7 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
void BranchOrBacktrack(Condition condition, Label* to);
void MarkPositionForCodeRelativeFixup() {
- code_relative_fixup_positions_.Add(masm_.pc_offset());
+ code_relative_fixup_positions_.Add(masm_.pc_offset(), zone());
}
void FixupCodeRelativePositions();
diff --git a/src/3rdparty/v8/src/x64/simulator-x64.h b/src/3rdparty/v8/src/x64/simulator-x64.h
index df8423a..8aba701 100644
--- a/src/3rdparty/v8/src/x64/simulator-x64.h
+++ b/src/3rdparty/v8/src/x64/simulator-x64.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -40,12 +40,12 @@ namespace internal {
(entry(p0, p1, p2, p3, p4))
typedef int (*regexp_matcher)(String*, int, const byte*,
- const byte*, int*, Address, int, Isolate*);
+ const byte*, int*, int, Address, int, Isolate*);
// Call the generated regexp code directly. The code at the entry address should
// expect eight int/pointer sized arguments and return an int.
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
- (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6, p7))
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7, p8) \
+ (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6, p7, p8))
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
(reinterpret_cast<TryCatch*>(try_catch_address))
diff --git a/src/3rdparty/v8/src/x64/stub-cache-x64.cc b/src/3rdparty/v8/src/x64/stub-cache-x64.cc
index 5721e9b..b120efb 100644
--- a/src/3rdparty/v8/src/x64/stub-cache-x64.cc
+++ b/src/3rdparty/v8/src/x64/stub-cache-x64.cc
@@ -228,15 +228,15 @@ void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
Register prototype) {
// Load the global or builtins object from the current context.
__ movq(prototype,
- Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
- // Load the global context from the global or builtins object.
+ Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
+ // Load the native context from the global or builtins object.
__ movq(prototype,
- FieldOperand(prototype, GlobalObject::kGlobalContextOffset));
- // Load the function from the global context.
+ FieldOperand(prototype, GlobalObject::kNativeContextOffset));
+ // Load the function from the native context.
__ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
// Load the initial map. The global functions all have initial maps.
__ movq(prototype,
- FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
+ FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
// Load the prototype from the initial map.
__ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
}
@@ -249,13 +249,13 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
Label* miss) {
Isolate* isolate = masm->isolate();
// Check we're still in the same context.
- __ Move(prototype, isolate->global());
- __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)),
+ __ Move(prototype, isolate->global_object());
+ __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)),
prototype);
__ j(not_equal, miss);
// Get the global function with the given index.
Handle<JSFunction> function(
- JSFunction::cast(isolate->global_context()->get(index)));
+ JSFunction::cast(isolate->native_context()->get(index)));
// Load its initial map. The global functions all have initial maps.
__ Move(prototype, Handle<Map>(function->initial_map()));
// Load the prototype from the initial map.
@@ -731,10 +731,22 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
Handle<JSObject> object,
int index,
Handle<Map> transition,
+ Handle<String> name,
Register receiver_reg,
Register name_reg,
- Register scratch,
+ Register scratch1,
+ Register scratch2,
Label* miss_label) {
+ LookupResult lookup(masm->isolate());
+ object->Lookup(*name, &lookup);
+ if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
+ // In sloppy mode, we could just return the value and be done. However, we
+ // might be in strict mode, where we have to throw. Since we cannot tell,
+ // go into slow case unconditionally.
+ __ jmp(miss_label);
+ return;
+ }
+
// Check that the map of the object hasn't changed.
CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
: REQUIRE_EXACT_MAP;
@@ -743,7 +755,32 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
+ __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
+ }
+
+ // Check that we are allowed to write this.
+ if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
+ JSObject* holder;
+ if (lookup.IsFound()) {
+ holder = lookup.holder();
+ } else {
+ // Find the top object.
+ holder = *object;
+ do {
+ holder = JSObject::cast(holder->GetPrototype());
+ } while (holder->GetPrototype()->IsJSObject());
+ }
+ // We need an extra register, push
+ __ push(name_reg);
+ Label miss_pop, done_check;
+ CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
+ scratch1, scratch2, name, &miss_pop);
+ __ jmp(&done_check);
+ __ bind(&miss_pop);
+ __ pop(name_reg);
+ __ jmp(miss_label);
+ __ bind(&done_check);
+ __ pop(name_reg);
}
// Stub never generated for non-global objects that require access
@@ -754,11 +791,11 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
// The properties must be extended before we can store the value.
// We jump to a runtime call that extends the properties array.
- __ pop(scratch); // Return address.
+ __ pop(scratch1); // Return address.
__ push(receiver_reg);
__ Push(transition);
__ push(rax);
- __ push(scratch);
+ __ push(scratch1);
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
masm->isolate()),
@@ -768,9 +805,19 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
}
if (!transition.is_null()) {
- // Update the map of the object; no write barrier updating is
- // needed because the map is never in new space.
- __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset), transition);
+ // Update the map of the object.
+ __ Move(scratch1, transition);
+ __ movq(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
+
+ // Update the write barrier for the map field and pass the now unused
+ // name_reg as scratch register.
+ __ RecordWriteField(receiver_reg,
+ HeapObject::kMapOffset,
+ scratch1,
+ name_reg,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
}
// Adjust for the number of properties stored in the object. Even in the
@@ -787,19 +834,19 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
// Pass the value being stored in the now unused name_reg.
__ movq(name_reg, rax);
__ RecordWriteField(
- receiver_reg, offset, name_reg, scratch, kDontSaveFPRegs);
+ receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs);
} else {
// Write to the properties array.
int offset = index * kPointerSize + FixedArray::kHeaderSize;
// Get the properties array (optimistically).
- __ movq(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
- __ movq(FieldOperand(scratch, offset), rax);
+ __ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
+ __ movq(FieldOperand(scratch1, offset), rax);
// Update the write barrier for the array address.
// Pass the value being stored in the now unused name_reg.
__ movq(name_reg, rax);
__ RecordWriteField(
- scratch, offset, name_reg, receiver_reg, kDontSaveFPRegs);
+ scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs);
}
// Return the value (register rax).
@@ -982,6 +1029,49 @@ void StubCompiler::GenerateLoadField(Handle<JSObject> object,
}
+void StubCompiler::GenerateDictionaryLoadCallback(Register receiver,
+ Register name_reg,
+ Register scratch1,
+ Register scratch2,
+ Register scratch3,
+ Handle<AccessorInfo> callback,
+ Handle<String> name,
+ Label* miss) {
+ ASSERT(!receiver.is(scratch1));
+ ASSERT(!receiver.is(scratch2));
+ ASSERT(!receiver.is(scratch3));
+
+ // Load the properties dictionary.
+ Register dictionary = scratch1;
+ __ movq(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset));
+
+ // Probe the dictionary.
+ Label probe_done;
+ StringDictionaryLookupStub::GeneratePositiveLookup(masm(),
+ miss,
+ &probe_done,
+ dictionary,
+ name_reg,
+ scratch2,
+ scratch3);
+ __ bind(&probe_done);
+
+ // If probing finds an entry in the dictionary, scratch3 contains the
+ // index into the dictionary. Check that the value is the callback.
+ Register index = scratch3;
+ const int kElementsStartOffset =
+ StringDictionary::kHeaderSize +
+ StringDictionary::kElementsStartIndex * kPointerSize;
+ const int kValueOffset = kElementsStartOffset + kPointerSize;
+ __ movq(scratch2,
+ Operand(dictionary, index, times_pointer_size,
+ kValueOffset - kHeapObjectTag));
+ __ movq(scratch3, callback, RelocInfo::EMBEDDED_OBJECT);
+ __ cmpq(scratch2, scratch3);
+ __ j(not_equal, miss);
+}
+
+
void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
Handle<JSObject> holder,
Register receiver,
@@ -989,6 +1079,7 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
Register scratch1,
Register scratch2,
Register scratch3,
+ Register scratch4,
Handle<AccessorInfo> callback,
Handle<String> name,
Label* miss) {
@@ -999,6 +1090,11 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
Register reg = CheckPrototypes(object, receiver, holder, scratch1,
scratch2, scratch3, name, miss);
+ if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
+ GenerateDictionaryLoadCallback(
+ reg, name_reg, scratch2, scratch3, scratch4, callback, name, miss);
+ }
+
// Insert additional parameters into the stack frame above return address.
ASSERT(!scratch2.is(reg));
__ pop(scratch2); // Get return address to place it below.
@@ -1096,12 +1192,13 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
// later.
bool compile_followup_inline = false;
if (lookup->IsFound() && lookup->IsCacheable()) {
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
lookup->GetCallbackObject()->IsAccessorInfo()) {
- compile_followup_inline =
- AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL;
+ AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
+ compile_followup_inline = callback->getter() != NULL &&
+ callback->IsCompatibleReceiver(*object);
}
}
@@ -1173,7 +1270,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
miss);
}
- if (lookup->type() == FIELD) {
+ if (lookup->IsField()) {
// We found FIELD property in prototype chain of interceptor's holder.
// Retrieve a field from field's holder.
GenerateFastPropertyLoad(masm(), rax, holder_reg,
@@ -1343,7 +1440,7 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
GenerateMissBranch();
// Return the generated code.
- return GetCode(FIELD, name);
+ return GetCode(Code::FIELD, name);
}
@@ -1434,17 +1531,32 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
- __ CheckFastSmiOnlyElements(rbx, &call_builtin);
+ __ CheckFastSmiElements(rbx, &call_builtin);
// rdx: receiver
// rbx: map
- __ movq(r9, rdi); // Backup rdi as it is going to be trashed.
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+
+ Label try_holey_map;
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
rbx,
rdi,
+ &try_holey_map);
+
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
+ // Restore edi.
+ __ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
+ __ jmp(&fast_object);
+
+ __ bind(&try_holey_map);
+ __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
+ FAST_HOLEY_ELEMENTS,
+ rbx,
+ rdi,
&call_builtin);
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
- __ movq(rdi, r9);
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
+ __ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(rbx, &call_builtin);
@@ -1852,7 +1964,7 @@ Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
GenerateMissBranch();
// Return the generated code.
- return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
+ return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
}
@@ -1967,7 +2079,7 @@ Handle<Code> CallStubCompiler::CompileMathAbsCall(
GenerateMissBranch();
// Return the generated code.
- return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
+ return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
}
@@ -2216,7 +2328,7 @@ Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
GenerateMissBranch();
// Return the generated code.
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2279,7 +2391,7 @@ Handle<Code> CallStubCompiler::CompileCallGlobal(
GenerateMissBranch();
// Return the generated code.
- return GetCode(NORMAL, name);
+ return GetCode(Code::NORMAL, name);
}
@@ -2296,7 +2408,13 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
Label miss;
// Generate store field code. Preserves receiver and name on jump to miss.
- GenerateStoreField(masm(), object, index, transition, rdx, rcx, rbx, &miss);
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ rdx, rcx, rbx, rdi,
+ &miss);
// Handle store cache miss.
__ bind(&miss);
@@ -2304,14 +2422,17 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
+ return GetCode(transition.is_null()
+ ? Code::FIELD
+ : Code::MAP_TRANSITION, name);
}
Handle<Code> StoreStubCompiler::CompileStoreCallback(
- Handle<JSObject> object,
- Handle<AccessorInfo> callback,
- Handle<String> name) {
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<AccessorInfo> callback) {
// ----------- S t a t e -------------
// -- rax : value
// -- rcx : name
@@ -2319,19 +2440,12 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
// -- rsp[0] : return address
// -----------------------------------
Label miss;
+ // Check that the maps haven't changed.
+ __ JumpIfSmi(rdx, &miss);
+ CheckPrototypes(receiver, rdx, holder, rbx, r8, rdi, name, &miss);
- // Check that the map of the object hasn't changed.
- __ CheckMap(rdx, Handle<Map>(object->map()), &miss,
- DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
-
- // Perform global security token check if needed.
- if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(rdx, rbx, &miss);
- }
-
- // Stub never generated for non-global objects that require access
- // checks.
- ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
+ // Stub never generated for non-global objects that require access checks.
+ ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
__ pop(rbx); // remove the return address
__ push(rdx); // receiver
@@ -2351,7 +2465,81 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm)
+
+
+void StoreStubCompiler::GenerateStoreViaSetter(
+ MacroAssembler* masm,
+ Handle<JSFunction> setter) {
+ // ----------- S t a t e -------------
+ // -- rax : value
+ // -- rcx : name
+ // -- rdx : receiver
+ // -- rsp[0] : return address
+ // -----------------------------------
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ // Save value register, so we can restore it later.
+ __ push(rax);
+
+ if (!setter.is_null()) {
+ // Call the JavaScript setter with receiver and value on the stack.
+ __ push(rdx);
+ __ push(rax);
+ ParameterCount actual(1);
+ __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+ } else {
+ // If we generate a global code snippet for deoptimization only, remember
+ // the place to continue after deoptimization.
+ masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
+ }
+
+ // We have to return the passed value, not the return value of the setter.
+ __ pop(rax);
+
+ // Restore context register.
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ }
+ __ ret(0);
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm())
+
+
+Handle<Code> StoreStubCompiler::CompileStoreViaSetter(
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> setter) {
+ // ----------- S t a t e -------------
+ // -- rax : value
+ // -- rcx : name
+ // -- rdx : receiver
+ // -- rsp[0] : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the maps haven't changed.
+ __ JumpIfSmi(rdx, &miss);
+ CheckPrototypes(receiver, rdx, holder, rbx, r8, rdi, name, &miss);
+
+ GenerateStoreViaSetter(masm(), setter);
+
+ __ bind(&miss);
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2397,7 +2585,7 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2445,7 +2633,7 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal(
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, name);
+ return GetCode(Code::NORMAL, name);
}
@@ -2469,7 +2657,13 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
__ j(not_equal, &miss);
// Generate store field code. Preserves receiver and name on jump to miss.
- GenerateStoreField(masm(), object, index, transition, rdx, rcx, rbx, &miss);
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ rdx, rcx, rbx, rdi,
+ &miss);
// Handle store cache miss.
__ bind(&miss);
@@ -2478,7 +2672,9 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
+ return GetCode(transition.is_null()
+ ? Code::FIELD
+ : Code::MAP_TRANSITION, name);
}
@@ -2502,7 +2698,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
__ jmp(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string());
+ return GetCode(Code::NORMAL, factory()->empty_string());
}
@@ -2540,7 +2736,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
__ jmp(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
+ return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC);
}
@@ -2578,7 +2774,7 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(NONEXISTENT, factory()->empty_string());
+ return GetCode(Code::NONEXISTENT, factory()->empty_string());
}
@@ -2598,7 +2794,7 @@ Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(FIELD, name);
+ return GetCode(Code::FIELD, name);
}
@@ -2613,13 +2809,76 @@ Handle<Code> LoadStubCompiler::CompileLoadCallback(
// -- rsp[0] : return address
// -----------------------------------
Label miss;
- GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx, rdi, callback,
+ GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx, rdi, r8, callback,
name, &miss);
__ bind(&miss);
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm)
+
+
+void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
+ Handle<JSFunction> getter) {
+ // ----------- S t a t e -------------
+ // -- rax : receiver
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -----------------------------------
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+
+ if (!getter.is_null()) {
+ // Call the JavaScript getter with the receiver on the stack.
+ __ push(rax);
+ ParameterCount actual(0);
+ __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+ } else {
+ // If we generate a global code snippet for deoptimization only, remember
+ // the place to continue after deoptimization.
+ masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
+ }
+
+ // Restore context register.
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ }
+ __ ret(0);
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm())
+
+
+Handle<Code> LoadStubCompiler::CompileLoadViaGetter(
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> getter) {
+ // ----------- S t a t e -------------
+ // -- rax : receiver
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the maps haven't changed.
+ __ JumpIfSmi(rax, &miss);
+ CheckPrototypes(receiver, rax, holder, rbx, rdx, rdi, name, &miss);
+
+ GenerateLoadViaGetter(masm(), getter),
+
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2639,7 +2898,7 @@ Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(CONSTANT_FUNCTION, name);
+ return GetCode(Code::CONSTANT_FUNCTION, name);
}
@@ -2663,7 +2922,7 @@ Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> receiver,
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2707,7 +2966,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
- return GetCode(NORMAL, name);
+ return GetCode(Code::NORMAL, name);
}
@@ -2736,7 +2995,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(FIELD, name);
+ return GetCode(Code::FIELD, name);
}
@@ -2758,14 +3017,14 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback(
__ Cmp(rax, name);
__ j(not_equal, &miss);
- GenerateLoadCallback(receiver, holder, rdx, rax, rbx, rcx, rdi, callback,
+ GenerateLoadCallback(receiver, holder, rdx, rax, rbx, rcx, rdi, r8, callback,
name, &miss);
__ bind(&miss);
__ DecrementCounter(counters->keyed_load_callback(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2795,7 +3054,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(CONSTANT_FUNCTION, name);
+ return GetCode(Code::CONSTANT_FUNCTION, name);
}
@@ -2825,7 +3084,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(INTERCEPTOR, name);
+ return GetCode(Code::INTERCEPTOR, name);
}
@@ -2851,7 +3110,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2877,7 +3136,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2903,7 +3162,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(CALLBACKS, name);
+ return GetCode(Code::CALLBACKS, name);
}
@@ -2923,7 +3182,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
__ jmp(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string());
+ return GetCode(Code::NORMAL, factory()->empty_string());
}
@@ -2951,7 +3210,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
- return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
+ return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC);
}
@@ -2981,6 +3240,7 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
#endif
// Load the initial map and verify that it is in fact a map.
+ // rdi: constructor
__ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
STATIC_ASSERT(kSmiTag == 0);
@@ -2990,18 +3250,22 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
#ifdef DEBUG
// Cannot construct functions this way.
- // rdi: constructor
// rbx: initial map
__ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
- __ Assert(not_equal, "Function constructed by construct stub.");
+ __ Check(not_equal, "Function constructed by construct stub.");
#endif
// Now allocate the JSObject in new space.
- // rdi: constructor
// rbx: initial map
+ ASSERT(function->has_initial_map());
+ int instance_size = function->initial_map()->instance_size();
+#ifdef DEBUG
__ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
__ shl(rcx, Immediate(kPointerSizeLog2));
- __ AllocateInNewSpace(rcx, rdx, rcx, no_reg,
+ __ cmpq(rcx, Immediate(instance_size));
+ __ Check(equal, "Instance size of initial map changed.");
+#endif
+ __ AllocateInNewSpace(instance_size, rdx, rcx, no_reg,
&generic_stub_call, NO_ALLOCATION_FLAGS);
// Allocated the JSObject, now initialize the fields and add the heap tag.
@@ -3047,7 +3311,6 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
}
// Fill the unused in-object property fields with undefined.
- ASSERT(function->has_initial_map());
for (int i = shared->this_property_assignments_count();
i < function->initial_map()->inobject_properties();
i++) {
@@ -3369,8 +3632,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
__ movsd(Operand(rbx, rdi, times_8, 0), xmm0);
break;
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3435,8 +3701,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3587,7 +3856,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, rcx, rbx, xmm0, xmm1, &miss_force_generic);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
__ JumpIfNotSmi(rax, &transition_elements_kind);
}
@@ -3611,13 +3880,13 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ j(not_equal, &miss_force_generic);
__ bind(&finish_store);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
__ SmiToInteger32(rcx, rcx);
__ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
rax);
} else {
// Do the store and update the write barrier.
- ASSERT(elements_kind == FAST_ELEMENTS);
+ ASSERT(IsFastObjectElementsKind(elements_kind));
__ SmiToInteger32(rcx, rcx);
__ lea(rcx,
FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize));
diff --git a/src/3rdparty/v8/src/zone-inl.h b/src/3rdparty/v8/src/zone-inl.h
index ee96ec0..e312b20 100644
--- a/src/3rdparty/v8/src/zone-inl.h
+++ b/src/3rdparty/v8/src/zone-inl.h
@@ -40,7 +40,7 @@ namespace internal {
inline void* Zone::New(int size) {
- ASSERT(ZoneScope::nesting() > 0);
+ ASSERT(scope_nesting_ > 0);
// Round up the requested size to fit the alignment.
size = RoundUp(size, kAlignment);
@@ -90,30 +90,17 @@ ZoneSplayTree<Config>::~ZoneSplayTree() {
// Reset the root to avoid unneeded iteration over all tree nodes
// in the destructor. For a zone-allocated tree, nodes will be
// freed by the Zone.
- SplayTree<Config, ZoneListAllocationPolicy>::ResetRoot();
+ SplayTree<Config, ZoneAllocationPolicy>::ResetRoot();
}
-// TODO(isolates): for performance reasons, this should be replaced with a new
-// operator that takes the zone in which the object should be
-// allocated.
-void* ZoneObject::operator new(size_t size) {
- return ZONE->New(static_cast<int>(size));
-}
-
void* ZoneObject::operator new(size_t size, Zone* zone) {
return zone->New(static_cast<int>(size));
}
-
-inline void* ZoneListAllocationPolicy::New(int size) {
- return ZONE->New(size);
-}
-
-
-template <typename T>
-void* ZoneList<T>::operator new(size_t size) {
- return ZONE->New(static_cast<int>(size));
+inline void* ZoneAllocationPolicy::New(size_t size) {
+ ASSERT(zone_);
+ return zone_->New(static_cast<int>(size));
}
@@ -123,19 +110,14 @@ void* ZoneList<T>::operator new(size_t size, Zone* zone) {
}
-ZoneScope::ZoneScope(Isolate* isolate, ZoneScopeMode mode)
- : isolate_(isolate), mode_(mode) {
- isolate_->zone()->scope_nesting_++;
+ZoneScope::ZoneScope(Zone* zone, ZoneScopeMode mode)
+ : zone_(zone), mode_(mode) {
+ zone_->scope_nesting_++;
}
bool ZoneScope::ShouldDeleteOnExit() {
- return isolate_->zone()->scope_nesting_ == 1 && mode_ == DELETE_ON_EXIT;
-}
-
-
-int ZoneScope::nesting() {
- return Isolate::Current()->zone()->scope_nesting_;
+ return zone_->scope_nesting_ == 1 && mode_ == DELETE_ON_EXIT;
}
diff --git a/src/3rdparty/v8/src/zone.cc b/src/3rdparty/v8/src/zone.cc
index d5d05ab..51b8113 100644
--- a/src/3rdparty/v8/src/zone.cc
+++ b/src/3rdparty/v8/src/zone.cc
@@ -67,20 +67,20 @@ class Segment {
};
-Zone::Zone()
+Zone::Zone(Isolate* isolate)
: zone_excess_limit_(256 * MB),
segment_bytes_allocated_(0),
position_(0),
limit_(0),
scope_nesting_(0),
- segment_head_(NULL) {
+ segment_head_(NULL),
+ isolate_(isolate) {
}
unsigned Zone::allocation_size_ = 0;
ZoneScope::~ZoneScope() {
- ASSERT_EQ(Isolate::Current(), isolate_);
- if (ShouldDeleteOnExit()) isolate_->zone()->DeleteAll();
- isolate_->zone()->scope_nesting_--;
+ if (ShouldDeleteOnExit()) zone_->DeleteAll();
+ zone_->scope_nesting_--;
}
diff --git a/src/3rdparty/v8/src/zone.h b/src/3rdparty/v8/src/zone.h
index 8648465..01e887e 100644
--- a/src/3rdparty/v8/src/zone.h
+++ b/src/3rdparty/v8/src/zone.h
@@ -64,6 +64,8 @@ class Isolate;
class Zone {
public:
+ explicit Zone(Isolate* isolate);
+ ~Zone() { DeleteKeptSegment(); }
// Allocate 'size' bytes of memory in the Zone; expands the Zone by
// allocating new segments of memory on demand using malloc().
inline void* New(int size);
@@ -114,9 +116,6 @@ class Zone {
// the zone.
int segment_bytes_allocated_;
- // Each isolate gets its own zone.
- Zone();
-
// Expand the Zone to hold at least 'size' more bytes and allocate
// the bytes. Returns the address of the newly allocated chunk of
// memory in the Zone. Should only be called if there isn't enough
@@ -148,7 +147,6 @@ class Zone {
class ZoneObject {
public:
// Allocate a new ZoneObject of 'size' bytes in the Zone.
- INLINE(void* operator new(size_t size));
INLINE(void* operator new(size_t size, Zone* zone));
// Ideally, the delete operator should be private instead of
@@ -164,16 +162,16 @@ class ZoneObject {
};
-// The ZoneListAllocationPolicy is used to specialize the GenericList
-// implementation to allocate ZoneLists and their elements in the
-// Zone.
-class ZoneListAllocationPolicy {
+// The ZoneAllocationPolicy is used to specialize generic data
+// structures to allocate themselves and their elements in the Zone.
+struct ZoneAllocationPolicy {
public:
- // Allocate 'size' bytes of memory in the zone.
- static void* New(int size);
+ explicit ZoneAllocationPolicy(Zone* zone) : zone_(zone) { }
+ INLINE(void* New(size_t size));
+ INLINE(static void Delete(void *pointer)) { }
- // De-allocation attempts are silently ignored.
- static void Delete(void* p) { }
+ private:
+ Zone* zone_;
};
@@ -182,20 +180,48 @@ class ZoneListAllocationPolicy {
// Zone. ZoneLists cannot be deleted individually; you can delete all
// objects in the Zone by calling Zone::DeleteAll().
template<typename T>
-class ZoneList: public List<T, ZoneListAllocationPolicy> {
+class ZoneList: public List<T, ZoneAllocationPolicy> {
public:
- INLINE(void* operator new(size_t size));
- INLINE(void* operator new(size_t size, Zone* zone));
-
// Construct a new ZoneList with the given capacity; the length is
// always zero. The capacity must be non-negative.
- explicit ZoneList(int capacity)
- : List<T, ZoneListAllocationPolicy>(capacity) { }
+ ZoneList(int capacity, Zone* zone)
+ : List<T, ZoneAllocationPolicy>(capacity, ZoneAllocationPolicy(zone)) { }
+
+ INLINE(void* operator new(size_t size, Zone* zone));
// Construct a new ZoneList by copying the elements of the given ZoneList.
- explicit ZoneList(const ZoneList<T>& other)
- : List<T, ZoneListAllocationPolicy>(other.length()) {
- AddAll(other);
+ ZoneList(const ZoneList<T>& other, Zone* zone)
+ : List<T, ZoneAllocationPolicy>(other.length(),
+ ZoneAllocationPolicy(zone)) {
+ AddAll(other, ZoneAllocationPolicy(zone));
+ }
+
+ // We add some convenience wrappers so that we can pass in a Zone
+ // instead of a (less convenient) ZoneAllocationPolicy.
+ INLINE(void Add(const T& element, Zone* zone)) {
+ List<T, ZoneAllocationPolicy>::Add(element, ZoneAllocationPolicy(zone));
+ }
+ INLINE(void AddAll(const List<T, ZoneAllocationPolicy>& other,
+ Zone* zone)) {
+ List<T, ZoneAllocationPolicy>::AddAll(other, ZoneAllocationPolicy(zone));
+ }
+ INLINE(void AddAll(const Vector<T>& other, Zone* zone)) {
+ List<T, ZoneAllocationPolicy>::AddAll(other, ZoneAllocationPolicy(zone));
+ }
+ INLINE(void InsertAt(int index, const T& element, Zone* zone)) {
+ List<T, ZoneAllocationPolicy>::InsertAt(index, element,
+ ZoneAllocationPolicy(zone));
+ }
+ INLINE(Vector<T> AddBlock(T value, int count, Zone* zone)) {
+ return List<T, ZoneAllocationPolicy>::AddBlock(value, count,
+ ZoneAllocationPolicy(zone));
+ }
+ INLINE(void Allocate(int length, Zone* zone)) {
+ List<T, ZoneAllocationPolicy>::Allocate(length, ZoneAllocationPolicy(zone));
+ }
+ INLINE(void Initialize(int capacity, Zone* zone)) {
+ List<T, ZoneAllocationPolicy>::Initialize(capacity,
+ ZoneAllocationPolicy(zone));
}
void operator delete(void* pointer) { UNREACHABLE(); }
@@ -208,7 +234,7 @@ class ZoneList: public List<T, ZoneListAllocationPolicy> {
// outer-most scope.
class ZoneScope BASE_EMBEDDED {
public:
- INLINE(ZoneScope(Isolate* isolate, ZoneScopeMode mode));
+ INLINE(ZoneScope(Zone* zone, ZoneScopeMode mode));
virtual ~ZoneScope();
@@ -223,7 +249,7 @@ class ZoneScope BASE_EMBEDDED {
inline static int nesting();
private:
- Isolate* isolate_;
+ Zone* zone_;
ZoneScopeMode mode_;
};
@@ -232,15 +258,15 @@ class ZoneScope BASE_EMBEDDED {
// different configurations of a concrete splay tree (see splay-tree.h).
// The tree itself and all its elements are allocated in the Zone.
template <typename Config>
-class ZoneSplayTree: public SplayTree<Config, ZoneListAllocationPolicy> {
+class ZoneSplayTree: public SplayTree<Config, ZoneAllocationPolicy> {
public:
- ZoneSplayTree()
- : SplayTree<Config, ZoneListAllocationPolicy>() {}
+ explicit ZoneSplayTree(Zone* zone)
+ : SplayTree<Config, ZoneAllocationPolicy>(ZoneAllocationPolicy(zone)) {}
~ZoneSplayTree();
};
-typedef TemplateHashMapImpl<ZoneListAllocationPolicy> ZoneHashMap;
+typedef TemplateHashMapImpl<ZoneAllocationPolicy> ZoneHashMap;
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/test/benchmarks/testcfg.py b/src/3rdparty/v8/test/benchmarks/testcfg.py
index ab9d40f..5bbad7a 100644
--- a/src/3rdparty/v8/test/benchmarks/testcfg.py
+++ b/src/3rdparty/v8/test/benchmarks/testcfg.py
@@ -30,6 +30,11 @@ import test
import os
from os.path import join, split
+def GetSuite(name, root):
+ # Not implemented.
+ return None
+
+
def IsNumber(string):
try:
float(string)
diff --git a/src/3rdparty/v8/test/cctest/cctest.gyp b/src/3rdparty/v8/test/cctest/cctest.gyp
index a242fe3..80eecfd 100644
--- a/src/3rdparty/v8/test/cctest/cctest.gyp
+++ b/src/3rdparty/v8/test/cctest/cctest.gyp
@@ -79,6 +79,7 @@
'test-lockers.cc',
'test-log.cc',
'test-mark-compact.cc',
+ 'test-object-observe.cc',
'test-parsing.cc',
'test-platform-tls.cc',
'test-profile-generator.cc',
@@ -118,7 +119,7 @@
'test-disasm-arm.cc'
],
}],
- ['v8_target_arch=="mips"', {
+ ['v8_target_arch=="mipsel"', {
'sources': [
'test-assembler-mips.cc',
'test-disasm-mips.cc',
diff --git a/src/3rdparty/v8/test/cctest/cctest.h b/src/3rdparty/v8/test/cctest/cctest.h
index 0b93562..f3961a4 100644
--- a/src/3rdparty/v8/test/cctest/cctest.h
+++ b/src/3rdparty/v8/test/cctest/cctest.h
@@ -214,4 +214,23 @@ static inline v8::Local<v8::Value> CompileRun(const char* source) {
}
+// Helper function that compiles and runs the source with given origin.
+static inline v8::Local<v8::Value> CompileRunWithOrigin(const char* source,
+ const char* origin_url,
+ int line_number,
+ int column_number) {
+ v8::ScriptOrigin origin(v8::String::New(origin_url),
+ v8::Integer::New(line_number),
+ v8::Integer::New(column_number));
+ return v8::Script::Compile(v8::String::New(source), &origin)->Run();
+}
+
+
+// Pick a slightly different port to allow tests to be run in parallel.
+static inline int FlagDependentPortOffset() {
+ return ::v8::internal::FLAG_crankshaft == false ? 100 :
+ ::v8::internal::FLAG_always_opt ? 200 : 0;
+}
+
+
#endif // ifndef CCTEST_H_
diff --git a/src/3rdparty/v8/test/cctest/cctest.status b/src/3rdparty/v8/test/cctest/cctest.status
index af28be1..ab59e33 100644
--- a/src/3rdparty/v8/test/cctest/cctest.status
+++ b/src/3rdparty/v8/test/cctest/cctest.status
@@ -27,6 +27,7 @@
prefix cctest
+# All tests prefixed with 'Bug' are expected to fail.
test-api/Bug*: FAIL
##############################################################################
@@ -43,6 +44,9 @@ test-heap-profiler/HeapSnapshotsDiff: PASS || FAIL
test-serialize/TestThatAlwaysFails: FAIL
test-serialize/DependentTestThatAlwaysFails: FAIL
+# This test always fails. It tests that LiveEdit causes abort when turned off.
+test-debug/LiveEditDisabled: FAIL
+
# TODO(gc): Temporarily disabled in the GC branch.
test-log/EquivalenceOfLoggingAndTraversal: PASS || FAIL
@@ -64,11 +68,6 @@ test-api/OutOfMemoryNested: SKIP
# BUG(355): Test crashes on ARM.
test-log/ProfLazyMode: SKIP
-# BUG(945): Socket connect fails on ARM
-test-debug/DebuggerAgent: SKIP
-test-debug/DebuggerAgentProtocolOverflowHeader: SKIP
-test-sockets/Socket: SKIP
-
# BUG(1075): Unresolved crashes.
test-serialize/Deserialize: SKIP
test-serialize/DeserializeFromSecondSerializationAndRunScript2: SKIP
@@ -76,16 +75,15 @@ test-serialize/DeserializeAndRunScript2: SKIP
test-serialize/DeserializeFromSecondSerialization: SKIP
##############################################################################
-[ $arch == arm && $crankshaft ]
-
-# Tests that time out with crankshaft.
-test-debug/ThreadedDebugging: SKIP
-test-debug/DebugBreakLoop: SKIP
+[ $arch == android_arm || $arch == android_ia32 ]
+# Tests crash as there is no /tmp directory in Android.
+test-log/LogAccessorCallbacks: SKIP
+test-log/LogCallbacks: SKIP
+test-log/ProfLazyMode: SKIP
-##############################################################################
-[ $arch == mips && $crankshaft ]
+# platform-tls.h does not contain an ANDROID-related header.
+test-platform-tls/FastTLS: SKIP
-# Tests that time out with crankshaft.
-test-debug/ThreadedDebugging: SKIP
-test-debug/DebugBreakLoop: SKIP
+# This test times out.
+test-threads/ThreadJoinSelf: SKIP
diff --git a/src/3rdparty/v8/test/cctest/test-alloc.cc b/src/3rdparty/v8/test/cctest/test-alloc.cc
index e195d14..7ba2583 100644
--- a/src/3rdparty/v8/test/cctest/test-alloc.cc
+++ b/src/3rdparty/v8/test/cctest/test-alloc.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -34,7 +34,8 @@
using namespace v8::internal;
-static inline void SimulateFullSpace(PagedSpace* space) {
+// Also used in test-heap.cc test cases.
+void SimulateFullSpace(PagedSpace* space) {
int old_linear_size = static_cast<int>(space->limit() - space->top());
space->Free(space->top(), old_linear_size);
space->SetTop(space->limit(), space->limit());
@@ -150,12 +151,21 @@ TEST(StressJS) {
Handle<Map> map(function->initial_map());
Handle<DescriptorArray> instance_descriptors(map->instance_descriptors());
Handle<Foreign> foreign = FACTORY->NewForeign(&kDescriptor);
- instance_descriptors = FACTORY->CopyAppendForeignDescriptor(
- instance_descriptors,
- FACTORY->NewStringFromAscii(Vector<const char>("get", 3)),
- foreign,
- static_cast<PropertyAttributes>(0));
- map->set_instance_descriptors(*instance_descriptors);
+ Handle<String> name =
+ FACTORY->NewStringFromAscii(Vector<const char>("get", 3));
+ ASSERT(instance_descriptors->IsEmpty());
+
+ Handle<DescriptorArray> new_descriptors = FACTORY->NewDescriptorArray(0, 1);
+
+ v8::internal::DescriptorArray::WhitenessWitness witness(*new_descriptors);
+ map->set_instance_descriptors(*new_descriptors);
+
+ CallbacksDescriptor d(*name,
+ *foreign,
+ static_cast<PropertyAttributes>(0),
+ v8::internal::PropertyDetails::kInitialIndex);
+ map->AppendDescriptor(&d, witness);
+
// Add the Foo constructor the global object.
env->Global()->Set(v8::String::New("Foo"), v8::Utils::ToLocal(function));
// Call the accessor through JavaScript.
diff --git a/src/3rdparty/v8/test/cctest/test-api.cc b/src/3rdparty/v8/test/cctest/test-api.cc
index 9122781..f7325df 100644
--- a/src/3rdparty/v8/test/cctest/test-api.cc
+++ b/src/3rdparty/v8/test/cctest/test-api.cc
@@ -27,12 +27,18 @@
#include <limits.h>
+#ifndef WIN32
+#include <signal.h> // kill
+#include <unistd.h> // getpid
+#endif // WIN32
+
#include "v8.h"
#include "api.h"
#include "isolate.h"
#include "compilation-cache.h"
#include "execution.h"
+#include "objects.h"
#include "snapshot.h"
#include "platform.h"
#include "utils.h"
@@ -398,6 +404,10 @@ THREADED_TEST(ScriptUsingStringResource) {
CHECK(source->IsExternal());
CHECK_EQ(resource,
static_cast<TestResource*>(source->GetExternalStringResource()));
+ String::Encoding encoding = String::UNKNOWN_ENCODING;
+ CHECK_EQ(static_cast<const String::ExternalStringResourceBase*>(resource),
+ source->GetExternalStringResourceBase(&encoding));
+ CHECK_EQ(String::TWO_BYTE_ENCODING, encoding);
HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
CHECK_EQ(0, dispose_count);
}
@@ -413,9 +423,16 @@ THREADED_TEST(ScriptUsingAsciiStringResource) {
{
v8::HandleScope scope;
LocalContext env;
- Local<String> source =
- String::NewExternal(new TestAsciiResource(i::StrDup(c_source),
- &dispose_count));
+ TestAsciiResource* resource = new TestAsciiResource(i::StrDup(c_source),
+ &dispose_count);
+ Local<String> source = String::NewExternal(resource);
+ CHECK(source->IsExternalAscii());
+ CHECK_EQ(static_cast<const String::ExternalStringResourceBase*>(resource),
+ source->GetExternalAsciiStringResource());
+ String::Encoding encoding = String::UNKNOWN_ENCODING;
+ CHECK_EQ(static_cast<const String::ExternalStringResourceBase*>(resource),
+ source->GetExternalStringResourceBase(&encoding));
+ CHECK_EQ(String::ASCII_ENCODING, encoding);
Local<Script> script = Script::Compile(source);
Local<Value> value = script->Run();
CHECK(value->IsNumber());
@@ -439,6 +456,11 @@ THREADED_TEST(ScriptMakingExternalString) {
// Trigger GCs so that the newly allocated string moves to old gen.
HEAP->CollectGarbage(i::NEW_SPACE); // in survivor space now
HEAP->CollectGarbage(i::NEW_SPACE); // in old gen now
+ CHECK_EQ(source->IsExternal(), false);
+ CHECK_EQ(source->IsExternalAscii(), false);
+ String::Encoding encoding = String::UNKNOWN_ENCODING;
+ CHECK_EQ(NULL, source->GetExternalStringResourceBase(&encoding));
+ CHECK_EQ(String::ASCII_ENCODING, encoding);
bool success = source->MakeExternal(new TestResource(two_byte_source,
&dispose_count));
CHECK(success);
@@ -947,22 +969,33 @@ THREADED_TEST(FindInstanceInPrototypeChain) {
THREADED_TEST(TinyInteger) {
v8::HandleScope scope;
LocalContext env;
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+
int32_t value = 239;
Local<v8::Integer> value_obj = v8::Integer::New(value);
CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
+
+ value_obj = v8::Integer::New(value, isolate);
+ CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
}
THREADED_TEST(BigSmiInteger) {
v8::HandleScope scope;
LocalContext env;
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+
int32_t value = i::Smi::kMaxValue;
// We cannot add one to a Smi::kMaxValue without wrapping.
if (i::kSmiValueSize < 32) {
CHECK(i::Smi::IsValid(value));
CHECK(!i::Smi::IsValid(value + 1));
+
Local<v8::Integer> value_obj = v8::Integer::New(value);
CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
+
+ value_obj = v8::Integer::New(value, isolate);
+ CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
}
}
@@ -970,6 +1003,8 @@ THREADED_TEST(BigSmiInteger) {
THREADED_TEST(BigInteger) {
v8::HandleScope scope;
LocalContext env;
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+
// We cannot add one to a Smi::kMaxValue without wrapping.
if (i::kSmiValueSize < 32) {
// The casts allow this to compile, even if Smi::kMaxValue is 2^31-1.
@@ -978,8 +1013,12 @@ THREADED_TEST(BigInteger) {
static_cast<int32_t>(static_cast<uint32_t>(i::Smi::kMaxValue) + 1);
CHECK(value > i::Smi::kMaxValue);
CHECK(!i::Smi::IsValid(value));
+
Local<v8::Integer> value_obj = v8::Integer::New(value);
CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
+
+ value_obj = v8::Integer::New(value, isolate);
+ CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
}
}
@@ -987,42 +1026,66 @@ THREADED_TEST(BigInteger) {
THREADED_TEST(TinyUnsignedInteger) {
v8::HandleScope scope;
LocalContext env;
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+
uint32_t value = 239;
+
Local<v8::Integer> value_obj = v8::Integer::NewFromUnsigned(value);
CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
+
+ value_obj = v8::Integer::NewFromUnsigned(value, isolate);
+ CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
}
THREADED_TEST(BigUnsignedSmiInteger) {
v8::HandleScope scope;
LocalContext env;
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+
uint32_t value = static_cast<uint32_t>(i::Smi::kMaxValue);
CHECK(i::Smi::IsValid(value));
CHECK(!i::Smi::IsValid(value + 1));
+
Local<v8::Integer> value_obj = v8::Integer::NewFromUnsigned(value);
CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
+
+ value_obj = v8::Integer::NewFromUnsigned(value, isolate);
+ CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
}
THREADED_TEST(BigUnsignedInteger) {
v8::HandleScope scope;
LocalContext env;
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+
uint32_t value = static_cast<uint32_t>(i::Smi::kMaxValue) + 1;
CHECK(value > static_cast<uint32_t>(i::Smi::kMaxValue));
CHECK(!i::Smi::IsValid(value));
+
Local<v8::Integer> value_obj = v8::Integer::NewFromUnsigned(value);
CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
+
+ value_obj = v8::Integer::NewFromUnsigned(value, isolate);
+ CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
}
THREADED_TEST(OutOfSignedRangeUnsignedInteger) {
v8::HandleScope scope;
LocalContext env;
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+
uint32_t INT32_MAX_AS_UINT = (1U << 31) - 1;
uint32_t value = INT32_MAX_AS_UINT + 1;
CHECK(value > INT32_MAX_AS_UINT); // No overflow.
+
Local<v8::Integer> value_obj = v8::Integer::NewFromUnsigned(value);
CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
+
+ value_obj = v8::Integer::NewFromUnsigned(value, isolate);
+ CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
}
@@ -2080,6 +2143,10 @@ THREADED_TEST(HiddenProperties) {
HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
+ CHECK(obj->SetHiddenValue(key, Handle<Value>()));
+ CHECK(obj->GetHiddenValue(key).IsEmpty());
+
+ CHECK(obj->SetHiddenValue(key, v8::Integer::New(2002)));
CHECK(obj->DeleteHiddenValue(key));
CHECK(obj->GetHiddenValue(key).IsEmpty());
}
@@ -2182,6 +2249,14 @@ THREADED_TEST(GlobalHandle) {
}
CHECK_EQ(global->Length(), 3);
global.Dispose();
+
+ {
+ v8::HandleScope scope;
+ Local<String> str = v8_str("str");
+ global = v8::Persistent<String>::New(str);
+ }
+ CHECK_EQ(global->Length(), 3);
+ global.Dispose(v8::Isolate::GetCurrent());
}
@@ -2372,6 +2447,100 @@ THREADED_TEST(ApiObjectGroupsCycle) {
}
+// TODO(mstarzinger): This should be a THREADED_TEST but causes failures
+// on the buildbots, so was made non-threaded for the time being.
+TEST(ApiObjectGroupsCycleForScavenger) {
+ HandleScope scope;
+ LocalContext env;
+
+ WeakCallCounter counter(1234);
+
+ Persistent<Object> g1s1;
+ Persistent<Object> g1s2;
+ Persistent<Object> g2s1;
+ Persistent<Object> g2s2;
+ Persistent<Object> g3s1;
+ Persistent<Object> g3s2;
+
+ {
+ HandleScope scope;
+ g1s1 = Persistent<Object>::New(Object::New());
+ g1s2 = Persistent<Object>::New(Object::New());
+ g1s1.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+ g1s2.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+
+ g2s1 = Persistent<Object>::New(Object::New());
+ g2s2 = Persistent<Object>::New(Object::New());
+ g2s1.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+ g2s2.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+
+ g3s1 = Persistent<Object>::New(Object::New());
+ g3s2 = Persistent<Object>::New(Object::New());
+ g3s1.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+ g3s2.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+ }
+
+ // Make a root.
+ Persistent<Object> root = Persistent<Object>::New(g1s1);
+ root.MarkPartiallyDependent();
+
+ // Connect groups. We're building the following cycle:
+ // G1: { g1s1, g2s1 }, g1s1 implicitly references g2s1, ditto for other
+ // groups.
+ {
+ g1s1.MarkPartiallyDependent();
+ g1s2.MarkPartiallyDependent();
+ g2s1.MarkPartiallyDependent();
+ g2s2.MarkPartiallyDependent();
+ g3s1.MarkPartiallyDependent();
+ g3s2.MarkPartiallyDependent();
+ Persistent<Value> g1_objects[] = { g1s1, g1s2 };
+ Persistent<Value> g2_objects[] = { g2s1, g2s2 };
+ Persistent<Value> g3_objects[] = { g3s1, g3s2 };
+ V8::AddObjectGroup(g1_objects, 2);
+ g1s1->Set(v8_str("x"), g2s1);
+ V8::AddObjectGroup(g2_objects, 2);
+ g2s1->Set(v8_str("x"), g3s1);
+ V8::AddObjectGroup(g3_objects, 2);
+ g3s1->Set(v8_str("x"), g1s1);
+ }
+
+ HEAP->CollectGarbage(i::NEW_SPACE);
+
+ // All objects should be alive.
+ CHECK_EQ(0, counter.NumberOfWeakCalls());
+
+ // Weaken the root.
+ root.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
+ root.MarkPartiallyDependent();
+
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+ // Groups are deleted, rebuild groups.
+ {
+ g1s1.MarkPartiallyDependent(isolate);
+ g1s2.MarkPartiallyDependent(isolate);
+ g2s1.MarkPartiallyDependent(isolate);
+ g2s2.MarkPartiallyDependent(isolate);
+ g3s1.MarkPartiallyDependent(isolate);
+ g3s2.MarkPartiallyDependent(isolate);
+ Persistent<Value> g1_objects[] = { g1s1, g1s2 };
+ Persistent<Value> g2_objects[] = { g2s1, g2s2 };
+ Persistent<Value> g3_objects[] = { g3s1, g3s2 };
+ V8::AddObjectGroup(g1_objects, 2);
+ g1s1->Set(v8_str("x"), g2s1);
+ V8::AddObjectGroup(g2_objects, 2);
+ g2s1->Set(v8_str("x"), g3s1);
+ V8::AddObjectGroup(g3_objects, 2);
+ g3s1->Set(v8_str("x"), g1s1);
+ }
+
+ HEAP->CollectGarbage(i::NEW_SPACE);
+
+ // All objects should be gone. 7 global handles in total.
+ CHECK_EQ(7, counter.NumberOfWeakCalls());
+}
+
+
THREADED_TEST(ScriptException) {
v8::HandleScope scope;
LocalContext env;
@@ -2388,20 +2557,19 @@ THREADED_TEST(ScriptException) {
bool message_received;
-static void check_message(v8::Handle<v8::Message> message,
- v8::Handle<Value> data) {
- CHECK_EQ(5.76, data->NumberValue());
+static void check_message_0(v8::Handle<v8::Message> message,
+ v8::Handle<Value> data) {
CHECK_EQ(6.75, message->GetScriptResourceName()->NumberValue());
CHECK_EQ(7.56, message->GetScriptData()->NumberValue());
message_received = true;
}
-THREADED_TEST(MessageHandlerData) {
+THREADED_TEST(MessageHandler0) {
message_received = false;
v8::HandleScope scope;
CHECK(!message_received);
- v8::V8::AddMessageListener(check_message, v8_num(5.76));
+ v8::V8::AddMessageListener(check_message_0);
LocalContext context;
v8::ScriptOrigin origin =
v8::ScriptOrigin(v8_str("6.75"));
@@ -2411,7 +2579,56 @@ THREADED_TEST(MessageHandlerData) {
script->Run();
CHECK(message_received);
// clear out the message listener
- v8::V8::RemoveMessageListeners(check_message);
+ v8::V8::RemoveMessageListeners(check_message_0);
+}
+
+
+static void check_message_1(v8::Handle<v8::Message> message,
+ v8::Handle<Value> data) {
+ CHECK(data->IsNumber());
+ CHECK_EQ(1337, data->Int32Value());
+ message_received = true;
+}
+
+
+TEST(MessageHandler1) {
+ message_received = false;
+ v8::HandleScope scope;
+ CHECK(!message_received);
+ v8::V8::AddMessageListener(check_message_1);
+ LocalContext context;
+ CompileRun("throw 1337;");
+ CHECK(message_received);
+ // clear out the message listener
+ v8::V8::RemoveMessageListeners(check_message_1);
+}
+
+
+static void check_message_2(v8::Handle<v8::Message> message,
+ v8::Handle<Value> data) {
+ LocalContext context;
+ CHECK(data->IsObject());
+ v8::Local<v8::Value> hidden_property =
+ v8::Object::Cast(*data)->GetHiddenValue(v8_str("hidden key"));
+ CHECK(v8_str("hidden value")->Equals(hidden_property));
+ message_received = true;
+}
+
+
+TEST(MessageHandler2) {
+ message_received = false;
+ v8::HandleScope scope;
+ CHECK(!message_received);
+ v8::V8::AddMessageListener(check_message_2);
+ LocalContext context;
+ v8::Local<v8::Value> error = v8::Exception::Error(v8_str("custom error"));
+ v8::Object::Cast(*error)->SetHiddenValue(v8_str("hidden key"),
+ v8_str("hidden value"));
+ context->Global()->Set(v8_str("error"), error);
+ CompileRun("throw error;");
+ CHECK(message_received);
+ // clear out the message listener
+ v8::V8::RemoveMessageListeners(check_message_2);
}
@@ -2697,7 +2914,7 @@ TEST(HugeConsStringOutOfMemory) {
static const int K = 1024;
v8::ResourceConstraints constraints;
constraints.set_max_young_space_size(256 * K);
- constraints.set_max_old_space_size(2 * K * K);
+ constraints.set_max_old_space_size(3 * K * K);
v8::SetResourceConstraints(&constraints);
// Execute a script that causes out of memory.
@@ -3052,7 +3269,33 @@ TEST(APIThrowMessageOverwrittenToString) {
"Number.prototype.toString = function() { return 'Whoops'; };"
"ReferenceError.prototype.toString = Object.prototype.toString;");
CompileRun("asdf;");
- v8::V8::RemoveMessageListeners(check_message);
+ v8::V8::RemoveMessageListeners(check_reference_error_message);
+}
+
+
+static void check_custom_error_message(
+ v8::Handle<v8::Message> message,
+ v8::Handle<v8::Value> data) {
+ const char* uncaught_error = "Uncaught MyError toString";
+ CHECK(message->Get()->Equals(v8_str(uncaught_error)));
+}
+
+
+TEST(CustomErrorToString) {
+ v8::HandleScope scope;
+ v8::V8::AddMessageListener(check_custom_error_message);
+ LocalContext context;
+ CompileRun(
+ "function MyError(name, message) { "
+ " this.name = name; "
+ " this.message = message; "
+ "} "
+ "MyError.prototype = Object.create(Error.prototype); "
+ "MyError.prototype.toString = function() { "
+ " return 'MyError toString'; "
+ "}; "
+ "throw new MyError('my name', 'my message'); ");
+ v8::V8::RemoveMessageListeners(check_custom_error_message);
}
@@ -3073,7 +3316,7 @@ TEST(APIThrowMessage) {
LocalContext context(0, templ);
CompileRun("ThrowFromC();");
CHECK(message_received);
- v8::V8::RemoveMessageListeners(check_message);
+ v8::V8::RemoveMessageListeners(receive_message);
}
@@ -3091,7 +3334,7 @@ TEST(APIThrowMessageAndVerboseTryCatch) {
CHECK(try_catch.HasCaught());
CHECK(result.IsEmpty());
CHECK(message_received);
- v8::V8::RemoveMessageListeners(check_message);
+ v8::V8::RemoveMessageListeners(receive_message);
}
@@ -3710,6 +3953,36 @@ THREADED_TEST(SimplePropertyWrite) {
}
+THREADED_TEST(SetterOnly) {
+ v8::HandleScope scope;
+ Local<ObjectTemplate> templ = ObjectTemplate::New();
+ templ->SetAccessor(v8_str("x"), NULL, SetXValue, v8_str("donut"));
+ LocalContext context;
+ context->Global()->Set(v8_str("obj"), templ->NewInstance());
+ Local<Script> script = Script::Compile(v8_str("obj.x = 4; obj.x"));
+ for (int i = 0; i < 10; i++) {
+ CHECK(xValue.IsEmpty());
+ script->Run();
+ CHECK_EQ(v8_num(4), xValue);
+ xValue.Dispose();
+ xValue = v8::Persistent<Value>();
+ }
+}
+
+
+THREADED_TEST(NoAccessors) {
+ v8::HandleScope scope;
+ Local<ObjectTemplate> templ = ObjectTemplate::New();
+ templ->SetAccessor(v8_str("x"), NULL, NULL, v8_str("donut"));
+ LocalContext context;
+ context->Global()->Set(v8_str("obj"), templ->NewInstance());
+ Local<Script> script = Script::Compile(v8_str("obj.x = 4; obj.x"));
+ for (int i = 0; i < 10; i++) {
+ script->Run();
+ }
+}
+
+
static v8::Handle<Value> XPropertyGetter(Local<String> property,
const AccessorInfo& info) {
ApiTestFuzzer::Fuzz();
@@ -4605,6 +4878,18 @@ THREADED_TEST(SimpleExtensions) {
}
+THREADED_TEST(NullExtensions) {
+ v8::HandleScope handle_scope;
+ v8::RegisterExtension(new Extension("nulltest", NULL));
+ const char* extension_names[] = { "nulltest" };
+ v8::ExtensionConfiguration extensions(1, extension_names);
+ v8::Handle<Context> context = Context::New(&extensions);
+ Context::Scope lock(context);
+ v8::Handle<Value> result = Script::Compile(v8_str("1+3"))->Run();
+ CHECK_EQ(result, v8::Integer::New(4));
+}
+
+
static const char* kEmbeddedExtensionSource =
"function Ret54321(){return 54321;}~~@@$"
"$%% THIS IS A SERIES OF NON-NULL-TERMINATED STRINGS.";
@@ -5014,7 +5299,6 @@ TEST(RegexpOutOfMemory) {
static void MissingScriptInfoMessageListener(v8::Handle<v8::Message> message,
v8::Handle<Value> data) {
- CHECK_EQ(v8::Undefined(), data);
CHECK(message->GetScriptResourceName()->IsUndefined());
CHECK_EQ(v8::Undefined(), message->GetScriptResourceName());
message->GetLineNumber();
@@ -5126,18 +5410,28 @@ THREADED_TEST(IndependentWeakHandle) {
v8::Persistent<Context> context = Context::New();
Context::Scope context_scope(context);
- v8::Persistent<v8::Object> object_a;
+ v8::Persistent<v8::Object> object_a, object_b;
{
v8::HandleScope handle_scope;
object_a = v8::Persistent<v8::Object>::New(v8::Object::New());
+ object_b = v8::Persistent<v8::Object>::New(v8::Object::New());
}
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
bool object_a_disposed = false;
+ bool object_b_disposed = false;
object_a.MakeWeak(&object_a_disposed, &DisposeAndSetFlag);
+ object_b.MakeWeak(&object_b_disposed, &DisposeAndSetFlag);
+ CHECK(!object_a.IsIndependent());
+ CHECK(!object_b.IsIndependent(isolate));
object_a.MarkIndependent();
+ object_b.MarkIndependent(isolate);
+ CHECK(object_a.IsIndependent());
+ CHECK(object_b.IsIndependent(isolate));
HEAP->PerformScavenge();
CHECK(object_a_disposed);
+ CHECK(object_b_disposed);
}
@@ -5218,7 +5512,7 @@ THREADED_TEST(IndependentHandleRevival) {
object.MarkIndependent();
HEAP->PerformScavenge();
CHECK(revived);
- HEAP->CollectAllGarbage(true);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
{
v8::HandleScope handle_scope;
v8::Local<String> y_str = v8_str("y");
@@ -5543,6 +5837,7 @@ THREADED_TEST(StringWrite) {
v8::Handle<String> str = v8_str("abcde");
// abc<Icelandic eth><Unicode snowman>.
v8::Handle<String> str2 = v8_str("abc\303\260\342\230\203");
+ v8::Handle<String> str3 = v8::String::New("abc\0def", 7);
const int kStride = 4; // Must match stride in for loops in JS below.
CompileRun(
"var left = '';"
@@ -5753,6 +6048,28 @@ THREADED_TEST(StringWrite) {
CHECK_NE(0, strcmp(utf8buf, "abc\303\260\342\230\203"));
utf8buf[8] = '\0';
CHECK_EQ(0, strcmp(utf8buf, "abc\303\260\342\230\203"));
+
+ memset(utf8buf, 0x1, sizeof(utf8buf));
+ utf8buf[5] = 'X';
+ len = str->WriteUtf8(utf8buf, sizeof(utf8buf), &charlen,
+ String::NO_NULL_TERMINATION);
+ CHECK_EQ(5, len);
+ CHECK_EQ('X', utf8buf[5]); // Test that the sixth character is untouched.
+ CHECK_EQ(5, charlen);
+ utf8buf[5] = '\0';
+ CHECK_EQ(0, strcmp(utf8buf, "abcde"));
+
+ memset(buf, 0x1, sizeof(buf));
+ len = str3->WriteAscii(buf);
+ CHECK_EQ(7, len);
+ CHECK_EQ(0, strcmp("abc def", buf));
+
+ memset(buf, 0x1, sizeof(buf));
+ len = str3->WriteAscii(buf, 0, -1, String::PRESERVE_ASCII_NULL);
+ CHECK_EQ(7, len);
+ CHECK_EQ(0, strcmp("abc", buf));
+ CHECK_EQ(0, buf[3]);
+ CHECK_EQ(0, strcmp("def", buf + 4));
}
@@ -7662,7 +7979,7 @@ THREADED_TEST(ShadowObject) {
value = Script::Compile(v8_str("f()"))->Run();
CHECK_EQ(42, value->Int32Value());
- Script::Compile(v8_str("y = 42"))->Run();
+ Script::Compile(v8_str("y = 43"))->Run();
CHECK_EQ(1, shadow_y_setter_call_count);
value = Script::Compile(v8_str("y"))->Run();
CHECK_EQ(1, shadow_y_getter_call_count);
@@ -9370,7 +9687,8 @@ static void GenerateSomeGarbage() {
v8::Handle<v8::Value> DirectApiCallback(const v8::Arguments& args) {
static int count = 0;
if (count++ % 3 == 0) {
- HEAP-> CollectAllGarbage(true); // This should move the stub
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
+ // This should move the stub
GenerateSomeGarbage(); // This should ensure the old stub memory is flushed
}
return v8::Handle<v8::Value>();
@@ -9425,7 +9743,7 @@ THREADED_TEST(CallICFastApi_DirectCall_Throw) {
v8::Handle<v8::Value> DirectGetterCallback(Local<String> name,
const v8::AccessorInfo& info) {
if (++p_getter_count % 3 == 0) {
- HEAP->CollectAllGarbage(true);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
GenerateSomeGarbage();
}
return v8::Handle<v8::Value>();
@@ -10260,6 +10578,7 @@ static v8::Handle<Value> ChildGetter(Local<String> name,
THREADED_TEST(Overriding) {
+ i::FLAG_es5_readonly = true;
v8::HandleScope scope;
LocalContext context;
@@ -10306,11 +10625,11 @@ THREADED_TEST(Overriding) {
value = v8_compile("o.g")->Run();
CHECK_EQ(42, value->Int32Value());
- // Check 'h' can be shadowed.
+ // Check that 'h' cannot be shadowed.
value = v8_compile("o.h = 3; o.h")->Run();
- CHECK_EQ(3, value->Int32Value());
+ CHECK_EQ(1, value->Int32Value());
- // Check 'i' is cannot be shadowed or changed.
+ // Check that 'i' cannot be shadowed or changed.
value = v8_compile("o.i = 3; o.i")->Run();
CHECK_EQ(42, value->Int32Value());
}
@@ -10721,18 +11040,21 @@ TEST(DontLeakGlobalObjects) {
{ v8::HandleScope scope;
LocalContext context;
}
+ v8::V8::ContextDisposedNotification();
CheckSurvivingGlobalObjectsCount(0);
{ v8::HandleScope scope;
LocalContext context;
v8_compile("Date")->Run();
}
+ v8::V8::ContextDisposedNotification();
CheckSurvivingGlobalObjectsCount(0);
{ v8::HandleScope scope;
LocalContext context;
v8_compile("/aaa/")->Run();
}
+ v8::V8::ContextDisposedNotification();
CheckSurvivingGlobalObjectsCount(0);
{ v8::HandleScope scope;
@@ -10741,6 +11063,7 @@ TEST(DontLeakGlobalObjects) {
LocalContext context(&extensions);
v8_compile("gc();")->Run();
}
+ v8::V8::ContextDisposedNotification();
CheckSurvivingGlobalObjectsCount(0);
}
}
@@ -10871,6 +11194,307 @@ THREADED_TEST(NestedHandleScopeAndContexts) {
}
+static i::Handle<i::JSFunction>* foo_ptr = NULL;
+static int foo_count = 0;
+static i::Handle<i::JSFunction>* bar_ptr = NULL;
+static int bar_count = 0;
+
+
+static void entry_hook(uintptr_t function,
+ uintptr_t return_addr_location) {
+ i::Code* code = i::Code::GetCodeFromTargetAddress(
+ reinterpret_cast<i::Address>(function));
+ CHECK(code != NULL);
+
+ if (bar_ptr != NULL && code == (*bar_ptr)->code())
+ ++bar_count;
+
+ if (foo_ptr != NULL && code == (*foo_ptr)->code())
+ ++foo_count;
+
+ // TODO(siggi): Verify return_addr_location.
+ // This can be done by capturing JitCodeEvents, but requires an ordered
+ // collection.
+}
+
+
+static void RunLoopInNewEnv() {
+ bar_ptr = NULL;
+ foo_ptr = NULL;
+
+ v8::HandleScope outer;
+ v8::Persistent<Context> env = Context::New();
+ env->Enter();
+
+ const char* script =
+ "function bar() {"
+ " var sum = 0;"
+ " for (i = 0; i < 100; ++i)"
+ " sum = foo(i);"
+ " return sum;"
+ "}"
+ "function foo(i) { return i * i; }";
+ CompileRun(script);
+ i::Handle<i::JSFunction> bar =
+ i::Handle<i::JSFunction>::cast(
+ v8::Utils::OpenHandle(*env->Global()->Get(v8_str("bar"))));
+ ASSERT(*bar);
+
+ i::Handle<i::JSFunction> foo =
+ i::Handle<i::JSFunction>::cast(
+ v8::Utils::OpenHandle(*env->Global()->Get(v8_str("foo"))));
+ ASSERT(*foo);
+
+ bar_ptr = &bar;
+ foo_ptr = &foo;
+
+ v8::Handle<v8::Value> value = CompileRun("bar();");
+ CHECK(value->IsNumber());
+ CHECK_EQ(9801.0, v8::Number::Cast(*value)->Value());
+
+ // Test the optimized codegen path.
+ value = CompileRun("%OptimizeFunctionOnNextCall(foo);"
+ "bar();");
+ CHECK(value->IsNumber());
+ CHECK_EQ(9801.0, v8::Number::Cast(*value)->Value());
+
+ env->Exit();
+}
+
+
+TEST(SetFunctionEntryHook) {
+ i::FLAG_allow_natives_syntax = true;
+
+ // Test setting and resetting the entry hook.
+ // Nulling it should always succeed.
+ CHECK(v8::V8::SetFunctionEntryHook(NULL));
+
+ CHECK(v8::V8::SetFunctionEntryHook(entry_hook));
+ // Setting a hook while one's active should fail.
+ CHECK_EQ(false, v8::V8::SetFunctionEntryHook(entry_hook));
+
+ CHECK(v8::V8::SetFunctionEntryHook(NULL));
+
+ // Reset the entry count to zero and set the entry hook.
+ bar_count = 0;
+ foo_count = 0;
+ CHECK(v8::V8::SetFunctionEntryHook(entry_hook));
+ RunLoopInNewEnv();
+
+ CHECK_EQ(2, bar_count);
+ CHECK_EQ(200, foo_count);
+
+ // Clear the entry hook and count.
+ bar_count = 0;
+ foo_count = 0;
+ v8::V8::SetFunctionEntryHook(NULL);
+
+ // Clear the compilation cache to make sure we don't reuse the
+ // functions from the previous invocation.
+ v8::internal::Isolate::Current()->compilation_cache()->Clear();
+
+ // Verify that entry hooking is now disabled.
+ RunLoopInNewEnv();
+ CHECK_EQ(0u, bar_count);
+ CHECK_EQ(0u, foo_count);
+}
+
+
+static i::HashMap* code_map = NULL;
+static int saw_bar = 0;
+static int move_events = 0;
+
+
+static bool FunctionNameIs(const char* expected,
+ const v8::JitCodeEvent* event) {
+ // Log lines for functions are of the general form:
+ // "LazyCompile:<type><function_name>", where the type is one of
+ // "*", "~" or "".
+ static const char kPreamble[] = "LazyCompile:";
+ static size_t kPreambleLen = sizeof(kPreamble) - 1;
+
+ if (event->name.len < sizeof(kPreamble) - 1 ||
+ strncmp(kPreamble, event->name.str, kPreambleLen) != 0) {
+ return false;
+ }
+
+ const char* tail = event->name.str + kPreambleLen;
+ size_t tail_len = event->name.len - kPreambleLen;
+ size_t expected_len = strlen(expected);
+ if (tail_len == expected_len + 1) {
+ if (*tail == '*' || *tail == '~') {
+ --tail_len;
+ ++tail;
+ } else {
+ return false;
+ }
+ }
+
+ if (tail_len != expected_len)
+ return false;
+
+ return strncmp(tail, expected, expected_len) == 0;
+}
+
+
+static void event_handler(const v8::JitCodeEvent* event) {
+ CHECK(event != NULL);
+ CHECK(code_map != NULL);
+
+ switch (event->type) {
+ case v8::JitCodeEvent::CODE_ADDED: {
+ CHECK(event->code_start != NULL);
+ CHECK_NE(0, static_cast<int>(event->code_len));
+ CHECK(event->name.str != NULL);
+ i::HashMap::Entry* entry =
+ code_map->Lookup(event->code_start,
+ i::ComputePointerHash(event->code_start),
+ true);
+ entry->value = reinterpret_cast<void*>(event->code_len);
+
+ if (FunctionNameIs("bar", event)) {
+ ++saw_bar;
+ }
+ }
+ break;
+
+ case v8::JitCodeEvent::CODE_MOVED: {
+ uint32_t hash = i::ComputePointerHash(event->code_start);
+ // We would like to never see code move that we haven't seen before,
+ // but the code creation event does not happen until the line endings
+ // have been calculated (this is so that we can report the line in the
+ // script at which the function source is found, see
+ // Compiler::RecordFunctionCompilation) and the line endings
+ // calculations can cause a GC, which can move the newly created code
+ // before its existence can be logged.
+ i::HashMap::Entry* entry =
+ code_map->Lookup(event->code_start, hash, false);
+ if (entry != NULL) {
+ ++move_events;
+
+ CHECK_EQ(reinterpret_cast<void*>(event->code_len), entry->value);
+ code_map->Remove(event->code_start, hash);
+
+ entry = code_map->Lookup(event->new_code_start,
+ i::ComputePointerHash(event->new_code_start),
+ true);
+ CHECK(entry != NULL);
+ entry->value = reinterpret_cast<void*>(event->code_len);
+ }
+ }
+ break;
+
+ case v8::JitCodeEvent::CODE_REMOVED:
+ // Object/code removal events are currently not dispatched from the GC.
+ CHECK(false);
+ break;
+ default:
+ // Impossible event.
+ CHECK(false);
+ break;
+ }
+}
+
+
+// Implemented in the test-alloc.cc test suite.
+void SimulateFullSpace(i::PagedSpace* space);
+
+
+static bool MatchPointers(void* key1, void* key2) {
+ return key1 == key2;
+}
+
+
+TEST(SetJitCodeEventHandler) {
+ const char* script =
+ "function bar() {"
+ " var sum = 0;"
+ " for (i = 0; i < 100; ++i)"
+ " sum = foo(i);"
+ " return sum;"
+ "}"
+ "function foo(i) { return i * i; };"
+ "bar();";
+
+ // Run this test in a new isolate to make sure we don't
+ // have remnants of state from other code.
+ v8::Isolate* isolate = v8::Isolate::New();
+ isolate->Enter();
+
+ {
+ i::HashMap code(MatchPointers);
+ code_map = &code;
+
+ saw_bar = 0;
+ move_events = 0;
+
+ i::FLAG_stress_compaction = true;
+ V8::SetJitCodeEventHandler(v8::kJitCodeEventDefault, event_handler);
+
+ v8::HandleScope scope;
+ // Generate new code objects sparsely distributed across several
+ // different fragmented code-space pages.
+ const int kIterations = 10;
+ for (int i = 0; i < kIterations; ++i) {
+ LocalContext env;
+
+ v8::Handle<v8::Script> compiled_script;
+ {
+ i::AlwaysAllocateScope always_allocate;
+ SimulateFullSpace(HEAP->code_space());
+ compiled_script = v8_compile(script);
+ }
+ compiled_script->Run();
+
+ // Clear the compilation cache to get more wastage.
+ ISOLATE->compilation_cache()->Clear();
+ }
+
+ // Force code movement.
+ HEAP->CollectAllAvailableGarbage("TestSetJitCodeEventHandler");
+
+ CHECK_LE(kIterations, saw_bar);
+ CHECK_NE(0, move_events);
+
+ code_map = NULL;
+ V8::SetJitCodeEventHandler(v8::kJitCodeEventDefault, NULL);
+ }
+
+ isolate->Exit();
+ isolate->Dispose();
+
+ // Do this in a new isolate.
+ isolate = v8::Isolate::New();
+ isolate->Enter();
+
+ // Verify that we get callbacks for existing code objects when we
+ // request enumeration of existing code.
+ {
+ v8::HandleScope scope;
+ LocalContext env;
+ CompileRun(script);
+
+ // Now get code through initial iteration.
+ i::HashMap code(MatchPointers);
+ code_map = &code;
+
+ V8::SetJitCodeEventHandler(v8::kJitCodeEventEnumExisting, event_handler);
+ V8::SetJitCodeEventHandler(v8::kJitCodeEventDefault, NULL);
+
+ code_map = NULL;
+
+ // We expect that we got some events. Note that if we could get code removal
+ // notifications, we could compare two collections, one created by listening
+ // from the time of creation of an isolate, and the other by subscribing
+ // with EnumExisting.
+ CHECK_NE(0, code.occupancy());
+ }
+
+ isolate->Exit();
+ isolate->Dispose();
+}
+
+
static int64_t cast(intptr_t x) { return static_cast<int64_t>(x); }
@@ -12040,7 +12664,7 @@ class RegExpStringModificationTest {
// Inject the input as a global variable.
i::Handle<i::String> input_name =
FACTORY->NewStringFromAscii(i::Vector<const char>("input", 5));
- i::Isolate::Current()->global_context()->global()->SetProperty(
+ i::Isolate::Current()->native_context()->global_object()->SetProperty(
*input_name,
*input_,
NONE,
@@ -12148,9 +12772,10 @@ TEST(RegExpStringModification) {
}
-// Test that we can set a property on the global object even if there
+// Test that we cannot set a property on the global object if there
// is a read-only property in the prototype chain.
TEST(ReadOnlyPropertyInGlobalProto) {
+ i::FLAG_es5_readonly = true;
v8::HandleScope scope;
v8::Handle<v8::ObjectTemplate> templ = v8::ObjectTemplate::New();
LocalContext context(0, templ);
@@ -12162,12 +12787,13 @@ TEST(ReadOnlyPropertyInGlobalProto) {
// Check without 'eval' or 'with'.
v8::Handle<v8::Value> res =
CompileRun("function f() { x = 42; return x; }; f()");
+ CHECK_EQ(v8::Integer::New(0), res);
// Check with 'eval'.
- res = CompileRun("function f() { eval('1'); y = 42; return y; }; f()");
- CHECK_EQ(v8::Integer::New(42), res);
+ res = CompileRun("function f() { eval('1'); y = 43; return y; }; f()");
+ CHECK_EQ(v8::Integer::New(0), res);
// Check with 'with'.
- res = CompileRun("function f() { with (this) { y = 42 }; return y; }; f()");
- CHECK_EQ(v8::Integer::New(42), res);
+ res = CompileRun("function f() { with (this) { y = 44 }; return y; }; f()");
+ CHECK_EQ(v8::Integer::New(0), res);
}
static int force_set_set_count = 0;
@@ -13617,6 +14243,41 @@ THREADED_TEST(ExternalArrayInfo) {
}
+void ExternalArrayLimitTestHelper(v8::ExternalArrayType array_type, int size) {
+ v8::Handle<v8::Object> obj = v8::Object::New();
+ v8::V8::SetFatalErrorHandler(StoringErrorCallback);
+ last_location = last_message = NULL;
+ obj->SetIndexedPropertiesToExternalArrayData(NULL, array_type, size);
+ CHECK(!obj->HasIndexedPropertiesInExternalArrayData());
+ CHECK_NE(NULL, last_location);
+ CHECK_NE(NULL, last_message);
+}
+
+
+TEST(ExternalArrayLimits) {
+ v8::HandleScope scope;
+ LocalContext context;
+ ExternalArrayLimitTestHelper(v8::kExternalByteArray, 0x40000000);
+ ExternalArrayLimitTestHelper(v8::kExternalByteArray, 0xffffffff);
+ ExternalArrayLimitTestHelper(v8::kExternalUnsignedByteArray, 0x40000000);
+ ExternalArrayLimitTestHelper(v8::kExternalUnsignedByteArray, 0xffffffff);
+ ExternalArrayLimitTestHelper(v8::kExternalShortArray, 0x40000000);
+ ExternalArrayLimitTestHelper(v8::kExternalShortArray, 0xffffffff);
+ ExternalArrayLimitTestHelper(v8::kExternalUnsignedShortArray, 0x40000000);
+ ExternalArrayLimitTestHelper(v8::kExternalUnsignedShortArray, 0xffffffff);
+ ExternalArrayLimitTestHelper(v8::kExternalIntArray, 0x40000000);
+ ExternalArrayLimitTestHelper(v8::kExternalIntArray, 0xffffffff);
+ ExternalArrayLimitTestHelper(v8::kExternalUnsignedIntArray, 0x40000000);
+ ExternalArrayLimitTestHelper(v8::kExternalUnsignedIntArray, 0xffffffff);
+ ExternalArrayLimitTestHelper(v8::kExternalFloatArray, 0x40000000);
+ ExternalArrayLimitTestHelper(v8::kExternalFloatArray, 0xffffffff);
+ ExternalArrayLimitTestHelper(v8::kExternalDoubleArray, 0x40000000);
+ ExternalArrayLimitTestHelper(v8::kExternalDoubleArray, 0xffffffff);
+ ExternalArrayLimitTestHelper(v8::kExternalPixelArray, 0x40000000);
+ ExternalArrayLimitTestHelper(v8::kExternalPixelArray, 0xffffffff);
+}
+
+
THREADED_TEST(ScriptContextDependence) {
v8::HandleScope scope;
LocalContext c1;
@@ -13995,6 +14656,89 @@ TEST(SourceURLInStackTrace) {
}
+v8::Handle<Value> AnalyzeStackOfInlineScriptWithSourceURL(
+ const v8::Arguments& args) {
+ v8::HandleScope scope;
+ v8::Handle<v8::StackTrace> stackTrace =
+ v8::StackTrace::CurrentStackTrace(10, v8::StackTrace::kDetailed);
+ CHECK_EQ(4, stackTrace->GetFrameCount());
+ v8::Handle<v8::String> url = v8_str("url");
+ for (int i = 0; i < 3; i++) {
+ v8::Handle<v8::String> name =
+ stackTrace->GetFrame(i)->GetScriptNameOrSourceURL();
+ CHECK(!name.IsEmpty());
+ CHECK_EQ(url, name);
+ }
+ return v8::Undefined();
+}
+
+
+TEST(InlineScriptWithSourceURLInStackTrace) {
+ v8::HandleScope scope;
+ Local<ObjectTemplate> templ = ObjectTemplate::New();
+ templ->Set(v8_str("AnalyzeStackOfInlineScriptWithSourceURL"),
+ v8::FunctionTemplate::New(
+ AnalyzeStackOfInlineScriptWithSourceURL));
+ LocalContext context(0, templ);
+
+ const char *source =
+ "function outer() {\n"
+ "function bar() {\n"
+ " AnalyzeStackOfInlineScriptWithSourceURL();\n"
+ "}\n"
+ "function foo() {\n"
+ "\n"
+ " bar();\n"
+ "}\n"
+ "foo();\n"
+ "}\n"
+ "outer()\n"
+ "//@ sourceURL=source_url";
+ CHECK(CompileRunWithOrigin(source, "url", 0, 1)->IsUndefined());
+}
+
+
+v8::Handle<Value> AnalyzeStackOfDynamicScriptWithSourceURL(
+ const v8::Arguments& args) {
+ v8::HandleScope scope;
+ v8::Handle<v8::StackTrace> stackTrace =
+ v8::StackTrace::CurrentStackTrace(10, v8::StackTrace::kDetailed);
+ CHECK_EQ(4, stackTrace->GetFrameCount());
+ v8::Handle<v8::String> url = v8_str("source_url");
+ for (int i = 0; i < 3; i++) {
+ v8::Handle<v8::String> name =
+ stackTrace->GetFrame(i)->GetScriptNameOrSourceURL();
+ CHECK(!name.IsEmpty());
+ CHECK_EQ(url, name);
+ }
+ return v8::Undefined();
+}
+
+
+TEST(DynamicWithSourceURLInStackTrace) {
+ v8::HandleScope scope;
+ Local<ObjectTemplate> templ = ObjectTemplate::New();
+ templ->Set(v8_str("AnalyzeStackOfDynamicScriptWithSourceURL"),
+ v8::FunctionTemplate::New(
+ AnalyzeStackOfDynamicScriptWithSourceURL));
+ LocalContext context(0, templ);
+
+ const char *source =
+ "function outer() {\n"
+ "function bar() {\n"
+ " AnalyzeStackOfDynamicScriptWithSourceURL();\n"
+ "}\n"
+ "function foo() {\n"
+ "\n"
+ " bar();\n"
+ "}\n"
+ "foo();\n"
+ "}\n"
+ "outer()\n"
+ "//@ sourceURL=source_url";
+ CHECK(CompileRunWithOrigin(source, "url", 0, 0)->IsUndefined());
+}
+
static void CreateGarbageInOldSpace() {
v8::HandleScope scope;
i::AlwaysAllocateScope always_allocate;
@@ -14182,11 +14926,12 @@ THREADED_TEST(GetHeapStatistics) {
class VisitorImpl : public v8::ExternalResourceVisitor {
public:
- VisitorImpl(TestResource* r1, TestResource* r2)
- : resource1_(r1),
- resource2_(r2),
- found_resource1_(false),
- found_resource2_(false) {}
+ explicit VisitorImpl(TestResource** resource) {
+ for (int i = 0; i < 4; i++) {
+ resource_[i] = resource[i];
+ found_resource_[i] = false;
+ }
+ }
virtual ~VisitorImpl() {}
virtual void VisitExternalString(v8::Handle<v8::String> string) {
if (!string->IsExternal()) {
@@ -14196,25 +14941,22 @@ class VisitorImpl : public v8::ExternalResourceVisitor {
v8::String::ExternalStringResource* resource =
string->GetExternalStringResource();
CHECK(resource);
- if (resource1_ == resource) {
- CHECK(!found_resource1_);
- found_resource1_ = true;
- }
- if (resource2_ == resource) {
- CHECK(!found_resource2_);
- found_resource2_ = true;
+ for (int i = 0; i < 4; i++) {
+ if (resource_[i] == resource) {
+ CHECK(!found_resource_[i]);
+ found_resource_[i] = true;
+ }
}
}
void CheckVisitedResources() {
- CHECK(found_resource1_);
- CHECK(found_resource2_);
+ for (int i = 0; i < 4; i++) {
+ CHECK(found_resource_[i]);
+ }
}
private:
- v8::String::ExternalStringResource* resource1_;
- v8::String::ExternalStringResource* resource2_;
- bool found_resource1_;
- bool found_resource2_;
+ v8::String::ExternalStringResource* resource_[4];
+ bool found_resource_[4];
};
TEST(VisitExternalStrings) {
@@ -14222,16 +14964,33 @@ TEST(VisitExternalStrings) {
LocalContext env;
const char* string = "Some string";
uint16_t* two_byte_string = AsciiToTwoByteString(string);
- TestResource* resource1 = new TestResource(two_byte_string);
- v8::Local<v8::String> string1 = v8::String::NewExternal(resource1);
- TestResource* resource2 = new TestResource(two_byte_string);
- v8::Local<v8::String> string2 = v8::String::NewExternal(resource2);
-
- // We need to add usages for string1 and string2 to avoid warnings in GCC 4.7
+ TestResource* resource[4];
+ resource[0] = new TestResource(two_byte_string);
+ v8::Local<v8::String> string0 = v8::String::NewExternal(resource[0]);
+ resource[1] = new TestResource(two_byte_string);
+ v8::Local<v8::String> string1 = v8::String::NewExternal(resource[1]);
+
+ // Externalized symbol.
+ resource[2] = new TestResource(two_byte_string);
+ v8::Local<v8::String> string2 = v8::String::NewSymbol(string);
+ CHECK(string2->MakeExternal(resource[2]));
+
+ // Symbolized External.
+ resource[3] = new TestResource(AsciiToTwoByteString("Some other string"));
+ v8::Local<v8::String> string3 = v8::String::NewExternal(resource[3]);
+ HEAP->CollectAllAvailableGarbage(); // Tenure string.
+ // Turn into a symbol.
+ i::Handle<i::String> string3_i = v8::Utils::OpenHandle(*string3);
+ CHECK(!HEAP->LookupSymbol(*string3_i)->IsFailure());
+ CHECK(string3_i->IsSymbol());
+
+ // We need to add usages for string* to avoid warnings in GCC 4.7
+ CHECK(string0->IsExternal());
CHECK(string1->IsExternal());
CHECK(string2->IsExternal());
+ CHECK(string3->IsExternal());
- VisitorImpl visitor(resource1, resource2);
+ VisitorImpl visitor(resource);
v8::V8::VisitExternalResources(&visitor);
visitor.CheckVisitedResources();
}
@@ -14416,6 +15175,7 @@ TEST(Regress528) {
context->Exit();
}
context.Dispose();
+ v8::V8::ContextDisposedNotification();
for (gc_count = 1; gc_count < 10; gc_count++) {
other_context->Enter();
CompileRun(source_simple);
@@ -14438,6 +15198,7 @@ TEST(Regress528) {
context->Exit();
}
context.Dispose();
+ v8::V8::ContextDisposedNotification();
for (gc_count = 1; gc_count < 10; gc_count++) {
other_context->Enter();
CompileRun(source_eval);
@@ -14465,6 +15226,7 @@ TEST(Regress528) {
context->Exit();
}
context.Dispose();
+ v8::V8::ContextDisposedNotification();
for (gc_count = 1; gc_count < 10; gc_count++) {
other_context->Enter();
CompileRun(source_exception);
@@ -14476,6 +15238,7 @@ TEST(Regress528) {
CHECK_EQ((snapshot_enabled ? 2 : 1), GetGlobalObjectsCount());
other_context.Dispose();
+ v8::V8::ContextDisposedNotification();
}
@@ -14565,6 +15328,8 @@ THREADED_TEST(FunctionGetScriptId) {
static v8::Handle<Value> GetterWhichReturns42(Local<String> name,
const AccessorInfo& info) {
+ CHECK(v8::Utils::OpenHandle(*info.This())->IsJSObject());
+ CHECK(v8::Utils::OpenHandle(*info.Holder())->IsJSObject());
return v8_num(42);
}
@@ -14572,7 +15337,29 @@ static v8::Handle<Value> GetterWhichReturns42(Local<String> name,
static void SetterWhichSetsYOnThisTo23(Local<String> name,
Local<Value> value,
const AccessorInfo& info) {
+ CHECK(v8::Utils::OpenHandle(*info.This())->IsJSObject());
+ CHECK(v8::Utils::OpenHandle(*info.Holder())->IsJSObject());
+ info.This()->Set(v8_str("y"), v8_num(23));
+}
+
+
+Handle<Value> FooGetInterceptor(Local<String> name,
+ const AccessorInfo& info) {
+ CHECK(v8::Utils::OpenHandle(*info.This())->IsJSObject());
+ CHECK(v8::Utils::OpenHandle(*info.Holder())->IsJSObject());
+ if (!name->Equals(v8_str("foo"))) return Handle<Value>();
+ return v8_num(42);
+}
+
+
+Handle<Value> FooSetInterceptor(Local<String> name,
+ Local<Value> value,
+ const AccessorInfo& info) {
+ CHECK(v8::Utils::OpenHandle(*info.This())->IsJSObject());
+ CHECK(v8::Utils::OpenHandle(*info.Holder())->IsJSObject());
+ if (!name->Equals(v8_str("foo"))) return Handle<Value>();
info.This()->Set(v8_str("y"), v8_num(23));
+ return v8_num(23);
}
@@ -15606,6 +16393,45 @@ TEST(DontDeleteCellLoadICAPI) {
}
+class Visitor42 : public v8::PersistentHandleVisitor {
+ public:
+ explicit Visitor42(v8::Persistent<v8::Object> object)
+ : counter_(0), object_(object) { }
+
+ virtual void VisitPersistentHandle(Persistent<Value> value,
+ uint16_t class_id) {
+ if (class_id == 42) {
+ CHECK(value->IsObject());
+ v8::Persistent<v8::Object> visited =
+ v8::Persistent<v8::Object>::Cast(value);
+ CHECK_EQ(42, visited.WrapperClassId());
+ CHECK_EQ(object_, visited);
+ ++counter_;
+ }
+ }
+
+ int counter_;
+ v8::Persistent<v8::Object> object_;
+};
+
+
+TEST(PersistentHandleVisitor) {
+ v8::HandleScope scope;
+ LocalContext context;
+ v8::Persistent<v8::Object> object =
+ v8::Persistent<v8::Object>::New(v8::Object::New());
+ CHECK_EQ(0, object.WrapperClassId());
+ object.SetWrapperClassId(42);
+ CHECK_EQ(42, object.WrapperClassId());
+
+ Visitor42 visitor(object);
+ v8::V8::VisitHandlesWithClassIds(&visitor);
+ CHECK_EQ(1, visitor.counter_);
+
+ object.Dispose();
+}
+
+
TEST(RegExp) {
v8::HandleScope scope;
LocalContext context;
@@ -16041,6 +16867,24 @@ THREADED_TEST(AllowCodeGenFromStrings) {
}
+TEST(SetErrorMessageForCodeGenFromStrings) {
+ v8::HandleScope scope;
+ LocalContext context;
+ TryCatch try_catch;
+
+ Handle<String> message = v8_str("Message") ;
+ Handle<String> expected_message = v8_str("Uncaught EvalError: Message");
+ V8::SetAllowCodeGenerationFromStringsCallback(&CodeGenerationDisallowed);
+ context->AllowCodeGenerationFromStrings(false);
+ context->SetErrorMessageForCodeGenerationFromStrings(message);
+ Handle<Value> result = CompileRun("eval('42')");
+ CHECK(result.IsEmpty());
+ CHECK(try_catch.HasCaught());
+ Handle<String> actual_message = try_catch.Message()->Get();
+ CHECK(expected_message->Equals(actual_message));
+}
+
+
static v8::Handle<Value> NonObjectThis(const v8::Arguments& args) {
return v8::Undefined();
}
@@ -16094,7 +16938,8 @@ THREADED_TEST(Regress1516) {
CHECK_LE(1, elements);
}
- i::Isolate::Current()->heap()->CollectAllGarbage(true);
+ i::Isolate::Current()->heap()->CollectAllGarbage(
+ i::Heap::kAbortIncrementalMarkingMask);
{ i::Object* raw_map_cache = i::Isolate::Current()->context()->map_cache();
if (raw_map_cache != i::Isolate::Current()->heap()->undefined_value()) {
i::MapCache* map_cache = i::MapCache::cast(raw_map_cache);
@@ -16594,3 +17439,459 @@ TEST(StringEmpty) {
CHECK(v8::String::Empty(isolate).IsEmpty());
CHECK_EQ(3, fatal_error_callback_counter);
}
+
+
+static int instance_checked_getter_count = 0;
+static Handle<Value> InstanceCheckedGetter(Local<String> name,
+ const AccessorInfo& info) {
+ CHECK_EQ(name, v8_str("foo"));
+ instance_checked_getter_count++;
+ return v8_num(11);
+}
+
+
+static int instance_checked_setter_count = 0;
+static void InstanceCheckedSetter(Local<String> name,
+ Local<Value> value,
+ const AccessorInfo& info) {
+ CHECK_EQ(name, v8_str("foo"));
+ CHECK_EQ(value, v8_num(23));
+ instance_checked_setter_count++;
+}
+
+
+static void CheckInstanceCheckedResult(int getters,
+ int setters,
+ bool expects_callbacks,
+ TryCatch* try_catch) {
+ if (expects_callbacks) {
+ CHECK(!try_catch->HasCaught());
+ CHECK_EQ(getters, instance_checked_getter_count);
+ CHECK_EQ(setters, instance_checked_setter_count);
+ } else {
+ CHECK(try_catch->HasCaught());
+ CHECK_EQ(0, instance_checked_getter_count);
+ CHECK_EQ(0, instance_checked_setter_count);
+ }
+ try_catch->Reset();
+}
+
+
+static void CheckInstanceCheckedAccessors(bool expects_callbacks) {
+ instance_checked_getter_count = 0;
+ instance_checked_setter_count = 0;
+ TryCatch try_catch;
+
+ // Test path through generic runtime code.
+ CompileRun("obj.foo");
+ CheckInstanceCheckedResult(1, 0, expects_callbacks, &try_catch);
+ CompileRun("obj.foo = 23");
+ CheckInstanceCheckedResult(1, 1, expects_callbacks, &try_catch);
+
+ // Test path through generated LoadIC and StoredIC.
+ CompileRun("function test_get(o) { o.foo; }"
+ "test_get(obj);");
+ CheckInstanceCheckedResult(2, 1, expects_callbacks, &try_catch);
+ CompileRun("test_get(obj);");
+ CheckInstanceCheckedResult(3, 1, expects_callbacks, &try_catch);
+ CompileRun("test_get(obj);");
+ CheckInstanceCheckedResult(4, 1, expects_callbacks, &try_catch);
+ CompileRun("function test_set(o) { o.foo = 23; }"
+ "test_set(obj);");
+ CheckInstanceCheckedResult(4, 2, expects_callbacks, &try_catch);
+ CompileRun("test_set(obj);");
+ CheckInstanceCheckedResult(4, 3, expects_callbacks, &try_catch);
+ CompileRun("test_set(obj);");
+ CheckInstanceCheckedResult(4, 4, expects_callbacks, &try_catch);
+
+ // Test path through optimized code.
+ CompileRun("%OptimizeFunctionOnNextCall(test_get);"
+ "test_get(obj);");
+ CheckInstanceCheckedResult(5, 4, expects_callbacks, &try_catch);
+ CompileRun("%OptimizeFunctionOnNextCall(test_set);"
+ "test_set(obj);");
+ CheckInstanceCheckedResult(5, 5, expects_callbacks, &try_catch);
+
+ // Cleanup so that closures start out fresh in next check.
+ CompileRun("%DeoptimizeFunction(test_get);"
+ "%ClearFunctionTypeFeedback(test_get);"
+ "%DeoptimizeFunction(test_set);"
+ "%ClearFunctionTypeFeedback(test_set);");
+}
+
+
+THREADED_TEST(InstanceCheckOnInstanceAccessor) {
+ v8::internal::FLAG_allow_natives_syntax = true;
+ v8::HandleScope scope;
+ LocalContext context;
+
+ Local<FunctionTemplate> templ = FunctionTemplate::New();
+ Local<ObjectTemplate> inst = templ->InstanceTemplate();
+ inst->SetAccessor(v8_str("foo"),
+ InstanceCheckedGetter, InstanceCheckedSetter,
+ Handle<Value>(),
+ v8::DEFAULT,
+ v8::None,
+ v8::AccessorSignature::New(templ));
+ context->Global()->Set(v8_str("f"), templ->GetFunction());
+
+ printf("Testing positive ...\n");
+ CompileRun("var obj = new f();");
+ CHECK(templ->HasInstance(context->Global()->Get(v8_str("obj"))));
+ CheckInstanceCheckedAccessors(true);
+
+ printf("Testing negative ...\n");
+ CompileRun("var obj = {};"
+ "obj.__proto__ = new f();");
+ CHECK(!templ->HasInstance(context->Global()->Get(v8_str("obj"))));
+ CheckInstanceCheckedAccessors(false);
+}
+
+
+THREADED_TEST(InstanceCheckOnInstanceAccessorWithInterceptor) {
+ v8::internal::FLAG_allow_natives_syntax = true;
+ v8::HandleScope scope;
+ LocalContext context;
+
+ Local<FunctionTemplate> templ = FunctionTemplate::New();
+ Local<ObjectTemplate> inst = templ->InstanceTemplate();
+ AddInterceptor(templ, EmptyInterceptorGetter, EmptyInterceptorSetter);
+ inst->SetAccessor(v8_str("foo"),
+ InstanceCheckedGetter, InstanceCheckedSetter,
+ Handle<Value>(),
+ v8::DEFAULT,
+ v8::None,
+ v8::AccessorSignature::New(templ));
+ context->Global()->Set(v8_str("f"), templ->GetFunction());
+
+ printf("Testing positive ...\n");
+ CompileRun("var obj = new f();");
+ CHECK(templ->HasInstance(context->Global()->Get(v8_str("obj"))));
+ CheckInstanceCheckedAccessors(true);
+
+ printf("Testing negative ...\n");
+ CompileRun("var obj = {};"
+ "obj.__proto__ = new f();");
+ CHECK(!templ->HasInstance(context->Global()->Get(v8_str("obj"))));
+ CheckInstanceCheckedAccessors(false);
+}
+
+
+THREADED_TEST(InstanceCheckOnPrototypeAccessor) {
+ v8::internal::FLAG_allow_natives_syntax = true;
+ v8::HandleScope scope;
+ LocalContext context;
+
+ Local<FunctionTemplate> templ = FunctionTemplate::New();
+ Local<ObjectTemplate> proto = templ->PrototypeTemplate();
+ proto->SetAccessor(v8_str("foo"),
+ InstanceCheckedGetter, InstanceCheckedSetter,
+ Handle<Value>(),
+ v8::DEFAULT,
+ v8::None,
+ v8::AccessorSignature::New(templ));
+ context->Global()->Set(v8_str("f"), templ->GetFunction());
+
+ printf("Testing positive ...\n");
+ CompileRun("var obj = new f();");
+ CHECK(templ->HasInstance(context->Global()->Get(v8_str("obj"))));
+ CheckInstanceCheckedAccessors(true);
+
+ printf("Testing negative ...\n");
+ CompileRun("var obj = {};"
+ "obj.__proto__ = new f();");
+ CHECK(!templ->HasInstance(context->Global()->Get(v8_str("obj"))));
+ CheckInstanceCheckedAccessors(false);
+
+ printf("Testing positive with modified prototype chain ...\n");
+ CompileRun("var obj = new f();"
+ "var pro = {};"
+ "pro.__proto__ = obj.__proto__;"
+ "obj.__proto__ = pro;");
+ CHECK(templ->HasInstance(context->Global()->Get(v8_str("obj"))));
+ CheckInstanceCheckedAccessors(true);
+}
+
+
+TEST(TryFinallyMessage) {
+ v8::HandleScope scope;
+ LocalContext context;
+ {
+ // Test that the original error message is not lost if there is a
+ // recursive call into Javascript is done in the finally block, e.g. to
+ // initialize an IC. (crbug.com/129171)
+ TryCatch try_catch;
+ const char* trigger_ic =
+ "try { \n"
+ " throw new Error('test'); \n"
+ "} finally { \n"
+ " var x = 0; \n"
+ " x++; \n" // Trigger an IC initialization here.
+ "} \n";
+ CompileRun(trigger_ic);
+ CHECK(try_catch.HasCaught());
+ Local<Message> message = try_catch.Message();
+ CHECK(!message.IsEmpty());
+ CHECK_EQ(2, message->GetLineNumber());
+ }
+
+ {
+ // Test that the original exception message is indeed overwritten if
+ // a new error is thrown in the finally block.
+ TryCatch try_catch;
+ const char* throw_again =
+ "try { \n"
+ " throw new Error('test'); \n"
+ "} finally { \n"
+ " var x = 0; \n"
+ " x++; \n"
+ " throw new Error('again'); \n" // This is the new uncaught error.
+ "} \n";
+ CompileRun(throw_again);
+ CHECK(try_catch.HasCaught());
+ Local<Message> message = try_catch.Message();
+ CHECK(!message.IsEmpty());
+ CHECK_EQ(6, message->GetLineNumber());
+ }
+}
+
+
+static void Helper137002(bool do_store,
+ bool polymorphic,
+ bool remove_accessor,
+ bool interceptor) {
+ LocalContext context;
+ Local<ObjectTemplate> templ = ObjectTemplate::New();
+ if (interceptor) {
+ templ->SetNamedPropertyHandler(FooGetInterceptor, FooSetInterceptor);
+ } else {
+ templ->SetAccessor(v8_str("foo"),
+ GetterWhichReturns42,
+ SetterWhichSetsYOnThisTo23);
+ }
+ context->Global()->Set(v8_str("obj"), templ->NewInstance());
+
+ // Turn monomorphic on slow object with native accessor, then turn
+ // polymorphic, finally optimize to create negative lookup and fail.
+ CompileRun(do_store ?
+ "function f(x) { x.foo = void 0; }" :
+ "function f(x) { return x.foo; }");
+ CompileRun("obj.y = void 0;");
+ if (!interceptor) {
+ CompileRun("%OptimizeObjectForAddingMultipleProperties(obj, 1);");
+ }
+ CompileRun("obj.__proto__ = null;"
+ "f(obj); f(obj); f(obj);");
+ if (polymorphic) {
+ CompileRun("f({});");
+ }
+ CompileRun("obj.y = void 0;"
+ "%OptimizeFunctionOnNextCall(f);");
+ if (remove_accessor) {
+ CompileRun("delete obj.foo;");
+ }
+ CompileRun("var result = f(obj);");
+ if (do_store) {
+ CompileRun("result = obj.y;");
+ }
+ if (remove_accessor && !interceptor) {
+ CHECK(context->Global()->Get(v8_str("result"))->IsUndefined());
+ } else {
+ CHECK_EQ(do_store ? 23 : 42,
+ context->Global()->Get(v8_str("result"))->Int32Value());
+ }
+}
+
+
+THREADED_TEST(Regress137002a) {
+ i::FLAG_allow_natives_syntax = true;
+ i::FLAG_compilation_cache = false;
+ v8::HandleScope scope;
+ for (int i = 0; i < 16; i++) {
+ Helper137002(i & 8, i & 4, i & 2, i & 1);
+ }
+}
+
+
+THREADED_TEST(Regress137002b) {
+ i::FLAG_allow_natives_syntax = true;
+ v8::HandleScope scope;
+ LocalContext context;
+ Local<ObjectTemplate> templ = ObjectTemplate::New();
+ templ->SetAccessor(v8_str("foo"),
+ GetterWhichReturns42,
+ SetterWhichSetsYOnThisTo23);
+ context->Global()->Set(v8_str("obj"), templ->NewInstance());
+
+ // Turn monomorphic on slow object with native accessor, then just
+ // delete the property and fail.
+ CompileRun("function load(x) { return x.foo; }"
+ "function store(x) { x.foo = void 0; }"
+ "function keyed_load(x, key) { return x[key]; }"
+ // Second version of function has a different source (add void 0)
+ // so that it does not share code with the first version. This
+ // ensures that the ICs are monomorphic.
+ "function load2(x) { void 0; return x.foo; }"
+ "function store2(x) { void 0; x.foo = void 0; }"
+ "function keyed_load2(x, key) { void 0; return x[key]; }"
+
+ "obj.y = void 0;"
+ "obj.__proto__ = null;"
+ "var subobj = {};"
+ "subobj.y = void 0;"
+ "subobj.__proto__ = obj;"
+ "%OptimizeObjectForAddingMultipleProperties(obj, 1);"
+
+ // Make the ICs monomorphic.
+ "load(obj); load(obj);"
+ "load2(subobj); load2(subobj);"
+ "store(obj); store(obj);"
+ "store2(subobj); store2(subobj);"
+ "keyed_load(obj, 'foo'); keyed_load(obj, 'foo');"
+ "keyed_load2(subobj, 'foo'); keyed_load2(subobj, 'foo');"
+
+ // Actually test the shiny new ICs and better not crash. This
+ // serves as a regression test for issue 142088 as well.
+ "load(obj);"
+ "load2(subobj);"
+ "store(obj);"
+ "store2(subobj);"
+ "keyed_load(obj, 'foo');"
+ "keyed_load2(subobj, 'foo');"
+
+ // Delete the accessor. It better not be called any more now.
+ "delete obj.foo;"
+ "obj.y = void 0;"
+ "subobj.y = void 0;"
+
+ "var load_result = load(obj);"
+ "var load_result2 = load2(subobj);"
+ "var keyed_load_result = keyed_load(obj, 'foo');"
+ "var keyed_load_result2 = keyed_load2(subobj, 'foo');"
+ "store(obj);"
+ "store2(subobj);"
+ "var y_from_obj = obj.y;"
+ "var y_from_subobj = subobj.y;");
+ CHECK(context->Global()->Get(v8_str("load_result"))->IsUndefined());
+ CHECK(context->Global()->Get(v8_str("load_result2"))->IsUndefined());
+ CHECK(context->Global()->Get(v8_str("keyed_load_result"))->IsUndefined());
+ CHECK(context->Global()->Get(v8_str("keyed_load_result2"))->IsUndefined());
+ CHECK(context->Global()->Get(v8_str("y_from_obj"))->IsUndefined());
+ CHECK(context->Global()->Get(v8_str("y_from_subobj"))->IsUndefined());
+}
+
+
+THREADED_TEST(Regress142088) {
+ i::FLAG_allow_natives_syntax = true;
+ v8::HandleScope scope;
+ LocalContext context;
+ Local<ObjectTemplate> templ = ObjectTemplate::New();
+ templ->SetAccessor(v8_str("foo"),
+ GetterWhichReturns42,
+ SetterWhichSetsYOnThisTo23);
+ context->Global()->Set(v8_str("obj"), templ->NewInstance());
+
+ CompileRun("function load(x) { return x.foo; }"
+ "var o = Object.create(obj);"
+ "%OptimizeObjectForAddingMultipleProperties(obj, 1);"
+ "load(o); load(o); load(o); load(o);");
+}
+
+
+THREADED_TEST(Regress137496) {
+ i::FLAG_expose_gc = true;
+ v8::HandleScope scope;
+ LocalContext context;
+
+ // Compile a try-finally clause where the finally block causes a GC
+ // while there still is a message pending for external reporting.
+ TryCatch try_catch;
+ try_catch.SetVerbose(true);
+ CompileRun("try { throw new Error(); } finally { gc(); }");
+ CHECK(try_catch.HasCaught());
+}
+
+
+THREADED_TEST(Regress149912) {
+ v8::HandleScope scope;
+ LocalContext context;
+ Handle<FunctionTemplate> templ = FunctionTemplate::New();
+ AddInterceptor(templ, EmptyInterceptorGetter, EmptyInterceptorSetter);
+ context->Global()->Set(v8_str("Bug"), templ->GetFunction());
+ CompileRun("Number.prototype.__proto__ = new Bug; var x = 0; x.foo();");
+}
+
+
+THREADED_TEST(Regress157124) {
+ v8::HandleScope scope;
+ LocalContext context;
+ Local<ObjectTemplate> templ = ObjectTemplate::New();
+ Local<Object> obj = templ->NewInstance();
+ obj->GetIdentityHash();
+ obj->DeleteHiddenValue(v8_str("Bug"));
+}
+
+
+#ifndef WIN32
+class ThreadInterruptTest {
+ public:
+ ThreadInterruptTest() : sem_(NULL), sem_value_(0) { }
+ ~ThreadInterruptTest() { delete sem_; }
+
+ void RunTest() {
+ sem_ = i::OS::CreateSemaphore(0);
+
+ InterruptThread i_thread(this);
+ i_thread.Start();
+
+ sem_->Wait();
+ CHECK_EQ(kExpectedValue, sem_value_);
+ }
+
+ private:
+ static const int kExpectedValue = 1;
+
+ class InterruptThread : public i::Thread {
+ public:
+ explicit InterruptThread(ThreadInterruptTest* test)
+ : Thread("InterruptThread"), test_(test) {}
+
+ virtual void Run() {
+ struct sigaction action;
+
+ // Ensure that we'll enter waiting condition
+ i::OS::Sleep(100);
+
+ // Setup signal handler
+ memset(&action, 0, sizeof(action));
+ action.sa_handler = SignalHandler;
+ sigaction(SIGCHLD, &action, NULL);
+
+ // Send signal
+ kill(getpid(), SIGCHLD);
+
+ // Ensure that if wait has returned because of error
+ i::OS::Sleep(100);
+
+ // Set value and signal semaphore
+ test_->sem_value_ = 1;
+ test_->sem_->Signal();
+ }
+
+ static void SignalHandler(int signal) {
+ }
+
+ private:
+ ThreadInterruptTest* test_;
+ struct sigaction sa_;
+ };
+
+ i::Semaphore* sem_;
+ volatile int sem_value_;
+};
+
+
+THREADED_TEST(SemaphoreInterruption) {
+ ThreadInterruptTest().RunTest();
+}
+#endif // WIN32
diff --git a/src/3rdparty/v8/test/cctest/test-assembler-arm.cc b/src/3rdparty/v8/test/cctest/test-assembler-arm.cc
index ecbf956..cdab1b9 100644
--- a/src/3rdparty/v8/test/cctest/test-assembler-arm.cc
+++ b/src/3rdparty/v8/test/cctest/test-assembler-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -642,8 +642,8 @@ TEST(8) {
// single precision values around in memory.
Assembler assm(Isolate::Current(), NULL, 0);
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
__ mov(ip, Operand(sp));
__ stm(db_w, sp, r4.bit() | fp.bit() | lr.bit());
@@ -753,8 +753,8 @@ TEST(9) {
// single precision values around in memory.
Assembler assm(Isolate::Current(), NULL, 0);
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
__ mov(ip, Operand(sp));
__ stm(db_w, sp, r4.bit() | fp.bit() | lr.bit());
@@ -868,8 +868,8 @@ TEST(10) {
// single precision values around in memory.
Assembler assm(Isolate::Current(), NULL, 0);
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+ if (CpuFeatures::IsSupported(VFP2)) {
+ CpuFeatures::Scope scope(VFP2);
__ mov(ip, Operand(sp));
__ stm(db_w, sp, r4.bit() | fp.bit() | lr.bit());
diff --git a/src/3rdparty/v8/test/cctest/test-ast.cc b/src/3rdparty/v8/test/cctest/test-ast.cc
index 80c7fdf..c72f87e 100644
--- a/src/3rdparty/v8/test/cctest/test-ast.cc
+++ b/src/3rdparty/v8/test/cctest/test-ast.cc
@@ -39,8 +39,10 @@ TEST(List) {
List<AstNode*>* list = new List<AstNode*>(0);
CHECK_EQ(0, list->length());
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
- AstNodeFactory<AstNullVisitor> factory(Isolate::Current());
+ Isolate* isolate = Isolate::Current();
+ Zone* zone = isolate->runtime_zone();
+ ZoneScope zone_scope(zone, DELETE_ON_EXIT);
+ AstNodeFactory<AstNullVisitor> factory(isolate, zone);
AstNode* node = factory.NewEmptyStatement();
list->Add(node);
CHECK_EQ(1, list->length());
diff --git a/src/3rdparty/v8/test/cctest/test-compiler.cc b/src/3rdparty/v8/test/cctest/test-compiler.cc
index 9ca0b0a..7700a98 100644
--- a/src/3rdparty/v8/test/cctest/test-compiler.cc
+++ b/src/3rdparty/v8/test/cctest/test-compiler.cc
@@ -68,15 +68,9 @@ v8::Handle<v8::Value> PrintExtension::Print(const v8::Arguments& args) {
for (int i = 0; i < args.Length(); i++) {
if (i != 0) printf(" ");
v8::HandleScope scope;
- v8::Handle<v8::Value> arg = args[i];
- v8::Handle<v8::String> string_obj = arg->ToString();
- if (string_obj.IsEmpty()) return string_obj;
- int length = string_obj->Length();
- uint16_t* string = NewArray<uint16_t>(length + 1);
- string_obj->Write(string);
- for (int j = 0; j < length; j++)
- printf("%lc", static_cast<wchar_t>(string[j]));
- DeleteArray(string);
+ v8::String::Utf8Value str(args[i]);
+ if (*str == NULL) return v8::Undefined();
+ printf("%s", *str);
}
printf("\n");
return v8::Undefined();
@@ -101,14 +95,14 @@ static void InitializeVM() {
static MaybeObject* GetGlobalProperty(const char* name) {
Handle<String> symbol = FACTORY->LookupAsciiSymbol(name);
- return Isolate::Current()->context()->global()->GetProperty(*symbol);
+ return Isolate::Current()->context()->global_object()->GetProperty(*symbol);
}
static void SetGlobalProperty(const char* name, Object* value) {
Handle<Object> object(value);
Handle<String> symbol = FACTORY->LookupAsciiSymbol(name);
- Handle<JSObject> global(Isolate::Current()->context()->global());
+ Handle<JSObject> global(Isolate::Current()->context()->global_object());
SetProperty(global, symbol, object, NONE, kNonStrictMode);
}
@@ -120,12 +114,13 @@ static Handle<JSFunction> Compile(const char* source) {
Handle<String>(),
0,
0,
+ Handle<Context>(Isolate::Current()->native_context()),
NULL,
NULL,
Handle<String>::null(),
NOT_NATIVES_CODE);
return FACTORY->NewFunctionFromSharedFunctionInfo(shared_function,
- Isolate::Current()->global_context());
+ Isolate::Current()->native_context());
}
@@ -138,7 +133,7 @@ static double Inc(int x) {
if (fun.is_null()) return -1;
bool has_pending_exception;
- Handle<JSObject> global(Isolate::Current()->context()->global());
+ Handle<JSObject> global(Isolate::Current()->context()->global_object());
Execution::Call(fun, global, 0, NULL, &has_pending_exception);
CHECK(!has_pending_exception);
return GetGlobalProperty("result")->ToObjectChecked()->Number();
@@ -159,7 +154,7 @@ static double Add(int x, int y) {
SetGlobalProperty("x", Smi::FromInt(x));
SetGlobalProperty("y", Smi::FromInt(y));
bool has_pending_exception;
- Handle<JSObject> global(Isolate::Current()->context()->global());
+ Handle<JSObject> global(Isolate::Current()->context()->global_object());
Execution::Call(fun, global, 0, NULL, &has_pending_exception);
CHECK(!has_pending_exception);
return GetGlobalProperty("result")->ToObjectChecked()->Number();
@@ -179,7 +174,7 @@ static double Abs(int x) {
SetGlobalProperty("x", Smi::FromInt(x));
bool has_pending_exception;
- Handle<JSObject> global(Isolate::Current()->context()->global());
+ Handle<JSObject> global(Isolate::Current()->context()->global_object());
Execution::Call(fun, global, 0, NULL, &has_pending_exception);
CHECK(!has_pending_exception);
return GetGlobalProperty("result")->ToObjectChecked()->Number();
@@ -200,7 +195,7 @@ static double Sum(int n) {
SetGlobalProperty("n", Smi::FromInt(n));
bool has_pending_exception;
- Handle<JSObject> global(Isolate::Current()->context()->global());
+ Handle<JSObject> global(Isolate::Current()->context()->global_object());
Execution::Call(fun, global, 0, NULL, &has_pending_exception);
CHECK(!has_pending_exception);
return GetGlobalProperty("result")->ToObjectChecked()->Number();
@@ -221,7 +216,7 @@ TEST(Print) {
Handle<JSFunction> fun = Compile(source);
if (fun.is_null()) return;
bool has_pending_exception;
- Handle<JSObject> global(Isolate::Current()->context()->global());
+ Handle<JSObject> global(Isolate::Current()->context()->global_object());
Execution::Call(fun, global, 0, NULL, &has_pending_exception);
CHECK(!has_pending_exception);
}
@@ -254,7 +249,7 @@ TEST(Stuff) {
Handle<JSFunction> fun = Compile(source);
CHECK(!fun.is_null());
bool has_pending_exception;
- Handle<JSObject> global(Isolate::Current()->context()->global());
+ Handle<JSObject> global(Isolate::Current()->context()->global_object());
Execution::Call(fun, global, 0, NULL, &has_pending_exception);
CHECK(!has_pending_exception);
CHECK_EQ(511.0, GetGlobalProperty("r")->ToObjectChecked()->Number());
@@ -269,7 +264,7 @@ TEST(UncaughtThrow) {
Handle<JSFunction> fun = Compile(source);
CHECK(!fun.is_null());
bool has_pending_exception;
- Handle<JSObject> global(Isolate::Current()->context()->global());
+ Handle<JSObject> global(Isolate::Current()->context()->global_object());
Execution::Call(fun, global, 0, NULL, &has_pending_exception);
CHECK(has_pending_exception);
CHECK_EQ(42.0, Isolate::Current()->pending_exception()->
@@ -294,12 +289,12 @@ TEST(C2JSFrames) {
// Run the generated code to populate the global object with 'foo'.
bool has_pending_exception;
- Handle<JSObject> global(Isolate::Current()->context()->global());
+ Handle<JSObject> global(Isolate::Current()->context()->global_object());
Execution::Call(fun0, global, 0, NULL, &has_pending_exception);
CHECK(!has_pending_exception);
Object* foo_symbol = FACTORY->LookupAsciiSymbol("foo")->ToObjectChecked();
- MaybeObject* fun1_object = Isolate::Current()->context()->global()->
+ MaybeObject* fun1_object = Isolate::Current()->context()->global_object()->
GetProperty(String::cast(foo_symbol));
Handle<Object> fun1(fun1_object->ToObjectChecked());
CHECK(fun1->IsJSFunction());
@@ -352,6 +347,38 @@ TEST(GetScriptLineNumber) {
}
+// Test that optimized code for different closures is actually shared
+// immediately by the FastNewClosureStub when run in the same context.
+TEST(OptimizedCodeSharing) {
+ // Skip test if --cache-optimized-code is not activated by default because
+ // FastNewClosureStub that is baked into the snapshot is incorrect.
+ if (!FLAG_cache_optimized_code) return;
+ FLAG_allow_natives_syntax = true;
+ InitializeVM();
+ v8::HandleScope scope;
+ for (int i = 0; i < 10; i++) {
+ LocalContext env;
+ env->Global()->Set(v8::String::New("x"), v8::Integer::New(i));
+ CompileRun("function MakeClosure() {"
+ " return function() { return x; };"
+ "}"
+ "var closure0 = MakeClosure();"
+ "%DebugPrint(closure0());"
+ "%OptimizeFunctionOnNextCall(closure0);"
+ "%DebugPrint(closure0());"
+ "var closure1 = MakeClosure();"
+ "var closure2 = MakeClosure();");
+ Handle<JSFunction> fun1 = v8::Utils::OpenHandle(
+ *v8::Local<v8::Function>::Cast(env->Global()->Get(v8_str("closure1"))));
+ Handle<JSFunction> fun2 = v8::Utils::OpenHandle(
+ *v8::Local<v8::Function>::Cast(env->Global()->Get(v8_str("closure2"))));
+ CHECK(fun1->IsOptimized() || !fun1->IsOptimizable());
+ CHECK(fun2->IsOptimized() || !fun2->IsOptimizable());
+ CHECK_EQ(fun1->code(), fun2->code());
+ }
+}
+
+
#ifdef ENABLE_DISASSEMBLER
static Handle<JSFunction> GetJSFunction(v8::Handle<v8::Object> obj,
const char* property_name) {
@@ -374,15 +401,16 @@ static void CheckCodeForUnsafeLiteral(Handle<JSFunction> f) {
Address end = pc + decode_size;
v8::internal::EmbeddedVector<char, 128> decode_buffer;
+ v8::internal::EmbeddedVector<char, 128> smi_hex_buffer;
+ Smi* smi = Smi::FromInt(12345678);
+ OS::SNPrintF(smi_hex_buffer, "0x%lx", reinterpret_cast<intptr_t>(smi));
while (pc < end) {
int num_const = d.ConstantPoolSizeAt(pc);
if (num_const >= 0) {
pc += (num_const + 1) * kPointerSize;
} else {
pc += d.InstructionDecode(decode_buffer, pc);
- CHECK(strstr(decode_buffer.start(), "mov eax,0x178c29c") == NULL);
- CHECK(strstr(decode_buffer.start(), "push 0x178c29c") == NULL);
- CHECK(strstr(decode_buffer.start(), "0x178c29c") == NULL);
+ CHECK(strstr(decode_buffer.start(), smi_hex_buffer.start()) == NULL);
}
}
}
diff --git a/src/3rdparty/v8/test/cctest/test-dataflow.cc b/src/3rdparty/v8/test/cctest/test-dataflow.cc
index a63008d..ae33279 100644
--- a/src/3rdparty/v8/test/cctest/test-dataflow.cc
+++ b/src/3rdparty/v8/test/cctest/test-dataflow.cc
@@ -36,8 +36,8 @@ using namespace v8::internal;
TEST(BitVector) {
v8::internal::V8::Initialize(NULL);
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
- Zone* zone = ZONE;
+ Zone* zone = Isolate::Current()->runtime_zone();
+ ZoneScope zone_scope(zone, DELETE_ON_EXIT);
{
BitVector v(15, zone);
v.Add(1);
diff --git a/src/3rdparty/v8/test/cctest/test-debug.cc b/src/3rdparty/v8/test/cctest/test-debug.cc
index 9c831fb..3caeb1b 100644
--- a/src/3rdparty/v8/test/cctest/test-debug.cc
+++ b/src/3rdparty/v8/test/cctest/test-debug.cc
@@ -197,10 +197,9 @@ static bool HasDebugInfo(v8::Handle<v8::Function> fun) {
// number.
static int SetBreakPoint(Handle<v8::internal::JSFunction> fun, int position) {
static int break_point = 0;
- Handle<v8::internal::SharedFunctionInfo> shared(fun->shared());
v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
debug->SetBreakPoint(
- shared,
+ fun,
Handle<Object>(v8::internal::Smi::FromInt(++break_point)),
&position);
return break_point;
@@ -515,7 +514,7 @@ void CheckDebugBreakFunction(DebugLocalContext* env,
// there
ClearBreakPoint(bp);
CHECK(!debug->HasDebugInfo(shared));
- CHECK(debug->EnsureDebugInfo(shared));
+ CHECK(debug->EnsureDebugInfo(shared, fun));
TestBreakLocationIterator it2(Debug::GetDebugInfo(shared));
it2.FindBreakLocationFromPosition(position);
actual_mode = it2.it()->rinfo()->rmode();
@@ -2441,7 +2440,7 @@ TEST(DebuggerStatementBreakpoint) {
}
-// Thest that the evaluation of expressions when a break point is hit generates
+// Test that the evaluation of expressions when a break point is hit generates
// the correct results.
TEST(DebugEvaluate) {
v8::HandleScope scope;
@@ -2557,6 +2556,98 @@ TEST(DebugEvaluate) {
CheckDebuggerUnloaded();
}
+
+int debugEventCount = 0;
+static void CheckDebugEvent(const v8::Debug::EventDetails& eventDetails) {
+ if (eventDetails.GetEvent() == v8::Break) ++debugEventCount;
+}
+
+// Test that the conditional breakpoints work event if code generation from
+// strings is prohibited in the debugee context.
+TEST(ConditionalBreakpointWithCodeGenerationDisallowed) {
+ v8::HandleScope scope;
+ DebugLocalContext env;
+ env.ExposeDebug();
+
+ v8::Debug::SetDebugEventListener2(CheckDebugEvent);
+
+ v8::Local<v8::Function> foo = CompileFunction(&env,
+ "function foo(x) {\n"
+ " var s = 'String value2';\n"
+ " return s + x;\n"
+ "}",
+ "foo");
+
+ // Set conditional breakpoint with condition 'true'.
+ CompileRun("debug.Debug.setBreakPoint(foo, 2, 0, 'true')");
+
+ debugEventCount = 0;
+ env->AllowCodeGenerationFromStrings(false);
+ foo->Call(env->Global(), 0, NULL);
+ CHECK_EQ(1, debugEventCount);
+
+ v8::Debug::SetDebugEventListener2(NULL);
+ CheckDebuggerUnloaded();
+}
+
+
+bool checkedDebugEvals = true;
+v8::Handle<v8::Function> checkGlobalEvalFunction;
+v8::Handle<v8::Function> checkFrameEvalFunction;
+static void CheckDebugEval(const v8::Debug::EventDetails& eventDetails) {
+ if (eventDetails.GetEvent() == v8::Break) {
+ ++debugEventCount;
+ v8::HandleScope handleScope;
+
+ v8::Handle<v8::Value> args[] = { eventDetails.GetExecutionState() };
+ CHECK(checkGlobalEvalFunction->Call(
+ eventDetails.GetEventContext()->Global(), 1, args)->IsTrue());
+ CHECK(checkFrameEvalFunction->Call(
+ eventDetails.GetEventContext()->Global(), 1, args)->IsTrue());
+ }
+}
+
+// Test that the evaluation of expressions when a break point is hit generates
+// the correct results in case code generation from strings is disallowed in the
+// debugee context.
+TEST(DebugEvaluateWithCodeGenerationDisallowed) {
+ v8::HandleScope scope;
+ DebugLocalContext env;
+ env.ExposeDebug();
+
+ v8::Debug::SetDebugEventListener2(CheckDebugEval);
+
+ v8::Local<v8::Function> foo = CompileFunction(&env,
+ "var global = 'Global';\n"
+ "function foo(x) {\n"
+ " var local = 'Local';\n"
+ " debugger;\n"
+ " return local + x;\n"
+ "}",
+ "foo");
+ checkGlobalEvalFunction = CompileFunction(&env,
+ "function checkGlobalEval(exec_state) {\n"
+ " return exec_state.evaluateGlobal('global').value() === 'Global';\n"
+ "}",
+ "checkGlobalEval");
+
+ checkFrameEvalFunction = CompileFunction(&env,
+ "function checkFrameEval(exec_state) {\n"
+ " return exec_state.frame(0).evaluate('local').value() === 'Local';\n"
+ "}",
+ "checkFrameEval");
+ debugEventCount = 0;
+ env->AllowCodeGenerationFromStrings(false);
+ foo->Call(env->Global(), 0, NULL);
+ CHECK_EQ(1, debugEventCount);
+
+ checkGlobalEvalFunction.Clear();
+ checkFrameEvalFunction.Clear();
+ v8::Debug::SetDebugEventListener2(NULL);
+ CheckDebuggerUnloaded();
+}
+
+
// Copies a C string to a 16-bit string. Does not check for buffer overflow.
// Does not use the V8 engine to convert strings, so it can be used
// in any thread. Returns the length of the string.
@@ -4086,15 +4177,12 @@ TEST(StepWithException) {
TEST(DebugBreak) {
+#ifdef VERIFY_HEAP
+ i::FLAG_verify_heap = true;
+#endif
v8::HandleScope scope;
DebugLocalContext env;
- // This test should be run with option --verify-heap. As --verify-heap is
- // only available in debug mode only check for it in that case.
-#ifdef DEBUG
- CHECK(v8::internal::FLAG_verify_heap);
-#endif
-
// Register a debug event listener which sets the break flag and counts.
v8::Debug::SetDebugEventListener(DebugEventBreak);
@@ -4333,9 +4421,9 @@ TEST(InterceptorPropertyMirror) {
"named_values[%d] instanceof debug.PropertyMirror", i);
CHECK(CompileRun(buffer.start())->BooleanValue());
- // 5 is PropertyType.Interceptor
OS::SNPrintF(buffer, "named_values[%d].propertyType()", i);
- CHECK_EQ(5, CompileRun(buffer.start())->Int32Value());
+ CHECK_EQ(v8::internal::INTERCEPTOR,
+ CompileRun(buffer.start())->Int32Value());
OS::SNPrintF(buffer, "named_values[%d].isNative()", i);
CHECK(CompileRun(buffer.start())->BooleanValue());
@@ -5893,9 +5981,9 @@ TEST(DebuggerAgent) {
i::Debugger* debugger = i::Isolate::Current()->debugger();
// Make sure these ports is not used by other tests to allow tests to run in
// parallel.
- const int kPort1 = 5858;
- const int kPort2 = 5857;
- const int kPort3 = 5856;
+ const int kPort1 = 5858 + FlagDependentPortOffset();
+ const int kPort2 = 5857 + FlagDependentPortOffset();
+ const int kPort3 = 5856 + FlagDependentPortOffset();
// Make a string with the port2 number.
const int kPortBufferLen = 6;
@@ -5994,7 +6082,7 @@ void DebuggerAgentProtocolServerThread::Run() {
TEST(DebuggerAgentProtocolOverflowHeader) {
// Make sure this port is not used by other tests to allow tests to run in
// parallel.
- const int kPort = 5860;
+ const int kPort = 5860 + FlagDependentPortOffset();
static const char* kLocalhost = "localhost";
// Make a string with the port number.
@@ -7409,4 +7497,94 @@ TEST(DebugBreakInline) {
}
+static void DebugEventStepNext(v8::DebugEvent event,
+ v8::Handle<v8::Object> exec_state,
+ v8::Handle<v8::Object> event_data,
+ v8::Handle<v8::Value> data) {
+ if (event == v8::Break) {
+ PrepareStep(StepNext);
+ }
+}
+
+
+static void RunScriptInANewCFrame(const char* source) {
+ v8::TryCatch try_catch;
+ CompileRun(source);
+ CHECK(try_catch.HasCaught());
+}
+
+
+TEST(Regress131642) {
+ // Bug description:
+ // When doing StepNext through the first script, the debugger is not reset
+ // after exiting through exception. A flawed implementation enabling the
+ // debugger to step into Array.prototype.forEach breaks inside the callback
+ // for forEach in the second script under the assumption that we are in a
+ // recursive call. In an attempt to step out, we crawl the stack using the
+ // recorded frame pointer from the first script and fail when not finding it
+ // on the stack.
+ v8::HandleScope scope;
+ DebugLocalContext env;
+ v8::Debug::SetDebugEventListener(DebugEventStepNext);
+
+ // We step through the first script. It exits through an exception. We run
+ // this inside a new frame to record a different FP than the second script
+ // would expect.
+ const char* script_1 = "debugger; throw new Error();";
+ RunScriptInANewCFrame(script_1);
+
+ // The second script uses forEach.
+ const char* script_2 = "[0].forEach(function() { });";
+ CompileRun(script_2);
+
+ v8::Debug::SetDebugEventListener(NULL);
+}
+
+
+// Import from test-heap.cc
+int CountNativeContexts();
+
+
+static void NopListener(v8::DebugEvent event,
+ v8::Handle<v8::Object> exec_state,
+ v8::Handle<v8::Object> event_data,
+ v8::Handle<v8::Value> data) {
+}
+
+
+TEST(DebuggerCreatesContextIffActive) {
+ v8::HandleScope scope;
+ DebugLocalContext env;
+ CHECK_EQ(1, CountNativeContexts());
+
+ v8::Debug::SetDebugEventListener(NULL);
+ CompileRun("debugger;");
+ CHECK_EQ(1, CountNativeContexts());
+
+ v8::Debug::SetDebugEventListener(NopListener);
+ CompileRun("debugger;");
+ CHECK_EQ(2, CountNativeContexts());
+
+ v8::Debug::SetDebugEventListener(NULL);
+}
+
+
+TEST(LiveEditEnabled) {
+ v8::internal::FLAG_allow_natives_syntax = true;
+ v8::HandleScope scope;
+ LocalContext context;
+ v8::Debug::SetLiveEditEnabled(true);
+ CompileRun("%LiveEditCompareStrings('', '')");
+}
+
+
+TEST(LiveEditDisabled) {
+ v8::internal::FLAG_allow_natives_syntax = true;
+ v8::HandleScope scope;
+ LocalContext context;
+ v8::Debug::SetLiveEditEnabled(false);
+ CompileRun("%LiveEditCompareStrings('', '')");
+}
+
+
#endif // ENABLE_DEBUGGER_SUPPORT
diff --git a/src/3rdparty/v8/test/cctest/test-decls.cc b/src/3rdparty/v8/test/cctest/test-decls.cc
index e6bdc9f..6fc6012 100644
--- a/src/3rdparty/v8/test/cctest/test-decls.cc
+++ b/src/3rdparty/v8/test/cctest/test-decls.cc
@@ -37,7 +37,8 @@ using namespace v8;
enum Expectations {
EXPECT_RESULT,
- EXPECT_EXCEPTION
+ EXPECT_EXCEPTION,
+ EXPECT_ERROR
};
@@ -72,6 +73,10 @@ class DeclarationContext {
void InitializeIfNeeded();
+ // Perform optional initialization steps on the context after it has
+ // been created. Defaults to none but may be overwritten.
+ virtual void PostInitializeContext(Handle<Context> context) {}
+
// Get the holder for the interceptor. Default to the instance template
// but may be overwritten.
virtual Local<ObjectTemplate> GetHolder(Local<FunctionTemplate> function) {
@@ -91,7 +96,6 @@ class DeclarationContext {
private:
bool is_initialized_;
Persistent<Context> context_;
- Local<String> property_;
int get_count_;
int set_count_;
@@ -120,6 +124,7 @@ void DeclarationContext::InitializeIfNeeded() {
context_ = Context::New(0, function->InstanceTemplate(), Local<Value>());
context_->Enter();
is_initialized_ = true;
+ PostInitializeContext(context_);
}
@@ -134,7 +139,13 @@ void DeclarationContext::Check(const char* source,
HandleScope scope;
TryCatch catcher;
catcher.SetVerbose(true);
- Local<Value> result = Script::Compile(String::New(source))->Run();
+ Local<Script> script = Script::Compile(String::New(source));
+ if (expectations == EXPECT_ERROR) {
+ CHECK(script.IsEmpty());
+ return;
+ }
+ CHECK(!script.IsEmpty());
+ Local<Value> result = script->Run();
CHECK_EQ(get, get_count());
CHECK_EQ(set, set_count());
CHECK_EQ(query, query_count());
@@ -536,9 +547,9 @@ TEST(ExistsInPrototype) {
{ ExistsInPrototypeContext context;
context.Check("var x; x",
- 0, // get
0,
- 0, // declaration
+ 0,
+ 0,
EXPECT_RESULT, Undefined());
}
@@ -546,7 +557,7 @@ TEST(ExistsInPrototype) {
context.Check("var x = 0; x",
0,
0,
- 0, // declaration
+ 0,
EXPECT_RESULT, Number::New(0));
}
@@ -554,7 +565,7 @@ TEST(ExistsInPrototype) {
context.Check("const x; x",
0,
0,
- 0, // declaration
+ 0,
EXPECT_RESULT, Undefined());
}
@@ -562,7 +573,7 @@ TEST(ExistsInPrototype) {
context.Check("const x = 0; x",
0,
0,
- 0, // declaration
+ 0,
EXPECT_RESULT, Number::New(0));
}
}
@@ -591,7 +602,305 @@ TEST(AbsentInPrototype) {
context.Check("if (false) { var x = 0; }; x",
0,
0,
- 0, // declaration
+ 0,
EXPECT_RESULT, Undefined());
}
}
+
+
+
+class ExistsInHiddenPrototypeContext: public DeclarationContext {
+ public:
+ ExistsInHiddenPrototypeContext() {
+ hidden_proto_ = FunctionTemplate::New();
+ hidden_proto_->SetHiddenPrototype(true);
+ }
+
+ protected:
+ virtual v8::Handle<Integer> Query(Local<String> key) {
+ // Let it seem that the property exists in the hidden prototype object.
+ return Integer::New(v8::None);
+ }
+
+ // Install the hidden prototype after the global object has been created.
+ virtual void PostInitializeContext(Handle<Context> context) {
+ Local<Object> global_object = context->Global();
+ Local<Object> hidden_proto = hidden_proto_->GetFunction()->NewInstance();
+ context->DetachGlobal();
+ context->Global()->SetPrototype(hidden_proto);
+ context->ReattachGlobal(global_object);
+ }
+
+ // Use the hidden prototype as the holder for the interceptors.
+ virtual Local<ObjectTemplate> GetHolder(Local<FunctionTemplate> function) {
+ return hidden_proto_->InstanceTemplate();
+ }
+
+ private:
+ Local<FunctionTemplate> hidden_proto_;
+};
+
+
+TEST(ExistsInHiddenPrototype) {
+ i::FLAG_es52_globals = true;
+ HandleScope scope;
+
+ { ExistsInHiddenPrototypeContext context;
+ context.Check("var x; x",
+ 1, // access
+ 0,
+ 2, // declaration + initialization
+ EXPECT_EXCEPTION); // x is not defined!
+ }
+
+ { ExistsInHiddenPrototypeContext context;
+ context.Check("var x = 0; x",
+ 1, // access
+ 1, // initialization
+ 2, // declaration + initialization
+ EXPECT_RESULT, Number::New(0));
+ }
+
+ { ExistsInHiddenPrototypeContext context;
+ context.Check("function x() { }; x",
+ 0,
+ 0,
+ 0,
+ EXPECT_RESULT);
+ }
+
+ // TODO(mstarzinger): The semantics of global const is vague.
+ { ExistsInHiddenPrototypeContext context;
+ context.Check("const x; x",
+ 0,
+ 0,
+ 1, // (re-)declaration
+ EXPECT_RESULT, Undefined());
+ }
+
+ // TODO(mstarzinger): The semantics of global const is vague.
+ { ExistsInHiddenPrototypeContext context;
+ context.Check("const x = 0; x",
+ 0,
+ 0,
+ 1, // (re-)declaration
+ EXPECT_RESULT, Number::New(0));
+ }
+}
+
+
+
+class SimpleContext {
+ public:
+ SimpleContext() {
+ context_ = Context::New(0);
+ context_->Enter();
+ }
+
+ virtual ~SimpleContext() {
+ context_->Exit();
+ context_.Dispose();
+ }
+
+ void Check(const char* source,
+ Expectations expectations,
+ v8::Handle<Value> value = Local<Value>()) {
+ HandleScope scope;
+ TryCatch catcher;
+ catcher.SetVerbose(true);
+ Local<Script> script = Script::Compile(String::New(source));
+ if (expectations == EXPECT_ERROR) {
+ CHECK(script.IsEmpty());
+ return;
+ }
+ CHECK(!script.IsEmpty());
+ Local<Value> result = script->Run();
+ if (expectations == EXPECT_RESULT) {
+ CHECK(!catcher.HasCaught());
+ if (!value.IsEmpty()) {
+ CHECK_EQ(value, result);
+ }
+ } else {
+ CHECK(expectations == EXPECT_EXCEPTION);
+ CHECK(catcher.HasCaught());
+ if (!value.IsEmpty()) {
+ CHECK_EQ(value, catcher.Exception());
+ }
+ }
+ }
+
+ private:
+ Persistent<Context> context_;
+};
+
+
+TEST(MultiScriptConflicts) {
+ HandleScope scope;
+
+ { SimpleContext context;
+ context.Check("var x = 1; x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("var x = 2; x",
+ EXPECT_RESULT, Number::New(2));
+ context.Check("const x = 3; x",
+ EXPECT_RESULT, Number::New(3));
+ context.Check("const x = 4; x",
+ EXPECT_RESULT, Number::New(4));
+ context.Check("x = 5; x",
+ EXPECT_RESULT, Number::New(5));
+ context.Check("var x = 6; x",
+ EXPECT_RESULT, Number::New(6));
+ context.Check("this.x",
+ EXPECT_RESULT, Number::New(6));
+ context.Check("function x() { return 7 }; x()",
+ EXPECT_RESULT, Number::New(7));
+ }
+
+ { SimpleContext context;
+ context.Check("const x = 1; x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("var x = 2; x", // assignment ignored
+ EXPECT_RESULT, Number::New(1));
+ context.Check("const x = 3; x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("x = 4; x", // assignment ignored
+ EXPECT_RESULT, Number::New(1));
+ context.Check("var x = 5; x", // assignment ignored
+ EXPECT_RESULT, Number::New(1));
+ context.Check("this.x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("function x() { return 7 }; x",
+ EXPECT_EXCEPTION);
+ }
+
+ i::FLAG_use_strict = true;
+ i::FLAG_harmony_scoping = true;
+
+ { SimpleContext context;
+ context.Check("var x = 1; x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("this.x",
+ EXPECT_RESULT, Number::New(1));
+ }
+
+ { SimpleContext context;
+ context.Check("function x() { return 4 }; x()",
+ EXPECT_RESULT, Number::New(4));
+ context.Check("x()",
+ EXPECT_RESULT, Number::New(4));
+ context.Check("this.x()",
+ EXPECT_RESULT, Number::New(4));
+ }
+
+ { SimpleContext context;
+ context.Check("let x = 2; x",
+ EXPECT_RESULT, Number::New(2));
+ context.Check("x",
+ EXPECT_RESULT, Number::New(2));
+ // TODO(rossberg): The current ES6 draft spec does not reflect lexical
+ // bindings on the global object. However, this will probably change, in
+ // which case we reactivate the following test.
+ // context.Check("this.x",
+ // EXPECT_RESULT, Number::New(2));
+ }
+
+ { SimpleContext context;
+ context.Check("const x = 3; x",
+ EXPECT_RESULT, Number::New(3));
+ context.Check("x",
+ EXPECT_RESULT, Number::New(3));
+ // TODO(rossberg): The current ES6 draft spec does not reflect lexical
+ // bindings on the global object. However, this will probably change, in
+ // which case we reactivate the following test.
+ // context.Check("this.x",
+ // EXPECT_RESULT, Number::New(3));
+ }
+
+ // TODO(rossberg): All of the below should actually be errors in Harmony.
+
+ { SimpleContext context;
+ context.Check("var x = 1; x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("let x = 2; x",
+ EXPECT_RESULT, Number::New(2));
+ }
+
+ { SimpleContext context;
+ context.Check("var x = 1; x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("const x = 2; x",
+ EXPECT_RESULT, Number::New(2));
+ }
+
+ { SimpleContext context;
+ context.Check("function x() { return 1 }; x()",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("let x = 2; x",
+ EXPECT_RESULT, Number::New(2));
+ }
+
+ { SimpleContext context;
+ context.Check("function x() { return 1 }; x()",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("const x = 2; x",
+ EXPECT_RESULT, Number::New(2));
+ }
+
+ { SimpleContext context;
+ context.Check("let x = 1; x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("var x = 2; x",
+ EXPECT_ERROR);
+ }
+
+ { SimpleContext context;
+ context.Check("let x = 1; x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("let x = 2; x",
+ EXPECT_ERROR);
+ }
+
+ { SimpleContext context;
+ context.Check("let x = 1; x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("const x = 2; x",
+ EXPECT_ERROR);
+ }
+
+ { SimpleContext context;
+ context.Check("let x = 1; x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("function x() { return 2 }; x()",
+ EXPECT_ERROR);
+ }
+
+ { SimpleContext context;
+ context.Check("const x = 1; x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("var x = 2; x",
+ EXPECT_ERROR);
+ }
+
+ { SimpleContext context;
+ context.Check("const x = 1; x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("let x = 2; x",
+ EXPECT_ERROR);
+ }
+
+ { SimpleContext context;
+ context.Check("const x = 1; x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("const x = 2; x",
+ EXPECT_ERROR);
+ }
+
+ { SimpleContext context;
+ context.Check("const x = 1; x",
+ EXPECT_RESULT, Number::New(1));
+ context.Check("function x() { return 2 }; x()",
+ EXPECT_ERROR);
+ }
+}
diff --git a/src/3rdparty/v8/test/cctest/test-dictionary.cc b/src/3rdparty/v8/test/cctest/test-dictionary.cc
index 793e228..00e3833 100644
--- a/src/3rdparty/v8/test/cctest/test-dictionary.cc
+++ b/src/3rdparty/v8/test/cctest/test-dictionary.cc
@@ -48,24 +48,24 @@ TEST(ObjectHashTable) {
table = PutIntoObjectHashTable(table, a, b);
CHECK_EQ(table->NumberOfElements(), 1);
CHECK_EQ(table->Lookup(*a), *b);
- CHECK_EQ(table->Lookup(*b), HEAP->undefined_value());
+ CHECK_EQ(table->Lookup(*b), HEAP->the_hole_value());
// Keys still have to be valid after objects were moved.
HEAP->CollectGarbage(NEW_SPACE);
CHECK_EQ(table->NumberOfElements(), 1);
CHECK_EQ(table->Lookup(*a), *b);
- CHECK_EQ(table->Lookup(*b), HEAP->undefined_value());
+ CHECK_EQ(table->Lookup(*b), HEAP->the_hole_value());
// Keys that are overwritten should not change number of elements.
table = PutIntoObjectHashTable(table, a, FACTORY->NewJSArray(13));
CHECK_EQ(table->NumberOfElements(), 1);
CHECK_NE(table->Lookup(*a), *b);
- // Keys mapped to undefined should be removed permanently.
- table = PutIntoObjectHashTable(table, a, FACTORY->undefined_value());
+ // Keys mapped to the hole should be removed permanently.
+ table = PutIntoObjectHashTable(table, a, FACTORY->the_hole_value());
CHECK_EQ(table->NumberOfElements(), 0);
CHECK_EQ(table->NumberOfDeletedElements(), 1);
- CHECK_EQ(table->Lookup(*a), HEAP->undefined_value());
+ CHECK_EQ(table->Lookup(*a), HEAP->the_hole_value());
// Keys should map back to their respective values and also should get
// an identity hash code generated.
@@ -85,7 +85,7 @@ TEST(ObjectHashTable) {
Handle<JSObject> key = FACTORY->NewJSArray(7);
CHECK(key->GetIdentityHash(ALLOW_CREATION)->ToObjectChecked()->IsSmi());
CHECK_EQ(table->FindEntry(*key), ObjectHashTable::kNotFound);
- CHECK_EQ(table->Lookup(*key), HEAP->undefined_value());
+ CHECK_EQ(table->Lookup(*key), HEAP->the_hole_value());
CHECK(key->GetIdentityHash(OMIT_CREATION)->ToObjectChecked()->IsSmi());
}
@@ -93,7 +93,7 @@ TEST(ObjectHashTable) {
// should not get an identity hash code generated.
for (int i = 0; i < 100; i++) {
Handle<JSObject> key = FACTORY->NewJSArray(7);
- CHECK_EQ(table->Lookup(*key), HEAP->undefined_value());
+ CHECK_EQ(table->Lookup(*key), HEAP->the_hole_value());
CHECK_EQ(key->GetIdentityHash(OMIT_CREATION), HEAP->undefined_value());
}
}
@@ -105,6 +105,12 @@ TEST(ObjectHashSetCausesGC) {
LocalContext context;
Handle<ObjectHashSet> table = FACTORY->NewObjectHashSet(1);
Handle<JSObject> key = FACTORY->NewJSArray(0);
+ v8::Handle<v8::Object> key_obj = v8::Utils::ToLocal(key);
+
+ // Force allocation of hash table backing store for hidden properties.
+ key_obj->SetHiddenValue(v8_str("key 1"), v8_str("val 1"));
+ key_obj->SetHiddenValue(v8_str("key 2"), v8_str("val 2"));
+ key_obj->SetHiddenValue(v8_str("key 3"), v8_str("val 3"));
// Simulate a full heap so that generating an identity hash code
// in subsequent calls will request GC.
@@ -128,13 +134,19 @@ TEST(ObjectHashTableCausesGC) {
LocalContext context;
Handle<ObjectHashTable> table = FACTORY->NewObjectHashTable(1);
Handle<JSObject> key = FACTORY->NewJSArray(0);
+ v8::Handle<v8::Object> key_obj = v8::Utils::ToLocal(key);
+
+ // Force allocation of hash table backing store for hidden properties.
+ key_obj->SetHiddenValue(v8_str("key 1"), v8_str("val 1"));
+ key_obj->SetHiddenValue(v8_str("key 2"), v8_str("val 2"));
+ key_obj->SetHiddenValue(v8_str("key 3"), v8_str("val 3"));
// Simulate a full heap so that generating an identity hash code
// in subsequent calls will request GC.
FLAG_gc_interval = 0;
// Calling Lookup() should not cause GC ever.
- CHECK(table->Lookup(*key)->IsUndefined());
+ CHECK(table->Lookup(*key)->IsTheHole());
// Calling Put() should request GC by returning a failure.
CHECK(table->Put(*key, *key)->IsRetryAfterGC());
diff --git a/src/3rdparty/v8/test/cctest/test-disasm-arm.cc b/src/3rdparty/v8/test/cctest/test-disasm-arm.cc
index 0e9432d..3a2d9e8 100644
--- a/src/3rdparty/v8/test/cctest/test-disasm-arm.cc
+++ b/src/3rdparty/v8/test/cctest/test-disasm-arm.cc
@@ -92,6 +92,10 @@ bool DisassembleAndCompare(byte* pc, const char* compare_string) {
if (!DisassembleAndCompare(progcounter, compare_string)) failure = true; \
}
+// Force emission of any pending literals into a pool.
+#define EMIT_PENDING_LITERALS() \
+ assm.CheckConstPool(true, false)
+
// Verify that all invocations of the COMPARE macro passed successfully.
// Exit with a failure if at least one of the tests failed.
@@ -280,6 +284,10 @@ TEST(Type0) {
// is pretty strange anyway.
COMPARE(mov(r5, Operand(0x01234), SetCC, ne),
"159fc000 ldrne ip, [pc, #+0]");
+ // Emit a literal pool now, otherwise this could be dumped later, in the
+ // middle of a different test.
+ EMIT_PENDING_LITERALS();
+
// We only disassemble one instruction so the eor instruction is not here.
// The eor does the setcc so we get a movw here.
COMPARE(eor(r5, r4, Operand(0x1234), SetCC, ne),
diff --git a/src/3rdparty/v8/test/cctest/test-flags.cc b/src/3rdparty/v8/test/cctest/test-flags.cc
index 32f1264..9cb12c4 100644
--- a/src/3rdparty/v8/test/cctest/test-flags.cc
+++ b/src/3rdparty/v8/test/cctest/test-flags.cc
@@ -159,7 +159,7 @@ TEST(Flags6) {
CHECK_EQ(3, FlagList::SetFlagsFromCommandLine(&argc,
const_cast<char **>(argv),
true));
- CHECK_EQ(4, argc);
+ CHECK_EQ(2, argc);
}
@@ -232,3 +232,16 @@ TEST(FlagsJSArguments4) {
CHECK_EQ(0, FLAG_js_arguments.argc());
}
+
+TEST(FlagsRemoveIncomplete) {
+ // Test that processed command line arguments are removed, even
+ // if the list of arguments ends unexpectedly.
+ SetFlagsToDefault();
+ int argc = 3;
+ const char* argv[] = { "", "--crankshaft", "--expose-debug-as" };
+ CHECK_EQ(2, FlagList::SetFlagsFromCommandLine(&argc,
+ const_cast<char **>(argv),
+ true));
+ CHECK_NE(NULL, argv[1]);
+ CHECK_EQ(argc, 2);
+}
diff --git a/src/3rdparty/v8/test/cctest/test-func-name-inference.cc b/src/3rdparty/v8/test/cctest/test-func-name-inference.cc
index 8f405b7..cda6aa0 100644
--- a/src/3rdparty/v8/test/cctest/test-func-name-inference.cc
+++ b/src/3rdparty/v8/test/cctest/test-func-name-inference.cc
@@ -28,6 +28,7 @@
#include "v8.h"
#include "api.h"
+#include "debug.h"
#include "runtime.h"
#include "cctest.h"
@@ -87,10 +88,10 @@ static void CheckFunctionName(v8::Handle<v8::Script> script,
#ifdef ENABLE_DEBUGGER_SUPPORT
// Obtain SharedFunctionInfo for the function.
+ Isolate::Current()->debug()->PrepareForBreakPoints();
Object* shared_func_info_ptr =
- Runtime::FindSharedFunctionInfoInScript(Isolate::Current(),
- i_script,
- func_pos);
+ Isolate::Current()->debug()->FindSharedFunctionInfoInScript(i_script,
+ func_pos);
CHECK(shared_func_info_ptr != HEAP->undefined_value());
Handle<SharedFunctionInfo> shared_func_info(
SharedFunctionInfo::cast(shared_func_info_ptr));
@@ -398,5 +399,45 @@ TEST(AssignmentAndCall) {
// The inferred name is empty, because this is an assignment of a result.
CheckFunctionName(script, "return 1", "");
// See MultipleAssignments test.
- CheckFunctionName(script, "return 2", "Enclosing.Bar");
+ // TODO(2276): Lazy compiling the enclosing outer closure would yield
+ // in "Enclosing.Bar" being the inferred name here.
+ CheckFunctionName(script, "return 2", "Bar");
+}
+
+
+TEST(MethodAssignmentInAnonymousFunctionCall) {
+ InitializeVM();
+ v8::HandleScope scope;
+
+ v8::Handle<v8::Script> script = Compile(
+ "(function () {\n"
+ " var EventSource = function () { };\n"
+ " EventSource.prototype.addListener = function () {\n"
+ " return 2012;\n"
+ " };\n"
+ " this.PublicEventSource = EventSource;\n"
+ "})();");
+ CheckFunctionName(script, "return 2012", "EventSource.addListener");
+}
+
+
+TEST(ReturnAnonymousFunction) {
+ InitializeVM();
+ v8::HandleScope scope;
+
+ v8::Handle<v8::Script> script = Compile(
+ "(function() {\n"
+ " function wrapCode() {\n"
+ " return function () {\n"
+ " return 2012;\n"
+ " };\n"
+ " };\n"
+ " var foo = 10;\n"
+ " function f() {\n"
+ " return wrapCode();\n"
+ " }\n"
+ " this.ref = f;\n"
+ "})()");
+ script->Run();
+ CheckFunctionName(script, "return 2012", "");
}
diff --git a/src/3rdparty/v8/test/cctest/test-heap-profiler.cc b/src/3rdparty/v8/test/cctest/test-heap-profiler.cc
index b5815dc..2a60785 100644
--- a/src/3rdparty/v8/test/cctest/test-heap-profiler.cc
+++ b/src/3rdparty/v8/test/cctest/test-heap-profiler.cc
@@ -7,6 +7,7 @@
#include "v8.h"
#include "cctest.h"
+#include "hashmap.h"
#include "heap-profiler.h"
#include "snapshot.h"
#include "debug.h"
@@ -27,10 +28,14 @@ class NamedEntriesDetector {
if (strcmp(entry->name(), "C2") == 0) has_C2 = true;
}
+ static bool AddressesMatch(void* key1, void* key2) {
+ return key1 == key2;
+ }
+
void CheckAllReachables(i::HeapEntry* root) {
+ i::HashMap visited(AddressesMatch);
i::List<i::HeapEntry*> list(10);
list.Add(root);
- root->paint();
CheckEntry(root);
while (!list.is_empty()) {
i::HeapEntry* entry = list.RemoveLast();
@@ -38,11 +43,15 @@ class NamedEntriesDetector {
for (int i = 0; i < children.length(); ++i) {
if (children[i]->type() == i::HeapGraphEdge::kShortcut) continue;
i::HeapEntry* child = children[i]->to();
- if (!child->painted()) {
- list.Add(child);
- child->paint();
- CheckEntry(child);
- }
+ i::HashMap::Entry* entry = visited.Lookup(
+ reinterpret_cast<void*>(child),
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(child)),
+ true);
+ if (entry->value)
+ continue;
+ entry->value = reinterpret_cast<void*>(1);
+ list.Add(child);
+ CheckEntry(child);
}
}
}
@@ -107,9 +116,6 @@ TEST(HeapSnapshot) {
"var c2 = new C2(a2);");
const v8::HeapSnapshot* snapshot_env2 =
v8::HeapProfiler::TakeSnapshot(v8_str("env2"));
- i::HeapSnapshot* i_snapshot_env2 =
- const_cast<i::HeapSnapshot*>(
- reinterpret_cast<const i::HeapSnapshot*>(snapshot_env2));
const v8::HeapGraphNode* global_env2 = GetGlobalObject(snapshot_env2);
// Verify, that JS global object of env2 has '..2' properties.
@@ -122,9 +128,7 @@ TEST(HeapSnapshot) {
NULL, GetProperty(global_env2, v8::HeapGraphEdge::kProperty, "b2_2"));
CHECK_NE(NULL, GetProperty(global_env2, v8::HeapGraphEdge::kProperty, "c2"));
- // Paint all nodes reachable from global object.
NamedEntriesDetector det;
- i_snapshot_env2->ClearPaint();
det.CheckAllReachables(const_cast<i::HeapEntry*>(
reinterpret_cast<const i::HeapEntry*>(global_env2)));
CHECK(det.has_A2);
@@ -158,9 +162,9 @@ TEST(HeapSnapshotObjectSizes) {
CHECK_NE(NULL, x2);
// Test sizes.
- CHECK_EQ(x->GetSelfSize() * 3, x->GetRetainedSize());
- CHECK_EQ(x1->GetSelfSize(), x1->GetRetainedSize());
- CHECK_EQ(x2->GetSelfSize(), x2->GetRetainedSize());
+ CHECK_NE(0, x->GetSelfSize());
+ CHECK_NE(0, x1->GetSelfSize());
+ CHECK_NE(0, x2->GetSelfSize());
}
@@ -479,66 +483,6 @@ TEST(HeapSnapshotRootPreservedAfterSorting) {
}
-TEST(HeapEntryDominator) {
- // The graph looks like this:
- //
- // -> node1
- // a |^
- // -> node5 ba
- // a v|
- // node6 -> node2
- // b a |^
- // -> node4 ba
- // b v|
- // -> node3
- //
- // The dominator for all nodes is node6.
-
- v8::HandleScope scope;
- LocalContext env;
-
- CompileRun(
- "function X(a, b) { this.a = a; this.b = b; }\n"
- "node6 = new X(new X(new X()), new X(new X(),new X()));\n"
- "(function(){\n"
- "node6.a.a.b = node6.b.a; // node1 -> node2\n"
- "node6.b.a.a = node6.a.a; // node2 -> node1\n"
- "node6.b.a.b = node6.b.b; // node2 -> node3\n"
- "node6.b.b.a = node6.b.a; // node3 -> node2\n"
- "})();");
-
- const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8_str("dominators"));
-
- const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
- CHECK_NE(NULL, global);
- const v8::HeapGraphNode* node6 =
- GetProperty(global, v8::HeapGraphEdge::kProperty, "node6");
- CHECK_NE(NULL, node6);
- const v8::HeapGraphNode* node5 =
- GetProperty(node6, v8::HeapGraphEdge::kProperty, "a");
- CHECK_NE(NULL, node5);
- const v8::HeapGraphNode* node4 =
- GetProperty(node6, v8::HeapGraphEdge::kProperty, "b");
- CHECK_NE(NULL, node4);
- const v8::HeapGraphNode* node3 =
- GetProperty(node4, v8::HeapGraphEdge::kProperty, "b");
- CHECK_NE(NULL, node3);
- const v8::HeapGraphNode* node2 =
- GetProperty(node4, v8::HeapGraphEdge::kProperty, "a");
- CHECK_NE(NULL, node2);
- const v8::HeapGraphNode* node1 =
- GetProperty(node5, v8::HeapGraphEdge::kProperty, "a");
- CHECK_NE(NULL, node1);
-
- CHECK_EQ(node6, node1->GetDominatorNode());
- CHECK_EQ(node6, node2->GetDominatorNode());
- CHECK_EQ(node6, node3->GetDominatorNode());
- CHECK_EQ(node6, node4->GetDominatorNode());
- CHECK_EQ(node6, node5->GetDominatorNode());
-}
-
-
namespace {
class TestJSONStream : public v8::OutputStream {
@@ -623,7 +567,7 @@ TEST(HeapSnapshotJSONSerialization) {
// Get node and edge "member" offsets.
v8::Local<v8::Value> meta_analysis_result = CompileRun(
"var meta = parsed.snapshot.meta;\n"
- "var edges_index_offset = meta.node_fields.indexOf('edges_index');\n"
+ "var edge_count_offset = meta.node_fields.indexOf('edge_count');\n"
"var node_fields_count = meta.node_fields.length;\n"
"var edge_fields_count = meta.edge_fields.length;\n"
"var edge_type_offset = meta.edge_fields.indexOf('type');\n"
@@ -633,7 +577,13 @@ TEST(HeapSnapshotJSONSerialization) {
" meta.edge_types[edge_type_offset].indexOf('property');\n"
"var shortcut_type ="
" meta.edge_types[edge_type_offset].indexOf('shortcut');\n"
- "parsed.nodes.concat(0, 0, 0, 0, 0, 0, parsed.edges.length);");
+ "var node_count = parsed.nodes.length / node_fields_count;\n"
+ "var first_edge_indexes = parsed.first_edge_indexes = [];\n"
+ "for (var i = 0, first_edge_index = 0; i < node_count; ++i) {\n"
+ " first_edge_indexes[i] = first_edge_index;\n"
+ " first_edge_index += edge_fields_count *\n"
+ " parsed.nodes[i * node_fields_count + edge_count_offset];\n"
+ "}\n");
CHECK(!meta_analysis_result.IsEmpty());
// A helper function for processing encoded nodes.
@@ -642,8 +592,9 @@ TEST(HeapSnapshotJSONSerialization) {
" var nodes = parsed.nodes;\n"
" var edges = parsed.edges;\n"
" var strings = parsed.strings;\n"
- " for (var i = nodes[pos + edges_index_offset],\n"
- " count = nodes[pos + node_fields_count + edges_index_offset];\n"
+ " var node_ordinal = pos / node_fields_count;\n"
+ " for (var i = parsed.first_edge_indexes[node_ordinal],\n"
+ " count = parsed.first_edge_indexes[node_ordinal + 1];\n"
" i < count; i += edge_fields_count) {\n"
" if (edges[i + edge_type_offset] === prop_type\n"
" && strings[edges[i + edge_name_offset]] === prop_name)\n"
@@ -659,8 +610,8 @@ TEST(HeapSnapshotJSONSerialization) {
"GetChildPosByProperty(\n"
" GetChildPosByProperty(\n"
" GetChildPosByProperty("
- " parsed.edges[parsed.nodes[edges_index_offset]"
- " + edge_to_node_offset + edge_fields_count],"
+ " parsed.edges[edge_to_node_offset"
+ " + edge_fields_count],"
" \"b\", property_type),\n"
" \"x\", property_type),"
" \"s\", property_type)");
@@ -669,8 +620,7 @@ TEST(HeapSnapshotJSONSerialization) {
"GetChildPosByProperty(\n"
" GetChildPosByProperty(\n"
" GetChildPosByProperty("
- " parsed.edges[parsed.nodes[edges_index_offset]"
- " + edge_to_node_offset],"
+ " parsed.edges[edge_to_node_offset],"
" \"b\", property_type),\n"
" \"x\", property_type),"
" \"s\", property_type)");
@@ -763,9 +713,13 @@ class TestStatsStream : public v8::OutputStream {
} // namespace
-static TestStatsStream GetHeapStatsUpdate() {
+static TestStatsStream GetHeapStatsUpdate(
+ v8::SnapshotObjectId* object_id = NULL) {
TestStatsStream stream;
- v8::HeapProfiler::PushHeapObjectsStats(&stream);
+ v8::SnapshotObjectId last_seen_id =
+ v8::HeapProfiler::PushHeapObjectsStats(&stream);
+ if (object_id)
+ *object_id = last_seen_id;
CHECK_EQ(1, stream.eos_signaled());
return stream;
}
@@ -776,15 +730,16 @@ TEST(HeapSnapshotObjectsStats) {
LocalContext env;
v8::HeapProfiler::StartHeapObjectsTracking();
- // We have to call GC 5 times. In other case the garbage will be
+ // We have to call GC 6 times. In other case the garbage will be
// the reason of flakiness.
- for (int i = 0; i < 5; ++i) {
+ for (int i = 0; i < 6; ++i) {
HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
}
+ v8::SnapshotObjectId initial_id;
{
// Single chunk of data expected in update. Initial data.
- TestStatsStream stats_update = GetHeapStatsUpdate();
+ TestStatsStream stats_update = GetHeapStatsUpdate(&initial_id);
CHECK_EQ(1, stats_update.intervals_count());
CHECK_EQ(1, stats_update.updates_written());
CHECK_LT(0, stats_update.entries_size());
@@ -792,13 +747,18 @@ TEST(HeapSnapshotObjectsStats) {
}
// No data expected in update because nothing has happened.
- CHECK_EQ(0, GetHeapStatsUpdate().updates_written());
+ v8::SnapshotObjectId same_id;
+ CHECK_EQ(0, GetHeapStatsUpdate(&same_id).updates_written());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(initial_id, same_id);
+
{
+ v8::SnapshotObjectId additional_string_id;
v8::HandleScope inner_scope_1;
v8_str("string1");
{
// Single chunk of data with one new entry expected in update.
- TestStatsStream stats_update = GetHeapStatsUpdate();
+ TestStatsStream stats_update = GetHeapStatsUpdate(&additional_string_id);
+ CHECK_LT(same_id, additional_string_id);
CHECK_EQ(1, stats_update.intervals_count());
CHECK_EQ(1, stats_update.updates_written());
CHECK_LT(0, stats_update.entries_size());
@@ -807,7 +767,9 @@ TEST(HeapSnapshotObjectsStats) {
}
// No data expected in update because nothing happened.
- CHECK_EQ(0, GetHeapStatsUpdate().updates_written());
+ v8::SnapshotObjectId last_id;
+ CHECK_EQ(0, GetHeapStatsUpdate(&last_id).updates_written());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(additional_string_id, last_id);
{
v8::HandleScope inner_scope_2;
@@ -1503,6 +1465,36 @@ TEST(FastCaseGetter) {
CHECK_NE(NULL, setterFunction);
}
+TEST(HiddenPropertiesFastCase) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ CompileRun(
+ "function C(x) { this.a = this; this.b = x; }\n"
+ "c = new C(2012);\n");
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("HiddenPropertiesFastCase1"));
+ const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
+ const v8::HeapGraphNode* c =
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "c");
+ CHECK_NE(NULL, c);
+ const v8::HeapGraphNode* hidden_props =
+ GetProperty(c, v8::HeapGraphEdge::kInternal, "hidden_properties");
+ CHECK_EQ(NULL, hidden_props);
+
+ v8::Handle<v8::Value> cHandle = env->Global()->Get(v8::String::New("c"));
+ CHECK(!cHandle.IsEmpty() && cHandle->IsObject());
+ cHandle->ToObject()->SetHiddenValue(v8_str("key"), v8_str("val"));
+
+ snapshot = v8::HeapProfiler::TakeSnapshot(
+ v8_str("HiddenPropertiesFastCase2"));
+ global = GetGlobalObject(snapshot);
+ c = GetProperty(global, v8::HeapGraphEdge::kProperty, "c");
+ CHECK_NE(NULL, c);
+ hidden_props = GetProperty(c, v8::HeapGraphEdge::kInternal,
+ "hidden_properties");
+ CHECK_NE(NULL, hidden_props);
+}
bool HasWeakEdge(const v8::HeapGraphNode* node) {
for (int i = 0; i < node->GetChildrenCount(); ++i) {
@@ -1545,7 +1537,7 @@ TEST(WeakGlobalHandle) {
}
-TEST(WeakGlobalContextRefs) {
+TEST(WeakNativeContextRefs) {
v8::HandleScope scope;
LocalContext env;
@@ -1557,10 +1549,10 @@ TEST(WeakGlobalContextRefs) {
const v8::HeapGraphNode* global_handles = GetNode(
gc_roots, v8::HeapGraphNode::kObject, "(Global handles)");
CHECK_NE(NULL, global_handles);
- const v8::HeapGraphNode* global_context = GetNode(
- global_handles, v8::HeapGraphNode::kHidden, "system / GlobalContext");
- CHECK_NE(NULL, global_context);
- CHECK(HasWeakEdge(global_context));
+ const v8::HeapGraphNode* native_context = GetNode(
+ global_handles, v8::HeapGraphNode::kHidden, "system / NativeContext");
+ CHECK_NE(NULL, native_context);
+ CHECK(HasWeakEdge(native_context));
}
@@ -1583,6 +1575,7 @@ TEST(SfiAndJsFunctionWeakRefs) {
}
+#ifdef ENABLE_DEBUGGER_SUPPORT
TEST(NoDebugObjectInSnapshot) {
v8::HandleScope scope;
LocalContext env;
@@ -1607,6 +1600,7 @@ TEST(NoDebugObjectInSnapshot) {
}
CHECK_EQ(1, globals_count);
}
+#endif // ENABLE_DEBUGGER_SUPPORT
TEST(PersistentHandleCount) {
@@ -1682,3 +1676,26 @@ TEST(NoRefsToNonEssentialEntries) {
GetProperty(global_object, v8::HeapGraphEdge::kInternal, "elements");
CHECK_EQ(NULL, elements);
}
+
+
+TEST(MapHasDescriptorsAndTransitions) {
+ v8::HandleScope scope;
+ LocalContext env;
+ CompileRun("obj = { a: 10 };\n");
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("snapshot"));
+ const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
+ const v8::HeapGraphNode* global_object =
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "obj");
+ CHECK_NE(NULL, global_object);
+
+ const v8::HeapGraphNode* map =
+ GetProperty(global_object, v8::HeapGraphEdge::kInternal, "map");
+ CHECK_NE(NULL, map);
+ const v8::HeapGraphNode* own_descriptors = GetProperty(
+ map, v8::HeapGraphEdge::kInternal, "descriptors");
+ CHECK_NE(NULL, own_descriptors);
+ const v8::HeapGraphNode* own_transitions = GetProperty(
+ map, v8::HeapGraphEdge::kInternal, "transitions");
+ CHECK_EQ(NULL, own_transitions);
+}
diff --git a/src/3rdparty/v8/test/cctest/test-heap.cc b/src/3rdparty/v8/test/cctest/test-heap.cc
index baee7e4..0d72ff7 100644
--- a/src/3rdparty/v8/test/cctest/test-heap.cc
+++ b/src/3rdparty/v8/test/cctest/test-heap.cc
@@ -4,10 +4,12 @@
#include "v8.h"
+#include "compilation-cache.h"
#include "execution.h"
#include "factory.h"
#include "macro-assembler.h"
#include "global-handles.h"
+#include "stub-cache.h"
#include "cctest.h"
#include "snapshot.h"
@@ -22,6 +24,19 @@ static void InitializeVM() {
}
+// Go through all incremental marking steps in one swoop.
+static void SimulateIncrementalMarking() {
+ IncrementalMarking* marking = HEAP->incremental_marking();
+ CHECK(marking->IsStopped());
+ marking->Start();
+ CHECK(marking->IsMarking());
+ while (!marking->IsComplete()) {
+ marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
+ }
+ CHECK(marking->IsComplete());
+}
+
+
static void CheckMap(Map* map, int type, int instance_size) {
CHECK(map->IsHeapObject());
#ifdef DEBUG
@@ -158,7 +173,8 @@ TEST(HeapObjects) {
String* object_symbol = String::cast(HEAP->Object_symbol());
CHECK(
- Isolate::Current()->context()->global()->HasLocalProperty(object_symbol));
+ Isolate::Current()->context()->global_object()->HasLocalProperty(
+ object_symbol));
// Check ToString for oddballs
CheckOddball(HEAP->true_value(), "true");
@@ -214,7 +230,7 @@ TEST(GarbageCollection) {
Handle<Map> initial_map =
FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
function->set_initial_map(*initial_map);
- Isolate::Current()->context()->global()->SetProperty(
+ Isolate::Current()->context()->global_object()->SetProperty(
*name, *function, NONE, kNonStrictMode)->ToObjectChecked();
// Allocate an object. Unrooted after leaving the scope.
Handle<JSObject> obj = FACTORY->NewJSObject(function);
@@ -230,9 +246,10 @@ TEST(GarbageCollection) {
HEAP->CollectGarbage(NEW_SPACE);
// Function should be alive.
- CHECK(Isolate::Current()->context()->global()->HasLocalProperty(*name));
+ CHECK(Isolate::Current()->context()->global_object()->
+ HasLocalProperty(*name));
// Check function is retained.
- Object* func_value = Isolate::Current()->context()->global()->
+ Object* func_value = Isolate::Current()->context()->global_object()->
GetProperty(*name)->ToObjectChecked();
CHECK(func_value->IsJSFunction());
Handle<JSFunction> function(JSFunction::cast(func_value));
@@ -241,7 +258,7 @@ TEST(GarbageCollection) {
HandleScope inner_scope;
// Allocate another object, make it reachable from global.
Handle<JSObject> obj = FACTORY->NewJSObject(function);
- Isolate::Current()->context()->global()->SetProperty(
+ Isolate::Current()->context()->global_object()->SetProperty(
*obj_name, *obj, NONE, kNonStrictMode)->ToObjectChecked();
obj->SetProperty(
*prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
@@ -250,10 +267,11 @@ TEST(GarbageCollection) {
// After gc, it should survive.
HEAP->CollectGarbage(NEW_SPACE);
- CHECK(Isolate::Current()->context()->global()->HasLocalProperty(*obj_name));
- CHECK(Isolate::Current()->context()->global()->
+ CHECK(Isolate::Current()->context()->global_object()->
+ HasLocalProperty(*obj_name));
+ CHECK(Isolate::Current()->context()->global_object()->
GetProperty(*obj_name)->ToObjectChecked()->IsJSObject());
- Object* obj = Isolate::Current()->context()->global()->
+ Object* obj = Isolate::Current()->context()->global_object()->
GetProperty(*obj_name)->ToObjectChecked();
JSObject* js_obj = JSObject::cast(obj);
CHECK_EQ(Smi::FromInt(23), js_obj->GetProperty(*prop_name));
@@ -416,6 +434,7 @@ TEST(WeakGlobalHandlesMark) {
global_handles->Destroy(h1.location());
}
+
TEST(DeleteWeakGlobalHandle) {
InitializeVM();
GlobalHandles* global_handles = Isolate::Current()->global_handles();
@@ -446,6 +465,7 @@ TEST(DeleteWeakGlobalHandle) {
CHECK(WeakPointerCleared);
}
+
static const char* not_so_random_string_table[] = {
"abstract",
"boolean",
@@ -562,7 +582,7 @@ TEST(ObjectProperties) {
v8::HandleScope sc;
String* object_symbol = String::cast(HEAP->Object_symbol());
- Object* raw_object = Isolate::Current()->context()->global()->
+ Object* raw_object = Isolate::Current()->context()->global_object()->
GetProperty(object_symbol)->ToObjectChecked();
JSFunction* object_function = JSFunction::cast(raw_object);
Handle<JSFunction> constructor(object_function);
@@ -659,7 +679,7 @@ TEST(JSArray) {
v8::HandleScope sc;
Handle<String> name = FACTORY->LookupAsciiSymbol("Array");
- Object* raw_object = Isolate::Current()->context()->global()->
+ Object* raw_object = Isolate::Current()->context()->global_object()->
GetProperty(*name)->ToObjectChecked();
Handle<JSFunction> function = Handle<JSFunction>(
JSFunction::cast(raw_object));
@@ -674,7 +694,7 @@ TEST(JSArray) {
array->SetElementsLength(Smi::FromInt(0))->ToObjectChecked();
CHECK_EQ(Smi::FromInt(0), array->length());
// Must be in fast mode.
- CHECK(array->HasFastTypeElements());
+ CHECK(array->HasFastSmiOrObjectElements());
// array[length] = name.
array->SetElement(0, *name, NONE, kNonStrictMode)->ToObjectChecked();
@@ -706,7 +726,7 @@ TEST(JSObjectCopy) {
v8::HandleScope sc;
String* object_symbol = String::cast(HEAP->Object_symbol());
- Object* raw_object = Isolate::Current()->context()->global()->
+ Object* raw_object = Isolate::Current()->context()->global_object()->
GetProperty(object_symbol)->ToObjectChecked();
JSFunction* object_function = JSFunction::cast(raw_object);
Handle<JSFunction> constructor(object_function);
@@ -812,7 +832,9 @@ TEST(Iteration) {
// Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
objs[next_objs_index++] = FACTORY->NewJSArray(10);
- objs[next_objs_index++] = FACTORY->NewJSArray(10, FAST_ELEMENTS, TENURED);
+ objs[next_objs_index++] = FACTORY->NewJSArray(10,
+ FAST_HOLEY_ELEMENTS,
+ TENURED);
// Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
objs[next_objs_index++] =
@@ -873,7 +895,7 @@ TEST(Regression39128) {
// Step 1: prepare a map for the object. We add 1 inobject property to it.
Handle<JSFunction> object_ctor(
- Isolate::Current()->global_context()->object_function());
+ Isolate::Current()->native_context()->object_function());
CHECK(object_ctor->has_initial_map());
Handle<Map> object_map(object_ctor->initial_map());
// Create a map with single inobject property.
@@ -934,9 +956,9 @@ TEST(Regression39128) {
TEST(TestCodeFlushing) {
- i::FLAG_allow_natives_syntax = true;
// If we do not flush code this test is invalid.
if (!FLAG_flush_code) return;
+ i::FLAG_allow_natives_syntax = true;
InitializeVM();
v8::HandleScope scope;
const char* source = "function foo() {"
@@ -953,24 +975,22 @@ TEST(TestCodeFlushing) {
}
// Check function is compiled.
- Object* func_value = Isolate::Current()->context()->global()->
+ Object* func_value = Isolate::Current()->context()->global_object()->
GetProperty(*foo_name)->ToObjectChecked();
CHECK(func_value->IsJSFunction());
Handle<JSFunction> function(JSFunction::cast(func_value));
CHECK(function->shared()->is_compiled());
- // TODO(1609) Currently incremental marker does not support code flushing.
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
-
+ // The code will survive at least two GCs.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
CHECK(function->shared()->is_compiled());
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ // Simulate several GCs that use full marking.
+ const int kAgingThreshold = 6;
+ for (int i = 0; i < kAgingThreshold; i++) {
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ }
// foo should no longer be in the compilation cache
CHECK(!function->shared()->is_compiled() || function->IsOptimized());
@@ -982,10 +1002,201 @@ TEST(TestCodeFlushing) {
}
-// Count the number of global contexts in the weak list of global contexts.
-static int CountGlobalContexts() {
+TEST(TestCodeFlushingIncremental) {
+ // If we do not flush code this test is invalid.
+ if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
+ i::FLAG_allow_natives_syntax = true;
+ InitializeVM();
+ v8::HandleScope scope;
+ const char* source = "function foo() {"
+ " var x = 42;"
+ " var y = 42;"
+ " var z = x + y;"
+ "};"
+ "foo()";
+ Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
+
+ // This compile will add the code to the compilation cache.
+ { v8::HandleScope scope;
+ CompileRun(source);
+ }
+
+ // Check function is compiled.
+ Object* func_value = Isolate::Current()->context()->global_object()->
+ GetProperty(*foo_name)->ToObjectChecked();
+ CHECK(func_value->IsJSFunction());
+ Handle<JSFunction> function(JSFunction::cast(func_value));
+ CHECK(function->shared()->is_compiled());
+
+ // The code will survive at least two GCs.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(function->shared()->is_compiled());
+
+ // Simulate several GCs that use incremental marking.
+ const int kAgingThreshold = 6;
+ for (int i = 0; i < kAgingThreshold; i++) {
+ HEAP->incremental_marking()->Abort();
+ SimulateIncrementalMarking();
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ }
+ CHECK(!function->shared()->is_compiled() || function->IsOptimized());
+ CHECK(!function->is_compiled() || function->IsOptimized());
+
+ // This compile will compile the function again.
+ { v8::HandleScope scope;
+ CompileRun("foo();");
+ }
+
+ // Simulate several GCs that use incremental marking but make sure
+ // the loop breaks once the function is enqueued as a candidate.
+ for (int i = 0; i < kAgingThreshold; i++) {
+ HEAP->incremental_marking()->Abort();
+ SimulateIncrementalMarking();
+ if (!function->next_function_link()->IsUndefined()) break;
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ }
+
+ // Force optimization while incremental marking is active and while
+ // the function is enqueued as a candidate.
+ { v8::HandleScope scope;
+ CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
+ }
+
+ // Simulate one final GC to make sure the candidate queue is sane.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(function->shared()->is_compiled() || !function->IsOptimized());
+ CHECK(function->is_compiled() || !function->IsOptimized());
+}
+
+
+TEST(TestCodeFlushingIncrementalScavenge) {
+ // If we do not flush code this test is invalid.
+ if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
+ i::FLAG_allow_natives_syntax = true;
+ InitializeVM();
+ v8::HandleScope scope;
+ const char* source = "var foo = function() {"
+ " var x = 42;"
+ " var y = 42;"
+ " var z = x + y;"
+ "};"
+ "foo();"
+ "var bar = function() {"
+ " var x = 23;"
+ "};"
+ "bar();";
+ Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
+ Handle<String> bar_name = FACTORY->LookupAsciiSymbol("bar");
+
+ // Perfrom one initial GC to enable code flushing.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ // This compile will add the code to the compilation cache.
+ { v8::HandleScope scope;
+ CompileRun(source);
+ }
+
+ // Check functions are compiled.
+ Object* func_value = Isolate::Current()->context()->global_object()->
+ GetProperty(*foo_name)->ToObjectChecked();
+ CHECK(func_value->IsJSFunction());
+ Handle<JSFunction> function(JSFunction::cast(func_value));
+ CHECK(function->shared()->is_compiled());
+ Object* func_value2 = Isolate::Current()->context()->global_object()->
+ GetProperty(*bar_name)->ToObjectChecked();
+ CHECK(func_value2->IsJSFunction());
+ Handle<JSFunction> function2(JSFunction::cast(func_value2));
+ CHECK(function2->shared()->is_compiled());
+
+ // Clear references to functions so that one of them can die.
+ { v8::HandleScope scope;
+ CompileRun("foo = 0; bar = 0;");
+ }
+
+ // Bump the code age so that flushing is triggered while the function
+ // object is still located in new-space.
+ const int kAgingThreshold = 6;
+ function->shared()->set_code_age(kAgingThreshold);
+ function2->shared()->set_code_age(kAgingThreshold);
+
+ // Simulate incremental marking so that the functions are enqueued as
+ // code flushing candidates. Then kill one of the functions. Finally
+ // perform a scavenge while incremental marking is still running.
+ SimulateIncrementalMarking();
+ *function2.location() = NULL;
+ HEAP->CollectGarbage(NEW_SPACE, "test scavenge while marking");
+
+ // Simulate one final GC to make sure the candidate queue is sane.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(!function->shared()->is_compiled() || function->IsOptimized());
+ CHECK(!function->is_compiled() || function->IsOptimized());
+}
+
+
+TEST(TestCodeFlushingIncrementalAbort) {
+ // If we do not flush code this test is invalid.
+ if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
+ i::FLAG_allow_natives_syntax = true;
+ InitializeVM();
+ v8::HandleScope scope;
+ const char* source = "function foo() {"
+ " var x = 42;"
+ " var y = 42;"
+ " var z = x + y;"
+ "};"
+ "foo()";
+ Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
+
+ // This compile will add the code to the compilation cache.
+ { v8::HandleScope scope;
+ CompileRun(source);
+ }
+
+ // Check function is compiled.
+ Object* func_value = Isolate::Current()->context()->global_object()->
+ GetProperty(*foo_name)->ToObjectChecked();
+ CHECK(func_value->IsJSFunction());
+ Handle<JSFunction> function(JSFunction::cast(func_value));
+ CHECK(function->shared()->is_compiled());
+
+ // The code will survive at least two GCs.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(function->shared()->is_compiled());
+
+ // Bump the code age so that flushing is triggered.
+ const int kAgingThreshold = 6;
+ function->shared()->set_code_age(kAgingThreshold);
+
+ // Simulate incremental marking so that the function is enqueued as
+ // code flushing candidate.
+ SimulateIncrementalMarking();
+
+ // Enable the debugger and add a breakpoint while incremental marking
+ // is running so that incremental marking aborts and code flushing is
+ // disabled.
+ int position = 0;
+ Handle<Object> breakpoint_object(Smi::FromInt(0));
+ ISOLATE->debug()->SetBreakPoint(function, breakpoint_object, &position);
+ ISOLATE->debug()->ClearAllBreakPoints();
+
+ // Force optimization now that code flushing is disabled.
+ { v8::HandleScope scope;
+ CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
+ }
+
+ // Simulate one final GC to make sure the candidate queue is sane.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(function->shared()->is_compiled() || !function->IsOptimized());
+ CHECK(function->is_compiled() || !function->IsOptimized());
+}
+
+
+// Count the number of native contexts in the weak list of native contexts.
+int CountNativeContexts() {
int count = 0;
- Object* object = HEAP->global_contexts_list();
+ Object* object = HEAP->native_contexts_list();
while (!object->IsUndefined()) {
count++;
object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
@@ -995,7 +1206,7 @@ static int CountGlobalContexts() {
// Count the number of user functions in the weak list of optimized
-// functions attached to a global context.
+// functions attached to a native context.
static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
int count = 0;
Handle<Context> icontext = v8::Utils::OpenHandle(*context);
@@ -1016,7 +1227,7 @@ TEST(TestInternalWeakLists) {
v8::HandleScope scope;
v8::Persistent<v8::Context> ctx[kNumTestContexts];
- CHECK_EQ(0, CountGlobalContexts());
+ CHECK_EQ(0, CountNativeContexts());
// Create a number of global contests which gets linked together.
for (int i = 0; i < kNumTestContexts; i++) {
@@ -1024,7 +1235,7 @@ TEST(TestInternalWeakLists) {
bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
- CHECK_EQ(i + 1, CountGlobalContexts());
+ CHECK_EQ(i + 1, CountNativeContexts());
ctx[i]->Enter();
@@ -1059,6 +1270,7 @@ TEST(TestInternalWeakLists) {
}
// Mark compact handles the weak references.
+ ISOLATE->compilation_cache()->Clear();
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
@@ -1084,7 +1296,7 @@ TEST(TestInternalWeakLists) {
// Force compilation cache cleanup.
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- // Dispose the global contexts one by one.
+ // Dispose the native contexts one by one.
for (int i = 0; i < kNumTestContexts; i++) {
ctx[i].Dispose();
ctx[i].Clear();
@@ -1092,23 +1304,23 @@ TEST(TestInternalWeakLists) {
// Scavenge treats these references as strong.
for (int j = 0; j < 10; j++) {
HEAP->PerformScavenge();
- CHECK_EQ(kNumTestContexts - i, CountGlobalContexts());
+ CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
}
// Mark compact handles the weak references.
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- CHECK_EQ(kNumTestContexts - i - 1, CountGlobalContexts());
+ CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
}
- CHECK_EQ(0, CountGlobalContexts());
+ CHECK_EQ(0, CountNativeContexts());
}
-// Count the number of global contexts in the weak list of global contexts
+// Count the number of native contexts in the weak list of native contexts
// causing a GC after the specified number of elements.
-static int CountGlobalContextsWithGC(int n) {
+static int CountNativeContextsWithGC(int n) {
int count = 0;
- Handle<Object> object(HEAP->global_contexts_list());
+ Handle<Object> object(HEAP->native_contexts_list());
while (!object->IsUndefined()) {
count++;
if (count == n) HEAP->CollectAllGarbage(Heap::kNoGCFlags);
@@ -1120,7 +1332,7 @@ static int CountGlobalContextsWithGC(int n) {
// Count the number of user functions in the weak list of optimized
-// functions attached to a global context causing a GC after the
+// functions attached to a native context causing a GC after the
// specified number of elements.
static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
int n) {
@@ -1146,14 +1358,14 @@ TEST(TestInternalWeakListsTraverseWithGC) {
v8::HandleScope scope;
v8::Persistent<v8::Context> ctx[kNumTestContexts];
- CHECK_EQ(0, CountGlobalContexts());
+ CHECK_EQ(0, CountNativeContexts());
// Create an number of contexts and check the length of the weak list both
// with and without GCs while iterating the list.
for (int i = 0; i < kNumTestContexts; i++) {
ctx[i] = v8::Context::New();
- CHECK_EQ(i + 1, CountGlobalContexts());
- CHECK_EQ(i + 1, CountGlobalContextsWithGC(i / 2 + 1));
+ CHECK_EQ(i + 1, CountNativeContexts());
+ CHECK_EQ(i + 1, CountNativeContextsWithGC(i / 2 + 1));
}
bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
@@ -1197,6 +1409,7 @@ TEST(TestSizeOfObjects) {
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
CHECK(HEAP->old_pointer_space()->IsSweepingComplete());
int initial_size = static_cast<int>(HEAP->SizeOfObjects());
@@ -1237,7 +1450,9 @@ TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
for (HeapObject* obj = iterator.next();
obj != NULL;
obj = iterator.next()) {
- size_of_objects_2 += obj->Size();
+ if (!obj->IsFreeSpace()) {
+ size_of_objects_2 += obj->Size();
+ }
}
// Delta must be within 5% of the larger result.
// TODO(gc): Tighten this up by distinguishing between byte
@@ -1278,7 +1493,8 @@ TEST(GrowAndShrinkNewSpace) {
InitializeVM();
NewSpace* new_space = HEAP->new_space();
- if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
+ if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize() ||
+ HEAP->MaxSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
// The max size cannot exceed the reserved size, since semispaces must be
// always within the reserved space. We can't test new space growing and
// shrinking if the reserved size is the same as the minimum (initial) size.
@@ -1326,7 +1542,8 @@ TEST(GrowAndShrinkNewSpace) {
TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
InitializeVM();
- if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
+ if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize() ||
+ HEAP->MaxSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
// The max size cannot exceed the reserved size, since semispaces must be
// always within the reserved space. We can't test new space growing and
// shrinking if the reserved size is the same as the minimum (initial) size.
@@ -1359,7 +1576,7 @@ static int NumberOfGlobalObjects() {
// Test that we don't embed maps from foreign contexts into
// optimized code.
-TEST(LeakGlobalContextViaMap) {
+TEST(LeakNativeContextViaMap) {
i::FLAG_allow_natives_syntax = true;
bool snapshot_enabled = i::Snapshot::IsEnabled();
v8::HandleScope outer_scope;
@@ -1386,6 +1603,7 @@ TEST(LeakGlobalContextViaMap) {
ctx2->Exit();
ctx1->Exit();
ctx1.Dispose();
+ v8::V8::ContextDisposedNotification();
}
HEAP->CollectAllAvailableGarbage();
CHECK_EQ((snapshot_enabled ? 3 : 2), NumberOfGlobalObjects());
@@ -1397,7 +1615,7 @@ TEST(LeakGlobalContextViaMap) {
// Test that we don't embed functions from foreign contexts into
// optimized code.
-TEST(LeakGlobalContextViaFunction) {
+TEST(LeakNativeContextViaFunction) {
i::FLAG_allow_natives_syntax = true;
bool snapshot_enabled = i::Snapshot::IsEnabled();
v8::HandleScope outer_scope;
@@ -1424,6 +1642,7 @@ TEST(LeakGlobalContextViaFunction) {
ctx2->Exit();
ctx1->Exit();
ctx1.Dispose();
+ v8::V8::ContextDisposedNotification();
}
HEAP->CollectAllAvailableGarbage();
CHECK_EQ((snapshot_enabled ? 3 : 2), NumberOfGlobalObjects());
@@ -1433,7 +1652,7 @@ TEST(LeakGlobalContextViaFunction) {
}
-TEST(LeakGlobalContextViaMapKeyed) {
+TEST(LeakNativeContextViaMapKeyed) {
i::FLAG_allow_natives_syntax = true;
bool snapshot_enabled = i::Snapshot::IsEnabled();
v8::HandleScope outer_scope;
@@ -1460,6 +1679,7 @@ TEST(LeakGlobalContextViaMapKeyed) {
ctx2->Exit();
ctx1->Exit();
ctx1.Dispose();
+ v8::V8::ContextDisposedNotification();
}
HEAP->CollectAllAvailableGarbage();
CHECK_EQ((snapshot_enabled ? 3 : 2), NumberOfGlobalObjects());
@@ -1469,7 +1689,7 @@ TEST(LeakGlobalContextViaMapKeyed) {
}
-TEST(LeakGlobalContextViaMapProto) {
+TEST(LeakNativeContextViaMapProto) {
i::FLAG_allow_natives_syntax = true;
bool snapshot_enabled = i::Snapshot::IsEnabled();
v8::HandleScope outer_scope;
@@ -1500,6 +1720,7 @@ TEST(LeakGlobalContextViaMapProto) {
ctx2->Exit();
ctx1->Exit();
ctx1.Dispose();
+ v8::V8::ContextDisposedNotification();
}
HEAP->CollectAllAvailableGarbage();
CHECK_EQ((snapshot_enabled ? 3 : 2), NumberOfGlobalObjects());
@@ -1511,9 +1732,10 @@ TEST(LeakGlobalContextViaMapProto) {
TEST(InstanceOfStubWriteBarrier) {
i::FLAG_allow_natives_syntax = true;
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
i::FLAG_verify_heap = true;
#endif
+
InitializeVM();
if (!i::V8::UseCrankshaft()) return;
v8::HandleScope outer_scope;
@@ -1585,22 +1807,24 @@ TEST(PrototypeTransitionClearing) {
// Verify that only dead prototype transitions are cleared.
CHECK_EQ(10, baseObject->map()->NumberOfProtoTransitions());
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- CHECK_EQ(10 - 3, baseObject->map()->NumberOfProtoTransitions());
+ const int transitions = 10 - 3;
+ CHECK_EQ(transitions, baseObject->map()->NumberOfProtoTransitions());
// Verify that prototype transitions array was compacted.
- FixedArray* trans = baseObject->map()->prototype_transitions();
- for (int i = 0; i < 10 - 3; i++) {
+ FixedArray* trans = baseObject->map()->GetPrototypeTransitions();
+ for (int i = 0; i < transitions; i++) {
int j = Map::kProtoTransitionHeaderSize +
i * Map::kProtoTransitionElementsPerEntry;
CHECK(trans->get(j + Map::kProtoTransitionMapOffset)->IsMap());
- CHECK(trans->get(j + Map::kProtoTransitionPrototypeOffset)->IsJSObject());
+ Object* proto = trans->get(j + Map::kProtoTransitionPrototypeOffset);
+ CHECK(proto->IsTheHole() || proto->IsJSObject());
}
// Make sure next prototype is placed on an old-space evacuation candidate.
Handle<JSObject> prototype;
PagedSpace* space = HEAP->old_pointer_space();
do {
- prototype = FACTORY->NewJSArray(32 * KB, FAST_ELEMENTS, TENURED);
+ prototype = FACTORY->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
} while (space->FirstPage() == space->LastPage() ||
!space->LastPage()->Contains(prototype->address()));
@@ -1608,7 +1832,8 @@ TEST(PrototypeTransitionClearing) {
// clearing correctly records slots in prototype transition array.
i::FLAG_always_compact = true;
Handle<Map> map(baseObject->map());
- CHECK(!space->LastPage()->Contains(map->prototype_transitions()->address()));
+ CHECK(!space->LastPage()->Contains(
+ map->GetPrototypeTransitions()->address()));
CHECK(space->LastPage()->Contains(prototype->address()));
baseObject->SetPrototype(*prototype, false)->ToObjectChecked();
CHECK(map->GetPrototypeTransition(*prototype)->IsMap());
@@ -1619,9 +1844,10 @@ TEST(PrototypeTransitionClearing) {
TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
i::FLAG_allow_natives_syntax = true;
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
i::FLAG_verify_heap = true;
#endif
+
InitializeVM();
if (!i::V8::UseCrankshaft()) return;
v8::HandleScope outer_scope;
@@ -1674,9 +1900,10 @@ TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
i::FLAG_allow_natives_syntax = true;
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
i::FLAG_verify_heap = true;
#endif
+
InitializeVM();
if (!i::V8::UseCrankshaft()) return;
v8::HandleScope outer_scope;
@@ -1743,14 +1970,7 @@ TEST(OptimizedAllocationAlwaysInNewSpace) {
static int CountMapTransitions(Map* map) {
- int result = 0;
- DescriptorArray* descs = map->instance_descriptors();
- for (int i = 0; i < descs->number_of_descriptors(); i++) {
- if (descs->IsTransitionOnly(i)) {
- result++;
- }
- }
- return result;
+ return map->transitions()->number_of_transitions();
}
@@ -1761,14 +1981,18 @@ TEST(Regress1465) {
i::FLAG_trace_incremental_marking = true;
InitializeVM();
v8::HandleScope scope;
+ static const int transitions_count = 256;
- #define TRANSITION_COUNT 256
- for (int i = 0; i < TRANSITION_COUNT; i++) {
- EmbeddedVector<char, 64> buffer;
- OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i);
- CompileRun(buffer.start());
+ {
+ AlwaysAllocateScope always_allocate;
+ for (int i = 0; i < transitions_count; i++) {
+ EmbeddedVector<char, 64> buffer;
+ OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i);
+ CompileRun(buffer.start());
+ }
+ CompileRun("var root = new Object;");
}
- CompileRun("var root = new Object;");
+
Handle<JSObject> root =
v8::Utils::OpenHandle(
*v8::Handle<v8::Object>::Cast(
@@ -1777,19 +2001,10 @@ TEST(Regress1465) {
// Count number of live transitions before marking.
int transitions_before = CountMapTransitions(root->map());
CompileRun("%DebugPrint(root);");
- CHECK_EQ(TRANSITION_COUNT, transitions_before);
+ CHECK_EQ(transitions_count, transitions_before);
- // Go through all incremental marking steps in one swoop.
- IncrementalMarking* marking = HEAP->incremental_marking();
- CHECK(marking->IsStopped());
- marking->Start();
- CHECK(marking->IsMarking());
- while (!marking->IsComplete()) {
- marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
- }
- CHECK(marking->IsComplete());
+ SimulateIncrementalMarking();
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- CHECK(marking->IsStopped());
// Count number of live transitions after marking. Note that one transition
// is left, because 'o' still holds an instance of one transition target.
@@ -1797,3 +2012,496 @@ TEST(Regress1465) {
CompileRun("%DebugPrint(root);");
CHECK_EQ(1, transitions_after);
}
+
+
+TEST(Regress2143a) {
+ i::FLAG_collect_maps = true;
+ i::FLAG_incremental_marking = true;
+ InitializeVM();
+ v8::HandleScope scope;
+
+ // Prepare a map transition from the root object together with a yet
+ // untransitioned root object.
+ CompileRun("var root = new Object;"
+ "root.foo = 0;"
+ "root = new Object;");
+
+ SimulateIncrementalMarking();
+
+ // Compile a StoreIC that performs the prepared map transition. This
+ // will restart incremental marking and should make sure the root is
+ // marked grey again.
+ CompileRun("function f(o) {"
+ " o.foo = 0;"
+ "}"
+ "f(new Object);"
+ "f(root);");
+
+ // This bug only triggers with aggressive IC clearing.
+ HEAP->AgeInlineCaches();
+
+ // Explicitly request GC to perform final marking step and sweeping.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ Handle<JSObject> root =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Object>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
+
+ // The root object should be in a sane state.
+ CHECK(root->IsJSObject());
+ CHECK(root->map()->IsMap());
+}
+
+
+TEST(Regress2143b) {
+ i::FLAG_collect_maps = true;
+ i::FLAG_incremental_marking = true;
+ i::FLAG_allow_natives_syntax = true;
+ InitializeVM();
+ v8::HandleScope scope;
+
+ // Prepare a map transition from the root object together with a yet
+ // untransitioned root object.
+ CompileRun("var root = new Object;"
+ "root.foo = 0;"
+ "root = new Object;");
+
+ SimulateIncrementalMarking();
+
+ // Compile an optimized LStoreNamedField that performs the prepared
+ // map transition. This will restart incremental marking and should
+ // make sure the root is marked grey again.
+ CompileRun("function f(o) {"
+ " o.foo = 0;"
+ "}"
+ "f(new Object);"
+ "f(new Object);"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f(root);"
+ "%DeoptimizeFunction(f);");
+
+ // This bug only triggers with aggressive IC clearing.
+ HEAP->AgeInlineCaches();
+
+ // Explicitly request GC to perform final marking step and sweeping.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ Handle<JSObject> root =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Object>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
+
+ // The root object should be in a sane state.
+ CHECK(root->IsJSObject());
+ CHECK(root->map()->IsMap());
+}
+
+
+// Implemented in the test-alloc.cc test suite.
+void SimulateFullSpace(PagedSpace* space);
+
+
+TEST(ReleaseOverReservedPages) {
+ i::FLAG_trace_gc = true;
+ // The optimizer can allocate stuff, messing up the test.
+ i::FLAG_crankshaft = false;
+ i::FLAG_always_opt = false;
+ InitializeVM();
+ v8::HandleScope scope;
+ static const int number_of_test_pages = 20;
+
+ // Prepare many pages with low live-bytes count.
+ PagedSpace* old_pointer_space = HEAP->old_pointer_space();
+ CHECK_EQ(1, old_pointer_space->CountTotalPages());
+ for (int i = 0; i < number_of_test_pages; i++) {
+ AlwaysAllocateScope always_allocate;
+ SimulateFullSpace(old_pointer_space);
+ FACTORY->NewFixedArray(1, TENURED);
+ }
+ CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+
+ // Triggering one GC will cause a lot of garbage to be discovered but
+ // even spread across all allocated pages.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation");
+ CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+
+ // Triggering subsequent GCs should cause at least half of the pages
+ // to be released to the OS after at most two cycles.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
+ CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
+ CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
+
+ // Triggering a last-resort GC should cause all pages to be released to the
+ // OS so that other processes can seize the memory. If we get a failure here
+ // where there are 2 pages left instead of 1, then we should increase the
+ // size of the first page a little in SizeOfFirstPage in spaces.cc. The
+ // first page should be small in order to reduce memory used when the VM
+ // boots, but if the 20 small arrays don't fit on the first page then that's
+ // an indication that it is too small.
+ HEAP->CollectAllAvailableGarbage("triggered really hard");
+ CHECK_EQ(1, old_pointer_space->CountTotalPages());
+}
+
+
+TEST(Regress2237) {
+ InitializeVM();
+ v8::HandleScope scope;
+ Handle<String> slice(HEAP->empty_string());
+
+ {
+ // Generate a parent that lives in new-space.
+ v8::HandleScope inner_scope;
+ const char* c = "This text is long enough to trigger sliced strings.";
+ Handle<String> s = FACTORY->NewStringFromAscii(CStrVector(c));
+ CHECK(s->IsSeqAsciiString());
+ CHECK(HEAP->InNewSpace(*s));
+
+ // Generate a sliced string that is based on the above parent and
+ // lives in old-space.
+ FillUpNewSpace(HEAP->new_space());
+ AlwaysAllocateScope always_allocate;
+ Handle<String> t;
+ // TODO(mstarzinger): Unfortunately FillUpNewSpace() still leaves
+ // some slack, so we need to allocate a few sliced strings.
+ for (int i = 0; i < 16; i++) {
+ t = FACTORY->NewProperSubString(s, 5, 35);
+ }
+ CHECK(t->IsSlicedString());
+ CHECK(!HEAP->InNewSpace(*t));
+ *slice.location() = *t.location();
+ }
+
+ CHECK(SlicedString::cast(*slice)->parent()->IsSeqAsciiString());
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(SlicedString::cast(*slice)->parent()->IsSeqAsciiString());
+}
+
+
+#ifdef OBJECT_PRINT
+TEST(PrintSharedFunctionInfo) {
+ InitializeVM();
+ v8::HandleScope scope;
+ const char* source = "f = function() { return 987654321; }\n"
+ "g = function() { return 123456789; }\n";
+ CompileRun(source);
+ Handle<JSFunction> g =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("g"))));
+
+ AssertNoAllocation no_alloc;
+ g->shared()->PrintLn();
+}
+#endif // OBJECT_PRINT
+
+
+TEST(Regress2211) {
+ InitializeVM();
+ v8::HandleScope scope;
+
+ v8::Handle<v8::String> value = v8_str("val string");
+ Smi* hash = Smi::FromInt(321);
+ Heap* heap = Isolate::Current()->heap();
+
+ for (int i = 0; i < 2; i++) {
+ // Store identity hash first and common hidden property second.
+ v8::Handle<v8::Object> obj = v8::Object::New();
+ Handle<JSObject> internal_obj = v8::Utils::OpenHandle(*obj);
+ CHECK(internal_obj->HasFastProperties());
+
+ // In the first iteration, set hidden value first and identity hash second.
+ // In the second iteration, reverse the order.
+ if (i == 0) obj->SetHiddenValue(v8_str("key string"), value);
+ MaybeObject* maybe_obj = internal_obj->SetIdentityHash(hash,
+ ALLOW_CREATION);
+ CHECK(!maybe_obj->IsFailure());
+ if (i == 1) obj->SetHiddenValue(v8_str("key string"), value);
+
+ // Check values.
+ CHECK_EQ(hash,
+ internal_obj->GetHiddenProperty(heap->identity_hash_symbol()));
+ CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string"))));
+
+ // Check size.
+ DescriptorArray* descriptors = internal_obj->map()->instance_descriptors();
+ ObjectHashTable* hashtable = ObjectHashTable::cast(
+ internal_obj->FastPropertyAt(descriptors->GetFieldIndex(0)));
+ // HashTable header (5) and 4 initial entries (8).
+ CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
+ }
+}
+
+
+TEST(IncrementalMarkingClearsTypeFeedbackCells) {
+ if (i::FLAG_always_opt) return;
+ InitializeVM();
+ v8::HandleScope scope;
+ v8::Local<v8::Value> fun1, fun2;
+
+ {
+ LocalContext env;
+ CompileRun("function fun() {};");
+ fun1 = env->Global()->Get(v8_str("fun"));
+ }
+
+ {
+ LocalContext env;
+ CompileRun("function fun() {};");
+ fun2 = env->Global()->Get(v8_str("fun"));
+ }
+
+ // Prepare function f that contains type feedback for closures
+ // originating from two different native contexts.
+ v8::Context::GetCurrent()->Global()->Set(v8_str("fun1"), fun1);
+ v8::Context::GetCurrent()->Global()->Set(v8_str("fun2"), fun2);
+ CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
+ Handle<JSFunction> f =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
+ Handle<TypeFeedbackCells> cells(TypeFeedbackInfo::cast(
+ f->shared()->code()->type_feedback_info())->type_feedback_cells());
+
+ CHECK_EQ(2, cells->CellCount());
+ CHECK(cells->Cell(0)->value()->IsJSFunction());
+ CHECK(cells->Cell(1)->value()->IsJSFunction());
+
+ SimulateIncrementalMarking();
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ CHECK_EQ(2, cells->CellCount());
+ CHECK(cells->Cell(0)->value()->IsTheHole());
+ CHECK(cells->Cell(1)->value()->IsTheHole());
+}
+
+
+static Code* FindFirstIC(Code* code, Code::Kind kind) {
+ int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
+ RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
+ RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID) |
+ RelocInfo::ModeMask(RelocInfo::CODE_TARGET_CONTEXT);
+ for (RelocIterator it(code, mask); !it.done(); it.next()) {
+ RelocInfo* info = it.rinfo();
+ Code* target = Code::GetCodeFromTargetAddress(info->target_address());
+ if (target->is_inline_cache_stub() && target->kind() == kind) {
+ return target;
+ }
+ }
+ return NULL;
+}
+
+
+TEST(IncrementalMarkingPreservesMonomorhpicIC) {
+ if (i::FLAG_always_opt) return;
+ InitializeVM();
+ v8::HandleScope scope;
+
+ // Prepare function f that contains a monomorphic IC for object
+ // originating from the same native context.
+ CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
+ "function f(o) { return o.x; } f(obj); f(obj);");
+ Handle<JSFunction> f =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
+
+ Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
+ CHECK(ic_before->ic_state() == MONOMORPHIC);
+
+ SimulateIncrementalMarking();
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
+ CHECK(ic_after->ic_state() == MONOMORPHIC);
+}
+
+
+TEST(IncrementalMarkingClearsMonomorhpicIC) {
+ if (i::FLAG_always_opt) return;
+ InitializeVM();
+ v8::HandleScope scope;
+ v8::Local<v8::Value> obj1;
+
+ {
+ LocalContext env;
+ CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
+ obj1 = env->Global()->Get(v8_str("obj"));
+ }
+
+ // Prepare function f that contains a monomorphic IC for object
+ // originating from a different native context.
+ v8::Context::GetCurrent()->Global()->Set(v8_str("obj1"), obj1);
+ CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
+ Handle<JSFunction> f =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
+
+ Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
+ CHECK(ic_before->ic_state() == MONOMORPHIC);
+
+ // Fire context dispose notification.
+ v8::V8::ContextDisposedNotification();
+ SimulateIncrementalMarking();
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
+ CHECK(ic_after->ic_state() == UNINITIALIZED);
+}
+
+
+TEST(IncrementalMarkingClearsPolymorhpicIC) {
+ if (i::FLAG_always_opt) return;
+ InitializeVM();
+ v8::HandleScope scope;
+ v8::Local<v8::Value> obj1, obj2;
+
+ {
+ LocalContext env;
+ CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
+ obj1 = env->Global()->Get(v8_str("obj"));
+ }
+
+ {
+ LocalContext env;
+ CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
+ obj2 = env->Global()->Get(v8_str("obj"));
+ }
+
+ // Prepare function f that contains a polymorphic IC for objects
+ // originating from two different native contexts.
+ v8::Context::GetCurrent()->Global()->Set(v8_str("obj1"), obj1);
+ v8::Context::GetCurrent()->Global()->Set(v8_str("obj2"), obj2);
+ CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
+ Handle<JSFunction> f =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
+
+ Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
+ CHECK(ic_before->ic_state() == MEGAMORPHIC);
+
+ // Fire context dispose notification.
+ v8::V8::ContextDisposedNotification();
+ SimulateIncrementalMarking();
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
+ CHECK(ic_after->ic_state() == UNINITIALIZED);
+}
+
+
+class SourceResource: public v8::String::ExternalAsciiStringResource {
+ public:
+ explicit SourceResource(const char* data)
+ : data_(data), length_(strlen(data)) { }
+
+ virtual void Dispose() {
+ i::DeleteArray(data_);
+ data_ = NULL;
+ }
+
+ const char* data() const { return data_; }
+
+ size_t length() const { return length_; }
+
+ bool IsDisposed() { return data_ == NULL; }
+
+ private:
+ const char* data_;
+ size_t length_;
+};
+
+
+TEST(ReleaseStackTraceData) {
+ // Test that the data retained by the Error.stack accessor is released
+ // after the first time the accessor is fired. We use external string
+ // to check whether the data is being released since the external string
+ // resource's callback is fired when the external string is GC'ed.
+ InitializeVM();
+ v8::HandleScope scope;
+ static const char* source = "var error = 1; "
+ "try { "
+ " throw new Error(); "
+ "} catch (e) { "
+ " error = e; "
+ "} ";
+ SourceResource* resource = new SourceResource(i::StrDup(source));
+ {
+ v8::HandleScope scope;
+ v8::Handle<v8::String> source_string = v8::String::NewExternal(resource);
+ v8::Script::Compile(source_string)->Run();
+ CHECK(!resource->IsDisposed());
+ }
+ HEAP->CollectAllAvailableGarbage();
+ // External source is being retained by the stack trace.
+ CHECK(!resource->IsDisposed());
+
+ CompileRun("error.stack; error.stack;");
+ HEAP->CollectAllAvailableGarbage();
+ // External source has been released.
+ CHECK(resource->IsDisposed());
+
+ delete resource;
+}
+
+
+TEST(Regression144230) {
+ InitializeVM();
+ v8::HandleScope scope;
+
+ // First make sure that the uninitialized CallIC stub is on a single page
+ // that will later be selected as an evacuation candidate.
+ {
+ v8::HandleScope inner_scope;
+ AlwaysAllocateScope always_allocate;
+ SimulateFullSpace(HEAP->code_space());
+ ISOLATE->stub_cache()->ComputeCallInitialize(9, RelocInfo::CODE_TARGET);
+ }
+
+ // Second compile a CallIC and execute it once so that it gets patched to
+ // the pre-monomorphic stub. These code objects are on yet another page.
+ {
+ v8::HandleScope inner_scope;
+ AlwaysAllocateScope always_allocate;
+ SimulateFullSpace(HEAP->code_space());
+ CompileRun("var o = { f:function(a,b,c,d,e,f,g,h,i) {}};"
+ "function call() { o.f(1,2,3,4,5,6,7,8,9); };"
+ "call();");
+ }
+
+ // Third we fill up the last page of the code space so that it does not get
+ // chosen as an evacuation candidate.
+ {
+ v8::HandleScope inner_scope;
+ AlwaysAllocateScope always_allocate;
+ CompileRun("for (var i = 0; i < 2000; i++) {"
+ " eval('function f' + i + '() { return ' + i +'; };' +"
+ " 'f' + i + '();');"
+ "}");
+ }
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ // Fourth is the tricky part. Make sure the code containing the CallIC is
+ // visited first without clearing the IC. The shared function info is then
+ // visited later, causing the CallIC to be cleared.
+ Handle<String> name = FACTORY->LookupAsciiSymbol("call");
+ Handle<GlobalObject> global(ISOLATE->context()->global_object());
+ MaybeObject* maybe_call = global->GetProperty(*name);
+ JSFunction* call = JSFunction::cast(maybe_call->ToObjectChecked());
+ USE(global->SetProperty(*name, Smi::FromInt(0), NONE, kNonStrictMode));
+ ISOLATE->compilation_cache()->Clear();
+ call->shared()->set_ic_age(HEAP->global_ic_age() + 1);
+ Handle<Object> call_code(call->code());
+ Handle<Object> call_function(call);
+
+ // Now we are ready to mess up the heap.
+ HEAP->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
+
+ // Either heap verification caught the problem already or we go kaboom once
+ // the CallIC is executed the next time.
+ USE(global->SetProperty(*name, *call_function, NONE, kNonStrictMode));
+ CompileRun("call();");
+}
diff --git a/src/3rdparty/v8/test/cctest/test-list.cc b/src/3rdparty/v8/test/cctest/test-list.cc
index 4c78f02..740b432 100644
--- a/src/3rdparty/v8/test/cctest/test-list.cc
+++ b/src/3rdparty/v8/test/cctest/test-list.cc
@@ -35,7 +35,7 @@ using namespace v8::internal;
// Use a testing allocator that clears memory before deletion.
class ZeroingAllocationPolicy {
public:
- static void* New(size_t size) {
+ void* New(size_t size) {
// Stash the size in the first word to use for Delete.
size_t true_size = size + sizeof(size_t);
size_t* result = reinterpret_cast<size_t*>(malloc(true_size));
diff --git a/src/3rdparty/v8/test/cctest/test-liveedit.cc b/src/3rdparty/v8/test/cctest/test-liveedit.cc
index 2498fca..2c89a38 100644
--- a/src/3rdparty/v8/test/cctest/test-liveedit.cc
+++ b/src/3rdparty/v8/test/cctest/test-liveedit.cc
@@ -81,7 +81,8 @@ class ListDiffOutputWriter : public Comparator::Output {
(*next_chunk_pointer_) = NULL;
}
void AddChunk(int pos1, int pos2, int len1, int len2) {
- current_chunk_ = new DiffChunkStruct(pos1, pos2, len1, len2);
+ current_chunk_ = new(Isolate::Current()->runtime_zone()) DiffChunkStruct(
+ pos1, pos2, len1, len2);
(*next_chunk_pointer_) = current_chunk_;
next_chunk_pointer_ = &current_chunk_->next;
}
@@ -95,7 +96,7 @@ void CompareStringsOneWay(const char* s1, const char* s2,
int expected_diff_parameter = -1) {
StringCompareInput input(s1, s2);
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zone_scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
DiffChunkStruct* first_chunk;
ListDiffOutputWriter writer(&first_chunk);
diff --git a/src/3rdparty/v8/test/cctest/test-mark-compact.cc b/src/3rdparty/v8/test/cctest/test-mark-compact.cc
index 700f322..c0ab763 100644
--- a/src/3rdparty/v8/test/cctest/test-mark-compact.cc
+++ b/src/3rdparty/v8/test/cctest/test-mark-compact.cc
@@ -194,7 +194,7 @@ TEST(MarkCompactCollector) {
Map::cast(HEAP->AllocateMap(JS_OBJECT_TYPE,
JSObject::kHeaderSize)->ToObjectChecked());
function->set_initial_map(initial_map);
- Isolate::Current()->context()->global()->SetProperty(
+ Isolate::Current()->context()->global_object()->SetProperty(
func_name, function, NONE, kNonStrictMode)->ToObjectChecked();
JSObject* obj = JSObject::cast(
@@ -203,8 +203,9 @@ TEST(MarkCompactCollector) {
func_name =
String::cast(HEAP->LookupAsciiSymbol("theFunction")->ToObjectChecked());
- CHECK(Isolate::Current()->context()->global()->HasLocalProperty(func_name));
- Object* func_value = Isolate::Current()->context()->global()->
+ CHECK(Isolate::Current()->context()->global_object()->
+ HasLocalProperty(func_name));
+ Object* func_value = Isolate::Current()->context()->global_object()->
GetProperty(func_name)->ToObjectChecked();
CHECK(func_value->IsJSFunction());
function = JSFunction::cast(func_value);
@@ -212,7 +213,7 @@ TEST(MarkCompactCollector) {
obj = JSObject::cast(HEAP->AllocateJSObject(function)->ToObjectChecked());
String* obj_name =
String::cast(HEAP->LookupAsciiSymbol("theObject")->ToObjectChecked());
- Isolate::Current()->context()->global()->SetProperty(
+ Isolate::Current()->context()->global_object()->SetProperty(
obj_name, obj, NONE, kNonStrictMode)->ToObjectChecked();
String* prop_name =
String::cast(HEAP->LookupAsciiSymbol("theSlot")->ToObjectChecked());
@@ -225,10 +226,11 @@ TEST(MarkCompactCollector) {
obj_name =
String::cast(HEAP->LookupAsciiSymbol("theObject")->ToObjectChecked());
- CHECK(Isolate::Current()->context()->global()->HasLocalProperty(obj_name));
- CHECK(Isolate::Current()->context()->global()->
+ CHECK(Isolate::Current()->context()->global_object()->
+ HasLocalProperty(obj_name));
+ CHECK(Isolate::Current()->context()->global_object()->
GetProperty(obj_name)->ToObjectChecked()->IsJSObject());
- obj = JSObject::cast(Isolate::Current()->context()->global()->
+ obj = JSObject::cast(Isolate::Current()->context()->global_object()->
GetProperty(obj_name)->ToObjectChecked());
prop_name =
String::cast(HEAP->LookupAsciiSymbol("theSlot")->ToObjectChecked());
@@ -526,23 +528,26 @@ static intptr_t MemoryInUse() {
TEST(BootUpMemoryUse) {
intptr_t initial_memory = MemoryInUse();
- FLAG_crankshaft = false; // Avoid flakiness.
+ // Avoid flakiness.
+ FLAG_crankshaft = false;
+ FLAG_parallel_recompilation = false;
+
// Only Linux has the proc filesystem and only if it is mapped. If it's not
// there we just skip the test.
if (initial_memory >= 0) {
InitializeVM();
- intptr_t booted_memory = MemoryInUse();
+ intptr_t delta = MemoryInUse() - initial_memory;
if (sizeof(initial_memory) == 8) {
if (v8::internal::Snapshot::IsEnabled()) {
- CHECK_LE(booted_memory - initial_memory, 3600 * 1024); // 3396.
+ CHECK_LE(delta, 3600 * 1024); // 3396.
} else {
- CHECK_LE(booted_memory - initial_memory, 3600 * 1024); // 3432.
+ CHECK_LE(delta, 4000 * 1024); // 3948.
}
} else {
if (v8::internal::Snapshot::IsEnabled()) {
- CHECK_LE(booted_memory - initial_memory, 2800 * 1024); // 2484.
+ CHECK_LE(delta, 2500 * 1024); // 2400.
} else {
- CHECK_LE(booted_memory - initial_memory, 2950 * 1024); // 2844
+ CHECK_LE(delta, 2860 * 1024); // 2760.
}
}
}
diff --git a/src/3rdparty/v8/test/cctest/test-object-observe.cc b/src/3rdparty/v8/test/cctest/test-object-observe.cc
new file mode 100644
index 0000000..374dca4
--- /dev/null
+++ b/src/3rdparty/v8/test/cctest/test-object-observe.cc
@@ -0,0 +1,196 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "cctest.h"
+
+using namespace v8;
+
+namespace {
+// Need to create a new isolate when FLAG_harmony_observation is on.
+class HarmonyIsolate {
+ public:
+ HarmonyIsolate() {
+ i::FLAG_harmony_observation = true;
+ isolate_ = Isolate::New();
+ isolate_->Enter();
+ }
+
+ ~HarmonyIsolate() {
+ isolate_->Exit();
+ isolate_->Dispose();
+ }
+
+ private:
+ Isolate* isolate_;
+};
+}
+
+TEST(PerIsolateState) {
+ HarmonyIsolate isolate;
+ HandleScope scope;
+ LocalContext context1;
+ CompileRun(
+ "var count = 0;"
+ "var calls = 0;"
+ "var observer = function(records) { count = records.length; calls++ };"
+ "var obj = {};"
+ "Object.observe(obj, observer);");
+ Handle<Value> observer = CompileRun("observer");
+ Handle<Value> obj = CompileRun("obj");
+ Handle<Value> notify_fun1 = CompileRun(
+ "(function() { obj.foo = 'bar'; })");
+ Handle<Value> notify_fun2;
+ {
+ LocalContext context2;
+ context2->Global()->Set(String::New("obj"), obj);
+ notify_fun2 = CompileRun(
+ "(function() { obj.foo = 'baz'; })");
+ }
+ Handle<Value> notify_fun3;
+ {
+ LocalContext context3;
+ context3->Global()->Set(String::New("obj"), obj);
+ notify_fun3 = CompileRun(
+ "(function() { obj.foo = 'bat'; })");
+ }
+ {
+ LocalContext context4;
+ context4->Global()->Set(String::New("observer"), observer);
+ context4->Global()->Set(String::New("fun1"), notify_fun1);
+ context4->Global()->Set(String::New("fun2"), notify_fun2);
+ context4->Global()->Set(String::New("fun3"), notify_fun3);
+ CompileRun("fun1(); fun2(); fun3(); Object.deliverChangeRecords(observer)");
+ }
+ CHECK_EQ(1, CompileRun("calls")->Int32Value());
+ CHECK_EQ(3, CompileRun("count")->Int32Value());
+}
+
+TEST(EndOfMicrotaskDelivery) {
+ HarmonyIsolate isolate;
+ HandleScope scope;
+ LocalContext context;
+ CompileRun(
+ "var obj = {};"
+ "var count = 0;"
+ "var observer = function(records) { count = records.length };"
+ "Object.observe(obj, observer);"
+ "obj.foo = 'bar';");
+ CHECK_EQ(1, CompileRun("count")->Int32Value());
+}
+
+TEST(DeliveryOrdering) {
+ HarmonyIsolate isolate;
+ HandleScope scope;
+ LocalContext context;
+ CompileRun(
+ "var obj1 = {};"
+ "var obj2 = {};"
+ "var ordering = [];"
+ "function observer2() { ordering.push(2); };"
+ "function observer1() { ordering.push(1); };"
+ "function observer3() { ordering.push(3); };"
+ "Object.observe(obj1, observer1);"
+ "Object.observe(obj1, observer2);"
+ "Object.observe(obj1, observer3);"
+ "obj1.foo = 'bar';");
+ CHECK_EQ(3, CompileRun("ordering.length")->Int32Value());
+ CHECK_EQ(1, CompileRun("ordering[0]")->Int32Value());
+ CHECK_EQ(2, CompileRun("ordering[1]")->Int32Value());
+ CHECK_EQ(3, CompileRun("ordering[2]")->Int32Value());
+ CompileRun(
+ "ordering = [];"
+ "Object.observe(obj2, observer3);"
+ "Object.observe(obj2, observer2);"
+ "Object.observe(obj2, observer1);"
+ "obj2.foo = 'baz'");
+ CHECK_EQ(3, CompileRun("ordering.length")->Int32Value());
+ CHECK_EQ(1, CompileRun("ordering[0]")->Int32Value());
+ CHECK_EQ(2, CompileRun("ordering[1]")->Int32Value());
+ CHECK_EQ(3, CompileRun("ordering[2]")->Int32Value());
+}
+
+TEST(DeliveryOrderingReentrant) {
+ HarmonyIsolate isolate;
+ HandleScope scope;
+ LocalContext context;
+ CompileRun(
+ "var obj = {};"
+ "var reentered = false;"
+ "var ordering = [];"
+ "function observer1() { ordering.push(1); };"
+ "function observer2() {"
+ " if (!reentered) {"
+ " obj.foo = 'baz';"
+ " reentered = true;"
+ " }"
+ " ordering.push(2);"
+ "};"
+ "function observer3() { ordering.push(3); };"
+ "Object.observe(obj, observer1);"
+ "Object.observe(obj, observer2);"
+ "Object.observe(obj, observer3);"
+ "obj.foo = 'bar';");
+ CHECK_EQ(5, CompileRun("ordering.length")->Int32Value());
+ CHECK_EQ(1, CompileRun("ordering[0]")->Int32Value());
+ CHECK_EQ(2, CompileRun("ordering[1]")->Int32Value());
+ CHECK_EQ(3, CompileRun("ordering[2]")->Int32Value());
+ // Note that we re-deliver to observers 1 and 2, while observer3
+ // already received the second record during the first round.
+ CHECK_EQ(1, CompileRun("ordering[3]")->Int32Value());
+ CHECK_EQ(2, CompileRun("ordering[1]")->Int32Value());
+}
+
+TEST(ObjectHashTableGrowth) {
+ HarmonyIsolate isolate;
+ HandleScope scope;
+ // Initializing this context sets up initial hash tables.
+ LocalContext context;
+ Handle<Value> obj = CompileRun("obj = {};");
+ Handle<Value> observer = CompileRun(
+ "var ran = false;"
+ "(function() { ran = true })");
+ {
+ // As does initializing this context.
+ LocalContext context2;
+ context2->Global()->Set(String::New("obj"), obj);
+ context2->Global()->Set(String::New("observer"), observer);
+ CompileRun(
+ "var objArr = [];"
+ // 100 objects should be enough to make the hash table grow
+ // (and thus relocate).
+ "for (var i = 0; i < 100; ++i) {"
+ " objArr.push({});"
+ " Object.observe(objArr[objArr.length-1], function(){});"
+ "}"
+ "Object.observe(obj, observer);");
+ }
+ // obj is now marked "is_observed", but our map has moved.
+ CompileRun("obj.foo = 'bar'");
+ CHECK(CompileRun("ran")->BooleanValue());
+}
diff --git a/src/3rdparty/v8/test/cctest/test-parsing.cc b/src/3rdparty/v8/test/cctest/test-parsing.cc
index 6bcae7c..717c665 100755
--- a/src/3rdparty/v8/test/cctest/test-parsing.cc
+++ b/src/3rdparty/v8/test/cctest/test-parsing.cc
@@ -354,7 +354,8 @@ TEST(Regress928) {
v8::HandleScope handles;
i::Handle<i::String> source(
FACTORY->NewStringFromAscii(i::CStrVector(program)));
- i::ScriptDataImpl* data = i::ParserApi::PartialPreParse(source, NULL, false);
+ i::GenericStringUtf16CharacterStream stream(source, 0, source->length());
+ i::ScriptDataImpl* data = i::ParserApi::PreParse(&stream, NULL, false);
CHECK(!data->HasError());
data->Initialize();
@@ -1016,11 +1017,11 @@ TEST(ScopePositions) {
FACTORY->NewStringFromUtf8(i::CStrVector(program.start())));
CHECK_EQ(source->length(), kProgramSize);
i::Handle<i::Script> script = FACTORY->NewScript(source);
- i::Parser parser(script, i::kAllowLazy | i::EXTENDED_MODE, NULL, NULL);
- i::CompilationInfo info(script);
+ i::CompilationInfoWithZone info(script);
+ i::Parser parser(&info, i::kAllowLazy | i::EXTENDED_MODE, NULL, NULL);
info.MarkAsGlobal();
info.SetLanguageMode(source_data[i].language_mode);
- i::FunctionLiteral* function = parser.ParseProgram(&info);
+ i::FunctionLiteral* function = parser.ParseProgram();
CHECK(function != NULL);
// Check scope types and positions.
@@ -1060,10 +1061,10 @@ void TestParserSync(i::Handle<i::String> source, int flags) {
i::Handle<i::Script> script = FACTORY->NewScript(source);
bool save_harmony_scoping = i::FLAG_harmony_scoping;
i::FLAG_harmony_scoping = harmony_scoping;
- i::Parser parser(script, flags, NULL, NULL);
- i::CompilationInfo info(script);
+ i::CompilationInfoWithZone info(script);
+ i::Parser parser(&info, flags, NULL, NULL);
info.MarkAsGlobal();
- i::FunctionLiteral* function = parser.ParseProgram(&info);
+ i::FunctionLiteral* function = parser.ParseProgram();
i::FLAG_harmony_scoping = save_harmony_scoping;
i::String* type_string = NULL;
@@ -1147,6 +1148,7 @@ TEST(ParserSync) {
{ "with ({})", "" },
{ "switch (12) { case 12: ", "}" },
{ "switch (12) { default: ", "}" },
+ { "switch (12) { ", "case 12: }" },
{ "label2: ", "" },
{ NULL, NULL }
};
@@ -1236,3 +1238,26 @@ TEST(ParserSync) {
}
}
}
+
+
+TEST(PreparserStrictOctal) {
+ // Test that syntax error caused by octal literal is reported correctly as
+ // such (issue 2220).
+ v8::internal::FLAG_min_preparse_length = 1; // Force preparsing.
+ v8::V8::Initialize();
+ v8::HandleScope scope;
+ v8::Context::Scope context_scope(v8::Context::New());
+ v8::TryCatch try_catch;
+ const char* script =
+ "\"use strict\"; \n"
+ "a = function() { \n"
+ " b = function() { \n"
+ " 01; \n"
+ " }; \n"
+ "}; \n";
+ v8::Script::Compile(v8::String::New(script));
+ CHECK(try_catch.HasCaught());
+ v8::String::Utf8Value exception(try_catch.Exception());
+ CHECK_EQ("SyntaxError: Octal literals are not allowed in strict mode.",
+ *exception);
+}
diff --git a/src/3rdparty/v8/test/cctest/test-platform-linux.cc b/src/3rdparty/v8/test/cctest/test-platform-linux.cc
index 2a8d497..47b99f0 100644
--- a/src/3rdparty/v8/test/cctest/test-platform-linux.cc
+++ b/src/3rdparty/v8/test/cctest/test-platform-linux.cc
@@ -79,3 +79,9 @@ TEST(VirtualMemory) {
CHECK(vm->Uncommit(block_addr, block_size));
delete vm;
}
+
+
+TEST(GetCurrentProcessId) {
+ OS::SetUp();
+ CHECK_EQ(static_cast<int>(getpid()), OS::GetCurrentProcessId());
+}
diff --git a/src/3rdparty/v8/test/cctest/test-platform-win32.cc b/src/3rdparty/v8/test/cctest/test-platform-win32.cc
index 36b30aa..668ccdb 100644
--- a/src/3rdparty/v8/test/cctest/test-platform-win32.cc
+++ b/src/3rdparty/v8/test/cctest/test-platform-win32.cc
@@ -25,3 +25,10 @@ TEST(VirtualMemory) {
CHECK(vm->Uncommit(block_addr, block_size));
delete vm;
}
+
+
+TEST(GetCurrentProcessId) {
+ OS::SetUp();
+ CHECK_EQ(static_cast<int>(::GetCurrentProcessId()),
+ OS::GetCurrentProcessId());
+}
diff --git a/src/3rdparty/v8/test/cctest/test-random.cc b/src/3rdparty/v8/test/cctest/test-random.cc
index a1f4931..86d6d8c 100644
--- a/src/3rdparty/v8/test/cctest/test-random.cc
+++ b/src/3rdparty/v8/test/cctest/test-random.cc
@@ -52,7 +52,7 @@ void TestSeeds(Handle<JSFunction> fun,
uint32_t state0,
uint32_t state1) {
bool has_pending_exception;
- Handle<JSObject> global(context->global());
+ Handle<JSObject> global(context->global_object());
Handle<ByteArray> seeds(context->random_seed());
SetSeeds(seeds, state0, state1);
@@ -77,7 +77,7 @@ TEST(CrankshaftRandom) {
env->Enter();
Handle<Context> context(Isolate::Current()->context());
- Handle<JSObject> global(context->global());
+ Handle<JSObject> global(context->global_object());
Handle<ByteArray> seeds(context->random_seed());
bool has_pending_exception;
@@ -85,7 +85,7 @@ TEST(CrankshaftRandom) {
Object* symbol = FACTORY->LookupAsciiSymbol("f")->ToObjectChecked();
MaybeObject* fun_object =
- context->global()->GetProperty(String::cast(symbol));
+ context->global_object()->GetProperty(String::cast(symbol));
Handle<JSFunction> fun(JSFunction::cast(fun_object->ToObjectChecked()));
// Optimize function.
diff --git a/src/3rdparty/v8/test/cctest/test-regexp.cc b/src/3rdparty/v8/test/cctest/test-regexp.cc
index e89e6cd..e433b92 100644
--- a/src/3rdparty/v8/test/cctest/test-regexp.cc
+++ b/src/3rdparty/v8/test/cctest/test-regexp.cc
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -72,23 +72,26 @@ using namespace v8::internal;
static bool CheckParse(const char* input) {
V8::Initialize(NULL);
v8::HandleScope scope;
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zone_scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
FlatStringReader reader(Isolate::Current(), CStrVector(input));
RegExpCompileData result;
- return v8::internal::RegExpParser::ParseRegExp(&reader, false, &result);
+ return v8::internal::RegExpParser::ParseRegExp(
+ &reader, false, &result, Isolate::Current()->runtime_zone());
}
static SmartArrayPointer<const char> Parse(const char* input) {
V8::Initialize(NULL);
v8::HandleScope scope;
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zone_scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
FlatStringReader reader(Isolate::Current(), CStrVector(input));
RegExpCompileData result;
- CHECK(v8::internal::RegExpParser::ParseRegExp(&reader, false, &result));
+ CHECK(v8::internal::RegExpParser::ParseRegExp(
+ &reader, false, &result, Isolate::Current()->runtime_zone()));
CHECK(result.tree != NULL);
CHECK(result.error.is_null());
- SmartArrayPointer<const char> output = result.tree->ToString();
+ SmartArrayPointer<const char> output =
+ result.tree->ToString(Isolate::Current()->runtime_zone());
return output;
}
@@ -96,10 +99,11 @@ static bool CheckSimple(const char* input) {
V8::Initialize(NULL);
v8::HandleScope scope;
unibrow::Utf8InputBuffer<> buffer(input, StrLength(input));
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zone_scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
FlatStringReader reader(Isolate::Current(), CStrVector(input));
RegExpCompileData result;
- CHECK(v8::internal::RegExpParser::ParseRegExp(&reader, false, &result));
+ CHECK(v8::internal::RegExpParser::ParseRegExp(
+ &reader, false, &result, Isolate::Current()->runtime_zone()));
CHECK(result.tree != NULL);
CHECK(result.error.is_null());
return result.simple;
@@ -114,10 +118,11 @@ static MinMaxPair CheckMinMaxMatch(const char* input) {
V8::Initialize(NULL);
v8::HandleScope scope;
unibrow::Utf8InputBuffer<> buffer(input, StrLength(input));
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zone_scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
FlatStringReader reader(Isolate::Current(), CStrVector(input));
RegExpCompileData result;
- CHECK(v8::internal::RegExpParser::ParseRegExp(&reader, false, &result));
+ CHECK(v8::internal::RegExpParser::ParseRegExp(
+ &reader, false, &result, Isolate::Current()->runtime_zone()));
CHECK(result.tree != NULL);
CHECK(result.error.is_null());
int min_match = result.tree->min_match();
@@ -262,6 +267,7 @@ TEST(Parser) {
CHECK_PARSE_EQ("\\u003z", "'u003z'");
CHECK_PARSE_EQ("foo[z]*", "(: 'foo' (# 0 - g [z]))");
+ CHECK_SIMPLE("", false);
CHECK_SIMPLE("a", true);
CHECK_SIMPLE("a|b", false);
CHECK_SIMPLE("a\\n", false);
@@ -385,10 +391,11 @@ static void ExpectError(const char* input,
const char* expected) {
V8::Initialize(NULL);
v8::HandleScope scope;
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zone_scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
FlatStringReader reader(Isolate::Current(), CStrVector(input));
RegExpCompileData result;
- CHECK(!v8::internal::RegExpParser::ParseRegExp(&reader, false, &result));
+ CHECK(!v8::internal::RegExpParser::ParseRegExp(
+ &reader, false, &result, Isolate::Current()->runtime_zone()));
CHECK(result.tree == NULL);
CHECK(!result.error.is_null());
SmartArrayPointer<char> str = result.error->ToCString(ALLOW_NULLS);
@@ -468,9 +475,11 @@ static bool NotWord(uc16 c) {
static void TestCharacterClassEscapes(uc16 c, bool (pred)(uc16 c)) {
- ZoneScope scope(Isolate::Current(), DELETE_ON_EXIT);
- ZoneList<CharacterRange>* ranges = new ZoneList<CharacterRange>(2);
- CharacterRange::AddClassEscape(c, ranges);
+ ZoneScope scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
+ Zone* zone = Isolate::Current()->runtime_zone();
+ ZoneList<CharacterRange>* ranges =
+ new(zone) ZoneList<CharacterRange>(2, zone);
+ CharacterRange::AddClassEscape(c, ranges, zone);
for (unsigned i = 0; i < (1 << 16); i++) {
bool in_class = false;
for (int j = 0; !in_class && j < ranges->length(); j++) {
@@ -500,14 +509,21 @@ static RegExpNode* Compile(const char* input, bool multiline, bool is_ascii) {
FlatStringReader reader(isolate, CStrVector(input));
RegExpCompileData compile_data;
if (!v8::internal::RegExpParser::ParseRegExp(&reader, multiline,
- &compile_data))
+ &compile_data,
+ isolate->runtime_zone()))
return NULL;
Handle<String> pattern = isolate->factory()->
NewStringFromUtf8(CStrVector(input));
Handle<String> sample_subject =
isolate->factory()->NewStringFromUtf8(CStrVector(""));
- RegExpEngine::Compile(
- &compile_data, false, multiline, pattern, sample_subject, is_ascii);
+ RegExpEngine::Compile(&compile_data,
+ false,
+ false,
+ multiline,
+ pattern,
+ sample_subject,
+ is_ascii,
+ isolate->runtime_zone());
return compile_data.node;
}
@@ -517,7 +533,7 @@ static void Execute(const char* input,
bool is_ascii,
bool dot_output = false) {
v8::HandleScope scope;
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zone_scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
RegExpNode* node = Compile(input, multiline, is_ascii);
USE(node);
#ifdef DEBUG
@@ -557,8 +573,8 @@ static unsigned PseudoRandom(int i, int j) {
TEST(SplayTreeSimple) {
v8::internal::V8::Initialize(NULL);
static const unsigned kLimit = 1000;
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
- ZoneSplayTree<TestConfig> tree;
+ ZoneScope zone_scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
+ ZoneSplayTree<TestConfig> tree(Isolate::Current()->runtime_zone());
bool seen[kLimit];
for (unsigned i = 0; i < kLimit; i++) seen[i] = false;
#define CHECK_MAPS_EQUAL() do { \
@@ -625,12 +641,13 @@ TEST(DispatchTableConstruction) {
}
}
// Enter test data into dispatch table.
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
- DispatchTable table;
+ ZoneScope zone_scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
+ DispatchTable table(Isolate::Current()->runtime_zone());
for (int i = 0; i < kRangeCount; i++) {
uc16* range = ranges[i];
for (int j = 0; j < 2 * kRangeSize; j += 2)
- table.AddRange(CharacterRange(range[j], range[j + 1]), i);
+ table.AddRange(CharacterRange(range[j], range[j + 1]), i,
+ Isolate::Current()->runtime_zone());
}
// Check that the table looks as we would expect
for (int p = 0; p < kLimit; p++) {
@@ -692,7 +709,8 @@ typedef RegExpMacroAssemblerMIPS ArchRegExpMacroAssembler;
class ContextInitializer {
public:
ContextInitializer()
- : env_(), scope_(), zone_(Isolate::Current(), DELETE_ON_EXIT) {
+ : env_(), scope_(), zone_(Isolate::Current()->runtime_zone(),
+ DELETE_ON_EXIT) {
env_ = v8::Context::New();
env_->Enter();
}
@@ -720,6 +738,7 @@ static ArchRegExpMacroAssembler::Result Execute(Code* code,
input_start,
input_end,
captures,
+ 0,
Isolate::Current());
}
@@ -729,7 +748,8 @@ TEST(MacroAssemblerNativeSuccess) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4,
+ Isolate::Current()->runtime_zone());
m.Succeed();
@@ -764,7 +784,8 @@ TEST(MacroAssemblerNativeSimple) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4,
+ Isolate::Current()->runtime_zone());
uc16 foo_chars[3] = {'f', 'o', 'o'};
Vector<const uc16> foo(foo_chars, 3);
@@ -821,7 +842,8 @@ TEST(MacroAssemblerNativeSimpleUC16) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::UC16, 4);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::UC16, 4,
+ Isolate::Current()->runtime_zone());
uc16 foo_chars[3] = {'f', 'o', 'o'};
Vector<const uc16> foo(foo_chars, 3);
@@ -883,7 +905,8 @@ TEST(MacroAssemblerNativeBacktrack) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 0);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 0,
+ Isolate::Current()->runtime_zone());
Label fail;
Label backtrack;
@@ -921,7 +944,8 @@ TEST(MacroAssemblerNativeBackReferenceASCII) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4,
+ Isolate::Current()->runtime_zone());
m.WriteCurrentPositionToRegister(0, 0);
m.AdvanceCurrentPosition(2);
@@ -968,7 +992,8 @@ TEST(MacroAssemblerNativeBackReferenceUC16) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::UC16, 4);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::UC16, 4,
+ Isolate::Current()->runtime_zone());
m.WriteCurrentPositionToRegister(0, 0);
m.AdvanceCurrentPosition(2);
@@ -998,11 +1023,11 @@ TEST(MacroAssemblerNativeBackReferenceUC16) {
int output[4];
NativeRegExpMacroAssembler::Result result =
Execute(*code,
- *input,
- 0,
- start_adr,
- start_adr + input->length() * 2,
- output);
+ *input,
+ 0,
+ start_adr,
+ start_adr + input->length() * 2,
+ output);
CHECK_EQ(NativeRegExpMacroAssembler::SUCCESS, result);
CHECK_EQ(0, output[0]);
@@ -1018,7 +1043,8 @@ TEST(MacroAssemblernativeAtStart) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 0);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 0,
+ Isolate::Current()->runtime_zone());
Label not_at_start, newline, fail;
m.CheckNotAtStart(&not_at_start);
@@ -1075,7 +1101,8 @@ TEST(MacroAssemblerNativeBackRefNoCase) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4,
+ Isolate::Current()->runtime_zone());
Label fail, succ;
@@ -1132,7 +1159,8 @@ TEST(MacroAssemblerNativeRegisters) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 6);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 6,
+ Isolate::Current()->runtime_zone());
uc16 foo_chars[3] = {'f', 'o', 'o'};
Vector<const uc16> foo(foo_chars, 3);
@@ -1234,7 +1262,8 @@ TEST(MacroAssemblerStackOverflow) {
Isolate* isolate = Isolate::Current();
Factory* factory = isolate->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 0);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 0,
+ Isolate::Current()->runtime_zone());
Label loop;
m.Bind(&loop);
@@ -1272,7 +1301,8 @@ TEST(MacroAssemblerNativeLotsOfRegisters) {
Isolate* isolate = Isolate::Current();
Factory* factory = isolate->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 2);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 2,
+ Isolate::Current()->runtime_zone());
// At least 2048, to ensure the allocated space for registers
// span one full page.
@@ -1319,7 +1349,8 @@ TEST(MacroAssemblerNativeLotsOfRegisters) {
TEST(MacroAssembler) {
V8::Initialize(NULL);
byte codes[1024];
- RegExpMacroAssemblerIrregexp m(Vector<byte>(codes, 1024));
+ RegExpMacroAssemblerIrregexp m(Vector<byte>(codes, 1024),
+ Isolate::Current()->runtime_zone());
// ^f(o)o.
Label fail, fail2, start;
uc16 foo_chars[3];
@@ -1389,17 +1420,20 @@ TEST(AddInverseToTable) {
static const int kLimit = 1000;
static const int kRangeCount = 16;
for (int t = 0; t < 10; t++) {
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zone_scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
+ Zone* zone = Isolate::Current()->runtime_zone();
ZoneList<CharacterRange>* ranges =
- new ZoneList<CharacterRange>(kRangeCount);
+ new(zone)
+ ZoneList<CharacterRange>(kRangeCount, zone);
for (int i = 0; i < kRangeCount; i++) {
int from = PseudoRandom(t + 87, i + 25) % kLimit;
int to = from + (PseudoRandom(i + 87, t + 25) % (kLimit / 20));
if (to > kLimit) to = kLimit;
- ranges->Add(CharacterRange(from, to));
+ ranges->Add(CharacterRange(from, to), zone);
}
- DispatchTable table;
- DispatchTableConstructor cons(&table, false);
+ DispatchTable table(zone);
+ DispatchTableConstructor cons(&table, false,
+ Isolate::Current()->runtime_zone());
cons.set_choice_index(0);
cons.AddInverse(ranges);
for (int i = 0; i < kLimit; i++) {
@@ -1410,12 +1444,14 @@ TEST(AddInverseToTable) {
CHECK_EQ(is_on, set->Get(0) == false);
}
}
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zone_scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
+ Zone* zone = Isolate::Current()->runtime_zone();
ZoneList<CharacterRange>* ranges =
- new ZoneList<CharacterRange>(1);
- ranges->Add(CharacterRange(0xFFF0, 0xFFFE));
- DispatchTable table;
- DispatchTableConstructor cons(&table, false);
+ new(zone) ZoneList<CharacterRange>(1, zone);
+ ranges->Add(CharacterRange(0xFFF0, 0xFFFE), zone);
+ DispatchTable table(zone);
+ DispatchTableConstructor cons(&table, false,
+ Isolate::Current()->runtime_zone());
cons.set_choice_index(0);
cons.AddInverse(ranges);
CHECK(!table.Get(0xFFFE)->Get(0));
@@ -1523,10 +1559,12 @@ TEST(UncanonicalizeEquivalence) {
static void TestRangeCaseIndependence(CharacterRange input,
Vector<CharacterRange> expected) {
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zone_scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
+ Zone* zone = Isolate::Current()->runtime_zone();
int count = expected.length();
- ZoneList<CharacterRange>* list = new ZoneList<CharacterRange>(count);
- input.AddCaseEquivalents(list, false);
+ ZoneList<CharacterRange>* list =
+ new(zone) ZoneList<CharacterRange>(count, zone);
+ input.AddCaseEquivalents(list, false, zone);
CHECK_EQ(count, list->length());
for (int i = 0; i < list->length(); i++) {
CHECK_EQ(expected[i].from(), list->at(i).from());
@@ -1587,13 +1625,16 @@ static bool InClass(uc16 c, ZoneList<CharacterRange>* ranges) {
TEST(CharClassDifference) {
v8::internal::V8::Initialize(NULL);
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
- ZoneList<CharacterRange>* base = new ZoneList<CharacterRange>(1);
- base->Add(CharacterRange::Everything());
+ ZoneScope zone_scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
+ Zone* zone = Isolate::Current()->runtime_zone();
+ ZoneList<CharacterRange>* base =
+ new(zone) ZoneList<CharacterRange>(1, zone);
+ base->Add(CharacterRange::Everything(), zone);
Vector<const int> overlay = CharacterRange::GetWordBounds();
ZoneList<CharacterRange>* included = NULL;
ZoneList<CharacterRange>* excluded = NULL;
- CharacterRange::Split(base, overlay, &included, &excluded);
+ CharacterRange::Split(base, overlay, &included, &excluded,
+ Isolate::Current()->runtime_zone());
for (int i = 0; i < (1 << 16); i++) {
bool in_base = InClass(i, base);
if (in_base) {
@@ -1614,13 +1655,15 @@ TEST(CharClassDifference) {
TEST(CanonicalizeCharacterSets) {
v8::internal::V8::Initialize(NULL);
- ZoneScope scope(Isolate::Current(), DELETE_ON_EXIT);
- ZoneList<CharacterRange>* list = new ZoneList<CharacterRange>(4);
+ ZoneScope scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
+ Zone* zone = Isolate::Current()->runtime_zone();
+ ZoneList<CharacterRange>* list =
+ new(zone) ZoneList<CharacterRange>(4, zone);
CharacterSet set(list);
- list->Add(CharacterRange(10, 20));
- list->Add(CharacterRange(30, 40));
- list->Add(CharacterRange(50, 60));
+ list->Add(CharacterRange(10, 20), zone);
+ list->Add(CharacterRange(30, 40), zone);
+ list->Add(CharacterRange(50, 60), zone);
set.Canonicalize();
ASSERT_EQ(3, list->length());
ASSERT_EQ(10, list->at(0).from());
@@ -1631,9 +1674,9 @@ TEST(CanonicalizeCharacterSets) {
ASSERT_EQ(60, list->at(2).to());
list->Rewind(0);
- list->Add(CharacterRange(10, 20));
- list->Add(CharacterRange(50, 60));
- list->Add(CharacterRange(30, 40));
+ list->Add(CharacterRange(10, 20), zone);
+ list->Add(CharacterRange(50, 60), zone);
+ list->Add(CharacterRange(30, 40), zone);
set.Canonicalize();
ASSERT_EQ(3, list->length());
ASSERT_EQ(10, list->at(0).from());
@@ -1644,11 +1687,11 @@ TEST(CanonicalizeCharacterSets) {
ASSERT_EQ(60, list->at(2).to());
list->Rewind(0);
- list->Add(CharacterRange(30, 40));
- list->Add(CharacterRange(10, 20));
- list->Add(CharacterRange(25, 25));
- list->Add(CharacterRange(100, 100));
- list->Add(CharacterRange(1, 1));
+ list->Add(CharacterRange(30, 40), zone);
+ list->Add(CharacterRange(10, 20), zone);
+ list->Add(CharacterRange(25, 25), zone);
+ list->Add(CharacterRange(100, 100), zone);
+ list->Add(CharacterRange(1, 1), zone);
set.Canonicalize();
ASSERT_EQ(5, list->length());
ASSERT_EQ(1, list->at(0).from());
@@ -1663,9 +1706,9 @@ TEST(CanonicalizeCharacterSets) {
ASSERT_EQ(100, list->at(4).to());
list->Rewind(0);
- list->Add(CharacterRange(10, 19));
- list->Add(CharacterRange(21, 30));
- list->Add(CharacterRange(20, 20));
+ list->Add(CharacterRange(10, 19), zone);
+ list->Add(CharacterRange(21, 30), zone);
+ list->Add(CharacterRange(20, 20), zone);
set.Canonicalize();
ASSERT_EQ(1, list->length());
ASSERT_EQ(10, list->at(0).from());
@@ -1675,9 +1718,10 @@ TEST(CanonicalizeCharacterSets) {
TEST(CharacterRangeMerge) {
v8::internal::V8::Initialize(NULL);
- ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
- ZoneList<CharacterRange> l1(4);
- ZoneList<CharacterRange> l2(4);
+ ZoneScope zone_scope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
+ ZoneList<CharacterRange> l1(4, Isolate::Current()->runtime_zone());
+ ZoneList<CharacterRange> l2(4, Isolate::Current()->runtime_zone());
+ Zone* zone = Isolate::Current()->runtime_zone();
// Create all combinations of intersections of ranges, both singletons and
// longer.
@@ -1692,8 +1736,8 @@ TEST(CharacterRangeMerge) {
// Y - outside after
for (int i = 0; i < 5; i++) {
- l1.Add(CharacterRange::Singleton(offset + 2));
- l2.Add(CharacterRange::Singleton(offset + i));
+ l1.Add(CharacterRange::Singleton(offset + 2), zone);
+ l2.Add(CharacterRange::Singleton(offset + i), zone);
offset += 6;
}
@@ -1708,8 +1752,8 @@ TEST(CharacterRangeMerge) {
// Y - disjoint after
for (int i = 0; i < 7; i++) {
- l1.Add(CharacterRange::Range(offset + 2, offset + 4));
- l2.Add(CharacterRange::Singleton(offset + i));
+ l1.Add(CharacterRange::Range(offset + 2, offset + 4), zone);
+ l2.Add(CharacterRange::Singleton(offset + i), zone);
offset += 8;
}
@@ -1729,35 +1773,35 @@ TEST(CharacterRangeMerge) {
// YYYYYYYYYYYY - containing entirely.
for (int i = 0; i < 9; i++) {
- l1.Add(CharacterRange::Range(offset + 6, offset + 15)); // Length 8.
- l2.Add(CharacterRange::Range(offset + 2 * i, offset + 2 * i + 3));
+ l1.Add(CharacterRange::Range(offset + 6, offset + 15), zone); // Length 8.
+ l2.Add(CharacterRange::Range(offset + 2 * i, offset + 2 * i + 3), zone);
offset += 22;
}
- l1.Add(CharacterRange::Range(offset + 6, offset + 15));
- l2.Add(CharacterRange::Range(offset + 6, offset + 15));
+ l1.Add(CharacterRange::Range(offset + 6, offset + 15), zone);
+ l2.Add(CharacterRange::Range(offset + 6, offset + 15), zone);
offset += 22;
- l1.Add(CharacterRange::Range(offset + 6, offset + 15));
- l2.Add(CharacterRange::Range(offset + 4, offset + 17));
+ l1.Add(CharacterRange::Range(offset + 6, offset + 15), zone);
+ l2.Add(CharacterRange::Range(offset + 4, offset + 17), zone);
offset += 22;
// Different kinds of multi-range overlap:
// XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX
// YYYY Y YYYY Y YYYY Y YYYY Y YYYY Y YYYY Y
- l1.Add(CharacterRange::Range(offset, offset + 21));
- l1.Add(CharacterRange::Range(offset + 31, offset + 52));
+ l1.Add(CharacterRange::Range(offset, offset + 21), zone);
+ l1.Add(CharacterRange::Range(offset + 31, offset + 52), zone);
for (int i = 0; i < 6; i++) {
- l2.Add(CharacterRange::Range(offset + 2, offset + 5));
- l2.Add(CharacterRange::Singleton(offset + 8));
+ l2.Add(CharacterRange::Range(offset + 2, offset + 5), zone);
+ l2.Add(CharacterRange::Singleton(offset + 8), zone);
offset += 9;
}
ASSERT(CharacterRange::IsCanonical(&l1));
ASSERT(CharacterRange::IsCanonical(&l2));
- ZoneList<CharacterRange> first_only(4);
- ZoneList<CharacterRange> second_only(4);
- ZoneList<CharacterRange> both(4);
+ ZoneList<CharacterRange> first_only(4, Isolate::Current()->runtime_zone());
+ ZoneList<CharacterRange> second_only(4, Isolate::Current()->runtime_zone());
+ ZoneList<CharacterRange> both(4, Isolate::Current()->runtime_zone());
}
diff --git a/src/3rdparty/v8/test/cctest/test-serialize.cc b/src/3rdparty/v8/test/cctest/test-serialize.cc
index e426e7b..8279182 100644
--- a/src/3rdparty/v8/test/cctest/test-serialize.cc
+++ b/src/3rdparty/v8/test/cctest/test-serialize.cc
@@ -196,8 +196,7 @@ class FileByteSink : public SnapshotByteSink {
int data_space_used,
int code_space_used,
int map_space_used,
- int cell_space_used,
- int large_space_used);
+ int cell_space_used);
private:
FILE* fp_;
@@ -211,8 +210,7 @@ void FileByteSink::WriteSpaceUsed(
int data_space_used,
int code_space_used,
int map_space_used,
- int cell_space_used,
- int large_space_used) {
+ int cell_space_used) {
int file_name_length = StrLength(file_name_) + 10;
Vector<char> name = Vector<char>::New(file_name_length + 1);
OS::SNPrintF(name, "%s.size", file_name_);
@@ -224,7 +222,6 @@ void FileByteSink::WriteSpaceUsed(
fprintf(fp, "code %d\n", code_space_used);
fprintf(fp, "map %d\n", map_space_used);
fprintf(fp, "cell %d\n", cell_space_used);
- fprintf(fp, "large %d\n", large_space_used);
fclose(fp);
}
@@ -233,6 +230,15 @@ static bool WriteToFile(const char* snapshot_file) {
FileByteSink file(snapshot_file);
StartupSerializer ser(&file);
ser.Serialize();
+
+ file.WriteSpaceUsed(
+ ser.CurrentAllocationAddress(NEW_SPACE),
+ ser.CurrentAllocationAddress(OLD_POINTER_SPACE),
+ ser.CurrentAllocationAddress(OLD_DATA_SPACE),
+ ser.CurrentAllocationAddress(CODE_SPACE),
+ ser.CurrentAllocationAddress(MAP_SPACE),
+ ser.CurrentAllocationAddress(CELL_SPACE));
+
return true;
}
@@ -250,18 +256,22 @@ static void Serialize() {
// Test that the whole heap can be serialized.
TEST(Serialize) {
- Serializer::Enable();
- v8::V8::Initialize();
- Serialize();
+ if (!Snapshot::HaveASnapshotToStartFrom()) {
+ Serializer::Enable();
+ v8::V8::Initialize();
+ Serialize();
+ }
}
// Test that heap serialization is non-destructive.
TEST(SerializeTwice) {
- Serializer::Enable();
- v8::V8::Initialize();
- Serialize();
- Serialize();
+ if (!Snapshot::HaveASnapshotToStartFrom()) {
+ Serializer::Enable();
+ v8::V8::Initialize();
+ Serialize();
+ Serialize();
+ }
}
@@ -275,11 +285,11 @@ static void Deserialize() {
static void SanityCheck() {
v8::HandleScope scope;
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
HEAP->Verify();
#endif
- CHECK(Isolate::Current()->global()->IsJSObject());
- CHECK(Isolate::Current()->global_context()->IsContext());
+ CHECK(Isolate::Current()->global_object()->IsJSObject());
+ CHECK(Isolate::Current()->native_context()->IsContext());
CHECK(HEAP->symbol_table()->IsSymbolTable());
CHECK(!FACTORY->LookupAsciiSymbol("Empty")->IsFailure());
}
@@ -289,7 +299,7 @@ DEPENDENT_TEST(Deserialize, Serialize) {
// The serialize-deserialize tests only work if the VM is built without
// serialization. That doesn't matter. We don't need to be able to
// serialize a snapshot in a VM that is booted from a snapshot.
- if (!Snapshot::IsEnabled()) {
+ if (!Snapshot::HaveASnapshotToStartFrom()) {
v8::HandleScope scope;
Deserialize();
@@ -302,7 +312,7 @@ DEPENDENT_TEST(Deserialize, Serialize) {
DEPENDENT_TEST(DeserializeFromSecondSerialization, SerializeTwice) {
- if (!Snapshot::IsEnabled()) {
+ if (!Snapshot::HaveASnapshotToStartFrom()) {
v8::HandleScope scope;
Deserialize();
@@ -315,7 +325,7 @@ DEPENDENT_TEST(DeserializeFromSecondSerialization, SerializeTwice) {
DEPENDENT_TEST(DeserializeAndRunScript2, Serialize) {
- if (!Snapshot::IsEnabled()) {
+ if (!Snapshot::HaveASnapshotToStartFrom()) {
v8::HandleScope scope;
Deserialize();
@@ -332,7 +342,7 @@ DEPENDENT_TEST(DeserializeAndRunScript2, Serialize) {
DEPENDENT_TEST(DeserializeFromSecondSerializationAndRunScript2,
SerializeTwice) {
- if (!Snapshot::IsEnabled()) {
+ if (!Snapshot::HaveASnapshotToStartFrom()) {
v8::HandleScope scope;
Deserialize();
@@ -348,63 +358,74 @@ DEPENDENT_TEST(DeserializeFromSecondSerializationAndRunScript2,
TEST(PartialSerialization) {
- Serializer::Enable();
- v8::V8::Initialize();
+ if (!Snapshot::HaveASnapshotToStartFrom()) {
+ Serializer::Enable();
+ v8::V8::Initialize();
- v8::Persistent<v8::Context> env = v8::Context::New();
- ASSERT(!env.IsEmpty());
- env->Enter();
- // Make sure all builtin scripts are cached.
- { HandleScope scope;
- for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
- Isolate::Current()->bootstrapper()->NativesSourceLookup(i);
+ v8::Persistent<v8::Context> env = v8::Context::New();
+ ASSERT(!env.IsEmpty());
+ env->Enter();
+ // Make sure all builtin scripts are cached.
+ { HandleScope scope;
+ for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
+ Isolate::Current()->bootstrapper()->NativesSourceLookup(i);
+ }
}
- }
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- Object* raw_foo;
- {
- v8::HandleScope handle_scope;
- v8::Local<v8::String> foo = v8::String::New("foo");
- ASSERT(!foo.IsEmpty());
- raw_foo = *(v8::Utils::OpenHandle(*foo));
- }
-
- int file_name_length = StrLength(FLAG_testing_serialization_file) + 10;
- Vector<char> startup_name = Vector<char>::New(file_name_length + 1);
- OS::SNPrintF(startup_name, "%s.startup", FLAG_testing_serialization_file);
+ Object* raw_foo;
+ {
+ v8::HandleScope handle_scope;
+ v8::Local<v8::String> foo = v8::String::New("foo");
+ ASSERT(!foo.IsEmpty());
+ raw_foo = *(v8::Utils::OpenHandle(*foo));
+ }
- env->Exit();
- env.Dispose();
+ int file_name_length = StrLength(FLAG_testing_serialization_file) + 10;
+ Vector<char> startup_name = Vector<char>::New(file_name_length + 1);
+ OS::SNPrintF(startup_name, "%s.startup", FLAG_testing_serialization_file);
- FileByteSink startup_sink(startup_name.start());
- startup_name.Dispose();
- StartupSerializer startup_serializer(&startup_sink);
- startup_serializer.SerializeStrongReferences();
-
- FileByteSink partial_sink(FLAG_testing_serialization_file);
- PartialSerializer p_ser(&startup_serializer, &partial_sink);
- p_ser.Serialize(&raw_foo);
- startup_serializer.SerializeWeakReferences();
- partial_sink.WriteSpaceUsed(p_ser.CurrentAllocationAddress(NEW_SPACE),
- p_ser.CurrentAllocationAddress(OLD_POINTER_SPACE),
- p_ser.CurrentAllocationAddress(OLD_DATA_SPACE),
- p_ser.CurrentAllocationAddress(CODE_SPACE),
- p_ser.CurrentAllocationAddress(MAP_SPACE),
- p_ser.CurrentAllocationAddress(CELL_SPACE),
- p_ser.CurrentAllocationAddress(LO_SPACE));
+ env->Exit();
+ env.Dispose();
+
+ FileByteSink startup_sink(startup_name.start());
+ StartupSerializer startup_serializer(&startup_sink);
+ startup_serializer.SerializeStrongReferences();
+
+ FileByteSink partial_sink(FLAG_testing_serialization_file);
+ PartialSerializer p_ser(&startup_serializer, &partial_sink);
+ p_ser.Serialize(&raw_foo);
+ startup_serializer.SerializeWeakReferences();
+
+ partial_sink.WriteSpaceUsed(
+ p_ser.CurrentAllocationAddress(NEW_SPACE),
+ p_ser.CurrentAllocationAddress(OLD_POINTER_SPACE),
+ p_ser.CurrentAllocationAddress(OLD_DATA_SPACE),
+ p_ser.CurrentAllocationAddress(CODE_SPACE),
+ p_ser.CurrentAllocationAddress(MAP_SPACE),
+ p_ser.CurrentAllocationAddress(CELL_SPACE));
+
+ startup_sink.WriteSpaceUsed(
+ startup_serializer.CurrentAllocationAddress(NEW_SPACE),
+ startup_serializer.CurrentAllocationAddress(OLD_POINTER_SPACE),
+ startup_serializer.CurrentAllocationAddress(OLD_DATA_SPACE),
+ startup_serializer.CurrentAllocationAddress(CODE_SPACE),
+ startup_serializer.CurrentAllocationAddress(MAP_SPACE),
+ startup_serializer.CurrentAllocationAddress(CELL_SPACE));
+ startup_name.Dispose();
+ }
}
-static void ReserveSpaceForPartialSnapshot(const char* file_name) {
+static void ReserveSpaceForSnapshot(Deserializer* deserializer,
+ const char* file_name) {
int file_name_length = StrLength(file_name) + 10;
Vector<char> name = Vector<char>::New(file_name_length + 1);
OS::SNPrintF(name, "%s.size", file_name);
FILE* fp = OS::FOpen(name.start(), "r");
name.Dispose();
int new_size, pointer_size, data_size, code_size, map_size, cell_size;
- int large_size;
#ifdef _MSC_VER
// Avoid warning about unsafe fscanf from MSVC.
// Please note that this is only fine if %c and %s are not being used.
@@ -416,18 +437,16 @@ static void ReserveSpaceForPartialSnapshot(const char* file_name) {
CHECK_EQ(1, fscanf(fp, "code %d\n", &code_size));
CHECK_EQ(1, fscanf(fp, "map %d\n", &map_size));
CHECK_EQ(1, fscanf(fp, "cell %d\n", &cell_size));
- CHECK_EQ(1, fscanf(fp, "large %d\n", &large_size));
#ifdef _MSC_VER
#undef fscanf
#endif
fclose(fp);
- HEAP->ReserveSpace(new_size,
- pointer_size,
- data_size,
- code_size,
- map_size,
- cell_size,
- large_size);
+ deserializer->set_reservation(NEW_SPACE, new_size);
+ deserializer->set_reservation(OLD_POINTER_SPACE, pointer_size);
+ deserializer->set_reservation(OLD_DATA_SPACE, data_size);
+ deserializer->set_reservation(CODE_SPACE, code_size);
+ deserializer->set_reservation(MAP_SPACE, map_size);
+ deserializer->set_reservation(CELL_SPACE, cell_size);
}
@@ -441,7 +460,6 @@ DEPENDENT_TEST(PartialDeserialization, PartialSerialization) {
startup_name.Dispose();
const char* file_name = FLAG_testing_serialization_file;
- ReserveSpaceForPartialSnapshot(file_name);
int snapshot_size = 0;
byte* snapshot = ReadBytes(file_name, &snapshot_size);
@@ -450,18 +468,19 @@ DEPENDENT_TEST(PartialDeserialization, PartialSerialization) {
{
SnapshotByteSource source(snapshot, snapshot_size);
Deserializer deserializer(&source);
+ ReserveSpaceForSnapshot(&deserializer, file_name);
deserializer.DeserializePartial(&root);
CHECK(root->IsString());
}
v8::HandleScope handle_scope;
Handle<Object> root_handle(root);
- ReserveSpaceForPartialSnapshot(file_name);
Object* root2;
{
SnapshotByteSource source(snapshot, snapshot_size);
Deserializer deserializer(&source);
+ ReserveSpaceForSnapshot(&deserializer, file_name);
deserializer.DeserializePartial(&root2);
CHECK(root2->IsString());
CHECK(*root_handle == root2);
@@ -471,53 +490,64 @@ DEPENDENT_TEST(PartialDeserialization, PartialSerialization) {
TEST(ContextSerialization) {
- Serializer::Enable();
- v8::V8::Initialize();
+ if (!Snapshot::HaveASnapshotToStartFrom()) {
+ Serializer::Enable();
+ v8::V8::Initialize();
- v8::Persistent<v8::Context> env = v8::Context::New();
- ASSERT(!env.IsEmpty());
- env->Enter();
- // Make sure all builtin scripts are cached.
- { HandleScope scope;
- for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
- Isolate::Current()->bootstrapper()->NativesSourceLookup(i);
+ v8::Persistent<v8::Context> env = v8::Context::New();
+ ASSERT(!env.IsEmpty());
+ env->Enter();
+ // Make sure all builtin scripts are cached.
+ { HandleScope scope;
+ for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
+ Isolate::Current()->bootstrapper()->NativesSourceLookup(i);
+ }
}
- }
- // If we don't do this then we end up with a stray root pointing at the
- // context even after we have disposed of env.
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ // If we don't do this then we end up with a stray root pointing at the
+ // context even after we have disposed of env.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ int file_name_length = StrLength(FLAG_testing_serialization_file) + 10;
+ Vector<char> startup_name = Vector<char>::New(file_name_length + 1);
+ OS::SNPrintF(startup_name, "%s.startup", FLAG_testing_serialization_file);
- int file_name_length = StrLength(FLAG_testing_serialization_file) + 10;
- Vector<char> startup_name = Vector<char>::New(file_name_length + 1);
- OS::SNPrintF(startup_name, "%s.startup", FLAG_testing_serialization_file);
+ env->Exit();
- env->Exit();
+ Object* raw_context = *(v8::Utils::OpenHandle(*env));
- Object* raw_context = *(v8::Utils::OpenHandle(*env));
+ env.Dispose();
- env.Dispose();
+ FileByteSink startup_sink(startup_name.start());
+ StartupSerializer startup_serializer(&startup_sink);
+ startup_serializer.SerializeStrongReferences();
+
+ FileByteSink partial_sink(FLAG_testing_serialization_file);
+ PartialSerializer p_ser(&startup_serializer, &partial_sink);
+ p_ser.Serialize(&raw_context);
+ startup_serializer.SerializeWeakReferences();
+
+ partial_sink.WriteSpaceUsed(
+ p_ser.CurrentAllocationAddress(NEW_SPACE),
+ p_ser.CurrentAllocationAddress(OLD_POINTER_SPACE),
+ p_ser.CurrentAllocationAddress(OLD_DATA_SPACE),
+ p_ser.CurrentAllocationAddress(CODE_SPACE),
+ p_ser.CurrentAllocationAddress(MAP_SPACE),
+ p_ser.CurrentAllocationAddress(CELL_SPACE));
- FileByteSink startup_sink(startup_name.start());
- startup_name.Dispose();
- StartupSerializer startup_serializer(&startup_sink);
- startup_serializer.SerializeStrongReferences();
-
- FileByteSink partial_sink(FLAG_testing_serialization_file);
- PartialSerializer p_ser(&startup_serializer, &partial_sink);
- p_ser.Serialize(&raw_context);
- startup_serializer.SerializeWeakReferences();
- partial_sink.WriteSpaceUsed(p_ser.CurrentAllocationAddress(NEW_SPACE),
- p_ser.CurrentAllocationAddress(OLD_POINTER_SPACE),
- p_ser.CurrentAllocationAddress(OLD_DATA_SPACE),
- p_ser.CurrentAllocationAddress(CODE_SPACE),
- p_ser.CurrentAllocationAddress(MAP_SPACE),
- p_ser.CurrentAllocationAddress(CELL_SPACE),
- p_ser.CurrentAllocationAddress(LO_SPACE));
+ startup_sink.WriteSpaceUsed(
+ startup_serializer.CurrentAllocationAddress(NEW_SPACE),
+ startup_serializer.CurrentAllocationAddress(OLD_POINTER_SPACE),
+ startup_serializer.CurrentAllocationAddress(OLD_DATA_SPACE),
+ startup_serializer.CurrentAllocationAddress(CODE_SPACE),
+ startup_serializer.CurrentAllocationAddress(MAP_SPACE),
+ startup_serializer.CurrentAllocationAddress(CELL_SPACE));
+ startup_name.Dispose();
+ }
}
DEPENDENT_TEST(ContextDeserialization, ContextSerialization) {
- if (!Snapshot::IsEnabled()) {
+ if (!Snapshot::HaveASnapshotToStartFrom()) {
int file_name_length = StrLength(FLAG_testing_serialization_file) + 10;
Vector<char> startup_name = Vector<char>::New(file_name_length + 1);
OS::SNPrintF(startup_name, "%s.startup", FLAG_testing_serialization_file);
@@ -526,7 +556,6 @@ DEPENDENT_TEST(ContextDeserialization, ContextSerialization) {
startup_name.Dispose();
const char* file_name = FLAG_testing_serialization_file;
- ReserveSpaceForPartialSnapshot(file_name);
int snapshot_size = 0;
byte* snapshot = ReadBytes(file_name, &snapshot_size);
@@ -535,18 +564,19 @@ DEPENDENT_TEST(ContextDeserialization, ContextSerialization) {
{
SnapshotByteSource source(snapshot, snapshot_size);
Deserializer deserializer(&source);
+ ReserveSpaceForSnapshot(&deserializer, file_name);
deserializer.DeserializePartial(&root);
CHECK(root->IsContext());
}
v8::HandleScope handle_scope;
Handle<Object> root_handle(root);
- ReserveSpaceForPartialSnapshot(file_name);
Object* root2;
{
SnapshotByteSource source(snapshot, snapshot_size);
Deserializer deserializer(&source);
+ ReserveSpaceForSnapshot(&deserializer, file_name);
deserializer.DeserializePartial(&root2);
CHECK(root2->IsContext());
CHECK(*root_handle != root2);
@@ -555,119 +585,6 @@ DEPENDENT_TEST(ContextDeserialization, ContextSerialization) {
}
-TEST(LinearAllocation) {
- v8::V8::Initialize();
- int new_space_max = 512 * KB;
- int paged_space_max = Page::kMaxNonCodeHeapObjectSize;
- int code_space_max = HEAP->code_space()->AreaSize();
-
- for (int size = 1000; size < 5 * MB; size += size >> 1) {
- size &= ~8; // Round.
- int new_space_size = (size < new_space_max) ? size : new_space_max;
- int paged_space_size = (size < paged_space_max) ? size : paged_space_max;
- HEAP->ReserveSpace(
- new_space_size,
- paged_space_size, // Old pointer space.
- paged_space_size, // Old data space.
- HEAP->code_space()->RoundSizeDownToObjectAlignment(code_space_max),
- HEAP->map_space()->RoundSizeDownToObjectAlignment(paged_space_size),
- HEAP->cell_space()->RoundSizeDownToObjectAlignment(paged_space_size),
- size); // Large object space.
- LinearAllocationScope linear_allocation_scope;
- const int kSmallFixedArrayLength = 4;
- const int kSmallFixedArraySize =
- FixedArray::kHeaderSize + kSmallFixedArrayLength * kPointerSize;
- const int kSmallStringLength = 16;
- const int kSmallStringSize =
- (SeqAsciiString::kHeaderSize + kSmallStringLength +
- kObjectAlignmentMask) & ~kObjectAlignmentMask;
- const int kMapSize = Map::kSize;
-
- Object* new_last = NULL;
- for (int i = 0;
- i + kSmallFixedArraySize <= new_space_size;
- i += kSmallFixedArraySize) {
- Object* obj =
- HEAP->AllocateFixedArray(kSmallFixedArrayLength)->ToObjectChecked();
- if (new_last != NULL) {
- CHECK(reinterpret_cast<char*>(obj) ==
- reinterpret_cast<char*>(new_last) + kSmallFixedArraySize);
- }
- new_last = obj;
- }
-
- Object* pointer_last = NULL;
- for (int i = 0;
- i + kSmallFixedArraySize <= paged_space_size;
- i += kSmallFixedArraySize) {
- Object* obj = HEAP->AllocateFixedArray(kSmallFixedArrayLength,
- TENURED)->ToObjectChecked();
- int old_page_fullness = i % Page::kPageSize;
- int page_fullness = (i + kSmallFixedArraySize) % Page::kPageSize;
- if (page_fullness < old_page_fullness ||
- page_fullness > HEAP->old_pointer_space()->AreaSize()) {
- i = RoundUp(i, Page::kPageSize);
- pointer_last = NULL;
- }
- if (pointer_last != NULL) {
- CHECK(reinterpret_cast<char*>(obj) ==
- reinterpret_cast<char*>(pointer_last) + kSmallFixedArraySize);
- }
- pointer_last = obj;
- }
-
- Object* data_last = NULL;
- for (int i = 0;
- i + kSmallStringSize <= paged_space_size;
- i += kSmallStringSize) {
- Object* obj = HEAP->AllocateRawAsciiString(kSmallStringLength,
- TENURED)->ToObjectChecked();
- int old_page_fullness = i % Page::kPageSize;
- int page_fullness = (i + kSmallStringSize) % Page::kPageSize;
- if (page_fullness < old_page_fullness ||
- page_fullness > HEAP->old_data_space()->AreaSize()) {
- i = RoundUp(i, Page::kPageSize);
- data_last = NULL;
- }
- if (data_last != NULL) {
- CHECK(reinterpret_cast<char*>(obj) ==
- reinterpret_cast<char*>(data_last) + kSmallStringSize);
- }
- data_last = obj;
- }
-
- Object* map_last = NULL;
- for (int i = 0; i + kMapSize <= paged_space_size; i += kMapSize) {
- Object* obj = HEAP->AllocateMap(JS_OBJECT_TYPE,
- 42 * kPointerSize)->ToObjectChecked();
- int old_page_fullness = i % Page::kPageSize;
- int page_fullness = (i + kMapSize) % Page::kPageSize;
- if (page_fullness < old_page_fullness ||
- page_fullness > HEAP->map_space()->AreaSize()) {
- i = RoundUp(i, Page::kPageSize);
- map_last = NULL;
- }
- if (map_last != NULL) {
- CHECK(reinterpret_cast<char*>(obj) ==
- reinterpret_cast<char*>(map_last) + kMapSize);
- }
- map_last = obj;
- }
-
- if (size > Page::kMaxNonCodeHeapObjectSize) {
- // Support for reserving space in large object space is not there yet,
- // but using an always-allocate scope is fine for now.
- AlwaysAllocateScope always;
- int large_object_array_length =
- (size - FixedArray::kHeaderSize) / kPointerSize;
- Object* obj = HEAP->AllocateFixedArray(large_object_array_length,
- TENURED)->ToObjectChecked();
- CHECK(!obj->IsFailure());
- }
- }
-}
-
-
TEST(TestThatAlwaysSucceeds) {
}
diff --git a/src/3rdparty/v8/test/cctest/test-sockets.cc b/src/3rdparty/v8/test/cctest/test-sockets.cc
index ad73540..2f7941c 100644
--- a/src/3rdparty/v8/test/cctest/test-sockets.cc
+++ b/src/3rdparty/v8/test/cctest/test-sockets.cc
@@ -124,7 +124,7 @@ static void SendAndReceive(int port, char *data, int len) {
TEST(Socket) {
// Make sure this port is not used by other tests to allow tests to run in
// parallel.
- static const int kPort = 5859;
+ static const int kPort = 5859 + FlagDependentPortOffset();
bool ok;
diff --git a/src/3rdparty/v8/test/cctest/test-strings.cc b/src/3rdparty/v8/test/cctest/test-strings.cc
index d86886f..5a9ccbb 100644
--- a/src/3rdparty/v8/test/cctest/test-strings.cc
+++ b/src/3rdparty/v8/test/cctest/test-strings.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Check that we can traverse very deep stacks of ConsStrings using
// StringInputBuffer. Check that Get(int) works on very deep stacks
@@ -11,6 +11,7 @@
#include "api.h"
#include "factory.h"
+#include "objects.h"
#include "cctest.h"
#include "zone-inl.h"
@@ -82,6 +83,7 @@ static void InitializeBuildingBlocks(
Handle<String> building_blocks[NUMBER_OF_BUILDING_BLOCKS]) {
// A list of pointers that we don't have any interest in cleaning up.
// If they are reachable from a root then leak detection won't complain.
+ Zone* zone = Isolate::Current()->runtime_zone();
for (int i = 0; i < NUMBER_OF_BUILDING_BLOCKS; i++) {
int len = gen() % 16;
if (len > 14) {
@@ -113,11 +115,11 @@ static void InitializeBuildingBlocks(
break;
}
case 2: {
- uc16* buf = ZONE->NewArray<uc16>(len);
+ uc16* buf = zone->NewArray<uc16>(len);
for (int j = 0; j < len; j++) {
buf[j] = gen() % 65536;
}
- Resource* resource = new Resource(Vector<const uc16>(buf, len));
+ Resource* resource = new(zone) Resource(Vector<const uc16>(buf, len));
building_blocks[i] = FACTORY->NewExternalStringFromTwoByte(resource);
for (int j = 0; j < len; j++) {
CHECK_EQ(buf[j], building_blocks[i]->Get(j));
@@ -233,7 +235,7 @@ TEST(Traverse) {
InitializeVM();
v8::HandleScope scope;
Handle<String> building_blocks[NUMBER_OF_BUILDING_BLOCKS];
- ZoneScope zone(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zone(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
InitializeBuildingBlocks(building_blocks);
Handle<String> flat = ConstructBalanced(building_blocks);
FlattenString(flat);
@@ -348,10 +350,11 @@ TEST(Utf8Conversion) {
TEST(ExternalShortStringAdd) {
- ZoneScope zone(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zonescope(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
InitializeVM();
v8::HandleScope handle_scope;
+ Zone* zone = Isolate::Current()->runtime_zone();
// Make sure we cover all always-flat lengths and at least one above.
static const int kMaxLength = 20;
@@ -365,25 +368,25 @@ TEST(ExternalShortStringAdd) {
// Generate short ascii and non-ascii external strings.
for (int i = 0; i <= kMaxLength; i++) {
- char* ascii = ZONE->NewArray<char>(i + 1);
+ char* ascii = zone->NewArray<char>(i + 1);
for (int j = 0; j < i; j++) {
ascii[j] = 'a';
}
// Terminating '\0' is left out on purpose. It is not required for external
// string data.
AsciiResource* ascii_resource =
- new AsciiResource(Vector<const char>(ascii, i));
+ new(zone) AsciiResource(Vector<const char>(ascii, i));
v8::Local<v8::String> ascii_external_string =
v8::String::NewExternal(ascii_resource);
ascii_external_strings->Set(v8::Integer::New(i), ascii_external_string);
- uc16* non_ascii = ZONE->NewArray<uc16>(i + 1);
+ uc16* non_ascii = zone->NewArray<uc16>(i + 1);
for (int j = 0; j < i; j++) {
non_ascii[j] = 0x1234;
}
// Terminating '\0' is left out on purpose. It is not required for external
// string data.
- Resource* resource = new Resource(Vector<const uc16>(non_ascii, i));
+ Resource* resource = new(zone) Resource(Vector<const uc16>(non_ascii, i));
v8::Local<v8::String> non_ascii_external_string =
v8::String::NewExternal(resource);
non_ascii_external_strings->Set(v8::Integer::New(i),
@@ -438,7 +441,7 @@ TEST(CachedHashOverflow) {
// We incorrectly allowed strings to be tagged as array indices even if their
// values didn't fit in the hash field.
// See http://code.google.com/p/v8/issues/detail?id=728
- ZoneScope zone(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zone(Isolate::Current()->runtime_zone(), DELETE_ON_EXIT);
InitializeVM();
v8::HandleScope handle_scope;
@@ -672,3 +675,43 @@ TEST(RobustSubStringStub) {
CompileRun("var slice = long.slice(1, 15);");
CheckException("%_SubString(slice, 0, 17);");
}
+
+
+TEST(RegExpOverflow) {
+ // Result string has the length 2^32, causing a 32-bit integer overflow.
+ InitializeVM();
+ HandleScope scope;
+ LocalContext context;
+ v8::V8::IgnoreOutOfMemoryException();
+ v8::Local<v8::Value> result = CompileRun(
+ "var a = 'a'; "
+ "for (var i = 0; i < 16; i++) { "
+ " a += a; "
+ "} "
+ "a.replace(/a/g, a); ");
+ CHECK(result.IsEmpty());
+ CHECK(context->HasOutOfMemoryException());
+}
+
+
+TEST(StringReplaceAtomTwoByteResult) {
+ InitializeVM();
+ HandleScope scope;
+ LocalContext context;
+ v8::Local<v8::Value> result = CompileRun(
+ "var subject = 'ascii~only~string~'; "
+ "var replace = '\x80'; "
+ "subject.replace(/~/g, replace); ");
+ CHECK(result->IsString());
+ Handle<String> string = v8::Utils::OpenHandle(v8::String::Cast(*result));
+ CHECK(string->IsSeqTwoByteString());
+
+ v8::Local<v8::String> expected = v8_str("ascii\x80only\x80string\x80");
+ CHECK(expected->Equals(result));
+}
+
+
+TEST(IsAscii) {
+ CHECK(String::IsAscii(static_cast<char*>(NULL), 0));
+ CHECK(String::IsAscii(static_cast<uc16*>(NULL), 0));
+}
diff --git a/src/3rdparty/v8/test/cctest/test-utils.cc b/src/3rdparty/v8/test/cctest/test-utils.cc
index df8ff72..c83acb9 100644
--- a/src/3rdparty/v8/test/cctest/test-utils.cc
+++ b/src/3rdparty/v8/test/cctest/test-utils.cc
@@ -55,6 +55,22 @@ TEST(Utils1) {
CHECK_EQ(-2, -8 >> 2);
CHECK_EQ(-2, static_cast<int8_t>(-8) >> 2);
CHECK_EQ(-2, static_cast<int>(static_cast<intptr_t>(-8) >> 2));
+
+ CHECK_EQ(-1000000, FastD2IChecked(-1000000.0));
+ CHECK_EQ(-1, FastD2IChecked(-1.0));
+ CHECK_EQ(0, FastD2IChecked(0.0));
+ CHECK_EQ(1, FastD2IChecked(1.0));
+ CHECK_EQ(1000000, FastD2IChecked(1000000.0));
+
+ CHECK_EQ(-1000000, FastD2IChecked(-1000000.123));
+ CHECK_EQ(-1, FastD2IChecked(-1.234));
+ CHECK_EQ(0, FastD2IChecked(0.345));
+ CHECK_EQ(1, FastD2IChecked(1.234));
+ CHECK_EQ(1000000, FastD2IChecked(1000000.123));
+
+ CHECK_EQ(INT_MAX, FastD2IChecked(1.0e100));
+ CHECK_EQ(INT_MIN, FastD2IChecked(-1.0e100));
+ CHECK_EQ(INT_MIN, FastD2IChecked(OS::nan_value()));
}
diff --git a/src/3rdparty/v8/test/cctest/test-weakmaps.cc b/src/3rdparty/v8/test/cctest/test-weakmaps.cc
index 7bba7b6..7c98c57 100644
--- a/src/3rdparty/v8/test/cctest/test-weakmaps.cc
+++ b/src/3rdparty/v8/test/cctest/test-weakmaps.cc
@@ -193,9 +193,10 @@ TEST(Regress2060a) {
// other strong paths are correctly recorded in the slots buffer.
TEST(Regress2060b) {
FLAG_always_compact = true;
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
FLAG_verify_heap = true;
#endif
+
LocalContext context;
v8::HandleScope scope;
Handle<JSFunction> function =
diff --git a/src/3rdparty/v8/test/cctest/testcfg.py b/src/3rdparty/v8/test/cctest/testcfg.py
index f1387e8..69a5db2 100644
--- a/src/3rdparty/v8/test/cctest/testcfg.py
+++ b/src/3rdparty/v8/test/cctest/testcfg.py
@@ -25,11 +25,70 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-import test
import os
-from os.path import join, dirname, exists
-import platform
-import utils
+import shutil
+
+from testrunner.local import commands
+from testrunner.local import testsuite
+from testrunner.local import utils
+from testrunner.objects import testcase
+
+
+class CcTestSuite(testsuite.TestSuite):
+
+ def __init__(self, name, root):
+ super(CcTestSuite, self).__init__(name, root)
+ self.serdes_dir = os.path.normpath(
+ os.path.join(root, "..", "..", "out", ".serdes"))
+ if os.path.exists(self.serdes_dir):
+ shutil.rmtree(self.serdes_dir, True)
+ os.makedirs(self.serdes_dir)
+
+ def ListTests(self, context):
+ if utils.IsWindows():
+ shell += '.exe'
+ shell = os.path.abspath(os.path.join(context.shell_dir, self.shell()))
+ output = commands.Execute([context.command_prefix,
+ shell,
+ '--list',
+ context.extra_flags])
+ if output.exit_code != 0:
+ print output.stdout
+ print output.stderr
+ return []
+ tests = []
+ for test_desc in output.stdout.strip().split():
+ raw_test, dependency = test_desc.split('<')
+ if dependency != '':
+ dependency = raw_test.split('/')[0] + '/' + dependency
+ else:
+ dependency = None
+ test = testcase.TestCase(self, raw_test, dependency=dependency)
+ tests.append(test)
+ tests.sort()
+ return tests
+
+ def GetFlagsForTestCase(self, testcase, context):
+ testname = testcase.path.split(os.path.sep)[-1]
+ serialization_file = os.path.join(self.serdes_dir, "serdes_" + testname)
+ serialization_file += ''.join(testcase.flags).replace('-', '_')
+ return (testcase.flags + [testcase.path] + context.mode_flags +
+ ["--testing_serialization_file=" + serialization_file])
+
+ def shell(self):
+ return "cctest"
+
+
+def GetSuite(name, root):
+ return CcTestSuite(name, root)
+
+
+# Deprecated definitions below.
+# TODO(jkummerow): Remove when SCons is no longer supported.
+
+
+from os.path import exists, join, normpath
+import test
class CcTestCase(test.TestCase):
@@ -93,7 +152,8 @@ class CcTestConfiguration(test.TestConfiguration):
if utils.IsWindows():
executable += '.exe'
executable = join(self.context.buildspace, executable)
- output = test.Execute([executable, '--list'], self.context)
+ full_command = self.context.processor([executable, '--list'])
+ output = test.Execute(full_command, self.context)
if output.exit_code != 0:
print output.stdout
print output.stderr
diff --git a/src/3rdparty/v8/test/es5conform/testcfg.py b/src/3rdparty/v8/test/es5conform/testcfg.py
index b6a17d9..7de990d 100644
--- a/src/3rdparty/v8/test/es5conform/testcfg.py
+++ b/src/3rdparty/v8/test/es5conform/testcfg.py
@@ -31,6 +31,11 @@ import os
from os.path import join, exists
+def GetSuite(name, root):
+ # Not implemented.
+ return None
+
+
HARNESS_FILES = ['sth.js']
diff --git a/src/3rdparty/v8/test/message/message.status b/src/3rdparty/v8/test/message/message.status
index fc2896b..441f8ed 100644
--- a/src/3rdparty/v8/test/message/message.status
+++ b/src/3rdparty/v8/test/message/message.status
@@ -28,4 +28,4 @@
prefix message
# All tests in the bug directory are expected to fail.
-bugs: FAIL
+bugs/*: FAIL
diff --git a/src/3rdparty/v8/test/message/testcfg.py b/src/3rdparty/v8/test/message/testcfg.py
index af467e6..1b788d5 100644
--- a/src/3rdparty/v8/test/message/testcfg.py
+++ b/src/3rdparty/v8/test/message/testcfg.py
@@ -25,13 +25,93 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-import test
+import itertools
import os
-from os.path import join, dirname, exists, basename, isdir
import re
+from testrunner.local import testsuite
+from testrunner.local import utils
+from testrunner.objects import testcase
+
+
FLAGS_PATTERN = re.compile(r"//\s+Flags:(.*)")
+
+class MessageTestSuite(testsuite.TestSuite):
+ def __init__(self, name, root):
+ super(MessageTestSuite, self).__init__(name, root)
+
+ def ListTests(self, context):
+ tests = []
+ for dirname, dirs, files in os.walk(self.root):
+ for dotted in [x for x in dirs if x.startswith('.')]:
+ dirs.remove(dotted)
+ dirs.sort()
+ files.sort()
+ for filename in files:
+ if filename.endswith(".js"):
+ testname = join(dirname[len(self.root) + 1:], filename[:-3])
+ test = testcase.TestCase(self, testname)
+ tests.append(test)
+ return tests
+
+ def GetFlagsForTestCase(self, testcase, context):
+ source = self.GetSourceForTest(testcase)
+ result = []
+ flags_match = re.findall(FLAGS_PATTERN, source)
+ for match in flags_match:
+ result += match.strip().split()
+ result += context.mode_flags
+ result.append(os.path.join(self.root, testcase.path + ".js"))
+ return testcase.flags + result
+
+ def GetSourceForTest(self, testcase):
+ filename = os.path.join(self.root, testcase.path + self.suffix())
+ with open(filename) as f:
+ return f.read()
+
+ def _IgnoreLine(self, string):
+ """Ignore empty lines, valgrind output and Android output."""
+ if not string: return True
+ return (string.startswith("==") or string.startswith("**") or
+ string.startswith("ANDROID"))
+
+ def IsFailureOutput(self, output, testpath):
+ expected_path = os.path.join(self.root, testpath + ".out")
+ expected_lines = []
+ # Can't use utils.ReadLinesFrom() here because it strips whitespace.
+ with open(expected_path) as f:
+ for line in f:
+ if line.startswith("#") or not line.strip(): continue
+ expected_lines.append(line)
+ raw_lines = output.stdout.splitlines()
+ actual_lines = [ s for s in raw_lines if not self._IgnoreLine(s) ]
+ env = { "basename": os.path.basename(testpath + ".js") }
+ if len(expected_lines) != len(actual_lines):
+ return True
+ for (expected, actual) in itertools.izip(expected_lines, actual_lines):
+ pattern = re.escape(expected.rstrip() % env)
+ pattern = pattern.replace("\\*", ".*")
+ pattern = "^%s$" % pattern
+ if not re.match(pattern, actual):
+ return True
+ return False
+
+ def StripOutputForTransmit(self, testcase):
+ pass
+
+
+def GetSuite(name, root):
+ return MessageTestSuite(name, root)
+
+
+# Deprecated definitions below.
+# TODO(jkummerow): Remove when SCons is no longer supported.
+
+
+import test
+from os.path import join, exists, basename, isdir
+
class MessageTestCase(test.TestCase):
def __init__(self, path, file, expected, mode, context, config):
@@ -41,9 +121,10 @@ class MessageTestCase(test.TestCase):
self.config = config
def IgnoreLine(self, str):
- """Ignore empty lines and valgrind output."""
+ """Ignore empty lines, valgrind output and Android output."""
if not str: return True
- else: return str.startswith('==') or str.startswith('**')
+ return (str.startswith('==') or str.startswith('**') or
+ str.startswith('ANDROID'))
def IsFailureOutput(self, output):
f = file(self.expected)
@@ -62,7 +143,7 @@ class MessageTestCase(test.TestCase):
pattern = '^%s$' % pattern
patterns.append(pattern)
# Compare actual output with the expected
- raw_lines = output.stdout.split('\n')
+ raw_lines = output.stdout.splitlines()
outlines = [ s for s in raw_lines if not self.IgnoreLine(s) ]
if len(outlines) != len(patterns):
return True
@@ -80,9 +161,9 @@ class MessageTestCase(test.TestCase):
def GetCommand(self):
result = self.config.context.GetVmCommand(self, self.mode)
source = open(self.file).read()
- flags_match = FLAGS_PATTERN.search(source)
- if flags_match:
- result += flags_match.group(1).strip().split()
+ flags_match = re.findall(FLAGS_PATTERN, source)
+ for match in flags_match:
+ result += match.strip().split()
result.append(self.file)
return result
diff --git a/src/3rdparty/v8/test/message/try-catch-finally-no-message.out b/src/3rdparty/v8/test/message/try-catch-finally-no-message.out
index d85fc7d..f59f5c6 100644
--- a/src/3rdparty/v8/test/message/try-catch-finally-no-message.out
+++ b/src/3rdparty/v8/test/message/try-catch-finally-no-message.out
@@ -1,26 +1,26 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+# Copyright 2008 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/3rdparty/v8/test/mjsunit/accessor-map-sharing.js b/src/3rdparty/v8/test/mjsunit/accessor-map-sharing.js
new file mode 100644
index 0000000..3afce37
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/accessor-map-sharing.js
@@ -0,0 +1,192 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Handy abbreviations.
+var dp = Object.defineProperty;
+var gop = Object.getOwnPropertyDescriptor;
+
+function getter() { return 111; }
+function setter(x) { print(222); }
+function anotherGetter() { return 333; }
+function anotherSetter(x) { print(444); }
+var obj1, obj2, obj3, obj4;
+
+// Two objects with the same getter.
+obj1 = {};
+dp(obj1, "alpha", { get: getter });
+obj2 = {};
+dp(obj2, "alpha", { get: getter });
+assertTrue(%HaveSameMap(obj1, obj2));
+
+// Two objects with the same getter, oldskool.
+obj1 = {};
+obj1.__defineGetter__("bravo", getter);
+assertEquals(getter, obj1.__lookupGetter__("bravo"));
+obj2 = {};
+obj2.__defineGetter__("bravo", getter);
+assertEquals(getter, obj2.__lookupGetter__("bravo"));
+assertTrue(%HaveSameMap(obj1, obj2));
+
+// Two objects with the same setter.
+obj1 = {};
+dp(obj1, "charlie", { set: setter });
+obj2 = {};
+dp(obj2, "charlie", { set: setter });
+assertTrue(%HaveSameMap(obj1, obj2));
+
+// Two objects with the same setter, oldskool.
+obj1 = {};
+obj1.__defineSetter__("delta", setter);
+assertEquals(setter, obj1.__lookupSetter__("delta"));
+obj2 = {};
+obj2.__defineSetter__("delta", setter);
+assertEquals(setter, obj2.__lookupSetter__("delta"));
+assertTrue(%HaveSameMap(obj1, obj2));
+
+// Two objects with the same getter and setter.
+obj1 = {};
+dp(obj1, "foxtrot", { get: getter, set: setter });
+obj2 = {};
+dp(obj2, "foxtrot", { get: getter, set: setter });
+assertTrue(%HaveSameMap(obj1, obj2));
+
+// Two objects with the same getter and setter, set separately.
+obj1 = {};
+dp(obj1, "golf", { get: getter, configurable: true });
+dp(obj1, "golf", { set: setter, configurable: true });
+obj2 = {};
+dp(obj2, "golf", { get: getter, configurable: true });
+dp(obj2, "golf", { set: setter, configurable: true });
+assertTrue(%HaveSameMap(obj1, obj2));
+
+// Two objects with the same getter and setter, set separately, oldskool.
+obj1 = {};
+obj1.__defineGetter__("hotel", getter);
+obj1.__defineSetter__("hotel", setter);
+obj2 = {};
+obj2.__defineGetter__("hotel", getter);
+obj2.__defineSetter__("hotel", setter);
+assertTrue(%HaveSameMap(obj1, obj2));
+
+// Attribute-only change, shouldn't affect previous descriptor properties.
+obj1 = {};
+dp(obj1, "india", { get: getter, configurable: true, enumerable: true });
+assertEquals(getter, gop(obj1, "india").get);
+assertTrue(gop(obj1, "india").configurable);
+assertTrue(gop(obj1, "india").enumerable);
+dp(obj1, "india", { enumerable: false });
+assertEquals(getter, gop(obj1, "india").get);
+assertTrue(gop(obj1, "india").configurable);
+assertFalse(gop(obj1, "india").enumerable);
+
+// Attribute-only change, shouldn't affect objects with previously shared maps.
+obj1 = {};
+dp(obj1, "juliet", { set: setter, configurable: true, enumerable: false });
+assertEquals(setter, gop(obj1, "juliet").set);
+assertTrue(gop(obj1, "juliet").configurable);
+assertFalse(gop(obj1, "juliet").enumerable);
+obj2 = {};
+dp(obj2, "juliet", { set: setter, configurable: true, enumerable: false });
+assertEquals(setter, gop(obj2, "juliet").set);
+assertTrue(gop(obj2, "juliet").configurable);
+assertFalse(gop(obj2, "juliet").enumerable);
+dp(obj1, "juliet", { set: setter, configurable: false, enumerable: true });
+assertEquals(setter, gop(obj1, "juliet").set);
+assertFalse(gop(obj1, "juliet").configurable);
+assertTrue(gop(obj1, "juliet").enumerable);
+assertEquals(setter, gop(obj2, "juliet").set);
+assertTrue(gop(obj2, "juliet").configurable);
+assertFalse(gop(obj2, "juliet").enumerable);
+
+// Two objects with the different getters.
+obj1 = {};
+dp(obj1, "kilo", { get: getter });
+obj2 = {};
+dp(obj2, "kilo", { get: anotherGetter });
+assertEquals(getter, gop(obj1, "kilo").get);
+assertEquals(anotherGetter, gop(obj2, "kilo").get);
+assertFalse(%HaveSameMap(obj1, obj2));
+
+// Two objects with the same getters and different setters.
+obj1 = {};
+dp(obj1, "lima", { get: getter, set: setter });
+obj2 = {};
+dp(obj2, "lima", { get: getter, set: anotherSetter });
+assertEquals(setter, gop(obj1, "lima").set);
+assertEquals(anotherSetter, gop(obj2, "lima").set);
+assertFalse(%HaveSameMap(obj1, obj2));
+
+// Even 'undefined' is a kind of getter.
+obj1 = {};
+dp(obj1, "mike", { get: undefined });
+assertTrue("mike" in obj1);
+assertEquals(undefined, gop(obj1, "mike").get);
+assertEquals(undefined, obj1.__lookupGetter__("mike"));
+assertEquals(undefined, gop(obj1, "mike").set);
+assertEquals(undefined, obj1.__lookupSetter__("mike"));
+
+// Even 'undefined' is a kind of setter.
+obj1 = {};
+dp(obj1, "november", { set: undefined });
+assertTrue("november" in obj1);
+assertEquals(undefined, gop(obj1, "november").get);
+assertEquals(undefined, obj1.__lookupGetter__("november"));
+assertEquals(undefined, gop(obj1, "november").set);
+assertEquals(undefined, obj1.__lookupSetter__("november"));
+
+// Redefining a data property.
+obj1 = {};
+obj1.oscar = 12345;
+dp(obj1, "oscar", { set: setter });
+assertEquals(setter, gop(obj1, "oscar").set);
+
+// Re-adding the same getter/attributes pair.
+obj1 = {};
+dp(obj1, "papa", { get: getter, configurable: true });
+dp(obj1, "papa", { get: getter, set: setter, configurable: true });
+assertEquals(getter, gop(obj1, "papa").get);
+assertEquals(setter, gop(obj1, "papa").set);
+assertTrue(gop(obj1, "papa").configurable);
+assertFalse(gop(obj1, "papa").enumerable);
+
+// Two objects with the same getter on the prototype chain.
+obj1 = {};
+dp(obj1, "quebec", { get: getter });
+obj2 = Object.create(obj1);
+obj3 = Object.create(obj2);
+obj4 = Object.create(obj2);
+assertTrue(%HaveSameMap(obj3, obj4));
+
+// Two objects with the same setter on the prototype chain.
+obj1 = {};
+dp(obj1, "romeo", { set: setter });
+obj2 = Object.create(obj1);
+obj3 = Object.create(obj2);
+obj4 = Object.create(obj2);
+assertTrue(%HaveSameMap(obj3, obj4));
diff --git a/src/3rdparty/v8/test/mjsunit/array-bounds-check-removal.js b/src/3rdparty/v8/test/mjsunit/array-bounds-check-removal.js
index 81064aa..df7988b 100644
--- a/src/3rdparty/v8/test/mjsunit/array-bounds-check-removal.js
+++ b/src/3rdparty/v8/test/mjsunit/array-bounds-check-removal.js
@@ -29,6 +29,29 @@
var a = new Int32Array(1024);
+// Test that we do not assert if the accessed index has not an int32 rep.
+var v = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
+function test_do_not_assert_on_non_int32(vector, base) {
+ var r = 0;
+ var a1 = base + 1;
+ var a2 = base + 2;
+ var a3 = base + 3;
+ var a4 = base + 4;
+ if (a1 == 2) {
+ r += vector[a1];
+ r += vector[a4];
+ r += vector[a2];
+ r += vector[a3];
+ }
+ return r;
+}
+test_do_not_assert_on_non_int32(v,1);
+test_do_not_assert_on_non_int32(v,1);
+test_do_not_assert_on_non_int32(v,"a");
+test_do_not_assert_on_non_int32(v,"a");
+%OptimizeFunctionOnNextCall(test_do_not_assert_on_non_int32);
+test_do_not_assert_on_non_int32(v,0);
+
function test_base(base,cond) {
a[base + 1] = 1;
a[base + 4] = 2;
@@ -123,7 +146,7 @@ check_test_minus(7,false);
// ALWAYS: 3
// NEVER: 4
-if (false) {
+// Test that we still deopt on failed bound checks
test_base(5,true);
test_base(6,true);
test_base(5,false);
@@ -139,7 +162,21 @@ test_base(6,false);
%OptimizeFunctionOnNextCall(test_base);
test_base(2048,true);
assertTrue(%GetOptimizationStatus(test_base) != 1);
+
+// Specific test on negative offsets
+var short_a = new Array(100);
+for (var i = 0; i < short_a.length; i++) short_a[i] = 0;
+function short_test(a, i) {
+ a[i + 9] = 0;
+ a[i - 10] = 0;
}
+short_test(short_a, 50);
+short_test(short_a, 50);
+%OptimizeFunctionOnNextCall(short_test);
+short_a.length = 10;
+short_test(a, 0);
+assertTrue(%GetOptimizationStatus(short_test) != 1);
+
gc();
diff --git a/src/3rdparty/v8/test/mjsunit/array-construct-transition.js b/src/3rdparty/v8/test/mjsunit/array-construct-transition.js
index 577e321..f8d7c83 100644
--- a/src/3rdparty/v8/test/mjsunit/array-construct-transition.js
+++ b/src/3rdparty/v8/test/mjsunit/array-construct-transition.js
@@ -27,13 +27,13 @@
// Flags: --allow-natives-syntax --smi-only-arrays
-support_smi_only_arrays = %HasFastSmiOnlyElements(new Array(1,2,3,4,5,6,7,8));
+support_smi_only_arrays = %HasFastSmiElements(new Array(1,2,3,4,5,6));
if (support_smi_only_arrays) {
var a = new Array(0, 1, 2);
- assertTrue(%HasFastSmiOnlyElements(a));
+ assertTrue(%HasFastSmiElements(a));
var b = new Array(0.5, 1.2, 2.3);
assertTrue(%HasFastDoubleElements(b));
var c = new Array(0.5, 1.2, new Object());
- assertTrue(%HasFastElements(c));
+ assertTrue(%HasFastObjectElements(c));
}
diff --git a/src/3rdparty/v8/test/mjsunit/array-iteration.js b/src/3rdparty/v8/test/mjsunit/array-iteration.js
index 0ee2e6e..033bb54 100644
--- a/src/3rdparty/v8/test/mjsunit/array-iteration.js
+++ b/src/3rdparty/v8/test/mjsunit/array-iteration.js
@@ -40,7 +40,7 @@
// Simple use.
var a = [0,1];
assertArrayEquals([0], a.filter(function(n) { return n == 0; }));
- assertArrayEquals(a, a);
+ assertArrayEquals([0,1], a);
// Use specified object as this object when calling the function.
var o = { value: 42 }
diff --git a/src/3rdparty/v8/test/mjsunit/array-literal-transitions.js b/src/3rdparty/v8/test/mjsunit/array-literal-transitions.js
index f657525..d4c0c30 100644
--- a/src/3rdparty/v8/test/mjsunit/array-literal-transitions.js
+++ b/src/3rdparty/v8/test/mjsunit/array-literal-transitions.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -26,6 +26,8 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax --smi-only-arrays --expose-gc
+// Flags: --noparallel-recompilation
+
// Test element kind of objects.
// Since --smi-only-arrays affects builtins, its default setting at compile
// time sticks if built with snapshot. If --smi-only-arrays is deactivated
@@ -33,7 +35,7 @@
// in this test case. Depending on whether smi-only arrays are actually
// enabled, this test takes the appropriate code path to check smi-only arrays.
-support_smi_only_arrays = %HasFastSmiOnlyElements([1,2,3,4,5,6,7,8,9,10]);
+support_smi_only_arrays = %HasFastSmiElements([1,2,3,4,5,6,7,8,9,10]);
if (support_smi_only_arrays) {
print("Tests include smi-only arrays.");
@@ -46,14 +48,14 @@ function get(foo) { return foo; } // Used to generate dynamic values.
function array_literal_test() {
var a0 = [1, 2, 3];
- assertTrue(%HasFastSmiOnlyElements(a0));
+ assertTrue(%HasFastSmiElements(a0));
var a1 = [get(1), get(2), get(3)];
- assertTrue(%HasFastSmiOnlyElements(a1));
+ assertTrue(%HasFastSmiElements(a1));
var b0 = [1, 2, get("three")];
- assertTrue(%HasFastElements(b0));
+ assertTrue(%HasFastObjectElements(b0));
var b1 = [get(1), get(2), get("three")];
- assertTrue(%HasFastElements(b1));
+ assertTrue(%HasFastObjectElements(b1));
var c0 = [1, 2, get(3.5)];
assertTrue(%HasFastDoubleElements(c0));
@@ -75,7 +77,7 @@ function array_literal_test() {
var object = new Object();
var d0 = [1, 2, object];
- assertTrue(%HasFastElements(d0));
+ assertTrue(%HasFastObjectElements(d0));
assertEquals(object, d0[2]);
assertEquals(2, d0[1]);
assertEquals(1, d0[0]);
@@ -87,7 +89,7 @@ function array_literal_test() {
assertEquals(1, e0[0]);
var f0 = [1, 2, [1, 2]];
- assertTrue(%HasFastElements(f0));
+ assertTrue(%HasFastObjectElements(f0));
assertEquals([1,2], f0[2]);
assertEquals(2, f0[1]);
assertEquals(1, f0[0]);
@@ -115,9 +117,9 @@ if (support_smi_only_arrays) {
large =
[ 0, 1, 2, 3, 4, 5, d(), d(), d(), d(), d(), d(), o(), o(), o(), o() ];
assertFalse(%HasDictionaryElements(large));
- assertFalse(%HasFastSmiOnlyElements(large));
+ assertFalse(%HasFastSmiElements(large));
assertFalse(%HasFastDoubleElements(large));
- assertTrue(%HasFastElements(large));
+ assertTrue(%HasFastObjectElements(large));
assertEquals(large,
[0, 1, 2, 3, 4, 5, 2.5, 2.5, 2.5, 2.5, 2.5, 2.5,
new Object(), new Object(), new Object(), new Object()]);
diff --git a/src/3rdparty/v8/test/mjsunit/assert-opt-and-deopt.js b/src/3rdparty/v8/test/mjsunit/assert-opt-and-deopt.js
index 51cb99a..c79d923 100644
--- a/src/3rdparty/v8/test/mjsunit/assert-opt-and-deopt.js
+++ b/src/3rdparty/v8/test/mjsunit/assert-opt-and-deopt.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --allow-natives-syntax
+// Flags: --allow-natives-syntax --noparallel-recompilation
/**
* This class shows how to use %GetOptimizationCount() and
diff --git a/src/3rdparty/v8/test/mjsunit/bugs/bug-2337.js b/src/3rdparty/v8/test/mjsunit/bugs/bug-2337.js
new file mode 100644
index 0000000..ebf7621
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/bugs/bug-2337.js
@@ -0,0 +1,53 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --expose-gc
+
+// If one callback causes a GC then the other callbacks don't take place.
+
+var f = eval("(function f() { return 42; })");
+var f2 = eval("(function f2() { return 43; })");
+
+Debug = debug.Debug;
+
+var called = 0;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.ScriptCollected) {
+ if (called != 2) {
+ called++;
+ gc();
+ }
+ }
+};
+
+Debug.scripts();
+Debug.setListener(listener);
+f = void 0;
+f2 = void 0;
+gc();
+assertTrue(called == 2);
diff --git a/src/3rdparty/v8/test/mjsunit/compare-known-objects-slow.js b/src/3rdparty/v8/test/mjsunit/compare-known-objects-slow.js
new file mode 100644
index 0000000..afa198f
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compare-known-objects-slow.js
@@ -0,0 +1,69 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Test CompareIC stubs for normal and strict equality comparison of known
+// objects in slow mode. These objects share the same map even though they
+// might have completely different properties.
+
+function eq(a, b) {
+ return a == b;
+}
+
+function eq_strict(a, b) {
+ return a === b;
+}
+
+function test(a, b) {
+ // Check CompareIC for equality of known objects.
+ assertTrue(eq(a, a));
+ assertTrue(eq(b, b));
+ assertFalse(eq(a, b));
+ // Check CompareIC for strict equality of known objects.
+ assertTrue(eq_strict(a, a));
+ assertTrue(eq_strict(b, b));
+ assertFalse(eq_strict(a, b));
+}
+
+// Prepare two objects in slow mode that have the same map.
+var obj1 = %OptimizeObjectForAddingMultipleProperties({}, 1);
+var obj2 = %OptimizeObjectForAddingMultipleProperties({}, 1);
+
+// Test original objects.
+assertTrue(%HaveSameMap(obj1, obj2));
+test(obj1, obj2);
+
+// Test after adding property to first object.
+obj1.x = 1;
+assertTrue(%HaveSameMap(obj1, obj2));
+test(obj1, obj2);
+
+// Test after adding property to second object.
+obj2.y = 2;
+assertTrue(%HaveSameMap(obj1, obj2));
+test(obj1, obj2);
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/alloc-object-huge.js b/src/3rdparty/v8/test/mjsunit/compiler/alloc-object-huge.js
index 0b202f7..b0a981d 100644
--- a/src/3rdparty/v8/test/mjsunit/compiler/alloc-object-huge.js
+++ b/src/3rdparty/v8/test/mjsunit/compiler/alloc-object-huge.js
@@ -25,7 +25,9 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --allow-natives-syntax --inline-construct --max-inlined-source-size=999999 --max-inlined-nodes=999999 --max-inlined-nodes-cumulative=999999
+// Flags: --allow-natives-syntax --inline-construct
+// Flags: --max-inlined-source-size=999999 --max-inlined-nodes=999999
+// Flags: --max-inlined-nodes-cumulative=999999
// Test that huge constructors (more than 256 this assignments) are
// handled correctly.
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/inline-accessors.js b/src/3rdparty/v8/test/mjsunit/compiler/inline-accessors.js
new file mode 100644
index 0000000..a4cf7ae
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/inline-accessors.js
@@ -0,0 +1,368 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --inline-accessors --max-opt-count=100
+
+var accessorCallCount, setterArgument, setterValue, obj, forceDeopt;
+
+// -----------------------------------------------------------------------------
+// Helpers for testing inlining of getters.
+
+function TestInlinedGetter(context, obj, expected) {
+ forceDeopt = { deopt: 0 };
+ accessorCallCount = 0;
+
+ assertEquals(expected, context(obj));
+ assertEquals(1, accessorCallCount);
+
+ assertEquals(expected, context(obj));
+ assertEquals(2, accessorCallCount);
+
+ %OptimizeFunctionOnNextCall(context);
+ assertEquals(expected, context(obj));
+ assertEquals(3, accessorCallCount);
+
+ forceDeopt = { /* empty*/ };
+ assertEquals(expected, context(obj));
+ assertEquals(4, accessorCallCount);
+}
+
+
+function value_context_for_getter(obj) {
+ return obj.getterProperty;
+}
+
+function test_context_for_getter(obj) {
+ if (obj.getterProperty) {
+ return 111;
+ } else {
+ return 222;
+ }
+}
+
+function effect_context_for_getter(obj) {
+ obj.getterProperty;
+ return 5678;
+}
+
+function TryGetter(context, getter, obj, expected, expectException) {
+ try {
+ TestInlinedGetter(context, obj, expected);
+ assertFalse(expectException);
+ } catch (exception) {
+ assertTrue(expectException);
+ assertEquals(7, exception.stack.split('\n').length);
+ }
+ %DeoptimizeFunction(context);
+ %ClearFunctionTypeFeedback(context);
+ %ClearFunctionTypeFeedback(getter);
+}
+
+function TestGetterInAllContexts(getter, obj, expected, expectException) {
+ TryGetter(value_context_for_getter, getter, obj, expected, expectException);
+ TryGetter(test_context_for_getter, getter, obj, expected ? 111 : 222,
+ expectException);
+ TryGetter(effect_context_for_getter, getter, obj, 5678, expectException);
+}
+
+// -----------------------------------------------------------------------------
+// Test getter returning something 'true'ish in all contexts.
+
+function getter1() {
+ assertSame(obj, this);
+ accessorCallCount++;
+ forceDeopt.deopt;
+ return 1234;
+}
+
+function ConstrG1() { }
+obj = Object.defineProperty(new ConstrG1(), "getterProperty", { get: getter1 });
+TestGetterInAllContexts(getter1, obj, 1234, false);
+obj = Object.create(obj);
+TestGetterInAllContexts(getter1, obj, 1234, false);
+
+// -----------------------------------------------------------------------------
+// Test getter returning false in all contexts.
+
+function getter2() {
+ assertSame(obj, this);
+ accessorCallCount++;
+ forceDeopt.deopt;
+ return false;
+}
+
+function ConstrG2() { }
+obj = Object.defineProperty(new ConstrG2(), "getterProperty", { get: getter2 });
+TestGetterInAllContexts(getter2, obj, false, false);
+obj = Object.create(obj);
+TestGetterInAllContexts(getter2, obj, false, false);
+
+// -----------------------------------------------------------------------------
+// Test getter without a return in all contexts.
+
+function getter3() {
+ assertSame(obj, this);
+ accessorCallCount++;
+ forceDeopt.deopt;
+}
+
+function ConstrG3() { }
+obj = Object.defineProperty(new ConstrG3(), "getterProperty", { get: getter3 });
+TestGetterInAllContexts(getter3, obj, undefined, false);
+obj = Object.create(obj);
+TestGetterInAllContexts(getter3, obj, undefined, false);
+
+// -----------------------------------------------------------------------------
+// Test getter with too many arguments without a return in all contexts.
+
+function getter4(a) {
+ assertSame(obj, this);
+ assertEquals(undefined, a);
+ accessorCallCount++;
+ forceDeopt.deopt;
+}
+
+function ConstrG4() { }
+obj = Object.defineProperty(new ConstrG4(), "getterProperty", { get: getter4 });
+TestGetterInAllContexts(getter4, obj, undefined, false);
+obj = Object.create(obj);
+TestGetterInAllContexts(getter4, obj, undefined, false);
+
+// -----------------------------------------------------------------------------
+// Test getter with too many arguments with a return in all contexts.
+
+function getter5(a) {
+ assertSame(obj, this);
+ assertEquals(undefined, a);
+ accessorCallCount++;
+ forceDeopt.deopt;
+ return 9876;
+}
+
+function ConstrG5() { }
+obj = Object.defineProperty(new ConstrG5(), "getterProperty", { get: getter5 });
+TestGetterInAllContexts(getter5, obj, 9876, false);
+obj = Object.create(obj);
+TestGetterInAllContexts(getter5, obj, 9876, false);
+
+// -----------------------------------------------------------------------------
+// Test getter which throws from optimized code.
+
+function getter6() {
+ assertSame(obj, this);
+ accessorCallCount++;
+ forceDeopt.deopt;
+ if (accessorCallCount == 4) { 123 in null; }
+ return 13579;
+}
+
+function ConstrG6() { }
+obj = Object.defineProperty(new ConstrG6(), "getterProperty", { get: getter6 });
+TestGetterInAllContexts(getter6, obj, 13579, true);
+obj = Object.create(obj);
+TestGetterInAllContexts(getter6, obj, 13579, true);
+
+// -----------------------------------------------------------------------------
+// Helpers for testing inlining of setters.
+
+function TestInlinedSetter(context, obj, value, expected) {
+ forceDeopt = { deopt: 0 };
+ accessorCallCount = 0;
+ setterArgument = value;
+
+ assertEquals(expected, context(obj, value));
+ assertEquals(value, setterValue);
+ assertEquals(1, accessorCallCount);
+
+ assertEquals(expected, context(obj, value));
+ assertEquals(value, setterValue);
+ assertEquals(2, accessorCallCount);
+
+ %OptimizeFunctionOnNextCall(context);
+ assertEquals(expected, context(obj, value));
+ assertEquals(value, setterValue);
+ assertEquals(3, accessorCallCount);
+
+ forceDeopt = { /* empty*/ };
+ assertEquals(expected, context(obj, value));
+ assertEquals(value, setterValue);
+ assertEquals(4, accessorCallCount);
+}
+
+function value_context_for_setter(obj, value) {
+ return obj.setterProperty = value;
+}
+
+function test_context_for_setter(obj, value) {
+ if (obj.setterProperty = value) {
+ return 333;
+ } else {
+ return 444;
+ }
+}
+
+function effect_context_for_setter(obj, value) {
+ obj.setterProperty = value;
+ return 666;
+}
+
+function TrySetter(context, setter, obj, expectException, value, expected) {
+ try {
+ TestInlinedSetter(context, obj, value, expected);
+ assertFalse(expectException);
+ } catch (exception) {
+ assertTrue(expectException);
+ assertEquals(7, exception.stack.split('\n').length);
+ }
+ %DeoptimizeFunction(context);
+ %ClearFunctionTypeFeedback(context);
+ %ClearFunctionTypeFeedback(setter);
+}
+
+function TestSetterInAllContexts(setter, obj, expectException) {
+ TrySetter(value_context_for_setter, setter, obj, expectException, 111, 111);
+ TrySetter(test_context_for_setter, setter, obj, expectException, true, 333);
+ TrySetter(test_context_for_setter, setter, obj, expectException, false, 444);
+ TrySetter(effect_context_for_setter, setter, obj, expectException, 555, 666);
+}
+
+// -----------------------------------------------------------------------------
+// Test setter without a return in all contexts.
+
+function setter1(value) {
+ assertSame(obj, this);
+ accessorCallCount++;
+ forceDeopt.deopt;
+ setterValue = value;
+}
+
+function ConstrS1() { }
+obj = Object.defineProperty(new ConstrS1(), "setterProperty", { set: setter1 });
+TestSetterInAllContexts(setter1, obj, false);
+obj = Object.create(obj);
+TestSetterInAllContexts(setter1, obj, false);
+
+// -----------------------------------------------------------------------------
+// Test setter returning something different than the RHS in all contexts.
+
+function setter2(value) {
+ assertSame(obj, this);
+ accessorCallCount++;
+ forceDeopt.deopt;
+ setterValue = value;
+ return 1000000;
+}
+
+function ConstrS2() { }
+obj = Object.defineProperty(new ConstrS2(), "setterProperty", { set: setter2 });
+TestSetterInAllContexts(setter2, obj, false);
+obj = Object.create(obj);
+TestSetterInAllContexts(setter2, obj, false);
+
+// -----------------------------------------------------------------------------
+// Test setter with too few arguments without a return in all contexts.
+
+function setter3() {
+ assertSame(obj, this);
+ accessorCallCount++;
+ forceDeopt.deopt;
+ setterValue = setterArgument;
+}
+
+function ConstrS3() { }
+obj = Object.defineProperty(new ConstrS3(), "setterProperty", { set: setter3 });
+TestSetterInAllContexts(setter3, obj, false);
+obj = Object.create(obj);
+TestSetterInAllContexts(setter3, obj, false);
+
+// -----------------------------------------------------------------------------
+// Test setter with too few arguments with a return in all contexts.
+
+function setter4() {
+ assertSame(obj, this);
+ accessorCallCount++;
+ forceDeopt.deopt;
+ setterValue = setterArgument;
+ return 2000000;
+}
+
+function ConstrS4() { }
+obj = Object.defineProperty(new ConstrS4(), "setterProperty", { set: setter4 });
+TestSetterInAllContexts(setter4, obj, false);
+obj = Object.create(obj);
+TestSetterInAllContexts(setter4, obj, false);
+
+// -----------------------------------------------------------------------------
+// Test setter with too many arguments without a return in all contexts.
+
+function setter5(value, foo) {
+ assertSame(obj, this);
+ assertEquals(undefined, foo);
+ accessorCallCount++;
+ forceDeopt.deopt;
+ setterValue = value;
+}
+
+function ConstrS5() { }
+obj = Object.defineProperty(new ConstrS5(), "setterProperty", { set: setter5 });
+TestSetterInAllContexts(setter5, obj, false);
+obj = Object.create(obj);
+TestSetterInAllContexts(setter5, obj, false);
+
+// -----------------------------------------------------------------------------
+// Test setter with too many arguments with a return in all contexts.
+
+function setter6(value, foo) {
+ assertSame(obj, this);
+ assertEquals(undefined, foo);
+ accessorCallCount++;
+ forceDeopt.deopt;
+ setterValue = value;
+ return 3000000;
+}
+
+function ConstrS6() { }
+obj = Object.defineProperty(new ConstrS6(), "setterProperty", { set: setter6 });
+TestSetterInAllContexts(setter6, obj, false);
+obj = Object.create(obj);
+TestSetterInAllContexts(setter6, obj, false);
+
+// -----------------------------------------------------------------------------
+// Test setter which throws from optimized code.
+
+function setter7(value) {
+ accessorCallCount++;
+ forceDeopt.deopt;
+ if (accessorCallCount == 4) { 123 in null; }
+ setterValue = value;
+}
+
+function ConstrS7() { }
+obj = Object.defineProperty(new ConstrS7(), "setterProperty", { set: setter7 });
+TestSetterInAllContexts(setter7, obj, true);
+obj = Object.create(obj);
+TestSetterInAllContexts(setter7, obj, true);
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/inline-arguments.js b/src/3rdparty/v8/test/mjsunit/compiler/inline-arguments.js
index f8a2476..df1bd22 100644
--- a/src/3rdparty/v8/test/mjsunit/compiler/inline-arguments.js
+++ b/src/3rdparty/v8/test/mjsunit/compiler/inline-arguments.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --allow-natives-syntax
+// Flags: --allow-natives-syntax --max-opt-count=100
function A() {
}
@@ -157,6 +157,7 @@ function test_toarr(toarr) {
test_toarr(toarr1);
test_toarr(toarr2);
+
// Test that arguments access from inlined function uses correct values.
(function () {
function inner(x, y) {
@@ -174,9 +175,94 @@ test_toarr(toarr2);
return inner(x, y);
}
+ %OptimizeFunctionOnNextCall(outer);
+ %OptimizeFunctionOnNextCall(inner);
+ assertEquals(2, outer(1, 2));
+})();
+
+
+(function () {
+ function inner(x, y) {
+ "use strict";
+ x = 10;
+ y = 20;
+ for (var i = 0; i < 1; i++) {
+ for (var j = 1; j <= arguments.length; j++) {
+ return arguments[arguments.length - j];
+ }
+ }
+ }
+
+ function outer(x, y) {
+ return inner(x, y);
+ }
+
assertEquals(2, outer(1, 2));
assertEquals(2, outer(1, 2));
assertEquals(2, outer(1, 2));
%OptimizeFunctionOnNextCall(outer);
assertEquals(2, outer(1, 2));
})();
+
+
+// Test inlining and deoptimization of functions accessing and modifying
+// the arguments object in strict mode with mismatched arguments count.
+(function () {
+ "use strict";
+ function test(outerCount, middleCount, innerCount) {
+ var forceDeopt = { deopt:false };
+ function inner(x,y) {
+ x = 0; y = 0;
+ forceDeopt.deopt;
+ assertSame(innerCount, arguments.length);
+ for (var i = 0; i < arguments.length; i++) {
+ assertSame(30 + i, arguments[i]);
+ }
+ }
+
+ function middle(x,y) {
+ x = 0; y = 0;
+ if (innerCount == 1) inner(30);
+ if (innerCount == 2) inner(30, 31);
+ if (innerCount == 3) inner(30, 31, 32);
+ assertSame(middleCount, arguments.length);
+ for (var i = 0; i < arguments.length; i++) {
+ assertSame(20 + i, arguments[i]);
+ }
+ }
+
+ function outer(x,y) {
+ x = 0; y = 0;
+ if (middleCount == 1) middle(20);
+ if (middleCount == 2) middle(20, 21);
+ if (middleCount == 3) middle(20, 21, 22);
+ assertSame(outerCount, arguments.length);
+ for (var i = 0; i < arguments.length; i++) {
+ assertSame(10 + i, arguments[i]);
+ }
+ }
+
+ for (var step = 0; step < 4; step++) {
+ if (outerCount == 1) outer(10);
+ if (outerCount == 2) outer(10, 11);
+ if (outerCount == 3) outer(10, 11, 12);
+ if (step == 1) %OptimizeFunctionOnNextCall(outer);
+ if (step == 2) delete forceDeopt.deopt;
+ }
+
+ %DeoptimizeFunction(outer);
+ %DeoptimizeFunction(middle);
+ %DeoptimizeFunction(inner);
+ %ClearFunctionTypeFeedback(outer);
+ %ClearFunctionTypeFeedback(middle);
+ %ClearFunctionTypeFeedback(inner);
+ }
+
+ for (var a = 1; a <= 3; a++) {
+ for (var b = 1; b <= 3; b++) {
+ for (var c = 1; c <= 3; c++) {
+ test(a,b,c);
+ }
+ }
+ }
+})();
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/inline-construct.js b/src/3rdparty/v8/test/mjsunit/compiler/inline-construct.js
index 7a3f1e4..fa784cf 100644
--- a/src/3rdparty/v8/test/mjsunit/compiler/inline-construct.js
+++ b/src/3rdparty/v8/test/mjsunit/compiler/inline-construct.js
@@ -29,63 +29,72 @@
// Test inlining of constructor calls.
-function TestInlinedConstructor(closure) {
+function TestInlinedConstructor(constructor, closure) {
var result;
var counter = { value:0 };
- result = closure(11, 12, counter);
- assertEquals(23, result);
+ var noDeopt = { deopt:0 };
+ var forceDeopt = { /*empty*/ };
+
+ result = closure(constructor, 11, noDeopt, counter);
+ assertEquals(11, result);
assertEquals(1, counter.value);
- result = closure(23, 19, counter);
- assertEquals(42, result);
+
+ result = closure(constructor, 23, noDeopt, counter);
+ assertEquals(23, result);
assertEquals(2, counter.value);
+
%OptimizeFunctionOnNextCall(closure);
- result = closure(1, 42, counter)
- assertEquals(43, result);
+ result = closure(constructor, 42, noDeopt, counter);
+ assertEquals(42, result);
assertEquals(3, counter.value);
- result = closure("foo", "bar", counter)
- assertEquals("foobar", result)
+
+ result = closure(constructor, 127, forceDeopt, counter);
+ assertEquals(127, result)
assertEquals(4, counter.value);
+
+ %DeoptimizeFunction(closure);
+ %ClearFunctionTypeFeedback(closure);
+ %ClearFunctionTypeFeedback(constructor);
}
-function TestInAllContexts(constructor) {
- function value_context(a, b, counter) {
- var obj = new constructor(a, b, counter);
- return obj.x;
- }
- function test_context(a, b, counter) {
- if (!new constructor(a, b, counter)) {
- assertUnreachable("should not happen");
- }
- return a + b;
- }
- function effect_context(a, b, counter) {
- new constructor(a, b, counter);
- return a + b;
+function value_context(constructor, val, deopt, counter) {
+ var obj = new constructor(val, deopt, counter);
+ return obj.x;
+}
+
+function test_context(constructor, val, deopt, counter) {
+ if (!new constructor(val, deopt, counter)) {
+ assertUnreachable("should not happen");
}
- TestInlinedConstructor(value_context);
- TestInlinedConstructor(test_context);
- TestInlinedConstructor(effect_context);
- %DeoptimizeFunction(value_context);
- %DeoptimizeFunction(test_context);
- %DeoptimizeFunction(effect_context);
- %ClearFunctionTypeFeedback(value_context);
- %ClearFunctionTypeFeedback(test_context);
- %ClearFunctionTypeFeedback(effect_context);
+ return val;
+}
+
+function effect_context(constructor, val, deopt, counter) {
+ new constructor(val, deopt, counter);
+ return val;
+}
+
+function TestInAllContexts(constructor) {
+ TestInlinedConstructor(constructor, value_context);
+ TestInlinedConstructor(constructor, test_context);
+ TestInlinedConstructor(constructor, effect_context);
}
// Test constructor returning nothing in all contexts.
-function c1(a, b, counter) {
- this.x = a + b;
+function c1(val, deopt, counter) {
+ deopt.deopt;
+ this.x = val;
counter.value++;
}
TestInAllContexts(c1);
// Test constructor returning an object in all contexts.
-function c2(a, b, counter) {
- var obj = new Object();
- obj.x = a + b;
+function c2(val, deopt, counter) {
+ var obj = {};
+ deopt.deopt;
+ obj.x = val;
counter.value++;
return obj;
}
@@ -93,8 +102,9 @@ TestInAllContexts(c2);
// Test constructor returning a primitive value in all contexts.
-function c3(a, b, counter) {
- this.x = a + b;
+function c3(val, deopt, counter) {
+ deopt.deopt;
+ this.x = val;
counter.value++;
return "not an object";
}
@@ -133,9 +143,10 @@ assertEquals("foo1", f_too_few("foo"))
// Test constructor that cannot be inlined.
-function c_unsupported_syntax(a, b, counter) {
+function c_unsupported_syntax(val, deopt, counter) {
try {
- this.x = a + b;
+ deopt.deopt;
+ this.x = val;
counter.value++;
} catch(e) {
throw new Error();
@@ -146,9 +157,10 @@ TestInAllContexts(c_unsupported_syntax);
// Regression test: Inlined constructors called as functions do not get their
// implicit receiver object set to undefined, even in strict mode.
-function c_strict(a, b, counter) {
+function c_strict(val, deopt, counter) {
"use strict";
- this.x = a + b;
+ deopt.deopt;
+ this.x = val;
counter.value++;
}
TestInAllContexts(c_strict);
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/inline-literals.js b/src/3rdparty/v8/test/mjsunit/compiler/inline-literals.js
index f78abe8..1422586 100644
--- a/src/3rdparty/v8/test/mjsunit/compiler/inline-literals.js
+++ b/src/3rdparty/v8/test/mjsunit/compiler/inline-literals.js
@@ -29,6 +29,26 @@
// Test that we can inline functions containing materialized literals.
+function a2(b, c) {
+ return [b, c, b + c];
+}
+
+function a1(a, b, c) {
+ return [a, a2(b, c)];
+}
+
+function TestArrayLiteral(a, b, c) {
+ var expected = [a, [b, c, b + c]];
+ var result = a1(a, b, c);
+ assertEquals(expected, result, "TestArrayLiteral");
+}
+
+TestArrayLiteral(1, 2, 3);
+TestArrayLiteral(1, 2, 3);
+%OptimizeFunctionOnNextCall(TestArrayLiteral);
+TestArrayLiteral(1, 2, 3);
+TestArrayLiteral('a', 'b', 'c');
+
function o2(b, c) {
return { 'b':b, 'c':c, 'y':b + c };
}
@@ -48,3 +68,22 @@ TestObjectLiteral(1, 2, 3);
%OptimizeFunctionOnNextCall(TestObjectLiteral);
TestObjectLiteral(1, 2, 3);
TestObjectLiteral('a', 'b', 'c');
+
+function r2(s, x, y) {
+ return s.replace(/a/, x + y);
+}
+
+function r1(s, x, y) {
+ return r2(s, x, y).replace(/b/, y + x);
+}
+
+function TestRegExpLiteral(s, x, y, expected) {
+ var result = r1(s, x, y);
+ assertEquals(expected, result, "TestRegExpLiteral");
+}
+
+TestRegExpLiteral("a-", "reg", "exp", "regexp-");
+TestRegExpLiteral("-b", "reg", "exp", "-expreg");
+%OptimizeFunctionOnNextCall(TestRegExpLiteral);
+TestRegExpLiteral("ab", "reg", "exp", "regexpexpreg");
+TestRegExpLiteral("ab", 12345, 54321, "6666666666");
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/optimized-closures.js b/src/3rdparty/v8/test/mjsunit/compiler/optimized-closures.js
new file mode 100644
index 0000000..eaf75f8
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/optimized-closures.js
@@ -0,0 +1,57 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Test optimized closures.
+
+var a = new Array(100);
+
+function f() {
+ var x=0;
+ for (var i=0; i<100; i++) {
+ var g = function goo(y) {
+ function h() {
+ if (goo.arguments[0] == 23) return -42;
+ return 42;
+ }
+ return x + y + h(y);
+ }
+ g(0);
+ %OptimizeFunctionOnNextCall(g);
+ a[i] = g(i);
+ }
+}
+
+f();
+assertEquals(42, a[0]);
+assertEquals(49, a[7]);
+assertEquals(-19, a[23]);
+
+
+
+
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/proto-chain-load.js b/src/3rdparty/v8/test/mjsunit/compiler/proto-chain-load.js
new file mode 100644
index 0000000..60c6431
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/proto-chain-load.js
@@ -0,0 +1,44 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Test HLoadNamedField on the proto chain.
+
+var obj4 = Object.create(null, { f4: {value: 4} });
+var obj3 = Object.create(obj4, { f3: {value: 3} });
+var obj2 = Object.create(obj3, { f2: {value: 2} });
+var obj1 = Object.create(obj2, { f1: {value: 1} });
+var obj0 = Object.create(obj1, { f0: {value: 0} });
+
+function get4(obj) { return obj.f4; }
+
+assertEquals(4, get4(obj0));
+assertEquals(4, get4(obj0));
+%OptimizeFunctionOnNextCall(get4);
+assertEquals(4, get4(obj0));
+assertEquals(4, get4(obj0));
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/regress-gvn.js b/src/3rdparty/v8/test/mjsunit/compiler/regress-gvn.js
index 358daf7..01b1aa9 100644
--- a/src/3rdparty/v8/test/mjsunit/compiler/regress-gvn.js
+++ b/src/3rdparty/v8/test/mjsunit/compiler/regress-gvn.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --noalways-opt
+// Flags: --noalways-opt --allow-natives-syntax
//
// Regression test for global value numbering.
@@ -39,10 +39,11 @@ function test(a) {
var a = new Array();
-var n = 100000000;
+var n = 100;
var result = 0;
for (var i = 0; i < n; ++i) {
+ if (i == 10) %OptimizeFunctionOnNextCall(test);
a[0] = 0;
result += test(a);
}
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/regress-or.js b/src/3rdparty/v8/test/mjsunit/compiler/regress-or.js
index 89f7802..939f2c3 100644
--- a/src/3rdparty/v8/test/mjsunit/compiler/regress-or.js
+++ b/src/3rdparty/v8/test/mjsunit/compiler/regress-or.js
@@ -25,6 +25,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --allow-natives-syntax
+
// Test deoptimization inside short-circuited expressions.
function f1(x) {
var c = "fail";
@@ -36,7 +38,8 @@ function f1(x) {
function g1() { try { return 1; } finally {} }
-for (var i=0; i<10000000; i++) f1(42);
+for (var i = 0; i < 5; i++) f1(42);
+%OptimizeFunctionOnNextCall(f1);
assertEquals(-1, f1(0));
assertEquals(-43, f1(42));
@@ -52,6 +55,7 @@ function f2(x) {
function g2() { try { return 0; } finally {} }
-for (var i=0; i<10000000; i++) f2(42);
+for (var i = 0; i < 5; i++) f2(42);
+%OptimizeFunctionOnNextCall(f2);
assertEquals(-1, f2(""));
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/rotate.js b/src/3rdparty/v8/test/mjsunit/compiler/rotate.js
new file mode 100644
index 0000000..14fe9da
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/rotate.js
@@ -0,0 +1,224 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --expose-gc
+
+// Test shift operations that can be replaced by rotate operation.
+
+function SideEffect() {
+ with ({}) { } // not inlinable
+}
+
+function Twenty() {
+ SideEffect();
+ return 20;
+}
+
+function Twelve() {
+ SideEffect();
+ return 12;
+}
+
+
+function ROR(x, sa) {
+ return (x >>> sa) | (x << (32 - sa));
+}
+
+function ROR1(x, sa) {
+ return (x >>> sa) | (x << (32 - sa));
+}
+
+function ROR2(x, sa) {
+ return (x >>> (32 - sa)) | (x << (sa));
+}
+
+function ROR3(x, sa) {
+ return (x << (32 - sa)) | (x >>> sa);
+}
+
+function ROR4(x, sa) {
+ return (x << (sa)) | (x >>> (32 - sa));
+}
+
+assertEquals(1 << ((2 % 32)), ROR(1, 30));
+assertEquals(1 << ((2 % 32)), ROR(1, 30));
+%OptimizeFunctionOnNextCall(ROR);
+assertEquals(1 << ((2 % 32)), ROR(1, 30));
+
+assertEquals(0xF0000FFF | 0, ROR1(0x0000FFFF, 4));
+assertEquals(0xF0000FFF | 0, ROR1(0x0000FFFF, 4));
+%OptimizeFunctionOnNextCall(ROR1);
+assertEquals(0xF0000FFF | 0, ROR1(0x0000FFFF, 4));
+
+assertEquals(0x0FFFF000 | 0, ROR1(0x0000FFFF, 20));
+assertEquals(0x0FFFF000 | 0, ROR1(0x0000FFFF, 20));
+%OptimizeFunctionOnNextCall(ROR1);
+assertEquals(0x0FFFF000 | 0, ROR1(0x0000FFFF, 20));
+
+assertEquals(0x0FFFF000 | 0, ROR1(0x0000FFFF, Twenty()));
+assertEquals(0x0FFFF000 | 0, ROR1(0x0000FFFF, Twenty()));
+%OptimizeFunctionOnNextCall(ROR1);
+assertEquals(0x0FFFF000 | 0, ROR1(0x0000FFFF, Twenty()));
+
+for (var i = 0; i <= 100; i++) {
+ assertEquals(0xFFFFFFFF | 0, ROR1(0xFFFFFFFF, i));
+ assertEquals(0xFFFFFFFF | 0, ROR1(0xFFFFFFFF, i));
+ %OptimizeFunctionOnNextCall(ROR1);
+ assertEquals(0xFFFFFFFF | 0, ROR1(0xFFFFFFFF, i));
+}
+
+for (var i = 0; i <= 100; i++) {
+ assertEquals(-1, ROR1(-1, i));
+ assertEquals(-1, ROR1(-1, i));
+ %OptimizeFunctionOnNextCall(ROR1);
+ assertEquals(-1, ROR1(-1, i));
+}
+
+for (var i = 0; i <= 100; i++) {
+ assertEquals(1 << (32 - (i % 32)), ROR1(1, i));
+ assertEquals(1 << (32 - (i % 32)), ROR1(1, i));
+ %OptimizeFunctionOnNextCall(ROR1);
+ assertEquals(1 << (32 - (i % 32)), ROR1(1, i));
+}
+
+for (var i = 0; i <= 100; i++) {
+ assertEquals(1 << (32 - (i % 32)), ROR1(1.4, i));
+ assertEquals(1 << (32 - (i % 32)), ROR1(1.4, i));
+ %OptimizeFunctionOnNextCall(ROR1);
+ assertEquals(1 << (32 - (i % 32)), ROR1(1.4, i));
+}
+
+
+
+assertEquals(0xF0000FFF | 0, ROR2(0x0000FFFF, 28));
+assertEquals(0xF0000FFF | 0, ROR2(0x0000FFFF, 28));
+%OptimizeFunctionOnNextCall(ROR2);
+assertEquals(0xF0000FFF | 0, ROR2(0x0000FFFF, 28));
+
+assertEquals(0x0FFFF000 | 0, ROR2(0x0000FFFF, 12));
+assertEquals(0x0FFFF000 | 0, ROR2(0x0000FFFF, 12));
+%OptimizeFunctionOnNextCall(ROR2);
+assertEquals(0x0FFFF000 | 0, ROR2(0x0000FFFF, 12));
+
+assertEquals(0x0FFFF000 | 0, ROR2(0x0000FFFF, Twelve()));
+assertEquals(0x0FFFF000 | 0, ROR2(0x0000FFFF, Twelve()));
+%OptimizeFunctionOnNextCall(ROR2);
+assertEquals(0x0FFFF000 | 0, ROR2(0x0000FFFF, Twelve()));
+
+for (var i = 0; i <= 100; i++) {
+ assertEquals(0xFFFFFFFF | 0, ROR2(0xFFFFFFFF, i));
+ assertEquals(0xFFFFFFFF | 0, ROR2(0xFFFFFFFF, i));
+ %OptimizeFunctionOnNextCall(ROR2);
+ assertEquals(0xFFFFFFFF | 0, ROR2(0xFFFFFFFF, i));
+}
+
+for (var i = 0; i <= 100; i++) {
+ assertEquals(-1, ROR2(-1, i));
+ assertEquals(-1, ROR2(-1, i));
+ %OptimizeFunctionOnNextCall(ROR2);
+ assertEquals(-1, ROR2(-1, i));
+}
+
+for (var i = 0; i <= 100; i++) {
+ assertEquals(1 << ((i % 32)), ROR2(1, i));
+ assertEquals(1 << ((i % 32)), ROR2(1, i));
+ %OptimizeFunctionOnNextCall(ROR2);
+ assertEquals(1 << ((i % 32)), ROR2(1, i));
+}
+
+assertEquals(0xF0000FFF | 0, ROR3(0x0000FFFF, 4));
+assertEquals(0xF0000FFF | 0, ROR3(0x0000FFFF, 4));
+%OptimizeFunctionOnNextCall(ROR3);
+assertEquals(0xF0000FFF | 0, ROR3(0x0000FFFF, 4));
+
+assertEquals(0x0FFFF000 | 0, ROR3(0x0000FFFF, 20));
+assertEquals(0x0FFFF000 | 0, ROR3(0x0000FFFF, 20));
+%OptimizeFunctionOnNextCall(ROR3);
+assertEquals(0x0FFFF000 | 0, ROR3(0x0000FFFF, 20));
+
+assertEquals(0x0FFFF000 | 0, ROR3(0x0000FFFF, Twenty()));
+assertEquals(0x0FFFF000 | 0, ROR3(0x0000FFFF, Twenty()));
+%OptimizeFunctionOnNextCall(ROR3);
+assertEquals(0x0FFFF000 | 0, ROR3(0x0000FFFF, Twenty()));
+
+for (var i = 0; i <= 100; i++) {
+ assertEquals(0xFFFFFFFF | 0, ROR3(0xFFFFFFFF, i));
+ assertEquals(0xFFFFFFFF | 0, ROR3(0xFFFFFFFF, i));
+ %OptimizeFunctionOnNextCall(ROR3);
+ assertEquals(0xFFFFFFFF | 0, ROR3(0xFFFFFFFF, i));
+}
+
+for (var i = 0; i <= 100; i++) {
+ assertEquals(-1, ROR3(-1, i));
+ assertEquals(-1, ROR3(-1, i));
+ %OptimizeFunctionOnNextCall(ROR3);
+ assertEquals(-1, ROR3(-1, i));
+}
+
+for (var i = 0; i <= 100; i++) {
+ assertEquals(1 << (32 - (i % 32)), ROR3(1, i));
+ assertEquals(1 << (32 - (i % 32)), ROR3(1, i));
+ %OptimizeFunctionOnNextCall(ROR3);
+ assertEquals(1 << (32 - (i % 32)), ROR3(1, i));
+}
+
+assertEquals(0xF0000FFF | 0, ROR4(0x0000FFFF, 28));
+assertEquals(0xF0000FFF | 0, ROR4(0x0000FFFF, 28));
+%OptimizeFunctionOnNextCall(ROR4);
+assertEquals(0xF0000FFF | 0, ROR4(0x0000FFFF, 28));
+
+assertEquals(0x0FFFF000 | 0, ROR4(0x0000FFFF, 12));
+assertEquals(0x0FFFF000 | 0, ROR4(0x0000FFFF, 12));
+%OptimizeFunctionOnNextCall(ROR4);
+assertEquals(0x0FFFF000 | 0, ROR4(0x0000FFFF, 12));
+
+assertEquals(0x0FFFF000 | 0, ROR4(0x0000FFFF, Twelve()));
+assertEquals(0x0FFFF000 | 0, ROR4(0x0000FFFF, Twelve()));
+%OptimizeFunctionOnNextCall(ROR4);
+assertEquals(0x0FFFF000 | 0, ROR4(0x0000FFFF, Twelve()));
+
+for (var i = 0; i <= 100; i++) {
+ assertEquals(0xFFFFFFFF | 0, ROR4(0xFFFFFFFF, i));
+ assertEquals(0xFFFFFFFF | 0, ROR4(0xFFFFFFFF, i));
+ %OptimizeFunctionOnNextCall(ROR4);
+ assertEquals(0xFFFFFFFF | 0, ROR4(0xFFFFFFFF, i));
+}
+
+for (var i = 0; i <= 100; i++) {
+ assertEquals(-1, ROR4(-1, i));
+ assertEquals(-1, ROR4(-1, i));
+ %OptimizeFunctionOnNextCall(ROR4);
+ assertEquals(-1, ROR4(-1, i));
+}
+
+for (var i = 0; i <= 100; i++) {
+ assertEquals(1 << ((i % 32)), ROR4(1, i));
+ assertEquals(1 << ((i % 32)), ROR4(1, i));
+ %OptimizeFunctionOnNextCall(ROR4);
+ assertEquals(1 << ((i % 32)), ROR4(1, i));
+}
+
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/uint32.js b/src/3rdparty/v8/test/mjsunit/compiler/uint32.js
new file mode 100644
index 0000000..abed285
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/uint32.js
@@ -0,0 +1,173 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --expose-gc
+
+// Test uint32 handing in optimized frames.
+
+var K1 = 0x7fffffff;
+var K2 = 0xffffffff;
+
+var uint32_array = new Uint32Array(2);
+uint32_array[0] = K1;
+uint32_array[1] = K2;
+
+function ChangeI2T(arr, i) {
+ return uint32_array[i];
+}
+
+assertEquals(K1, ChangeI2T(uint32_array, 0));
+assertEquals(K2, ChangeI2T(uint32_array, 1));
+%OptimizeFunctionOnNextCall(ChangeI2T);
+assertEquals(K1, ChangeI2T(uint32_array, 0));
+// Loop to force inline allocation failure and a call into runtime.
+for (var i = 0; i < 80000; i++) {
+ assertEquals(K2, ChangeI2T(uint32_array, 1));
+}
+
+function SideEffect() {
+ with ({}) { } // not inlinable
+}
+
+function Deopt(obj, arr, i) {
+ var x = arr[i];
+ SideEffect(); // x will be used by HSimulate.
+ obj.x;
+ return x;
+}
+
+assertEquals(K1, Deopt({x: 0}, uint32_array, 0));
+assertEquals(K2, Deopt({x: 0}, uint32_array, 1));
+%OptimizeFunctionOnNextCall(Deopt);
+assertEquals(K2, Deopt({}, uint32_array, 1));
+
+function ChangeI2D(arr) {
+ // This addition will have a double type feedback so ChangeI2D will
+ // be generated for its operands.
+ return arr[0] + arr[1];
+}
+
+assertEquals(K1 + K2, ChangeI2D(uint32_array));
+assertEquals(K1 + K2, ChangeI2D(uint32_array));
+%OptimizeFunctionOnNextCall(ChangeI2D);
+assertEquals(K1 + K2, ChangeI2D(uint32_array));
+
+function ShrShr(val) {
+ return (val >>> 0) >>> 1;
+}
+
+assertEquals(K1, ShrShr(K2 | 0));
+assertEquals(K1, ShrShr(K2 | 0));
+%OptimizeFunctionOnNextCall(ShrShr);
+assertEquals(K1, ShrShr(K2 | 0));
+
+function SarShr(val) {
+ return val >> (-2 >>> 0);
+}
+
+var K3 = 0x80000000;
+assertEquals(-2, SarShr(K3 | 0));
+assertEquals(-2, SarShr(K3 | 0));
+%OptimizeFunctionOnNextCall(SarShr);
+assertEquals(-2, SarShr(K3 | 0));
+
+function Uint32Phi(a, b, c) {
+ var i = a ? (b >>> 0) : (c >>> 0);
+ return (i | 0);
+}
+
+var K4 = 0x80000001;
+assertEquals(K3 | 0, Uint32Phi(true, K3, K4));
+assertEquals(K4 | 0, Uint32Phi(false, K3, K4));
+assertEquals(K3 | 0, Uint32Phi(true, K3, K4));
+assertEquals(K4 | 0, Uint32Phi(false, K3, K4));
+%OptimizeFunctionOnNextCall(Uint32Phi);
+assertEquals(K3 | 0, Uint32Phi(true, K3, K4));
+assertEquals(K4 | 0, Uint32Phi(false, K3, K4));
+
+function NonUint32Phi(a, b, c) {
+ var i = a ? (b >>> 0) : c;
+ return (i | 0);
+}
+
+assertEquals(K3 | 0, NonUint32Phi(true, K3, K4));
+assertEquals(K4 | 0, NonUint32Phi(false, K3, K4));
+assertEquals(K3 | 0, NonUint32Phi(true, K3, K4));
+assertEquals(K4 | 0, NonUint32Phi(false, K3, K4));
+%OptimizeFunctionOnNextCall(NonUint32Phi);
+assertEquals(K3 | 0, NonUint32Phi(true, K3, K4));
+assertEquals(K4 | 0, NonUint32Phi(false, K3, K4));
+
+function PhiOfPhi(x) {
+ var a = (x >>> 0);
+ for (var i = 0; i < 2; i++) {
+ for (var j = 0; j < 2; j++) {
+ a = (a >>> 0);
+ }
+ }
+ return (a | 0);
+}
+
+assertEquals(1, PhiOfPhi(1));
+assertEquals(1, PhiOfPhi(1));
+%OptimizeFunctionOnNextCall(PhiOfPhi);
+assertEquals(K3 | 0, PhiOfPhi(K3));
+
+function PhiOfPhiUnsafe(x) {
+ var a = x >>> 0;
+ for (var i = 0; i < 2; i++) {
+ for (var j = 0; j < 2; j++) {
+ a = (a >>> 0);
+ }
+ }
+ return a + a;
+}
+
+assertEquals(2, PhiOfPhiUnsafe(1));
+assertEquals(2, PhiOfPhiUnsafe(1));
+%OptimizeFunctionOnNextCall(PhiOfPhiUnsafe);
+assertEquals(2 * K3, PhiOfPhiUnsafe(K3));
+
+var old_array = new Array(1000);
+
+for (var i = 0; i < old_array.length; i++) old_array[i] = null;
+
+// Force promotion.
+gc();
+gc();
+
+function FillOldArrayWithHeapNumbers(N) {
+ for (var i = 0; i < N; i++) {
+ old_array[i] = uint32_array[1];
+ }
+}
+
+FillOldArrayWithHeapNumbers(1);
+FillOldArrayWithHeapNumbers(1);
+%OptimizeFunctionOnNextCall(FillOldArrayWithHeapNumbers);
+FillOldArrayWithHeapNumbers(old_array.length);
+gc();
diff --git a/src/3rdparty/v8/test/mjsunit/count-based-osr.js b/src/3rdparty/v8/test/mjsunit/count-based-osr.js
index 125c4e2..fbff91e 100644
--- a/src/3rdparty/v8/test/mjsunit/count-based-osr.js
+++ b/src/3rdparty/v8/test/mjsunit/count-based-osr.js
@@ -25,7 +25,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --count-based-interrupts --interrupt-budget=10 --weighted-back-edges --allow-natives-syntax
+// Flags: --count-based-interrupts --interrupt-budget=10 --weighted-back-edges
+// Flags: --allow-natives-syntax --noparallel-recompilation
// Test that OSR works properly when using count-based interrupting/profiling.
diff --git a/src/3rdparty/v8/test/mjsunit/d8-os.js b/src/3rdparty/v8/test/mjsunit/d8-os.js
index 239938c..f6b9839 100644
--- a/src/3rdparty/v8/test/mjsunit/d8-os.js
+++ b/src/3rdparty/v8/test/mjsunit/d8-os.js
@@ -129,13 +129,13 @@ if (this.os && os.system) {
have_echo = false;
}
if (have_sleep) {
- assertThrows("os.system('sleep', ['2000'], 200);", "sleep 1");
+ assertThrows("os.system('sleep', ['2000'], 20);", "sleep 1");
// Check we time out with total time.
- assertThrows("os.system('sleep', ['2000'], -1, 200);", "sleep 2");
+ assertThrows("os.system('sleep', ['2000'], -1, 20);", "sleep 2");
// Check that -1 means no timeout.
- os.system('sleep', ['1'], -1, -1);
+ os.system('sleep', ['0.1'], -1, -1);
}
diff --git a/src/3rdparty/v8/test/mjsunit/date-parse.js b/src/3rdparty/v8/test/mjsunit/date-parse.js
index b46e39a..cb4a951 100644
--- a/src/3rdparty/v8/test/mjsunit/date-parse.js
+++ b/src/3rdparty/v8/test/mjsunit/date-parse.js
@@ -287,6 +287,9 @@ for (var i = 0; i < 24 * 365 * 100; i += 150) {
var testCasesNegative = [
'May 25 2008 1:30 (PM)) UTC', // Bad unmatched ')' after number.
'May 25 2008 1:30( )AM (PM)', //
+ 'a1', // Issue 126448, 53209.
+ 'nasfdjklsfjoaifg1',
+ 'x_2',
'May 25 2008 AAA (GMT)']; // Unknown word after number.
testCasesNegative.forEach(function (s) {
diff --git a/src/3rdparty/v8/test/mjsunit/date.js b/src/3rdparty/v8/test/mjsunit/date.js
index 3e153ab..5aaa3bb 100644
--- a/src/3rdparty/v8/test/mjsunit/date.js
+++ b/src/3rdparty/v8/test/mjsunit/date.js
@@ -25,6 +25,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --allow-natives-syntax
+
// Test date construction from other dates.
var date0 = new Date(1111);
var date1 = new Date(date0);
@@ -319,3 +321,23 @@ for (var i = 0; i < 24; i++) {
assertEquals(70674603500 - ms, Date.parse(string), string);
}
}
+
+assertThrows('Date.prototype.setTime.call("", 1);', TypeError);
+assertThrows('Date.prototype.setYear.call("", 1);', TypeError);
+assertThrows('Date.prototype.setHours.call("", 1, 2, 3, 4);', TypeError);
+assertThrows('Date.prototype.getDate.call("");', TypeError);
+assertThrows('Date.prototype.getUTCDate.call("");', TypeError);
+
+var date = new Date();
+date.getTime();
+date.getTime();
+%OptimizeFunctionOnNextCall(Date.prototype.getTime);
+assertThrows(function() { Date.prototype.getTime.call(""); }, TypeError);
+assertTrue(%GetOptimizationStatus(Date.prototype.getTime) != 1);
+
+date.getYear();
+date.getYear();
+%OptimizeFunctionOnNextCall(Date.prototype.getYear);
+assertThrows(function() { Date.prototype.getYear.call(""); }, TypeError);
+opt_status = %GetOptimizationStatus(Date.prototype.getYear);
+assertTrue(%GetOptimizationStatus(Date.prototype.getTime) != 1); \ No newline at end of file
diff --git a/src/3rdparty/v8/test/mjsunit/debug-break-inline.js b/src/3rdparty/v8/test/mjsunit/debug-break-inline.js
index 4418fa8..464cb73 100644
--- a/src/3rdparty/v8/test/mjsunit/debug-break-inline.js
+++ b/src/3rdparty/v8/test/mjsunit/debug-break-inline.js
@@ -26,6 +26,7 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug --allow-natives-syntax
+// Flags: --noparallel-recompilation
// This test tests that deoptimization due to debug breaks works for
// inlined functions where the full-code is generated before the
diff --git a/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js b/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
index efbb2cc..8d91b97 100644
--- a/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
+++ b/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
@@ -25,7 +25,9 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --expose-debug-as debug --expose-gc --allow-natives-syntax --inline-construct
+// Flags: --expose-debug-as debug --expose-gc --allow-natives-syntax
+// Flags: --inline-construct
+
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug
@@ -43,13 +45,17 @@ var input = [
];
var expected = [
- { locals: {a0: 1.01, b0: 2.02}, args: { names: ["i", "x0", "y0"], values: [0, 3.03, 4.04] } },
- { locals: {a1: 3.03, b1: 4.04}, args: { names: ["i", "x1", "y1"], values: [1, 5.05, 6.06] } },
- { locals: {a2: 5.05, b2: 6.06}, args: { names: ["i"], values: [2] } },
- { locals: {a3: 7.07, b3: 8.08}, args: { names: ["i", "x3", "y3", "z3"],
- values: [3, 9.09, 10.10, undefined] }
- },
- { locals: {a4: 9.09, b4: 10.10}, args: { names: ["i", "x4", "y4"], values: [4, 11.11, 12.12] } }
+ { locals: {a0: 1.01, b0: 2.02},
+ args: { names: ["i", "x0", "y0"], values: [0, 3.03, 4.04] } },
+ { locals: {a1: 3.03, b1: 4.04},
+ args: { names: ["i", "x1", "y1"], values: [1, 5.05, 6.06] } },
+ { locals: {a2: 5.05, b2: 6.06},
+ args: { names: ["i"], values: [2] } },
+ { locals: {a3: 7.07, b3: 8.08},
+ args: { names: ["i", "x3", "y3", "z3"],
+ values: [3, 9.09, 10.10, undefined] } },
+ { locals: {a4: 9.09, b4: 10.10},
+ args: { names: ["i", "x4", "y4"], values: [4, 11.11, 12.12] } }
];
function arraySum(arr) {
@@ -78,7 +84,8 @@ function listener(event, exec_state, event_data, data) {
// All frames except the bottom one have expected arguments.
for (var j = 0; j < expected_args.names.length; j++) {
assertEquals(expected_args.names[j], frame.argumentName(j));
- assertEquals(expected_args.values[j], frame.argumentValue(j).value());
+ assertEquals(expected_args.values[j],
+ frame.argumentValue(j).value());
}
// All frames except the bottom one have two scopes.
@@ -87,13 +94,15 @@ function listener(event, exec_state, event_data, data) {
assertEquals(debug.ScopeType.Global, frame.scope(1).scopeType());
Object.keys(expected_locals).forEach(function (name) {
- assertEquals(expected_locals[name], frame.scope(0).scopeObject().value()[name]);
+ assertEquals(expected_locals[name],
+ frame.scope(0).scopeObject().value()[name]);
});
for (var j = 0; j < expected_args.names.length; j++) {
var arg_name = expected_args.names[j];
var arg_value = expected_args.values[j];
- assertEquals(arg_value, frame.scope(0).scopeObject().value()[arg_name]);
+ assertEquals(arg_value,
+ frame.scope(0).scopeObject().value()[arg_name]);
}
// Evaluate in the inlined frame.
@@ -114,7 +123,8 @@ function listener(event, exec_state, event_data, data) {
map(function (k) { return expected_locals[k]; }));
assertEquals(expected_locals_sum + expected_args_sum,
- frame.evaluate(Object.keys(expected_locals).join('+') + ' + ' +
+ frame.evaluate(Object.keys(expected_locals).join('+') +
+ ' + ' +
expected_args.names.join('+')).value());
var arguments_sum = expected_args.names.map(function(_, idx) {
diff --git a/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized.js b/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized.js
index 9c56a12..f662912 100644
--- a/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized.js
+++ b/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized.js
@@ -25,7 +25,9 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --expose-debug-as debug --expose-gc --allow-natives-syntax --inline-construct
+// Flags: --expose-debug-as debug --expose-gc --allow-natives-syntax
+// Flags: --inline-construct
+
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug
@@ -35,11 +37,16 @@ var exception = false;
var testingConstructCall = false;
var expected = [
- { locals: {a0: 1, b0: 2}, args: { names: ["i", "x0", "y0"], values: [0, 3, 4] } },
- { locals: {a1: 3, b1: 4}, args: { names: ["i", "x1", "y1"], values: [1, 5, 6] } },
- { locals: {a2: 5, b2: 6}, args: { names: ["i"], values: [2] } },
- { locals: {a3: 7, b3: 8}, args: { names: ["i", "x3", "y3", "z3"], values: [3, 9, 10, undefined] } },
- { locals: {a4: 9, b4: 10}, args: { names: ["i", "x4", "y4"], values: [4, 11, 12] } }
+ { locals: {a0: 1, b0: 2},
+ args: { names: ["i", "x0", "y0"], values: [0, 3, 4] } },
+ { locals: {a1: 3, b1: 4},
+ args: { names: ["i", "x1", "y1"], values: [1, 5, 6] } },
+ { locals: {a2: 5, b2: 6},
+ args: { names: ["i"], values: [2] } },
+ { locals: {a3: 7, b3: 8},
+ args: { names: ["i", "x3", "y3", "z3"], values: [3, 9, 10, undefined] } },
+ { locals: {a4: 9, b4: 10},
+ args: { names: ["i", "x4", "y4"], values: [4, 11, 12] } }
];
function arraySum(arr) {
@@ -68,7 +75,8 @@ function listener(event, exec_state, event_data, data) {
// All frames except the bottom one have expected arguments.
for (var j = 0; j < expected_args.names.length; j++) {
assertEquals(expected_args.names[j], frame.argumentName(j));
- assertEquals(expected_args.values[j], frame.argumentValue(j).value());
+ assertEquals(expected_args.values[j],
+ frame.argumentValue(j).value());
}
// All frames except the bottom one have two scopes.
@@ -77,13 +85,15 @@ function listener(event, exec_state, event_data, data) {
assertEquals(debug.ScopeType.Global, frame.scope(1).scopeType());
Object.keys(expected_locals).forEach(function (name) {
- assertEquals(expected_locals[name], frame.scope(0).scopeObject().value()[name]);
+ assertEquals(expected_locals[name],
+ frame.scope(0).scopeObject().value()[name]);
});
for (var j = 0; j < expected_args.names.length; j++) {
var arg_name = expected_args.names[j];
var arg_value = expected_args.values[j];
- assertEquals(arg_value, frame.scope(0).scopeObject().value()[arg_name]);
+ assertEquals(arg_value,
+ frame.scope(0).scopeObject().value()[arg_name]);
}
// Evaluate in the inlined frame.
@@ -104,7 +114,8 @@ function listener(event, exec_state, event_data, data) {
map(function (k) { return expected_locals[k]; }));
assertEquals(expected_locals_sum + expected_args_sum,
- frame.evaluate(Object.keys(expected_locals).join('+') + ' + ' +
+ frame.evaluate(Object.keys(expected_locals).join('+') +
+ ' + ' +
expected_args.names.join('+')).value());
var arguments_sum = expected_args.names.map(function(_, idx) {
diff --git a/src/3rdparty/v8/test/mjsunit/debug-liveedit-double-call.js b/src/3rdparty/v8/test/mjsunit/debug-liveedit-double-call.js
new file mode 100644
index 0000000..1df806a
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/debug-liveedit-double-call.js
@@ -0,0 +1,142 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+// Get the Debug object exposed from the debug context global object.
+
+Debug = debug.Debug
+
+
+function TestCase(test_scenario, expected_output) {
+ // Global variable, accessed from eval'd script.
+ test_output = "";
+
+ var script_text_generator = (function() {
+ var variables = { a: 1, b: 1, c: 1, d: 1, e: 1, f: 1 };
+
+ return {
+ get: function() {
+ return "(function() {\n " +
+ " function A() {\n " +
+ " test_output += 'a' + " + variables.a + ";\n " +
+ " test_output += '=';\n " +
+ " debugger;\n " +
+ " return 'Capybara';\n " +
+ " }\n " +
+ " function B(p1, p2) {\n " +
+ " test_output += 'b' + " + variables.b + ";\n " +
+ " return A();\n " +
+ " }\n " +
+ " function C() {\n " +
+ " test_output += 'c' + " + variables.c + ";\n " +
+ " // Function call with argument adaptor is intentional.\n " +
+ " return B();\n " +
+ " }\n " +
+ " function D() {\n " +
+ " test_output += 'd' + " + variables.d + ";\n " +
+ " // Function call with argument adaptor is intentional.\n " +
+ " return C(1, 2);\n " +
+ " }\n " +
+ " function E() {\n " +
+ " test_output += 'e' + " + variables.e + ";\n " +
+ " return D();\n " +
+ " }\n " +
+ " function F() {\n " +
+ " test_output += 'f' + " + variables.f + ";\n " +
+ " return E();\n " +
+ " }\n " +
+ " return F();\n " +
+ "})\n";
+ },
+ change: function(var_name) {
+ variables[var_name]++;
+ }
+ };
+ })();
+
+ var test_fun = eval(script_text_generator.get());
+
+ var script = Debug.findScript(test_fun);
+
+ var scenario_pos = 0;
+
+ function DebuggerStatementHandler() {
+ while (true) {
+ assertTrue(scenario_pos < test_scenario.length);
+ var change_var = test_scenario[scenario_pos++];
+ if (change_var == '=') {
+ // Continue.
+ return;
+ }
+ script_text_generator.change(change_var);
+ try {
+ Debug.LiveEdit.SetScriptSource(script, script_text_generator.get(),
+ false, []);
+ } catch (e) {
+ print("LiveEdit exception: " + e);
+ throw e;
+ }
+ }
+ }
+
+ var saved_exception = null;
+
+ function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ try {
+ DebuggerStatementHandler();
+ } catch (e) {
+ saved_exception = e;
+ }
+ } else {
+ print("Other: " + event);
+ }
+ }
+
+ Debug.setListener(listener);
+ assertEquals("Capybara", test_fun());
+ Debug.setListener(null);
+
+ if (saved_exception) {
+ print("Exception: " + saved_exception);
+ assertUnreachable();
+ }
+
+ print(test_output);
+
+ assertEquals(expected_output, test_output);
+}
+
+TestCase(['='], "f1e1d1c1b1a1=");
+
+TestCase(['c', '=', '='], "f1e1d1c1b1a1=c2b1a1=");
+
+TestCase(['b', 'c', 'd', 'e', '=', '='], "f1e1d1c1b1a1=e2d2c2b2a1=");
+
+TestCase(['b', 'c', '=', 'b', 'c', 'd', 'e', '=', '='], "f1e1d1c1b1a1=c2b2a1=e2d2c3b3a1=");
+
+TestCase(['e', 'f', '=', '='], "f1e1d1c1b1a1=f2e2d1c1b1a1=");
diff --git a/src/3rdparty/v8/test/mjsunit/debug-liveedit-restart-frame.js b/src/3rdparty/v8/test/mjsunit/debug-liveedit-restart-frame.js
new file mode 100644
index 0000000..d978a97
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/debug-liveedit-restart-frame.js
@@ -0,0 +1,153 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+// Get the Debug object exposed from the debug context global object.
+
+Debug = debug.Debug
+
+function FindCallFrame(exec_state, frame_code) {
+ var number = Number(frame_code);
+ if (number >= 0) {
+ return exec_state.frame(number);
+ } else {
+ for (var i = 0; i < exec_state.frameCount(); i++) {
+ var frame = exec_state.frame(i);
+ var func_mirror = frame.func();
+ if (frame_code == func_mirror.name()) {
+ return frame;
+ }
+ }
+ }
+ throw new Error("Failed to find function name " + function_name);
+}
+
+function TestCase(test_scenario, expected_output) {
+ // Global variable, accessed from eval'd script.
+ test_output = "";
+
+ function TestCode() {
+ function A() {
+ // Extra stack variable. To make function not slim.
+ // Restarter doesn't work on slim function when stopped on 'debugger'
+ // statement. (There is no padding for 'debugger' statement).
+ var o = {};
+ test_output += 'A';
+ test_output += '=';
+ debugger;
+ return 'Capybara';
+ }
+ function B(p1, p2) {
+ test_output += 'B';
+ return A();
+ }
+ function C() {
+ test_output += 'C';
+ // Function call with argument adaptor is intentional.
+ return B();
+ }
+ function D() {
+ test_output += 'D';
+ // Function call with argument adaptor is intentional.
+ return C(1, 2);
+ }
+ function E() {
+ test_output += 'E';
+ return D();
+ }
+ function F() {
+ test_output += 'F';
+ return E();
+ }
+ return F();
+ }
+
+ var scenario_pos = 0;
+
+ function DebuggerStatementHandler(exec_state) {
+ while (true) {
+ assertTrue(scenario_pos < test_scenario.length);
+ var change_code = test_scenario[scenario_pos++];
+ if (change_code == '=') {
+ // Continue.
+ return;
+ }
+ var frame = FindCallFrame(exec_state, change_code);
+ // Throws if fails.
+ Debug.LiveEdit.RestartFrame(frame);
+ }
+ }
+
+ var saved_exception = null;
+
+ function listener(event, exec_state, event_data, data) {
+ if (saved_exception != null) {
+ return;
+ }
+ if (event == Debug.DebugEvent.Break) {
+ try {
+ DebuggerStatementHandler(exec_state);
+ } catch (e) {
+ saved_exception = e;
+ }
+ } else {
+ print("Other: " + event);
+ }
+ }
+
+ Debug.setListener(listener);
+ assertEquals("Capybara", TestCode());
+ Debug.setListener(null);
+
+ if (saved_exception) {
+ print("Exception: " + saved_exception);
+ print("Stack: " + saved_exception.stack);
+ assertUnreachable();
+ }
+
+ print(test_output);
+
+ assertEquals(expected_output, test_output);
+}
+
+TestCase('0==', "FEDCBA=A=");
+TestCase('1==', "FEDCBA=BA=");
+TestCase('2==', "FEDCBA=CBA=");
+TestCase('3==', "FEDCBA=DCBA=");
+TestCase('4==', "FEDCBA=EDCBA=");
+TestCase('5==', "FEDCBA=FEDCBA=");
+
+TestCase('=', "FEDCBA=");
+
+TestCase('C==', "FEDCBA=CBA=");
+
+TestCase('B=C=A=D==', "FEDCBA=BA=CBA=A=DCBA=");
+
+// Successive restarts don't work now and require additional fix.
+//TestCase('BCDE==', "FEDCBA=EDCBA=");
+//TestCase('BC=BCDE==', "FEDCBA=CBA=EDCBA=");
+//TestCase('EF==', "FEDCBA=FEDCBA=");
diff --git a/src/3rdparty/v8/test/mjsunit/debug-multiple-breakpoints.js b/src/3rdparty/v8/test/mjsunit/debug-multiple-breakpoints.js
index 1047410..d8b1d94 100644
--- a/src/3rdparty/v8/test/mjsunit/debug-multiple-breakpoints.js
+++ b/src/3rdparty/v8/test/mjsunit/debug-multiple-breakpoints.js
@@ -89,7 +89,7 @@ g();
assertEquals(3, break_point_hit_count);
// Finally test with many break points.
-test_count = 100;
+test_count = 10;
bps = new Array(test_count);
break_point_hit_count = 0;
for (var i = 0; i < test_count; i++) {
diff --git a/src/3rdparty/v8/test/mjsunit/debug-script-breakpoints-closure.js b/src/3rdparty/v8/test/mjsunit/debug-script-breakpoints-closure.js
new file mode 100644
index 0000000..7c89718
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/debug-script-breakpoints-closure.js
@@ -0,0 +1,67 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+// Get the Debug object exposed from the debug context global object.
+Debug = debug.Debug
+
+// Simple debug event handler which just counts the number of break points hit.
+var break_point_hit_count = 0;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ break_point_hit_count++;
+ }
+};
+
+// Add the debug event listener.
+Debug.setListener(listener);
+
+function makeClosure() {
+ var x;
+ return function() {
+ return x; // Breakpoint line ( #47 )
+ };
+}
+
+// Create closure before break point is set.
+var closure = makeClosure();
+
+// The debugger triggers re-compilation.
+assertEquals(0, Debug.scriptBreakPoints().length);
+var scr = Debug.findScript(makeClosure);
+var sbp = Debug.setScriptBreakPointById(scr.id, 47);
+assertEquals(1, Debug.scriptBreakPoints().length);
+
+// Ensure the closure actually triggers a break point hit.
+closure();
+assertEquals(1, break_point_hit_count);
+
+// Remove script break point.
+assertEquals(1, Debug.scriptBreakPoints().length);
+Debug.clearBreakPoint(sbp);
+assertEquals(0, Debug.scriptBreakPoints().length);
diff --git a/src/3rdparty/v8/test/mjsunit/debug-script-breakpoints-nested.js b/src/3rdparty/v8/test/mjsunit/debug-script-breakpoints-nested.js
new file mode 100644
index 0000000..ce25c17
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/debug-script-breakpoints-nested.js
@@ -0,0 +1,82 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+// Get the Debug object exposed from the debug context global object.
+Debug = debug.Debug
+
+// Simple debug event handler which just counts the number of break points hit.
+var break_point_hit_count = 0;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ break_point_hit_count++;
+ }
+};
+
+// Add the debug event listener.
+Debug.setListener(listener);
+
+eval(
+ "var inner;\n" +
+ "function outer() {\n" + // Non-trivial outer closure.
+ " var x = 5;\n" +
+ " function a() {\n" +
+ " var foo = 0, y = 7;\n" +
+ " function b() {\n" +
+ " var bar = 0, baz = 0, z = 11;\n" +
+ " function c() {\n" +
+ " return x + y + z;\n" + // Breakpoint line ( #8 )
+ " }\n" +
+ " inner = c;\n" +
+ " return c();\n" +
+ " }\n" +
+ " return b();\n" +
+ " }\n" +
+ " return a();\n" +
+ "}"
+);
+
+var script = Debug.findScript(outer);
+
+// The debugger triggers compilation of inner closures.
+assertEquals(0, Debug.scriptBreakPoints().length);
+var sbp = Debug.setScriptBreakPointById(script.id, 8);
+assertEquals(1, Debug.scriptBreakPoints().length);
+
+// The compiled outer closure should behave correctly.
+assertEquals(23, outer());
+assertEquals(1, break_point_hit_count);
+
+// The compiled inner closure should behave correctly.
+assertEquals(23, inner());
+assertEquals(2, break_point_hit_count);
+
+// Remove script break point.
+assertEquals(1, Debug.scriptBreakPoints().length);
+Debug.clearBreakPoint(sbp);
+assertEquals(0, Debug.scriptBreakPoints().length);
diff --git a/src/3rdparty/v8/test/mjsunit/debug-script.js b/src/3rdparty/v8/test/mjsunit/debug-script.js
index 9767888..b9dbc07 100644
--- a/src/3rdparty/v8/test/mjsunit/debug-script.js
+++ b/src/3rdparty/v8/test/mjsunit/debug-script.js
@@ -25,9 +25,11 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --expose-debug-as debug --expose-gc
+// Flags: --expose-debug-as debug --expose-gc --noparallel-recompilation
+// Flags: --send-idle-notification
+
// Get the Debug object exposed from the debug context global object.
-Debug = debug.Debug
+Debug = debug.Debug;
Date();
RegExp();
diff --git a/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part1.js b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part1.js
new file mode 100644
index 0000000..f2f9d91
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part1.js
@@ -0,0 +1,190 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --expose-natives-as=builtins
+
+// Check that the ScopeIterator can properly recreate the scope at
+// every point when stepping through functions.
+
+var Debug = debug.Debug;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ // Access scope details.
+ var scope_count = exec_state.frame().scopeCount();
+ for (var i = 0; i < scope_count; i++) {
+ var scope = exec_state.frame().scope(i);
+ // assertTrue(scope.isScope());
+ scope.scopeType();
+ scope.scopeObject();
+ }
+
+ // Do steps until we reach the global scope again.
+ if (true) {
+ exec_state.prepareStep(Debug.StepAction.StepInMin, 1);
+ }
+ }
+}
+
+Debug.setListener(listener);
+
+
+function test1() {
+ debugger;
+ with ({x:1}) {
+ x = 2;
+ }
+}
+test1();
+
+
+function test2() {
+ if (true) {
+ with ({}) {
+ debugger;
+ }
+ } else {
+ with ({}) {
+ return 10;
+ }
+ }
+}
+test2();
+
+
+function test3() {
+ if (true) {
+ debugger;
+ } else {
+ with ({}) {
+ return 10;
+ }
+ }
+}
+test3();
+
+
+function test4() {
+ debugger;
+ with ({x:1}) x = 1
+}
+test4();
+
+
+function test5() {
+ debugger;
+ var dummy = 1;
+ with ({}) {
+ with ({}) {
+ dummy = 2;
+ }
+ }
+ dummy = 3;
+}
+test5();
+
+
+function test6() {
+ debugger;
+ try {
+ throw 'stuff';
+ } catch (e) {
+ e = 1;
+ }
+}
+test6();
+
+
+function test7() {
+ debugger;
+ function foo() {}
+}
+test7();
+
+
+function test8() {
+ debugger;
+ (function foo() {})();
+}
+test8();
+
+
+function test10() {
+ debugger;
+ with ({}) {
+ return 10;
+ }
+}
+test10();
+
+
+function test11() {
+ debugger;
+ try {
+ throw 'stuff';
+ } catch (e) {
+ return 10;
+ }
+}
+test11();
+
+
+var prefixes = [
+ "debugger; ",
+ "if (false) { try { throw 0; } catch(x) { return x; } }; debugger; " ];
+
+
+// Return from function constructed with Function constructor.
+var anon = 12;
+for (var i = 0; i < prefixes.length; ++i) {
+ var pre = prefixes[i];
+ Function(pre + "return 42")();
+ Function(pre + "return 42 ")();
+ Function(pre + "return 42;")();
+ Function(pre + "return 42; ")();
+ Function(pre + "return anon")();
+ Function(pre + "return anon ")();
+ Function(pre + "return anon;")();
+ Function(pre + "return anon; ")();
+}
+
+
+try {
+ with({}) {
+ debugger;
+ eval("{}$%:^");
+ }
+} catch(e) {
+ nop();
+}
+
+
+function nop() {}
+
+
+// With block as the last(!) statement in global code.
+with ({}) { debugger; } \ No newline at end of file
diff --git a/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part2.js b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part2.js
new file mode 100644
index 0000000..121c7b7
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part2.js
@@ -0,0 +1,83 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --expose-natives-as=builtins
+
+// Check that the ScopeIterator can properly recreate the scope at
+// every point when stepping through functions.
+
+var Debug = debug.Debug;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ // Access scope details.
+ var scope_count = exec_state.frame().scopeCount();
+ for (var i = 0; i < scope_count; i++) {
+ var scope = exec_state.frame().scope(i);
+ // assertTrue(scope.isScope());
+ scope.scopeType();
+ scope.scopeObject();
+ }
+
+ // Do steps until we reach the global scope again.
+ if (true) {
+ exec_state.prepareStep(Debug.StepAction.StepInMin, 1);
+ }
+ }
+}
+
+Debug.setListener(listener);
+
+var q = 42;
+var prefixes = [ "debugger; ",
+ "if (false) { try { throw 0; } catch(x) { return x; } }; debugger; " ];
+var bodies = [ "1",
+ "1 ",
+ "1;",
+ "1; ",
+ "q",
+ "q ",
+ "q;",
+ "q; ",
+ "try { throw 'stuff' } catch (e) { e = 1; }",
+ "try { throw 'stuff' } catch (e) { e = 1; } ",
+ "try { throw 'stuff' } catch (e) { e = 1; };",
+ "try { throw 'stuff' } catch (e) { e = 1; }; " ];
+
+
+function test9() {
+ debugger;
+ for (var i = 0; i < prefixes.length; ++i) {
+ var pre = prefixes[i];
+ for (var j = 0; j < bodies.length; ++j) {
+ var body = bodies[j];
+ eval(pre + body);
+ eval("'use strict'; " + pre + body);
+ }
+ }
+}
+test9();
diff --git a/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part3.js b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part3.js
new file mode 100644
index 0000000..16b085e
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part3.js
@@ -0,0 +1,80 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --expose-natives-as=builtins
+
+// Check that the ScopeIterator can properly recreate the scope at
+// every point when stepping through functions.
+
+var Debug = debug.Debug;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ // Access scope details.
+ var scope_count = exec_state.frame().scopeCount();
+ for (var i = 0; i < scope_count; i++) {
+ var scope = exec_state.frame().scope(i);
+ // assertTrue(scope.isScope());
+ scope.scopeType();
+ scope.scopeObject();
+ }
+
+ // Do steps until we reach the global scope again.
+ if (true) {
+ exec_state.prepareStep(Debug.StepAction.StepInMin, 1);
+ }
+ }
+}
+
+Debug.setListener(listener);
+
+var q = 42;
+var prefixes = [
+ "debugger; ",
+ "if (false) { try { throw 0; } catch(x) { return x; } }; debugger; " ];
+var with_bodies = [ "with ({}) {}",
+ "with ({x:1}) x",
+ "with ({x:1}) x = 1",
+ "with ({x:1}) x ",
+ "with ({x:1}) x = 1 ",
+ "with ({x:1}) x;",
+ "with ({x:1}) x = 1;",
+ "with ({x:1}) x; ",
+ "with ({x:1}) x = 1; " ];
+
+
+function test9() {
+ debugger;
+ for (var i = 0; i < prefixes.length; ++i) {
+ var pre = prefixes[i];
+ for (var j = 0; j < with_bodies.length; ++j) {
+ var body = with_bodies[j];
+ eval(pre + body);
+ }
+ }
+}
+test9();
diff --git a/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part4.js b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part4.js
new file mode 100644
index 0000000..48f4347
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part4.js
@@ -0,0 +1,80 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --expose-natives-as=builtins
+
+// Check that the ScopeIterator can properly recreate the scope at
+// every point when stepping through functions.
+
+var Debug = debug.Debug;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ // Access scope details.
+ var scope_count = exec_state.frame().scopeCount();
+ for (var i = 0; i < scope_count; i++) {
+ var scope = exec_state.frame().scope(i);
+ // assertTrue(scope.isScope());
+ scope.scopeType();
+ scope.scopeObject();
+ }
+
+ // Do steps until we reach the global scope again.
+ if (true) {
+ exec_state.prepareStep(Debug.StepAction.StepInMin, 1);
+ }
+ }
+}
+
+Debug.setListener(listener);
+
+var q = 42;
+var prefixes = [
+ "debugger; ",
+ "if (false) { try { throw 0; } catch(x) { return x; } }; debugger; " ];
+var bodies = [ "1",
+ "1 ",
+ "1;",
+ "1; ",
+ "q",
+ "q ",
+ "q;",
+ "q; ",
+ "try { throw 'stuff' } catch (e) { e = 1; }",
+ "try { throw 'stuff' } catch (e) { e = 1; } ",
+ "try { throw 'stuff' } catch (e) { e = 1; };",
+ "try { throw 'stuff' } catch (e) { e = 1; }; " ];
+
+
+// Test global eval and function constructor.
+for (var i = 0; i < prefixes.length; ++i) {
+ var pre = prefixes[i];
+ for (var j = 0; j < bodies.length; ++j) {
+ var body = bodies[j];
+ eval(pre + body);
+ }
+}
diff --git a/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part5.js b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part5.js
new file mode 100644
index 0000000..f060ec3
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part5.js
@@ -0,0 +1,77 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --expose-natives-as=builtins
+
+// Check that the ScopeIterator can properly recreate the scope at
+// every point when stepping through functions.
+
+var Debug = debug.Debug;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ // Access scope details.
+ var scope_count = exec_state.frame().scopeCount();
+ for (var i = 0; i < scope_count; i++) {
+ var scope = exec_state.frame().scope(i);
+ // assertTrue(scope.isScope());
+ scope.scopeType();
+ scope.scopeObject();
+ }
+
+ // Do steps until we reach the global scope again.
+ if (true) {
+ exec_state.prepareStep(Debug.StepAction.StepInMin, 1);
+ }
+ }
+}
+
+Debug.setListener(listener);
+
+var q = 42;
+var prefixes = [ "debugger; ",
+ "if (false) { try { throw 0; } catch(x) { return x; } }; debugger; " ];
+var with_bodies = [ "with ({}) {}",
+ "with ({x:1}) x",
+ "with ({x:1}) x = 1",
+ "with ({x:1}) x ",
+ "with ({x:1}) x = 1 ",
+ "with ({x:1}) x;",
+ "with ({x:1}) x = 1;",
+ "with ({x:1}) x; ",
+ "with ({x:1}) x = 1; " ];
+
+
+// Test global eval and function constructor.
+for (var i = 0; i < prefixes.length; ++i) {
+ var pre = prefixes[i];
+ for (var j = 0; j < with_bodies.length; ++j) {
+ var body = with_bodies[j];
+ eval(pre + body);
+ Function(pre + body)();
+ }
+}
diff --git a/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part6.js b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part6.js
new file mode 100644
index 0000000..f7c8df0
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part6.js
@@ -0,0 +1,79 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --expose-natives-as=builtins
+
+// Check that the ScopeIterator can properly recreate the scope at
+// every point when stepping through functions.
+
+var Debug = debug.Debug;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ // Access scope details.
+ var scope_count = exec_state.frame().scopeCount();
+ for (var i = 0; i < scope_count; i++) {
+ var scope = exec_state.frame().scope(i);
+ // assertTrue(scope.isScope());
+ scope.scopeType();
+ scope.scopeObject();
+ }
+
+ // Do steps until we reach the global scope again.
+ if (true) {
+ exec_state.prepareStep(Debug.StepAction.StepInMin, 1);
+ }
+ }
+}
+
+Debug.setListener(listener);
+
+var q = 42;
+var prefixes = [ "debugger; ",
+ "if (false) { try { throw 0; } catch(x) { return x; } }; debugger; " ];
+var bodies = [ "1",
+ "1 ",
+ "1;",
+ "1; ",
+ "q",
+ "q ",
+ "q;",
+ "q; ",
+ "try { throw 'stuff' } catch (e) { e = 1; }",
+ "try { throw 'stuff' } catch (e) { e = 1; } ",
+ "try { throw 'stuff' } catch (e) { e = 1; };",
+ "try { throw 'stuff' } catch (e) { e = 1; }; " ];
+
+
+// Test global eval and function constructor.
+for (var i = 0; i < prefixes.length; ++i) {
+ var pre = prefixes[i];
+ for (var j = 0; j < bodies.length; ++j) {
+ var body = bodies[j];
+ eval("'use strict'; " + pre + body);
+ }
+}
diff --git a/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part7.js b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part7.js
new file mode 100644
index 0000000..4f0c066
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part7.js
@@ -0,0 +1,79 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --expose-natives-as=builtins
+
+// Check that the ScopeIterator can properly recreate the scope at
+// every point when stepping through functions.
+
+var Debug = debug.Debug;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ // Access scope details.
+ var scope_count = exec_state.frame().scopeCount();
+ for (var i = 0; i < scope_count; i++) {
+ var scope = exec_state.frame().scope(i);
+ // assertTrue(scope.isScope());
+ scope.scopeType();
+ scope.scopeObject();
+ }
+
+ // Do steps until we reach the global scope again.
+ if (true) {
+ exec_state.prepareStep(Debug.StepAction.StepInMin, 1);
+ }
+ }
+}
+
+Debug.setListener(listener);
+
+var q = 42;
+var prefixes = [ "debugger; ",
+ "if (false) { try { throw 0; } catch(x) { return x; } }; debugger; " ];
+var bodies = [ "1",
+ "1 ",
+ "1;",
+ "1; ",
+ "q",
+ "q ",
+ "q;",
+ "q; ",
+ "try { throw 'stuff' } catch (e) { e = 1; }",
+ "try { throw 'stuff' } catch (e) { e = 1; } ",
+ "try { throw 'stuff' } catch (e) { e = 1; };",
+ "try { throw 'stuff' } catch (e) { e = 1; }; " ];
+
+
+// Test global eval and function constructor.
+for (var i = 0; i < prefixes.length; ++i) {
+ var pre = prefixes[i];
+ for (var j = 0; j < bodies.length; ++j) {
+ var body = bodies[j];
+ Function(pre + body)();
+ }
+}
diff --git a/src/3rdparty/v8/test/mjsunit/debug-stepout-scope.js b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part8.js
index 9c040da..f91fab5 100644
--- a/src/3rdparty/v8/test/mjsunit/debug-stepout-scope.js
+++ b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope-part8.js
@@ -53,191 +53,6 @@ function listener(event, exec_state, event_data, data) {
Debug.setListener(listener);
-function test1() {
- debugger;
- with ({x:1}) {
- x = 2;
- }
-}
-test1();
-
-
-function test2() {
- if (true) {
- with ({}) {
- debugger;
- }
- } else {
- with ({}) {
- return 10;
- }
- }
-}
-test2();
-
-
-function test3() {
- if (true) {
- debugger;
- } else {
- with ({}) {
- return 10;
- }
- }
-}
-test3();
-
-
-function test4() {
- debugger;
- with ({x:1}) x = 1
-}
-test4();
-
-
-function test5() {
- debugger;
- var dummy = 1;
- with ({}) {
- with ({}) {
- dummy = 2;
- }
- }
- dummy = 3;
-}
-test5();
-
-
-function test6() {
- debugger;
- try {
- throw 'stuff';
- } catch (e) {
- e = 1;
- }
-}
-test6();
-
-
-function test7() {
- debugger;
- function foo() {}
-}
-test7();
-
-
-function test8() {
- debugger;
- (function foo() {})();
-}
-test8();
-
-
-var q = 42;
-var prefixes = [ "debugger; ",
- "if (false) { try { throw 0; } catch(x) { return x; } }; debugger; " ];
-var bodies = [ "1",
- "1 ",
- "1;",
- "1; ",
- "q",
- "q ",
- "q;",
- "q; ",
- "try { throw 'stuff' } catch (e) { e = 1; }",
- "try { throw 'stuff' } catch (e) { e = 1; } ",
- "try { throw 'stuff' } catch (e) { e = 1; };",
- "try { throw 'stuff' } catch (e) { e = 1; }; " ];
-var with_bodies = [ "with ({}) {}",
- "with ({x:1}) x",
- "with ({x:1}) x = 1",
- "with ({x:1}) x ",
- "with ({x:1}) x = 1 ",
- "with ({x:1}) x;",
- "with ({x:1}) x = 1;",
- "with ({x:1}) x; ",
- "with ({x:1}) x = 1; " ];
-
-
-function test9() {
- debugger;
- for (var i = 0; i < prefixes.length; ++i) {
- var pre = prefixes[i];
- for (var j = 0; j < bodies.length; ++j) {
- var body = bodies[j];
- eval(pre + body);
- eval("'use strict'; " + pre + body);
- }
- for (var j = 0; j < with_bodies.length; ++j) {
- var body = with_bodies[j];
- eval(pre + body);
- }
- }
-}
-test9();
-
-
-function test10() {
- debugger;
- with ({}) {
- return 10;
- }
-}
-test10();
-
-
-function test11() {
- debugger;
- try {
- throw 'stuff';
- } catch (e) {
- return 10;
- }
-}
-test11();
-
-
-// Test global eval and function constructor.
-for (var i = 0; i < prefixes.length; ++i) {
- var pre = prefixes[i];
- for (var j = 0; j < bodies.length; ++j) {
- var body = bodies[j];
- eval(pre + body);
- eval("'use strict'; " + pre + body);
- Function(pre + body)();
- }
- for (var j = 0; j < with_bodies.length; ++j) {
- var body = with_bodies[j];
- eval(pre + body);
- Function(pre + body)();
- }
-}
-
-
-try {
- with({}) {
- debugger;
- eval("{}$%:^");
- }
-} catch(e) {
- nop();
-}
-
-// Return from function constructed with Function constructor.
-var anon = 12;
-for (var i = 0; i < prefixes.length; ++i) {
- var pre = prefixes[i];
- Function(pre + "return 42")();
- Function(pre + "return 42 ")();
- Function(pre + "return 42;")();
- Function(pre + "return 42; ")();
- Function(pre + "return anon")();
- Function(pre + "return anon ")();
- Function(pre + "return anon;")();
- Function(pre + "return anon; ")();
-}
-
-
function nop() {}
@@ -417,7 +232,3 @@ function stress() {
}
stress();
-
-
-// With block as the last(!) statement in global code.
-with ({}) { debugger; } \ No newline at end of file
diff --git a/src/3rdparty/v8/test/mjsunit/deopt-minus-zero.js b/src/3rdparty/v8/test/mjsunit/deopt-minus-zero.js
new file mode 100644
index 0000000..ee09831
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/deopt-minus-zero.js
@@ -0,0 +1,56 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --expose-gc
+
+/**
+ * The possible optimization states of a function. Must be in sync with the
+ * return values of Runtime_GetOptimizationStatus() in runtime.cc!
+ */
+var OptimizationState = {
+ YES: 1,
+ NO: 2,
+ ALWAYS: 3,
+ NEVER: 4
+};
+
+function mul (a, b) {
+ return a * b;
+}
+
+mul(-1, -1);
+mul(0x80000001|0, -1);
+mul(0x80000001|0, -1);
+%OptimizeFunctionOnNextCall(mul);
+mul(0, -1);
+%OptimizeFunctionOnNextCall(mul);
+mul(0, -1);
+
+var raw_optimized = %GetOptimizationStatus(mul);
+assertFalse(raw_optimized == OptimizationState.NO);
+gc();
+
diff --git a/src/3rdparty/v8/test/mjsunit/elements-kind.js b/src/3rdparty/v8/test/mjsunit/elements-kind.js
index 4aa79de..b74a212 100644
--- a/src/3rdparty/v8/test/mjsunit/elements-kind.js
+++ b/src/3rdparty/v8/test/mjsunit/elements-kind.js
@@ -34,7 +34,7 @@
// in this test case. Depending on whether smi-only arrays are actually
// enabled, this test takes the appropriate code path to check smi-only arrays.
-support_smi_only_arrays = %HasFastSmiOnlyElements(new Array(1,2,3,4,5,6,7,8));
+support_smi_only_arrays = %HasFastSmiElements(new Array(1,2,3,4,5,6,7,8));
if (support_smi_only_arrays) {
print("Tests include smi-only arrays.");
@@ -59,8 +59,8 @@ var elements_kind = {
}
function getKind(obj) {
- if (%HasFastSmiOnlyElements(obj)) return elements_kind.fast_smi_only;
- if (%HasFastElements(obj)) return elements_kind.fast;
+ if (%HasFastSmiElements(obj)) return elements_kind.fast_smi_only;
+ if (%HasFastObjectElements(obj)) return elements_kind.fast;
if (%HasFastDoubleElements(obj)) return elements_kind.fast_double;
if (%HasDictionaryElements(obj)) return elements_kind.dictionary;
// Every external kind is also an external array.
@@ -116,7 +116,7 @@ if (support_smi_only_arrays) {
assertKind(elements_kind.fast_smi_only, too);
}
-// Make sure the element kind transitions from smionly when a non-smi is stored.
+// Make sure the element kind transitions from smi when a non-smi is stored.
var you = new Array();
assertKind(elements_kind.fast_smi_only, you);
for (var i = 0; i < 1337; i++) {
@@ -143,7 +143,7 @@ assertKind(elements_kind.external_int, new Int32Array(0xF));
assertKind(elements_kind.external_unsigned_int, new Uint32Array(23));
assertKind(elements_kind.external_float, new Float32Array(7));
assertKind(elements_kind.external_double, new Float64Array(0));
-assertKind(elements_kind.external_pixel, new PixelArray(512));
+assertKind(elements_kind.external_pixel, new Uint8ClampedArray(512));
// Crankshaft support for smi-only array elements.
function monomorphic(array) {
@@ -224,9 +224,11 @@ if (support_smi_only_arrays) {
for (var i = 0; i < 3; i++) {
convert_mixed(doubles, "three", elements_kind.fast);
}
+ convert_mixed(construct_smis(), "three", elements_kind.fast);
+ convert_mixed(construct_doubles(), "three", elements_kind.fast);
+ %OptimizeFunctionOnNextCall(convert_mixed);
smis = construct_smis();
doubles = construct_doubles();
- %OptimizeFunctionOnNextCall(convert_mixed);
convert_mixed(smis, 1, elements_kind.fast);
convert_mixed(doubles, 1, elements_kind.fast);
assertTrue(%HaveSameMap(smis, doubles));
diff --git a/src/3rdparty/v8/test/mjsunit/elements-length-no-holey.js b/src/3rdparty/v8/test/mjsunit/elements-length-no-holey.js
new file mode 100644
index 0000000..5bac296
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/elements-length-no-holey.js
@@ -0,0 +1,33 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+a = [1,2,3];
+a.length = 1;
+assertFalse(%HasFastHoleyElements(a));
+assertTrue(%HasFastSmiElements(a));
diff --git a/src/3rdparty/v8/test/mjsunit/elements-transition-hoisting.js b/src/3rdparty/v8/test/mjsunit/elements-transition-hoisting.js
index 5e78f10..017e7ec 100644
--- a/src/3rdparty/v8/test/mjsunit/elements-transition-hoisting.js
+++ b/src/3rdparty/v8/test/mjsunit/elements-transition-hoisting.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,13 +25,13 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --allow-natives-syntax --smi-only-arrays --expose-gc
+// Flags: --allow-natives-syntax --smi-only-arrays --noparallel-recompilation
// Ensure that ElementsKind transitions in various situations are hoisted (or
// not hoisted) correctly, don't change the semantics programs and don't trigger
// deopt through hoisting in important situations.
-support_smi_only_arrays = %HasFastSmiOnlyElements(new Array(1,2,3,4,5,6));
+support_smi_only_arrays = %HasFastSmiElements(new Array(1,2,3,4,5,6));
if (support_smi_only_arrays) {
print("Tests include smi-only arrays.");
@@ -39,11 +39,6 @@ if (support_smi_only_arrays) {
print("Tests do NOT include smi-only arrays.");
}
-// Force existing ICs from previous stress runs to be flushed, otherwise the
-// assumptions in this test about when deoptimizations get triggered are not
-// valid.
-gc();
-
if (support_smi_only_arrays) {
// Make sure that a simple elements array transitions inside a loop before
// stores to an array gets hoisted in a way that doesn't generate a deopt in
@@ -58,10 +53,14 @@ if (support_smi_only_arrays) {
}
testDoubleConversion4(new Array(5));
+ testDoubleConversion4(new Array(5)); // Call twice to make sure that second
+ // store is a transition and not
+ // optimistically MONOMORPHIC
%OptimizeFunctionOnNextCall(testDoubleConversion4);
testDoubleConversion4(new Array(5));
testDoubleConversion4(new Array(5));
assertTrue(2 != %GetOptimizationStatus(testDoubleConversion4));
+ %ClearFunctionTypeFeedback(testDoubleConversion4);
// Make sure that non-element related map checks that are not preceded by
// transitions in a loop still get hoisted in a way that doesn't generate a
@@ -73,17 +72,21 @@ if (support_smi_only_arrays) {
a[1] = 1;
var count = 3;
do {
- a.foo = object; // This map check should be hoistable
+ a.foo = object; // This map check should be hoistable
a[1] = object;
result = a.foo == object && a[1] == object;
} while (--count > 0);
}
testExactMapHoisting(new Array(5));
+ testExactMapHoisting(new Array(5)); // Call twice to make sure that second
+ // store is a transition and not
+ // optimistically MONOMORPHIC
%OptimizeFunctionOnNextCall(testExactMapHoisting);
testExactMapHoisting(new Array(5));
testExactMapHoisting(new Array(5));
assertTrue(2 != %GetOptimizationStatus(testExactMapHoisting));
+ %ClearFunctionTypeFeedback(testExactMapHoisting);
// Make sure that non-element related map checks do NOT get hoisted if they
// depend on an elements transition before them and it's not possible to hoist
@@ -98,19 +101,24 @@ if (support_smi_only_arrays) {
if (a.bar === undefined) {
a[1] = 2.5;
}
- a.foo = object; // This map check should NOT be hoistable because it
- // includes a check for the FAST_ELEMENTS map as well as
- // the FAST_DOUBLE_ELEMENTS map, which depends on the
- // double transition above in the if, which cannot be
- // hoisted.
+ a.foo = object; // This map check should NOT be hoistable because it
+ // includes a check for the FAST_ELEMENTS map as well as
+ // the FAST_DOUBLE_ELEMENTS map, which depends on the
+ // double transition above in the if, which cannot be
+ // hoisted.
} while (--count > 0);
}
testExactMapHoisting2(new Array(5));
+ testExactMapHoisting2(new Array(5)); // Call twice to make sure that second
+ // store is a transition and not
+ // optimistically MONOMORPHIC
%OptimizeFunctionOnNextCall(testExactMapHoisting2);
testExactMapHoisting2(new Array(5));
testExactMapHoisting2(new Array(5));
- assertTrue(2 != %GetOptimizationStatus(testExactMapHoisting2));
+ // Temporarily disabled - see bug 2176.
+ // assertTrue(2 != %GetOptimizationStatus(testExactMapHoisting2));
+ %ClearFunctionTypeFeedback(testExactMapHoisting2);
// Make sure that non-element related map checks do get hoisted if they use
// the transitioned map for the check and all transitions that they depend
@@ -123,19 +131,23 @@ if (support_smi_only_arrays) {
var count = 3;
do {
a[1] = 2.5;
- a.foo = object; // This map check should be hoistable because all elements
- // transitions in the loop can also be hoisted.
+ a.foo = object; // This map check should be hoistable because all elements
+ // transitions in the loop can also be hoisted.
} while (--count > 0);
}
var add_transition = new Array(5);
add_transition.foo = 0;
- add_transition[0] = new Object(); // For FAST_ELEMENT transition to be created
+ add_transition[0] = new Object(); // For FAST_ELEMENT transition to be created
testExactMapHoisting3(new Array(5));
+ testExactMapHoisting3(new Array(5)); // Call twice to make sure that second
+ // store is a transition and not
+ // optimistically MONOMORPHIC
%OptimizeFunctionOnNextCall(testExactMapHoisting3);
testExactMapHoisting3(new Array(5));
testExactMapHoisting3(new Array(5));
assertTrue(2 != %GetOptimizationStatus(testExactMapHoisting3));
+ %ClearFunctionTypeFeedback(testExactMapHoisting3);
function testDominatingTransitionHoisting1(a) {
var object = new Object();
@@ -149,11 +161,21 @@ if (support_smi_only_arrays) {
} while (--count > 3);
}
+ /*
testDominatingTransitionHoisting1(new Array(5));
+ testDominatingTransitionHoisting1(new Array(5)); // Call twice to make sure
+ // that second store is a
+ // transition and not
+ // optimistically MONOMORPHIC
%OptimizeFunctionOnNextCall(testDominatingTransitionHoisting1);
testDominatingTransitionHoisting1(new Array(5));
testDominatingTransitionHoisting1(new Array(5));
+ // TODO(verwaest) With current changes the elements transition gets hoisted
+ // above the access, causing a deopt. We should update the type of access
+ // rather than forbid hoisting the transition.
assertTrue(2 != %GetOptimizationStatus(testDominatingTransitionHoisting1));
+ %ClearFunctionTypeFeedback(testDominatingTransitionHoisting1);
+ */
function testHoistingWithSideEffect(a) {
var object = new Object();
@@ -166,10 +188,14 @@ if (support_smi_only_arrays) {
}
testHoistingWithSideEffect(new Array(5));
+ testHoistingWithSideEffect(new Array(5)); // Call twice to make sure that
+ // second store is a transition and
+ // not optimistically MONOMORPHIC
%OptimizeFunctionOnNextCall(testHoistingWithSideEffect);
testHoistingWithSideEffect(new Array(5));
testHoistingWithSideEffect(new Array(5));
assertTrue(2 != %GetOptimizationStatus(testHoistingWithSideEffect));
+ %ClearFunctionTypeFeedback(testHoistingWithSideEffect);
function testStraightLineDupeElinination(a,b,c,d,e,f) {
var count = 3;
@@ -179,7 +205,7 @@ if (support_smi_only_arrays) {
a[1] = c;
a[2] = d;
assertTrue(true);
- a[3] = e; // TransitionElementsKind should be eliminated despite call.
+ a[3] = e; // TransitionElementsKind should be eliminated despite call.
a[4] = f;
} while (--count > 3);
}
@@ -205,7 +231,8 @@ if (support_smi_only_arrays) {
testStraightLineDupeElinination(new Array(5),0,0,0,.5,0);
testStraightLineDupeElinination(new Array(5),0,0,0,0,.5);
%OptimizeFunctionOnNextCall(testStraightLineDupeElinination);
- testStraightLineDupeElinination(new Array(5));
- testStraightLineDupeElinination(new Array(5));
+ testStraightLineDupeElinination(new Array(5),0,0,0,0,0);
+ testStraightLineDupeElinination(new Array(5),0,0,0,0,0);
assertTrue(2 != %GetOptimizationStatus(testStraightLineDupeElinination));
+ %ClearFunctionTypeFeedback(testStraightLineDupeElinination);
}
diff --git a/src/3rdparty/v8/test/mjsunit/elements-transition.js b/src/3rdparty/v8/test/mjsunit/elements-transition.js
index 60e051b..0dffd37 100644
--- a/src/3rdparty/v8/test/mjsunit/elements-transition.js
+++ b/src/3rdparty/v8/test/mjsunit/elements-transition.js
@@ -27,7 +27,7 @@
// Flags: --allow-natives-syntax --smi-only-arrays
-support_smi_only_arrays = %HasFastSmiOnlyElements(new Array(1,2,3,4,5,6,7,8));
+support_smi_only_arrays = %HasFastSmiElements(new Array(1,2,3,4,5,6,7,8));
if (support_smi_only_arrays) {
print("Tests include smi-only arrays.");
@@ -44,8 +44,8 @@ if (support_smi_only_arrays) {
var array_1 = new Array(length);
var array_2 = new Array(length);
- assertTrue(%HasFastSmiOnlyElements(array_1));
- assertTrue(%HasFastSmiOnlyElements(array_2));
+ assertTrue(%HasFastSmiElements(array_1));
+ assertTrue(%HasFastSmiElements(array_2));
for (var i = 0; i < length; i++) {
if (i == length - 5 && test_double) {
// Trigger conversion to fast double elements at length-5.
@@ -57,8 +57,8 @@ if (support_smi_only_arrays) {
// Trigger conversion to fast object elements at length-3.
set(array_1, i, 'object');
set(array_2, i, 'object');
- assertTrue(%HasFastElements(array_1));
- assertTrue(%HasFastElements(array_2));
+ assertTrue(%HasFastObjectElements(array_1));
+ assertTrue(%HasFastObjectElements(array_2));
} else if (i != length - 7) {
// Set the element to an integer but leave a hole at length-7.
set(array_1, i, 2*i+1);
diff --git a/src/3rdparty/v8/test/mjsunit/error-accessors.js b/src/3rdparty/v8/test/mjsunit/error-accessors.js
new file mode 100644
index 0000000..9581050
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/error-accessors.js
@@ -0,0 +1,54 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that the message property of error objects is a data property.
+
+var o;
+
+// message is constructed using the constructor.
+var error1 = new Error("custom message");
+o = {};
+o.__proto__ = error1;
+
+assertEquals("custom message",
+ Object.getOwnPropertyDescriptor(error1, "message").value);
+o.message = "another message";
+assertEquals("another message", o.message);
+assertEquals("custom message", error1.message);
+
+// message is constructed by the runtime.
+var error2;
+try { x.x } catch (e) { error2 = e; }
+o = {};
+o.__proto__ = error2;
+
+assertEquals("x is not defined",
+ Object.getOwnPropertyDescriptor(error2, "message").value);
+o.message = "another message";
+assertEquals("another message", o.message);
+assertEquals("x is not defined", error2.message);
+
diff --git a/src/3rdparty/v8/test/mjsunit/eval-stack-trace.js b/src/3rdparty/v8/test/mjsunit/eval-stack-trace.js
new file mode 100644
index 0000000..723d522
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/eval-stack-trace.js
@@ -0,0 +1,203 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Return the stack frames of an Error object.
+Error.prototype.getFrames = function() {
+ Error.prepareStackTrace = function(error, frames) {
+ return frames;
+ }
+ var frames = this.stack;
+ Error.prepareStackTrace = undefined;
+ return frames;
+}
+
+String.prototype.contains = function(pattern) {
+ return this.indexOf(pattern) > -1;
+}
+
+// Check for every frame that a certain method returns the
+// expected value for every frame.
+Array.prototype.verifyEquals = function(frames, func_name) {
+ this.forEach(
+ function(element, index) {
+ var frame = frames[index];
+ if (element === null) return;
+ assertEquals(element, (frame[func_name])());
+ }
+ );
+}
+
+// Check for every frame that a certain method has a return value
+// that contains the expected pattern for every frame.
+Array.prototype.verifyContains = function(frames, func_name) {
+ this.forEach(
+ function(element, index) {
+ var frame = frames[index];
+ if (element === null) return;
+ assertTrue((frame[func_name])().contains(element));
+ }
+ );
+}
+
+// Check for every frame that a certain method returns undefined
+// when expected.
+Array.prototype.verifyUndefined = function(frames, func_name) {
+ this.forEach(
+ function(element, index) {
+ var frame = frames[index];
+ if (element === null) return;
+ assertEquals(element, (frame[func_name])() === undefined);
+ }
+ );
+}
+
+
+// Simple eval.
+var code1 = "function f() { \n" +
+ " throw new Error(3); \n" + // Line 2
+ "} \n" +
+ "f(); \n"; // Line 4
+
+function g() {
+ eval(code1);
+}
+
+try {
+ g();
+} catch (e) {
+ // We expect something like
+ // f (eval at g (eval-stack.js:87:8), <anonymous>:2:9)
+ // eval (eval at g (eval-stack.js:87:8), <anonymous>:4:1)
+ // g (eval-stack.js:87:3)
+ // eval-stack.js:94:3
+ var frames = e.getFrames();
+ assertEquals(4, frames.length);
+ ["f", "eval", "g"]
+ .verifyEquals(frames, "getFunctionName");
+ [2, 4]
+ .verifyEquals(frames, "getLineNumber");
+ ["<anonymous>:2:", "<anonymous>:4:"]
+ .verifyContains(frames, "toString");
+ [true, true, false, false]
+ .verifyUndefined(frames, "getFileName");
+ ["eval at g", "eval at g"]
+ .verifyContains(frames, "getEvalOrigin");
+}
+
+
+// Nested eval.
+var code2 = "function h() { \n" +
+ " // Empty \n" +
+ " eval(code1); \n" + // Line 3
+ "} \n" +
+ "h(); \n"; // Line 5
+
+try {
+ eval(code2);
+} catch (e) {
+ // We expect something like
+ // f (eval at h (eval at <anonymous> (eval-stack.js:116:8)),
+ // <anonymous>:2:9)
+ // eval (eval at h (eval at <anonymous> (eval-stack.js:116:8)),
+ // <anonymous>:4:1)
+ // h (eval at <anonymous> (eval-stack.js:116:8), <anonymous>:3:3)
+ // eval (eval at <anonymous> (eval-stack.js:116:8), <anonymous>:5:1)
+ // eval-stack.js:116:3
+ var frames = e.getFrames();
+ assertEquals(5, frames.length);
+ ["f", "eval", "h", "eval"]
+ .verifyEquals(frames, "getFunctionName");
+ [2, 4, 3, 5]
+ .verifyEquals(frames, "getLineNumber");
+ ["<anonymous>:2:", "<anonymous>:4:", "<anonymous>:3:", "<anonymous>:5:"]
+ .verifyContains(frames, "toString");
+ [true, true, true, true, false]
+ .verifyUndefined(frames, "getFileName");
+ ["eval at h (eval at <anonymous> (",
+ "eval at h (eval at <anonymous> (",
+ "eval at <anonymous> (",
+ "eval at <anonymous> ("]
+ .verifyContains(frames, "getEvalOrigin");
+}
+
+
+// Nested eval calling through non-eval defined function.
+var code3 = "function h() { \n" +
+ " // Empty \n" +
+ " g(); \n" + // Line 3
+ "} \n" +
+ "h(); \n"; // Line 5
+
+try {
+ eval(code3);
+} catch (e) {
+ // We expect something like
+ // f (eval at g (test.js:83:8), <anonymous>:2:9)
+ // eval (eval at g (test.js:83:8), <anonymous>:4:1)
+ // g (test.js:83:3)
+ // h (eval at <anonymous> (test.js:149:8), <anonymous>:3:3)
+ // eval (eval at <anonymous> (test.js:149:8), <anonymous>:5:1)
+ // test.js:149:3
+ var frames = e.getFrames();
+ assertEquals(6, frames.length);
+ ["f", "eval", "g", "h", "eval"]
+ .verifyEquals(frames, "getFunctionName");
+ [2, 4, null, 3, 5]
+ .verifyEquals(frames, "getLineNumber");
+ ["<anonymous>:2:", "<anonymous>:4:", null, "<anonymous>:3:", "<anonymous>:5:"]
+ .verifyContains(frames, "toString");
+ [true, true, false, true, true, false]
+ .verifyUndefined(frames, "getFileName");
+ ["eval at g (",
+ "eval at g (",
+ null,
+ "eval at <anonymous> (",
+ "eval at <anonymous> ("]
+ .verifyContains(frames, "getEvalOrigin");
+}
+
+
+// Calling function defined in eval.
+eval("function f() { \n" +
+ " throw new Error(3); \n" +
+ "} \n");
+
+try {
+ f();
+} catch (e) {
+ // We expect something like
+ // f (eval at <anonymous> (test.js:182:40), <anonymous>:2:9)
+ // test.js:186:3
+ var frames = e.getFrames();
+ assertEquals(2, frames.length);
+ ["f"].verifyEquals(frames, "getFunctionName");
+ [2].verifyEquals(frames, "getLineNumber");
+ ["<anonymous>:2:"].verifyContains(frames, "toString");
+ [true, false].verifyUndefined(frames, "getFileName");
+ ["eval at <anonymous> ("].verifyContains(frames, "getEvalOrigin");
+}
+
diff --git a/src/3rdparty/v8/test/mjsunit/external-array.js b/src/3rdparty/v8/test/mjsunit/external-array.js
index 32f78a7..85a8cc5 100644
--- a/src/3rdparty/v8/test/mjsunit/external-array.js
+++ b/src/3rdparty/v8/test/mjsunit/external-array.js
@@ -27,6 +27,12 @@
// Flags: --allow-natives-syntax --expose-gc
+// Helper
+function assertInstance(o, f) {
+ assertSame(o.constructor, f);
+ assertInstanceof(o, f);
+}
+
// This is a regression test for overlapping key and value registers.
function f(a) {
a[0] = 0;
@@ -51,14 +57,63 @@ assertThrows(abfunc1);
// Test derivation from an ArrayBuffer
var ab = new ArrayBuffer(12);
+assertInstance(ab, ArrayBuffer);
var derived_uint8 = new Uint8Array(ab);
+assertInstance(derived_uint8, Uint8Array);
+assertSame(ab, derived_uint8.buffer);
assertEquals(12, derived_uint8.length);
+assertEquals(12, derived_uint8.byteLength);
+assertEquals(0, derived_uint8.byteOffset);
+assertEquals(1, derived_uint8.BYTES_PER_ELEMENT);
+var derived_uint8_2 = new Uint8Array(ab,7);
+assertInstance(derived_uint8_2, Uint8Array);
+assertSame(ab, derived_uint8_2.buffer);
+assertEquals(5, derived_uint8_2.length);
+assertEquals(5, derived_uint8_2.byteLength);
+assertEquals(7, derived_uint8_2.byteOffset);
+assertEquals(1, derived_uint8_2.BYTES_PER_ELEMENT);
+var derived_int16 = new Int16Array(ab);
+assertInstance(derived_int16, Int16Array);
+assertSame(ab, derived_int16.buffer);
+assertEquals(6, derived_int16.length);
+assertEquals(12, derived_int16.byteLength);
+assertEquals(0, derived_int16.byteOffset);
+assertEquals(2, derived_int16.BYTES_PER_ELEMENT);
+var derived_int16_2 = new Int16Array(ab,6);
+assertInstance(derived_int16_2, Int16Array);
+assertSame(ab, derived_int16_2.buffer);
+assertEquals(3, derived_int16_2.length);
+assertEquals(6, derived_int16_2.byteLength);
+assertEquals(6, derived_int16_2.byteOffset);
+assertEquals(2, derived_int16_2.BYTES_PER_ELEMENT);
var derived_uint32 = new Uint32Array(ab);
+assertInstance(derived_uint32, Uint32Array);
+assertSame(ab, derived_uint32.buffer);
assertEquals(3, derived_uint32.length);
+assertEquals(12, derived_uint32.byteLength);
+assertEquals(0, derived_uint32.byteOffset);
+assertEquals(4, derived_uint32.BYTES_PER_ELEMENT);
var derived_uint32_2 = new Uint32Array(ab,4);
+assertInstance(derived_uint32_2, Uint32Array);
+assertSame(ab, derived_uint32_2.buffer);
assertEquals(2, derived_uint32_2.length);
+assertEquals(8, derived_uint32_2.byteLength);
+assertEquals(4, derived_uint32_2.byteOffset);
+assertEquals(4, derived_uint32_2.BYTES_PER_ELEMENT);
var derived_uint32_3 = new Uint32Array(ab,4,1);
+assertInstance(derived_uint32_3, Uint32Array);
+assertSame(ab, derived_uint32_3.buffer);
assertEquals(1, derived_uint32_3.length);
+assertEquals(4, derived_uint32_3.byteLength);
+assertEquals(4, derived_uint32_3.byteOffset);
+assertEquals(4, derived_uint32_3.BYTES_PER_ELEMENT);
+var derived_float64 = new Float64Array(ab,0,1);
+assertInstance(derived_float64, Float64Array);
+assertSame(ab, derived_float64.buffer);
+assertEquals(1, derived_float64.length);
+assertEquals(8, derived_float64.byteLength);
+assertEquals(0, derived_float64.byteOffset);
+assertEquals(8, derived_float64.BYTES_PER_ELEMENT);
// If a given byteOffset and length references an area beyond the end of the
// ArrayBuffer an exception is raised.
@@ -87,6 +142,25 @@ function abfunc6() {
}
assertThrows(abfunc6);
+// Test that an array constructed without an array buffer creates one properly.
+a = new Uint8Array(31);
+assertEquals(a.byteLength, a.buffer.byteLength);
+assertEquals(a.length, a.buffer.byteLength);
+assertEquals(a.length * a.BYTES_PER_ELEMENT, a.buffer.byteLength);
+a = new Int16Array(5);
+assertEquals(a.byteLength, a.buffer.byteLength);
+assertEquals(a.length * a.BYTES_PER_ELEMENT, a.buffer.byteLength);
+a = new Float64Array(7);
+assertEquals(a.byteLength, a.buffer.byteLength);
+assertEquals(a.length * a.BYTES_PER_ELEMENT, a.buffer.byteLength);
+
+// Test that an implicitly created buffer is a valid buffer.
+a = new Float64Array(7);
+assertSame(a.buffer, (new Uint16Array(a.buffer)).buffer);
+assertSame(a.buffer, (new Float32Array(a.buffer,4)).buffer);
+assertSame(a.buffer, (new Int8Array(a.buffer,3,51)).buffer);
+assertInstance(a.buffer, ArrayBuffer);
+
// Test the correct behavior of the |BYTES_PER_ELEMENT| property (which is
// "constant", but not read-only).
a = new Int32Array(2);
@@ -140,7 +214,7 @@ assertEquals(4, array_with_length_from_non_number.length);
// Test loads and stores.
types = [Array, Int8Array, Uint8Array, Int16Array, Uint16Array, Int32Array,
- Uint32Array, PixelArray, Float32Array, Float64Array];
+ Uint32Array, Uint8ClampedArray, Float32Array, Float64Array];
test_result_nan = [NaN, 0, 0, 0, 0, 0, 0, 0, NaN, NaN];
test_result_low_int = [-1, -1, 255, -1, 65535, -1, 0xFFFFFFFF, 0, -1, -1];
@@ -351,3 +425,292 @@ assertTrue(isNaN(float64_array[0]));
%OptimizeFunctionOnNextCall(store_float64_undefined);
store_float64_undefined(float64_array);
assertTrue(isNaN(float64_array[0]));
+
+
+// Check handling of 0-sized buffers and arrays.
+ab = new ArrayBuffer(0);
+assertInstance(ab, ArrayBuffer);
+assertEquals(0, ab.byteLength);
+a = new Int8Array(ab);
+assertInstance(a, Int8Array);
+assertEquals(0, a.byteLength);
+assertEquals(0, a.length);
+a[0] = 1;
+assertEquals(undefined, a[0]);
+ab = new ArrayBuffer(16);
+assertInstance(ab, ArrayBuffer);
+a = new Float32Array(ab,4,0);
+assertInstance(a, Float32Array);
+assertEquals(0, a.byteLength);
+assertEquals(0, a.length);
+a[0] = 1;
+assertEquals(undefined, a[0]);
+a = new Uint16Array(0);
+assertInstance(a, Uint16Array);
+assertEquals(0, a.byteLength);
+assertEquals(0, a.length);
+a[0] = 1;
+assertEquals(undefined, a[0]);
+
+
+// Check construction from arrays.
+a = new Uint32Array([]);
+assertInstance(a, Uint32Array);
+assertEquals(0, a.length);
+assertEquals(0, a.byteLength);
+assertEquals(0, a.buffer.byteLength);
+assertEquals(4, a.BYTES_PER_ELEMENT);
+assertInstance(a.buffer, ArrayBuffer);
+a = new Uint16Array([1,2,3]);
+assertInstance(a, Uint16Array);
+assertEquals(3, a.length);
+assertEquals(6, a.byteLength);
+assertEquals(6, a.buffer.byteLength);
+assertEquals(2, a.BYTES_PER_ELEMENT);
+assertEquals(1, a[0]);
+assertEquals(3, a[2]);
+assertInstance(a.buffer, ArrayBuffer);
+a = new Uint32Array(a);
+assertInstance(a, Uint32Array);
+assertEquals(3, a.length);
+assertEquals(12, a.byteLength);
+assertEquals(12, a.buffer.byteLength);
+assertEquals(4, a.BYTES_PER_ELEMENT);
+assertEquals(1, a[0]);
+assertEquals(3, a[2]);
+assertInstance(a.buffer, ArrayBuffer);
+
+// Check subarrays.
+a = new Uint16Array([1,2,3,4,5,6]);
+aa = a.subarray(3);
+assertInstance(aa, Uint16Array);
+assertEquals(3, aa.length);
+assertEquals(6, aa.byteLength);
+assertEquals(2, aa.BYTES_PER_ELEMENT);
+assertSame(a.buffer, aa.buffer);
+aa = a.subarray(3,5);
+assertInstance(aa, Uint16Array);
+assertEquals(2, aa.length);
+assertEquals(4, aa.byteLength);
+assertEquals(2, aa.BYTES_PER_ELEMENT);
+assertSame(a.buffer, aa.buffer);
+aa = a.subarray(4,8);
+assertInstance(aa, Uint16Array);
+assertEquals(2, aa.length);
+assertEquals(4, aa.byteLength);
+assertEquals(2, aa.BYTES_PER_ELEMENT);
+assertSame(a.buffer, aa.buffer);
+aa = a.subarray(9);
+assertInstance(aa, Uint16Array);
+assertEquals(0, aa.length);
+assertEquals(0, aa.byteLength);
+assertEquals(2, aa.BYTES_PER_ELEMENT);
+assertSame(a.buffer, aa.buffer);
+aa = a.subarray(-4);
+assertInstance(aa, Uint16Array);
+assertEquals(4, aa.length);
+assertEquals(8, aa.byteLength);
+assertEquals(2, aa.BYTES_PER_ELEMENT);
+assertSame(a.buffer, aa.buffer);
+aa = a.subarray(-3,-1);
+assertInstance(aa, Uint16Array);
+assertEquals(2, aa.length);
+assertEquals(4, aa.byteLength);
+assertEquals(2, aa.BYTES_PER_ELEMENT);
+assertSame(a.buffer, aa.buffer);
+aa = a.subarray(3,2);
+assertInstance(aa, Uint16Array);
+assertEquals(0, aa.length);
+assertEquals(0, aa.byteLength);
+assertEquals(2, aa.BYTES_PER_ELEMENT);
+assertSame(a.buffer, aa.buffer);
+aa = a.subarray(-3,-4);
+assertInstance(aa, Uint16Array);
+assertEquals(0, aa.length);
+assertEquals(0, aa.byteLength);
+assertEquals(2, aa.BYTES_PER_ELEMENT);
+assertSame(a.buffer, aa.buffer);
+aa = a.subarray(0,-8);
+assertInstance(aa, Uint16Array);
+assertEquals(0, aa.length);
+assertEquals(0, aa.byteLength);
+assertEquals(2, aa.BYTES_PER_ELEMENT);
+assertSame(a.buffer, aa.buffer);
+
+assertThrows(function(){ a.subarray.call({}, 0) });
+assertThrows(function(){ a.subarray.call([], 0) });
+assertThrows(function(){ a.subarray.call(a) });
+
+
+// Call constructors directly as functions, and through .call and .apply
+
+b = ArrayBuffer(100)
+a = Int8Array(b, 5, 77)
+assertInstance(b, ArrayBuffer)
+assertInstance(a, Int8Array)
+assertSame(b, a.buffer)
+assertEquals(5, a.byteOffset)
+assertEquals(77, a.byteLength)
+b = ArrayBuffer.call(null, 10)
+a = Uint16Array.call(null, b, 2, 4)
+assertInstance(b, ArrayBuffer)
+assertInstance(a, Uint16Array)
+assertSame(b, a.buffer)
+assertEquals(2, a.byteOffset)
+assertEquals(8, a.byteLength)
+b = ArrayBuffer.apply(null, [1000])
+a = Float32Array.apply(null, [b, 128, 1])
+assertInstance(b, ArrayBuffer)
+assertInstance(a, Float32Array)
+assertSame(b, a.buffer)
+assertEquals(128, a.byteOffset)
+assertEquals(4, a.byteLength)
+
+
+// Test array.set in different combinations.
+
+function assertArrayPrefix(expected, array) {
+ for (var i = 0; i < expected.length; ++i) {
+ assertEquals(expected[i], array[i]);
+ }
+}
+
+var a11 = new Int16Array([1, 2, 3, 4, 0, -1])
+var a12 = new Uint16Array(15)
+a12.set(a11, 3)
+assertArrayPrefix([0, 0, 0, 1, 2, 3, 4, 0, 0xffff, 0, 0], a12)
+assertThrows(function(){ a11.set(a12) })
+
+var a21 = [1, undefined, 10, NaN, 0, -1, {valueOf: function() {return 3}}]
+var a22 = new Int32Array(12)
+a22.set(a21, 2)
+assertArrayPrefix([0, 0, 1, 0, 10, 0, 0, -1, 3, 0], a22)
+
+var a31 = new Float32Array([2, 4, 6, 8, 11, NaN, 1/0, -3])
+var a32 = a31.subarray(2, 6)
+a31.set(a32, 4)
+assertArrayPrefix([2, 4, 6, 8, 6, 8, 11, NaN], a31)
+assertArrayPrefix([6, 8, 6, 8], a32)
+
+var a4 = new Uint8ClampedArray([3,2,5,6])
+a4.set(a4)
+assertArrayPrefix([3, 2, 5, 6], a4)
+
+// Cases with overlapping backing store but different element sizes.
+var b = new ArrayBuffer(4)
+var a5 = new Int16Array(b)
+var a50 = new Int8Array(b)
+var a51 = new Int8Array(b, 0, 2)
+var a52 = new Int8Array(b, 1, 2)
+var a53 = new Int8Array(b, 2, 2)
+
+a5.set([0x5050, 0x0a0a])
+assertArrayPrefix([0x50, 0x50, 0x0a, 0x0a], a50)
+assertArrayPrefix([0x50, 0x50], a51)
+assertArrayPrefix([0x50, 0x0a], a52)
+assertArrayPrefix([0x0a, 0x0a], a53)
+
+a50.set([0x50, 0x50, 0x0a, 0x0a])
+a51.set(a5)
+assertArrayPrefix([0x50, 0x0a, 0x0a, 0x0a], a50)
+
+a50.set([0x50, 0x50, 0x0a, 0x0a])
+a52.set(a5)
+assertArrayPrefix([0x50, 0x50, 0x0a, 0x0a], a50)
+
+a50.set([0x50, 0x50, 0x0a, 0x0a])
+a53.set(a5)
+assertArrayPrefix([0x50, 0x50, 0x50, 0x0a], a50)
+
+a50.set([0x50, 0x51, 0x0a, 0x0b])
+a5.set(a51)
+assertArrayPrefix([0x0050, 0x0051], a5)
+
+a50.set([0x50, 0x51, 0x0a, 0x0b])
+a5.set(a52)
+assertArrayPrefix([0x0051, 0x000a], a5)
+
+a50.set([0x50, 0x51, 0x0a, 0x0b])
+a5.set(a53)
+assertArrayPrefix([0x000a, 0x000b], a5)
+
+// Mixed types of same size.
+var a61 = new Float32Array([1.2, 12.3])
+var a62 = new Int32Array(2)
+a62.set(a61)
+assertArrayPrefix([1, 12], a62)
+a61.set(a62)
+assertArrayPrefix([1, 12], a61)
+
+// Invalid source
+assertThrows(function() { a.set(0) })
+assertThrows(function() { a.set({}) })
+
+
+// Test arraybuffer.slice
+
+var a0 = new Int8Array([1, 2, 3, 4, 5, 6])
+var b0 = a0.buffer
+
+var b1 = b0.slice(0)
+assertEquals(b0.byteLength, b1.byteLength)
+assertArrayPrefix([1, 2, 3, 4, 5, 6], Int8Array(b1))
+
+var b2 = b0.slice(3)
+assertEquals(b0.byteLength - 3, b2.byteLength)
+assertArrayPrefix([4, 5, 6], Int8Array(b2))
+
+var b3 = b0.slice(2, 4)
+assertEquals(2, b3.byteLength)
+assertArrayPrefix([3, 4], Int8Array(b3))
+
+function goo(a, i) {
+ return a[i];
+}
+
+function boo(a, i, v) {
+ return a[i] = v;
+}
+
+function do_tagged_index_external_array_test(constructor) {
+ var t_array = new constructor([1, 2, 3, 4, 5, 6]);
+ assertEquals(1, goo(t_array, 0));
+ assertEquals(1, goo(t_array, 0));
+ boo(t_array, 0, 13);
+ assertEquals(13, goo(t_array, 0));
+ %OptimizeFunctionOnNextCall(goo);
+ %OptimizeFunctionOnNextCall(boo);
+ boo(t_array, 0, 15);
+ assertEquals(15, goo(t_array, 0));
+ %ClearFunctionTypeFeedback(goo);
+ %ClearFunctionTypeFeedback(boo);
+}
+
+do_tagged_index_external_array_test(Int8Array);
+do_tagged_index_external_array_test(Uint8Array);
+do_tagged_index_external_array_test(Int16Array);
+do_tagged_index_external_array_test(Uint16Array);
+do_tagged_index_external_array_test(Int32Array);
+do_tagged_index_external_array_test(Uint32Array);
+do_tagged_index_external_array_test(Float32Array);
+do_tagged_index_external_array_test(Float64Array);
+
+var built_in_array = new Array(1, 2, 3, 4, 5, 6);
+assertEquals(1, goo(built_in_array, 0));
+assertEquals(1, goo(built_in_array, 0));
+%OptimizeFunctionOnNextCall(goo);
+%OptimizeFunctionOnNextCall(boo);
+boo(built_in_array, 0, 11);
+assertEquals(11, goo(built_in_array, 0));
+%ClearFunctionTypeFeedback(goo);
+%ClearFunctionTypeFeedback(boo);
+
+built_in_array = new Array(1.5, 2, 3, 4, 5, 6);
+assertEquals(1.5, goo(built_in_array, 0));
+assertEquals(1.5, goo(built_in_array, 0));
+%OptimizeFunctionOnNextCall(goo);
+%OptimizeFunctionOnNextCall(boo);
+boo(built_in_array, 0, 2.5);
+assertEquals(2.5, goo(built_in_array, 0));
+%ClearFunctionTypeFeedback(goo);
+%ClearFunctionTypeFeedback(boo);
diff --git a/src/3rdparty/v8/test/mjsunit/fast-array-length.js b/src/3rdparty/v8/test/mjsunit/fast-array-length.js
new file mode 100644
index 0000000..42f2c38
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/fast-array-length.js
@@ -0,0 +1,37 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// This is a regression test for overlapping key and value registers.
+
+
+var a = [0, 1, 2, 3, 4, 5];
+assertTrue(%HasFastSmiElements(a));
+a.length = (1 << 30);
+assertFalse(%HasFastSmiElements(a));
+
diff --git a/src/3rdparty/v8/test/mjsunit/fast-non-keyed.js b/src/3rdparty/v8/test/mjsunit/fast-non-keyed.js
new file mode 100644
index 0000000..c2f7fc7
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/fast-non-keyed.js
@@ -0,0 +1,113 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Check that keyed stores make things go dict mode faster than non-keyed
+// stores.
+
+function AddProps(obj) {
+ for (var i = 0; i < 26; i++) {
+ obj["x" + i] = 0;
+ }
+}
+
+
+function AddPropsNonKeyed(obj) {
+ obj.x0 = 0;
+ obj.x1 = 0;
+ obj.x2 = 0;
+ obj.x3 = 0;
+ obj.x4 = 0;
+ obj.x5 = 0;
+ obj.x6 = 0;
+ obj.x7 = 0;
+ obj.x8 = 0;
+ obj.x9 = 0;
+ obj.x10 = 0;
+ obj.x11 = 0;
+ obj.x12 = 0;
+ obj.x13 = 0;
+ obj.x14 = 0;
+ obj.x15 = 0;
+ obj.x16 = 0;
+ obj.x17 = 0;
+ obj.x18 = 0;
+ obj.x19 = 0;
+ obj.x20 = 0;
+ obj.x21 = 0;
+ obj.x22 = 0;
+ obj.x23 = 0;
+ obj.x24 = 0;
+ obj.x25 = 0;
+}
+
+function AddProps3(obj) {
+ obj["x0"] = 0;
+ obj["x1"] = 0;
+ obj["x2"] = 0;
+ obj["x3"] = 0;
+ obj["x4"] = 0;
+ obj["x5"] = 0;
+ obj["x6"] = 0;
+ obj["x7"] = 0;
+ obj["x8"] = 0;
+ obj["x9"] = 0;
+ obj["x10"] = 0;
+ obj["x11"] = 0;
+ obj["x12"] = 0;
+ obj["x13"] = 0;
+ obj["x14"] = 0;
+ obj["x15"] = 0;
+ obj["x16"] = 0;
+ obj["x17"] = 0;
+ obj["x18"] = 0;
+ obj["x19"] = 0;
+ obj["x20"] = 0;
+ obj["x21"] = 0;
+ obj["x22"] = 0;
+ obj["x23"] = 0;
+ obj["x24"] = 0;
+ obj["x25"] = 0;
+}
+
+
+var keyed = {};
+AddProps(keyed);
+assertFalse(%HasFastProperties(keyed));
+
+var non_keyed = {};
+AddPropsNonKeyed(non_keyed);
+assertTrue(%HasFastProperties(non_keyed));
+
+var obj3 = {};
+AddProps3(obj3);
+assertTrue(%HasFastProperties(obj3));
+
+var bad_name = {};
+bad_name[".foo"] = 0;
+assertFalse(%HasFastProperties(bad_name));
diff --git a/src/3rdparty/v8/test/mjsunit/fast-prototype.js b/src/3rdparty/v8/test/mjsunit/fast-prototype.js
new file mode 100644
index 0000000..7fd73a4
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/fast-prototype.js
@@ -0,0 +1,113 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Check that objects that are used for prototypes are in the fast mode.
+
+function Super() {
+}
+
+
+function Sub() {
+}
+
+
+function AddProps(obj) {
+ for (var i = 0; i < 26; i++) {
+ obj["x" + i] = 0;
+ }
+}
+
+
+function DoProtoMagic(proto, set__proto__) {
+ if (set__proto__) {
+ (new Sub()).__proto__ = proto;
+ } else {
+ Sub.prototype = proto;
+ }
+}
+
+
+function test(use_new, add_first, set__proto__, same_map_as) {
+ var proto = use_new ? new Super() : {};
+
+ // New object is fast.
+ assertTrue(%HasFastProperties(proto));
+
+ if (add_first) {
+ AddProps(proto);
+ // Adding this many properties makes it slow.
+ assertFalse(%HasFastProperties(proto));
+ DoProtoMagic(proto, set__proto__);
+ // Making it a prototype makes it fast again.
+ assertTrue(%HasFastProperties(proto));
+ } else {
+ DoProtoMagic(proto, set__proto__);
+ // Still fast
+ assertTrue(%HasFastProperties(proto));
+ AddProps(proto);
+ // After we add all those properties it went slow mode again :-(
+ assertFalse(%HasFastProperties(proto));
+ }
+ if (same_map_as && !add_first) {
+ assertTrue(%HaveSameMap(same_map_as, proto));
+ }
+ return proto;
+}
+
+
+for (var i = 0; i < 4; i++) {
+ var set__proto__ = ((i & 1) != 0);
+ var use_new = ((i & 2) != 0);
+
+ test(use_new, true, set__proto__);
+
+ var last = test(use_new, false, set__proto__);
+ test(use_new, false, set__proto__, last);
+}
+
+
+var x = {a: 1, b: 2, c: 3};
+var o = { __proto__: x };
+assertTrue(%HasFastProperties(x));
+for (key in x) {
+ assertTrue(key == 'a');
+ break;
+}
+delete x.b;
+for (key in x) {
+ assertTrue(key == 'a');
+ break;
+}
+assertFalse(%HasFastProperties(x));
+x.d = 4;
+assertFalse(%HasFastProperties(x));
+for (key in x) {
+ assertTrue(key == 'a');
+ break;
+}
diff --git a/src/3rdparty/v8/test/mjsunit/fuzz-natives.js b/src/3rdparty/v8/test/mjsunit/fuzz-natives-part1.js
index 2965e74..6941d80 100644
--- a/src/3rdparty/v8/test/mjsunit/fuzz-natives.js
+++ b/src/3rdparty/v8/test/mjsunit/fuzz-natives-part1.js
@@ -149,6 +149,7 @@ var knownProblems = {
"PushBlockContext": true,
"LazyCompile": true,
"LazyRecompile": true,
+ "ParallelRecompile": true,
"NotifyDeoptimized": true,
"NotifyOSR": true,
"CreateObjectLiteralBoilerplate": true,
@@ -204,7 +205,9 @@ var currentlyUncallable = {
function testNatives() {
var allNatives = %ListNatives();
- for (var i = 0; i < allNatives.length; i++) {
+ var start = 0;
+ var stop = (allNatives.length >> 2);
+ for (var i = start; i < stop; i++) {
var nativeInfo = allNatives[i];
var name = nativeInfo[0];
if (name in knownProblems || name in currentlyUncallable)
diff --git a/src/3rdparty/v8/test/mjsunit/fuzz-natives-part2.js b/src/3rdparty/v8/test/mjsunit/fuzz-natives-part2.js
new file mode 100644
index 0000000..ea8a2cf
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/fuzz-natives-part2.js
@@ -0,0 +1,222 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var RUN_WITH_ALL_ARGUMENT_ENTRIES = false;
+var kOnManyArgumentsRemove = 5;
+
+function makeArguments() {
+ var result = [ ];
+ result.push(17);
+ result.push(-31);
+ result.push(new Array(100));
+ result.push(new Array(100003));
+ result.push(Number.MIN_VALUE);
+ result.push("whoops");
+ result.push("x");
+ result.push({"x": 1, "y": 2});
+ var slowCaseObj = {"a": 3, "b": 4, "c": 5};
+ delete slowCaseObj.c;
+ result.push(slowCaseObj);
+ result.push(function () { return 8; });
+ return result;
+}
+
+var kArgObjects = makeArguments().length;
+
+function makeFunction(name, argc) {
+ var args = [];
+ for (var i = 0; i < argc; i++)
+ args.push("x" + i);
+ var argsStr = args.join(", ");
+ return new Function(args.join(", "), "return %" + name + "(" + argsStr + ");");
+}
+
+function testArgumentCount(name, argc) {
+ for (var i = 0; i < 10; i++) {
+ var func = null;
+ try {
+ func = makeFunction(name, i);
+ } catch (e) {
+ if (e != "SyntaxError: Illegal access") throw e;
+ }
+ if (func === null && i == argc) {
+ throw "unexpected exception";
+ }
+ var args = [ ];
+ for (var j = 0; j < i; j++)
+ args.push(0);
+ try {
+ func.apply(void 0, args);
+ } catch (e) {
+ // we don't care what happens as long as we don't crash
+ }
+ }
+}
+
+function testArgumentTypes(name, argc) {
+ var type = 0;
+ var hasMore = true;
+ var func = makeFunction(name, argc);
+ while (hasMore) {
+ var argPool = makeArguments();
+ // When we have 5 or more arguments we lower the amount of tests cases
+ // by randomly removing kOnManyArgumentsRemove entries
+ var numArguments = RUN_WITH_ALL_ARGUMENT_ENTRIES ?
+ kArgObjects : kArgObjects-kOnManyArgumentsRemove;
+ if (argc >= 5 && !RUN_WITH_ALL_ARGUMENT_ENTRIES) {
+ for (var i = 0; i < kOnManyArgumentsRemove; i++) {
+ var rand = Math.floor(Math.random() * (kArgObjects - i));
+ argPool.splice(rand,1);
+ }
+ }
+ var current = type;
+ var hasMore = false;
+ var argList = [ ];
+ for (var i = 0; i < argc; i++) {
+ var index = current % numArguments;
+ current = (current / numArguments) << 0;
+ if (index != (numArguments - 1))
+ hasMore = true;
+ argList.push(argPool[index]);
+ }
+ try {
+ func.apply(void 0, argList);
+ } catch (e) {
+ // we don't care what happens as long as we don't crash
+ }
+ type++;
+ }
+}
+
+var knownProblems = {
+ "Abort": true,
+
+ // Avoid calling the concat operation, because weird lengths
+ // may lead to out-of-memory. Ditto for StringBuilderJoin.
+ "StringBuilderConcat": true,
+ "StringBuilderJoin": true,
+
+ // These functions use pseudo-stack-pointers and are not robust
+ // to unexpected integer values.
+ "DebugEvaluate": true,
+
+ // These functions do nontrivial error checking in recursive calls,
+ // which means that we have to propagate errors back.
+ "SetFunctionBreakPoint": true,
+ "SetScriptBreakPoint": true,
+ "PrepareStep": true,
+
+ // Too slow.
+ "DebugReferencedBy": true,
+
+ // Calling disable/enable access checks may interfere with the
+ // the rest of the tests.
+ "DisableAccessChecks": true,
+ "EnableAccessChecks": true,
+
+ // These functions should not be callable as runtime functions.
+ "NewFunctionContext": true,
+ "NewArgumentsFast": true,
+ "NewStrictArgumentsFast": true,
+ "PushWithContext": true,
+ "PushCatchContext": true,
+ "PushBlockContext": true,
+ "LazyCompile": true,
+ "LazyRecompile": true,
+ "ParallelRecompile": true,
+ "NotifyDeoptimized": true,
+ "NotifyOSR": true,
+ "CreateObjectLiteralBoilerplate": true,
+ "CloneLiteralBoilerplate": true,
+ "CloneShallowLiteralBoilerplate": true,
+ "CreateArrayLiteralBoilerplate": true,
+ "IS_VAR": true,
+ "ResolvePossiblyDirectEval": true,
+ "Log": true,
+ "DeclareGlobals": true,
+
+ "PromoteScheduledException": true,
+ "DeleteHandleScopeExtensions": true,
+
+ // Vararg with minimum number > 0.
+ "Call": true,
+
+ // Requires integer arguments to be non-negative.
+ "Apply": true,
+
+ // That can only be invoked on Array.prototype.
+ "FinishArrayPrototypeSetup": true,
+
+ "_SwapElements": true,
+
+ // Performance critical functions which cannot afford type checks.
+ "_IsNativeOrStrictMode": true,
+ "_CallFunction": true,
+
+ // Tries to allocate based on argument, and (correctly) throws
+ // out-of-memory if the request is too large. In practice, the
+ // size will be the number of captures of a RegExp.
+ "RegExpConstructResult": true,
+ "_RegExpConstructResult": true,
+
+ // This functions perform some checks compile time (they require one of their
+ // arguments to be a compile time smi).
+ "_DateField": true,
+ "_GetFromCache": true,
+
+ // This function expects its first argument to be a non-smi.
+ "_IsStringWrapperSafeForDefaultValueOf" : true,
+
+ // Only applicable to strings.
+ "_HasCachedArrayIndex": true,
+ "_GetCachedArrayIndex": true
+};
+
+var currentlyUncallable = {
+ // We need to find a way to test this without breaking the system.
+ "SystemBreak": true
+};
+
+function testNatives() {
+ var allNatives = %ListNatives();
+ var start = allNatives.length >> 2;
+ var stop = (allNatives.length >> 2)*2;
+ for (var i = start; i < stop; i++) {
+ var nativeInfo = allNatives[i];
+ var name = nativeInfo[0];
+ if (name in knownProblems || name in currentlyUncallable)
+ continue;
+ print(name);
+ var argc = nativeInfo[1];
+ testArgumentCount(name, argc);
+ testArgumentTypes(name, argc);
+ }
+}
+
+testNatives();
diff --git a/src/3rdparty/v8/test/mjsunit/fuzz-natives-part3.js b/src/3rdparty/v8/test/mjsunit/fuzz-natives-part3.js
new file mode 100644
index 0000000..ecfdf97
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/fuzz-natives-part3.js
@@ -0,0 +1,222 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var RUN_WITH_ALL_ARGUMENT_ENTRIES = false;
+var kOnManyArgumentsRemove = 5;
+
+function makeArguments() {
+ var result = [ ];
+ result.push(17);
+ result.push(-31);
+ result.push(new Array(100));
+ result.push(new Array(100003));
+ result.push(Number.MIN_VALUE);
+ result.push("whoops");
+ result.push("x");
+ result.push({"x": 1, "y": 2});
+ var slowCaseObj = {"a": 3, "b": 4, "c": 5};
+ delete slowCaseObj.c;
+ result.push(slowCaseObj);
+ result.push(function () { return 8; });
+ return result;
+}
+
+var kArgObjects = makeArguments().length;
+
+function makeFunction(name, argc) {
+ var args = [];
+ for (var i = 0; i < argc; i++)
+ args.push("x" + i);
+ var argsStr = args.join(", ");
+ return new Function(args.join(", "), "return %" + name + "(" + argsStr + ");");
+}
+
+function testArgumentCount(name, argc) {
+ for (var i = 0; i < 10; i++) {
+ var func = null;
+ try {
+ func = makeFunction(name, i);
+ } catch (e) {
+ if (e != "SyntaxError: Illegal access") throw e;
+ }
+ if (func === null && i == argc) {
+ throw "unexpected exception";
+ }
+ var args = [ ];
+ for (var j = 0; j < i; j++)
+ args.push(0);
+ try {
+ func.apply(void 0, args);
+ } catch (e) {
+ // we don't care what happens as long as we don't crash
+ }
+ }
+}
+
+function testArgumentTypes(name, argc) {
+ var type = 0;
+ var hasMore = true;
+ var func = makeFunction(name, argc);
+ while (hasMore) {
+ var argPool = makeArguments();
+ // When we have 5 or more arguments we lower the amount of tests cases
+ // by randomly removing kOnManyArgumentsRemove entries
+ var numArguments = RUN_WITH_ALL_ARGUMENT_ENTRIES ?
+ kArgObjects : kArgObjects-kOnManyArgumentsRemove;
+ if (argc >= 5 && !RUN_WITH_ALL_ARGUMENT_ENTRIES) {
+ for (var i = 0; i < kOnManyArgumentsRemove; i++) {
+ var rand = Math.floor(Math.random() * (kArgObjects - i));
+ argPool.splice(rand,1);
+ }
+ }
+ var current = type;
+ var hasMore = false;
+ var argList = [ ];
+ for (var i = 0; i < argc; i++) {
+ var index = current % numArguments;
+ current = (current / numArguments) << 0;
+ if (index != (numArguments - 1))
+ hasMore = true;
+ argList.push(argPool[index]);
+ }
+ try {
+ func.apply(void 0, argList);
+ } catch (e) {
+ // we don't care what happens as long as we don't crash
+ }
+ type++;
+ }
+}
+
+var knownProblems = {
+ "Abort": true,
+
+ // Avoid calling the concat operation, because weird lengths
+ // may lead to out-of-memory. Ditto for StringBuilderJoin.
+ "StringBuilderConcat": true,
+ "StringBuilderJoin": true,
+
+ // These functions use pseudo-stack-pointers and are not robust
+ // to unexpected integer values.
+ "DebugEvaluate": true,
+
+ // These functions do nontrivial error checking in recursive calls,
+ // which means that we have to propagate errors back.
+ "SetFunctionBreakPoint": true,
+ "SetScriptBreakPoint": true,
+ "PrepareStep": true,
+
+ // Too slow.
+ "DebugReferencedBy": true,
+
+ // Calling disable/enable access checks may interfere with the
+ // the rest of the tests.
+ "DisableAccessChecks": true,
+ "EnableAccessChecks": true,
+
+ // These functions should not be callable as runtime functions.
+ "NewFunctionContext": true,
+ "NewArgumentsFast": true,
+ "NewStrictArgumentsFast": true,
+ "PushWithContext": true,
+ "PushCatchContext": true,
+ "PushBlockContext": true,
+ "LazyCompile": true,
+ "LazyRecompile": true,
+ "ParallelRecompile": true,
+ "NotifyDeoptimized": true,
+ "NotifyOSR": true,
+ "CreateObjectLiteralBoilerplate": true,
+ "CloneLiteralBoilerplate": true,
+ "CloneShallowLiteralBoilerplate": true,
+ "CreateArrayLiteralBoilerplate": true,
+ "IS_VAR": true,
+ "ResolvePossiblyDirectEval": true,
+ "Log": true,
+ "DeclareGlobals": true,
+
+ "PromoteScheduledException": true,
+ "DeleteHandleScopeExtensions": true,
+
+ // Vararg with minimum number > 0.
+ "Call": true,
+
+ // Requires integer arguments to be non-negative.
+ "Apply": true,
+
+ // That can only be invoked on Array.prototype.
+ "FinishArrayPrototypeSetup": true,
+
+ "_SwapElements": true,
+
+ // Performance critical functions which cannot afford type checks.
+ "_IsNativeOrStrictMode": true,
+ "_CallFunction": true,
+
+ // Tries to allocate based on argument, and (correctly) throws
+ // out-of-memory if the request is too large. In practice, the
+ // size will be the number of captures of a RegExp.
+ "RegExpConstructResult": true,
+ "_RegExpConstructResult": true,
+
+ // This functions perform some checks compile time (they require one of their
+ // arguments to be a compile time smi).
+ "_DateField": true,
+ "_GetFromCache": true,
+
+ // This function expects its first argument to be a non-smi.
+ "_IsStringWrapperSafeForDefaultValueOf" : true,
+
+ // Only applicable to strings.
+ "_HasCachedArrayIndex": true,
+ "_GetCachedArrayIndex": true
+};
+
+var currentlyUncallable = {
+ // We need to find a way to test this without breaking the system.
+ "SystemBreak": true
+};
+
+function testNatives() {
+ var allNatives = %ListNatives();
+ var start = (allNatives.length >> 2)*2;
+ var stop = (allNatives.length >> 2)*3;
+ for (var i = start; i < stop; i++) {
+ var nativeInfo = allNatives[i];
+ var name = nativeInfo[0];
+ if (name in knownProblems || name in currentlyUncallable)
+ continue;
+ print(name);
+ var argc = nativeInfo[1];
+ testArgumentCount(name, argc);
+ testArgumentTypes(name, argc);
+ }
+}
+
+testNatives();
diff --git a/src/3rdparty/v8/test/mjsunit/fuzz-natives-part4.js b/src/3rdparty/v8/test/mjsunit/fuzz-natives-part4.js
new file mode 100644
index 0000000..da04596
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/fuzz-natives-part4.js
@@ -0,0 +1,222 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var RUN_WITH_ALL_ARGUMENT_ENTRIES = false;
+var kOnManyArgumentsRemove = 5;
+
+function makeArguments() {
+ var result = [ ];
+ result.push(17);
+ result.push(-31);
+ result.push(new Array(100));
+ result.push(new Array(100003));
+ result.push(Number.MIN_VALUE);
+ result.push("whoops");
+ result.push("x");
+ result.push({"x": 1, "y": 2});
+ var slowCaseObj = {"a": 3, "b": 4, "c": 5};
+ delete slowCaseObj.c;
+ result.push(slowCaseObj);
+ result.push(function () { return 8; });
+ return result;
+}
+
+var kArgObjects = makeArguments().length;
+
+function makeFunction(name, argc) {
+ var args = [];
+ for (var i = 0; i < argc; i++)
+ args.push("x" + i);
+ var argsStr = args.join(", ");
+ return new Function(args.join(", "), "return %" + name + "(" + argsStr + ");");
+}
+
+function testArgumentCount(name, argc) {
+ for (var i = 0; i < 10; i++) {
+ var func = null;
+ try {
+ func = makeFunction(name, i);
+ } catch (e) {
+ if (e != "SyntaxError: Illegal access") throw e;
+ }
+ if (func === null && i == argc) {
+ throw "unexpected exception";
+ }
+ var args = [ ];
+ for (var j = 0; j < i; j++)
+ args.push(0);
+ try {
+ func.apply(void 0, args);
+ } catch (e) {
+ // we don't care what happens as long as we don't crash
+ }
+ }
+}
+
+function testArgumentTypes(name, argc) {
+ var type = 0;
+ var hasMore = true;
+ var func = makeFunction(name, argc);
+ while (hasMore) {
+ var argPool = makeArguments();
+ // When we have 5 or more arguments we lower the amount of tests cases
+ // by randomly removing kOnManyArgumentsRemove entries
+ var numArguments = RUN_WITH_ALL_ARGUMENT_ENTRIES ?
+ kArgObjects : kArgObjects-kOnManyArgumentsRemove;
+ if (argc >= 5 && !RUN_WITH_ALL_ARGUMENT_ENTRIES) {
+ for (var i = 0; i < kOnManyArgumentsRemove; i++) {
+ var rand = Math.floor(Math.random() * (kArgObjects - i));
+ argPool.splice(rand,1);
+ }
+ }
+ var current = type;
+ var hasMore = false;
+ var argList = [ ];
+ for (var i = 0; i < argc; i++) {
+ var index = current % numArguments;
+ current = (current / numArguments) << 0;
+ if (index != (numArguments - 1))
+ hasMore = true;
+ argList.push(argPool[index]);
+ }
+ try {
+ func.apply(void 0, argList);
+ } catch (e) {
+ // we don't care what happens as long as we don't crash
+ }
+ type++;
+ }
+}
+
+var knownProblems = {
+ "Abort": true,
+
+ // Avoid calling the concat operation, because weird lengths
+ // may lead to out-of-memory. Ditto for StringBuilderJoin.
+ "StringBuilderConcat": true,
+ "StringBuilderJoin": true,
+
+ // These functions use pseudo-stack-pointers and are not robust
+ // to unexpected integer values.
+ "DebugEvaluate": true,
+
+ // These functions do nontrivial error checking in recursive calls,
+ // which means that we have to propagate errors back.
+ "SetFunctionBreakPoint": true,
+ "SetScriptBreakPoint": true,
+ "PrepareStep": true,
+
+ // Too slow.
+ "DebugReferencedBy": true,
+
+ // Calling disable/enable access checks may interfere with the
+ // the rest of the tests.
+ "DisableAccessChecks": true,
+ "EnableAccessChecks": true,
+
+ // These functions should not be callable as runtime functions.
+ "NewFunctionContext": true,
+ "NewArgumentsFast": true,
+ "NewStrictArgumentsFast": true,
+ "PushWithContext": true,
+ "PushCatchContext": true,
+ "PushBlockContext": true,
+ "LazyCompile": true,
+ "LazyRecompile": true,
+ "ParallelRecompile": true,
+ "NotifyDeoptimized": true,
+ "NotifyOSR": true,
+ "CreateObjectLiteralBoilerplate": true,
+ "CloneLiteralBoilerplate": true,
+ "CloneShallowLiteralBoilerplate": true,
+ "CreateArrayLiteralBoilerplate": true,
+ "IS_VAR": true,
+ "ResolvePossiblyDirectEval": true,
+ "Log": true,
+ "DeclareGlobals": true,
+
+ "PromoteScheduledException": true,
+ "DeleteHandleScopeExtensions": true,
+
+ // Vararg with minimum number > 0.
+ "Call": true,
+
+ // Requires integer arguments to be non-negative.
+ "Apply": true,
+
+ // That can only be invoked on Array.prototype.
+ "FinishArrayPrototypeSetup": true,
+
+ "_SwapElements": true,
+
+ // Performance critical functions which cannot afford type checks.
+ "_IsNativeOrStrictMode": true,
+ "_CallFunction": true,
+
+ // Tries to allocate based on argument, and (correctly) throws
+ // out-of-memory if the request is too large. In practice, the
+ // size will be the number of captures of a RegExp.
+ "RegExpConstructResult": true,
+ "_RegExpConstructResult": true,
+
+ // This functions perform some checks compile time (they require one of their
+ // arguments to be a compile time smi).
+ "_DateField": true,
+ "_GetFromCache": true,
+
+ // This function expects its first argument to be a non-smi.
+ "_IsStringWrapperSafeForDefaultValueOf" : true,
+
+ // Only applicable to strings.
+ "_HasCachedArrayIndex": true,
+ "_GetCachedArrayIndex": true
+};
+
+var currentlyUncallable = {
+ // We need to find a way to test this without breaking the system.
+ "SystemBreak": true
+};
+
+function testNatives() {
+ var allNatives = %ListNatives();
+ var start = (allNatives.length >> 2)*3;
+ var stop = allNatives.length;
+ for (var i = start; i < stop; i++) {
+ var nativeInfo = allNatives[i];
+ var name = nativeInfo[0];
+ if (name in knownProblems || name in currentlyUncallable)
+ continue;
+ print(name);
+ var argc = nativeInfo[1];
+ testArgumentCount(name, argc);
+ testArgumentTypes(name, argc);
+ }
+}
+
+testNatives();
diff --git a/src/3rdparty/v8/test/mjsunit/greedy.js b/src/3rdparty/v8/test/mjsunit/greedy.js
index d357f0c..8c49e41 100644
--- a/src/3rdparty/v8/test/mjsunit/greedy.js
+++ b/src/3rdparty/v8/test/mjsunit/greedy.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --gc-greedy
+// Flags: --gc-greedy --noverify-heap
function IterativeFib(n) {
var f0 = 0, f1 = 1;
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/block-conflicts.js b/src/3rdparty/v8/test/mjsunit/harmony/block-conflicts.js
index 8388504..3aa9d22 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/block-conflicts.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/block-conflicts.js
@@ -35,7 +35,8 @@
function CheckException(e) {
var string = e.toString();
assertTrue(string.indexOf("has already been declared") >= 0 ||
- string.indexOf("redeclaration") >= 0); return 'Conflict';
+ string.indexOf("redeclaration") >= 0);
+ return 'Conflict';
}
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/block-let-crankshaft.js b/src/3rdparty/v8/test/mjsunit/harmony/block-let-crankshaft.js
index 1db1792..d01e5c0 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/block-let-crankshaft.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/block-let-crankshaft.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --harmony-scoping --allow-natives-syntax
+// Flags: --harmony-scoping --allow-natives-syntax --noparallel-recompilation
// TODO(ES6): properly activate extended mode
"use strict";
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/collections.js b/src/3rdparty/v8/test/mjsunit/harmony/collections.js
index 412e6f1..0219f39 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/collections.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/collections.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -65,9 +65,11 @@ TestInvalidCalls(new WeakMap);
// Test expected behavior for Sets
function TestSet(set, key) {
assertFalse(set.has(key));
- set.add(key);
+ assertSame(undefined, set.add(key));
assertTrue(set.has(key));
- set.delete(key);
+ assertTrue(set.delete(key));
+ assertFalse(set.has(key));
+ assertFalse(set.delete(key));
assertFalse(set.has(key));
}
function TestSetBehavior(set) {
@@ -87,7 +89,7 @@ TestSetBehavior(new Set);
// Test expected mapping behavior for Maps and WeakMaps
function TestMapping(map, key, value) {
- map.set(key, value);
+ assertSame(undefined, map.set(key, value));
assertSame(value, map.get(key));
}
function TestMapBehavior1(m) {
@@ -117,12 +119,12 @@ TestMapBehavior2(new Map);
// Test expected querying behavior of Maps and WeakMaps
function TestQuery(m) {
var key = new Object;
- TestMapping(m, key, 'to-be-present');
- assertTrue(m.has(key));
- assertFalse(m.has(new Object));
- TestMapping(m, key, undefined);
- assertFalse(m.has(key));
- assertFalse(m.has(new Object));
+ var values = [ 'x', 0, +Infinity, -Infinity, true, false, null, undefined ];
+ for (var i = 0; i < values.length; i++) {
+ TestMapping(m, key, values[i]);
+ assertTrue(m.has(key));
+ assertFalse(m.has(new Object));
+ }
}
TestQuery(new Map);
TestQuery(new WeakMap);
@@ -311,4 +313,60 @@ TestBogusReceivers(bogusReceiversTestSet);
// Stress Test
// There is a proposed stress-test available at the es-discuss mailing list
// which cannot be reasonably automated. Check it out by hand if you like:
-// https://mail.mozilla.org/pipermail/es-discuss/2011-May/014096.html \ No newline at end of file
+// https://mail.mozilla.org/pipermail/es-discuss/2011-May/014096.html
+
+
+// Set and Map size getters
+var setSizeDescriptor = Object.getOwnPropertyDescriptor(Set.prototype, 'size');
+assertEquals(undefined, setSizeDescriptor.value);
+assertEquals(undefined, setSizeDescriptor.set);
+assertTrue(setSizeDescriptor.get instanceof Function);
+assertEquals(undefined, setSizeDescriptor.get.prototype);
+assertFalse(setSizeDescriptor.enumerable);
+assertTrue(setSizeDescriptor.configurable);
+
+var s = new Set();
+assertFalse(s.hasOwnProperty('size'));
+for (var i = 0; i < 10; i++) {
+ assertEquals(i, s.size);
+ s.add(i);
+}
+for (var i = 9; i >= 0; i--) {
+ s.delete(i);
+ assertEquals(i, s.size);
+}
+
+
+var mapSizeDescriptor = Object.getOwnPropertyDescriptor(Map.prototype, 'size');
+assertEquals(undefined, mapSizeDescriptor.value);
+assertEquals(undefined, mapSizeDescriptor.set);
+assertTrue(mapSizeDescriptor.get instanceof Function);
+assertEquals(undefined, mapSizeDescriptor.get.prototype);
+assertFalse(mapSizeDescriptor.enumerable);
+assertTrue(mapSizeDescriptor.configurable);
+
+var m = new Map();
+assertFalse(m.hasOwnProperty('size'));
+for (var i = 0; i < 10; i++) {
+ assertEquals(i, m.size);
+ m.set(i, i);
+}
+for (var i = 9; i >= 0; i--) {
+ m.delete(i);
+ assertEquals(i, m.size);
+}
+
+// Test clear
+var a = new Set();
+s.add(42);
+assertTrue(s.has(42));
+s.clear();
+assertFalse(s.has(42));
+assertEquals(0, s.size);
+
+var m = new Map();
+m.set(42, true);
+assertTrue(m.has(42));
+m.clear();
+assertFalse(m.has(42));
+assertEquals(0, m.size);
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/debug-blockscopes.js b/src/3rdparty/v8/test/mjsunit/harmony/debug-blockscopes.js
index 10aac2d..ca2ab9e 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/debug-blockscopes.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/debug-blockscopes.js
@@ -376,7 +376,7 @@ listener_delegate = function(exec_state) {
debug.ScopeType.Global], exec_state);
CheckScopeContent({x:'y'}, 0, exec_state);
// The function scope contains a temporary iteration variable.
- CheckScopeContent({x:'y'}, 1, exec_state);
+ CheckScopeContent({'.for.x':'y'}, 1, exec_state);
};
for_loop_1();
EndTest();
@@ -401,7 +401,7 @@ listener_delegate = function(exec_state) {
CheckScopeContent({x:3}, 0, exec_state);
CheckScopeContent({x:'y'}, 1, exec_state);
// The function scope contains a temporary iteration variable.
- CheckScopeContent({x:'y'}, 2, exec_state);
+ CheckScopeContent({'.for.x':'y'}, 2, exec_state);
};
for_loop_2();
EndTest();
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/module-linking.js b/src/3rdparty/v8/test/mjsunit/harmony/module-linking.js
index 13ca6f7..a4b272f 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/module-linking.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/module-linking.js
@@ -27,10 +27,188 @@
// Flags: --harmony-modules --harmony-scoping
-// Test basic module linking.
+// Test basic module linking and initialization.
"use strict";
+module R {
+ // At this point, only functions and modules are initialized.
+ assertEquals(undefined, v)
+ assertEquals(undefined, vv)
+ assertEquals(undefined, R.v)
+ assertEquals(undefined, M.v)
+ assertEquals(undefined, MM.v)
+ assertEquals(undefined, F.v)
+ assertEquals(undefined, G.v)
+ assertThrows(function() { l }, ReferenceError)
+ assertThrows(function() { ll }, ReferenceError)
+ assertThrows(function() { R.l }, ReferenceError)
+ assertThrows(function() { M.l }, ReferenceError)
+ assertThrows(function() { MM.l }, ReferenceError)
+ assertThrows(function() { F.l }, ReferenceError)
+ assertThrows(function() { G.l }, ReferenceError)
+ assertThrows(function() { c }, ReferenceError)
+ assertThrows(function() { cc }, ReferenceError)
+ assertThrows(function() { R.c }, ReferenceError)
+ assertThrows(function() { M.c }, ReferenceError)
+ assertThrows(function() { MM.c }, ReferenceError)
+ assertThrows(function() { F.c }, ReferenceError)
+ assertThrows(function() { G.c }, ReferenceError)
+ assertEquals(4, f())
+ assertEquals(24, ff())
+ assertEquals(4, R.f())
+ assertEquals(14, M.f())
+ assertEquals(34, MM.f())
+ assertEquals(44, F.f())
+ assertEquals(14, G.f())
+
+ // All properties should already exist on the instance objects, though.
+ assertTrue("v" in R)
+ assertTrue("v" in RR)
+ assertTrue("v" in M)
+ assertTrue("v" in MM)
+ assertTrue("v" in F)
+ assertTrue("v" in G)
+ assertTrue("l" in R)
+ assertTrue("l" in RR)
+ assertTrue("l" in M)
+ assertTrue("l" in MM)
+ assertTrue("l" in F)
+ assertTrue("l" in G)
+ assertTrue("c" in R)
+ assertTrue("c" in RR)
+ assertTrue("c" in M)
+ assertTrue("c" in MM)
+ assertTrue("c" in F)
+ assertTrue("c" in G)
+ assertTrue("f" in R)
+ assertTrue("f" in RR)
+ assertTrue("f" in M)
+ assertTrue("f" in MM)
+ assertTrue("f" in F)
+ assertTrue("f" in G)
+ assertTrue("M" in R)
+ assertTrue("M" in RR)
+ assertTrue("RR" in R)
+ assertTrue("RR" in RR)
+
+ // And aliases should be identical.
+ assertSame(R, RR)
+ assertSame(R, R.RR)
+ assertSame(M, R.M)
+ assertSame(M, G)
+
+ // We can only assign to var.
+ assertEquals(-1, v = -1)
+ assertEquals(-2, R.v = -2)
+ assertEquals(-2, v)
+ assertEquals(-2, R.v)
+
+ assertThrows(function() { l = -1 }, ReferenceError)
+ assertThrows(function() { R.l = -2 }, ReferenceError)
+ assertThrows(function() { l }, ReferenceError)
+ assertThrows(function() { R.l }, ReferenceError)
+
+ assertThrows(function() { eval("c = -1") }, SyntaxError)
+ assertThrows(function() { R.c = -2 }, TypeError)
+
+ // Initialize first bunch or variables.
+ export var v = 1
+ export let l = 2
+ export const c = 3
+ export function f() { return 4 }
+
+ assertEquals(1, v)
+ assertEquals(1, R.v)
+ assertEquals(2, l)
+ assertEquals(2, R.l)
+ assertEquals(3, c)
+ assertEquals(3, R.c)
+
+ assertEquals(-3, v = -3)
+ assertEquals(-4, R.v = -4)
+ assertEquals(-3, l = -3)
+ assertEquals(-4, R.l = -4)
+ assertThrows(function() { eval("c = -3") }, SyntaxError)
+ assertThrows(function() { R.c = -4 }, TypeError)
+
+ assertEquals(-4, v)
+ assertEquals(-4, R.v)
+ assertEquals(-4, l)
+ assertEquals(-4, R.l)
+ assertEquals(3, c)
+ assertEquals(3, R.c)
+
+ // Initialize nested module.
+ export module M {
+ export var v = 11
+ export let l = 12
+ export const c = 13
+ export function f() { return 14 }
+ }
+
+ assertEquals(11, M.v)
+ assertEquals(11, G.v)
+ assertEquals(12, M.l)
+ assertEquals(12, G.l)
+ assertEquals(13, M.c)
+ assertEquals(13, G.c)
+
+ // Initialize non-exported variables.
+ var vv = 21
+ let ll = 22
+ const cc = 23
+ function ff() { return 24 }
+
+ assertEquals(21, vv)
+ assertEquals(22, ll)
+ assertEquals(23, cc)
+
+ // Initialize non-exported module.
+ module MM {
+ export var v = 31
+ export let l = 32
+ export const c = 33
+ export function f() { return 34 }
+ }
+
+ assertEquals(31, MM.v)
+ assertEquals(32, MM.l)
+ assertEquals(33, MM.c)
+
+ // Recursive self reference.
+ export module RR = R
+}
+
+// Initialize sibling module that was forward-used.
+module F {
+ assertEquals(undefined, v)
+ assertEquals(undefined, F.v)
+ assertThrows(function() { l }, ReferenceError)
+ assertThrows(function() { F.l }, ReferenceError)
+ assertThrows(function() { c }, ReferenceError)
+ assertThrows(function() { F.c }, ReferenceError)
+
+ export var v = 41
+ export let l = 42
+ export const c = 43
+ export function f() { return 44 }
+
+ assertEquals(41, v)
+ assertEquals(41, F.v)
+ assertEquals(42, l)
+ assertEquals(42, F.l)
+ assertEquals(43, c)
+ assertEquals(43, F.c)
+}
+
+// Define recursive module alias.
+module G = R.M
+
+
+
+// Second test with side effects and more module nesting.
+
let log = "";
export let x = (log += "1");
@@ -117,5 +295,4 @@ assertSame(M2, M1.A2);
assertSame(M1, M1.A2.A1);
assertSame(M2, M2.A1.A2);
-// TODO(rossberg): inner declarations are not executed yet.
-// assertEquals("1234567890", log);
+assertEquals("1234567890", log);
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/module-parsing.js b/src/3rdparty/v8/test/mjsunit/harmony/module-parsing.js
index cdd0a2e..8a9103d 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/module-parsing.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/module-parsing.js
@@ -116,6 +116,11 @@ x
,
y
+var
+x
+,
+y
+
export
var
v1 = 1
@@ -157,3 +162,29 @@ try {} catch (module) {}
module
v = 20
+
+
+
+// Check that module declarations are rejected in eval or local scope.
+
+module M { export let x; }
+
+assertThrows("export x;", SyntaxError); // It's using eval, so should throw.
+assertThrows("export let x;", SyntaxError);
+assertThrows("import x from M;", SyntaxError);
+assertThrows("module M {};", SyntaxError);
+
+assertThrows("{ export x; }", SyntaxError);
+assertThrows("{ export let x; }", SyntaxError);
+assertThrows("{ import x from M; }", SyntaxError);
+assertThrows("{ module M {}; }", SyntaxError);
+
+assertThrows("function f() { export x; }", SyntaxError);
+assertThrows("function f() { export let x; }", SyntaxError);
+assertThrows("function f() { import x from M; }", SyntaxError);
+assertThrows("function f() { module M {}; }", SyntaxError);
+
+assertThrows("function f() { { export x; } }", SyntaxError);
+assertThrows("function f() { { export let x; } }", SyntaxError);
+assertThrows("function f() { { import x from M; } }", SyntaxError);
+assertThrows("function f() { { module M {}; } }", SyntaxError);
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/module-recompile.js b/src/3rdparty/v8/test/mjsunit/harmony/module-recompile.js
new file mode 100644
index 0000000..23f5bfc
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/harmony/module-recompile.js
@@ -0,0 +1,87 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-modules
+
+// Test that potential recompilation of the global scope does not screw up.
+
+"use strict";
+
+var N = 1e5; // Number of loop iterations that trigger optimization.
+
+module A {
+ export var x = 1
+ export function f() { return x }
+}
+var f = A.f
+
+assertEquals(1, A.x)
+assertEquals(1, A.f())
+assertEquals(1, f())
+
+A.x = 2
+
+assertEquals(2, A.x)
+assertEquals(2, A.f())
+assertEquals(2, f())
+
+for (var i = 0; i < N; i++) {
+ if (i > N) print("impossible");
+}
+
+assertEquals(2, A.x)
+assertEquals(2, A.f())
+assertEquals(2, f())
+
+
+// Same test with loop inside a module.
+
+module B {
+ module A {
+ export var x = 1
+ export function f() { return x }
+ }
+ var f = A.f
+
+ assertEquals(1, A.x)
+ assertEquals(1, A.f())
+ assertEquals(1, f())
+
+ A.x = 2
+
+ assertEquals(2, A.x)
+ assertEquals(2, A.f())
+ assertEquals(2, f())
+
+ for (var i = 0; i < N; i++) {
+ if (i > N) print("impossible");
+ }
+
+ assertEquals(2, A.x)
+ assertEquals(2, A.f())
+ assertEquals(2, f())
+}
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/module-resolution.js b/src/3rdparty/v8/test/mjsunit/harmony/module-resolution.js
index a1b9917..1a95347 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/module-resolution.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/module-resolution.js
@@ -33,6 +33,7 @@
print("begin.")
+
export let x = print("0")
export module B = A.B
@@ -44,15 +45,25 @@ export module A {
module BB = B
export BB, x
let x = print("2")
- let y = print("3")
+ var y = print("3")
let Ax = A.x
+ try { A.y } catch (e) {} // throws
+ let Az = A.z // undefined
+ let Az2 = z // undefined
+ A.g() // hoisted
+ g() // hoisted
let ABx = A.B.x
- let Ay = A.y
+ let ABy = A.B.y
+ let Bx = B.x
+ let By = B.y
let BBx = BB.x
+ let BBy = BB.y
let Af = A.f
function f(x,y) { return x }
}
export let y = print("4")
+ export var z = print("4.1")
+ export function g() {}
let Ax = A.x
let Bx = B.x
let ABx = A.B.x
@@ -92,6 +103,8 @@ export module E {
let Bx = B.x
// TODO(rossberg): Handle import *.
// import A.*
+ module B = A.B
+ let y = A.y
}
export module M1 {
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/object-observe.js b/src/3rdparty/v8/test/mjsunit/harmony/object-observe.js
new file mode 100644
index 0000000..945841b
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/harmony/object-observe.js
@@ -0,0 +1,591 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-observation
+
+var allObservers = [];
+function reset() {
+ allObservers.forEach(function(observer) { observer.reset(); });
+}
+
+function createObserver() {
+ "use strict"; // So that |this| in callback can be undefined.
+
+ var observer = {
+ records: undefined,
+ callbackCount: 0,
+ reset: function() {
+ this.records = undefined;
+ this.callbackCount = 0;
+ },
+ assertNotCalled: function() {
+ assertEquals(undefined, this.records);
+ assertEquals(0, this.callbackCount);
+ },
+ assertCalled: function() {
+ assertEquals(1, this.callbackCount);
+ },
+ assertRecordCount: function(count) {
+ this.assertCalled();
+ assertEquals(count, this.records.length);
+ },
+ assertCallbackRecords: function(recs) {
+ this.assertRecordCount(recs.length);
+ for (var i = 0; i < recs.length; i++) {
+ assertSame(this.records[i].object, recs[i].object);
+ assertEquals('string', typeof recs[i].type);
+ assertPropertiesEqual(this.records[i], recs[i]);
+ }
+ }
+ };
+
+ observer.callback = function(r) {
+ assertEquals(undefined, this);
+ assertEquals('object', typeof r);
+ assertTrue(r instanceof Array)
+ observer.records = r;
+ observer.callbackCount++;
+ };
+
+ observer.reset();
+ allObservers.push(observer);
+ return observer;
+}
+
+var observer = createObserver();
+assertEquals("function", typeof observer.callback);
+var obj = {};
+
+function frozenFunction() {}
+Object.freeze(frozenFunction);
+var nonFunction = {};
+var changeRecordWithAccessor = { type: 'foo' };
+var recordCreated = false;
+Object.defineProperty(changeRecordWithAccessor, 'name', {
+ get: function() {
+ recordCreated = true;
+ return "bar";
+ },
+ enumerable: true
+})
+
+// Object.observe
+assertThrows(function() { Object.observe("non-object", observer.callback); }, TypeError);
+assertThrows(function() { Object.observe(obj, nonFunction); }, TypeError);
+assertThrows(function() { Object.observe(obj, frozenFunction); }, TypeError);
+
+// Object.unobserve
+assertThrows(function() { Object.unobserve(4, observer.callback); }, TypeError);
+
+// Object.getNotifier
+var notifier = Object.getNotifier(obj);
+assertSame(notifier, Object.getNotifier(obj));
+assertEquals(null, Object.getNotifier(Object.freeze({})));
+assertFalse(notifier.hasOwnProperty('notify'));
+assertEquals([], Object.keys(notifier));
+var notifyDesc = Object.getOwnPropertyDescriptor(notifier.__proto__, 'notify');
+assertTrue(notifyDesc.configurable);
+assertTrue(notifyDesc.writable);
+assertFalse(notifyDesc.enumerable);
+assertThrows(function() { notifier.notify({}); }, TypeError);
+assertThrows(function() { notifier.notify({ type: 4 }); }, TypeError);
+var notify = notifier.notify;
+assertThrows(function() { notify.call(undefined, { type: 'a' }); }, TypeError);
+assertThrows(function() { notify.call(null, { type: 'a' }); }, TypeError);
+assertThrows(function() { notify.call(5, { type: 'a' }); }, TypeError);
+assertThrows(function() { notify.call('hello', { type: 'a' }); }, TypeError);
+assertThrows(function() { notify.call(false, { type: 'a' }); }, TypeError);
+assertThrows(function() { notify.call({}, { type: 'a' }); }, TypeError);
+assertFalse(recordCreated);
+notifier.notify(changeRecordWithAccessor);
+assertFalse(recordCreated); // not observed yet
+
+// Object.deliverChangeRecords
+assertThrows(function() { Object.deliverChangeRecords(nonFunction); }, TypeError);
+
+// Multiple records are delivered.
+Object.observe(obj, observer.callback);
+notifier.notify({
+ type: 'updated',
+ name: 'foo',
+ expando: 1
+});
+
+notifier.notify({
+ object: notifier, // object property is ignored
+ type: 'deleted',
+ name: 'bar',
+ expando2: 'str'
+});
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: obj, name: 'foo', type: 'updated', expando: 1 },
+ { object: obj, name: 'bar', type: 'deleted', expando2: 'str' }
+]);
+
+// No delivery takes place if no records are pending
+reset();
+Object.deliverChangeRecords(observer.callback);
+observer.assertNotCalled();
+
+// Multiple observation has no effect.
+reset();
+Object.observe(obj, observer.callback);
+Object.observe(obj, observer.callback);
+Object.getNotifier(obj).notify({
+ type: 'foo',
+});
+Object.deliverChangeRecords(observer.callback);
+observer.assertCalled();
+
+// Observation can be stopped.
+reset();
+Object.unobserve(obj, observer.callback);
+Object.getNotifier(obj).notify({
+ type: 'foo',
+});
+Object.deliverChangeRecords(observer.callback);
+observer.assertNotCalled();
+
+// Multiple unobservation has no effect
+reset();
+Object.unobserve(obj, observer.callback);
+Object.unobserve(obj, observer.callback);
+Object.getNotifier(obj).notify({
+ type: 'foo',
+});
+Object.deliverChangeRecords(observer.callback);
+observer.assertNotCalled();
+
+// Re-observation works and only includes changeRecords after of call.
+reset();
+Object.getNotifier(obj).notify({
+ type: 'foo',
+});
+Object.observe(obj, observer.callback);
+Object.getNotifier(obj).notify({
+ type: 'foo',
+});
+records = undefined;
+Object.deliverChangeRecords(observer.callback);
+observer.assertRecordCount(1);
+
+// Observing a continuous stream of changes, while itermittantly unobserving.
+reset();
+Object.observe(obj, observer.callback);
+Object.getNotifier(obj).notify({
+ type: 'foo',
+ val: 1
+});
+
+Object.unobserve(obj, observer.callback);
+Object.getNotifier(obj).notify({
+ type: 'foo',
+ val: 2
+});
+
+Object.observe(obj, observer.callback);
+Object.getNotifier(obj).notify({
+ type: 'foo',
+ val: 3
+});
+
+Object.unobserve(obj, observer.callback);
+Object.getNotifier(obj).notify({
+ type: 'foo',
+ val: 4
+});
+
+Object.observe(obj, observer.callback);
+Object.getNotifier(obj).notify({
+ type: 'foo',
+ val: 5
+});
+
+Object.unobserve(obj, observer.callback);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: obj, type: 'foo', val: 1 },
+ { object: obj, type: 'foo', val: 3 },
+ { object: obj, type: 'foo', val: 5 }
+]);
+
+// Observing multiple objects; records appear in order.
+reset();
+var obj2 = {};
+var obj3 = {}
+Object.observe(obj, observer.callback);
+Object.observe(obj3, observer.callback);
+Object.observe(obj2, observer.callback);
+Object.getNotifier(obj).notify({
+ type: 'foo1',
+});
+Object.getNotifier(obj2).notify({
+ type: 'foo2',
+});
+Object.getNotifier(obj3).notify({
+ type: 'foo3',
+});
+Object.observe(obj3, observer.callback);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: obj, type: 'foo1' },
+ { object: obj2, type: 'foo2' },
+ { object: obj3, type: 'foo3' }
+]);
+
+// Observing named properties.
+reset();
+var obj = {a: 1}
+Object.observe(obj, observer.callback);
+obj.a = 2;
+obj["a"] = 3;
+delete obj.a;
+obj.a = 4;
+obj.a = 4; // ignored
+obj.a = 5;
+Object.defineProperty(obj, "a", {value: 6});
+Object.defineProperty(obj, "a", {writable: false});
+obj.a = 7; // ignored
+Object.defineProperty(obj, "a", {value: 8});
+Object.defineProperty(obj, "a", {value: 7, writable: true});
+Object.defineProperty(obj, "a", {get: function() {}});
+Object.defineProperty(obj, "a", {get: function() {}});
+delete obj.a;
+delete obj.a;
+Object.defineProperty(obj, "a", {get: function() {}, configurable: true});
+Object.defineProperty(obj, "a", {value: 9, writable: true});
+obj.a = 10;
+delete obj.a;
+Object.defineProperty(obj, "a", {value: 11, configurable: true});
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: obj, name: "a", type: "updated", oldValue: 1 },
+ { object: obj, name: "a", type: "updated", oldValue: 2 },
+ { object: obj, name: "a", type: "deleted", oldValue: 3 },
+ { object: obj, name: "a", type: "new" },
+ { object: obj, name: "a", type: "updated", oldValue: 4 },
+ { object: obj, name: "a", type: "updated", oldValue: 5 },
+ { object: obj, name: "a", type: "reconfigured", oldValue: 6 },
+ { object: obj, name: "a", type: "updated", oldValue: 6 },
+ { object: obj, name: "a", type: "reconfigured", oldValue: 8 },
+ { object: obj, name: "a", type: "reconfigured", oldValue: 7 },
+ { object: obj, name: "a", type: "reconfigured" },
+ { object: obj, name: "a", type: "deleted" },
+ { object: obj, name: "a", type: "new" },
+ { object: obj, name: "a", type: "reconfigured" },
+ { object: obj, name: "a", type: "updated", oldValue: 9 },
+ { object: obj, name: "a", type: "deleted", oldValue: 10 },
+ { object: obj, name: "a", type: "new" },
+]);
+
+// Observing indexed properties.
+reset();
+var obj = {'1': 1}
+Object.observe(obj, observer.callback);
+obj[1] = 2;
+obj[1] = 3;
+delete obj[1];
+obj[1] = 4;
+obj[1] = 4; // ignored
+obj[1] = 5;
+Object.defineProperty(obj, "1", {value: 6});
+Object.defineProperty(obj, "1", {writable: false});
+obj[1] = 7; // ignored
+Object.defineProperty(obj, "1", {value: 8});
+Object.defineProperty(obj, "1", {value: 7, writable: true});
+Object.defineProperty(obj, "1", {get: function() {}});
+delete obj[1];
+delete obj[1];
+Object.defineProperty(obj, "1", {get: function() {}, configurable: true});
+Object.defineProperty(obj, "1", {value: 9, writable: true});
+obj[1] = 10;
+delete obj[1];
+Object.defineProperty(obj, "1", {value: 11, configurable: true});
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: obj, name: "1", type: "updated", oldValue: 1 },
+ { object: obj, name: "1", type: "updated", oldValue: 2 },
+ { object: obj, name: "1", type: "deleted", oldValue: 3 },
+ { object: obj, name: "1", type: "new" },
+ { object: obj, name: "1", type: "updated", oldValue: 4 },
+ { object: obj, name: "1", type: "updated", oldValue: 5 },
+ { object: obj, name: "1", type: "reconfigured", oldValue: 6 },
+ { object: obj, name: "1", type: "updated", oldValue: 6 },
+ { object: obj, name: "1", type: "reconfigured", oldValue: 8 },
+ { object: obj, name: "1", type: "reconfigured", oldValue: 7 },
+ // TODO(observe): oldValue should not be present below.
+ { object: obj, name: "1", type: "deleted", oldValue: undefined },
+ { object: obj, name: "1", type: "new" },
+ // TODO(observe): oldValue should be absent below, and type = "reconfigured".
+ { object: obj, name: "1", type: "updated", oldValue: undefined },
+ { object: obj, name: "1", type: "updated", oldValue: 9 },
+ { object: obj, name: "1", type: "deleted", oldValue: 10 },
+ { object: obj, name: "1", type: "new" },
+]);
+
+// Observing array length (including truncation)
+reset();
+var arr = ['a', 'b', 'c', 'd'];
+var arr2 = ['alpha', 'beta'];
+var arr3 = ['hello'];
+arr3[2] = 'goodbye';
+arr3.length = 6;
+// TODO(adamk): Enable this test case when it can run in a reasonable
+// amount of time.
+//var slow_arr = new Array(1000000000);
+//slow_arr[500000000] = 'hello';
+Object.defineProperty(arr, '0', {configurable: false});
+Object.defineProperty(arr, '2', {get: function(){}});
+Object.defineProperty(arr2, '0', {get: function(){}, configurable: false});
+Object.observe(arr, observer.callback);
+Object.observe(arr2, observer.callback);
+Object.observe(arr3, observer.callback);
+arr.length = 2;
+arr.length = 0;
+arr.length = 10;
+arr2.length = 0;
+arr2.length = 1; // no change expected
+arr3.length = 0;
+Object.defineProperty(arr3, 'length', {value: 5});
+Object.defineProperty(arr3, 'length', {value: 10, writable: false});
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: arr, name: '3', type: 'deleted', oldValue: 'd' },
+ // TODO(adamk): oldValue should not be present below
+ { object: arr, name: '2', type: 'deleted', oldValue: undefined },
+ { object: arr, name: 'length', type: 'updated', oldValue: 4 },
+ { object: arr, name: '1', type: 'deleted', oldValue: 'b' },
+ { object: arr, name: 'length', type: 'updated', oldValue: 2 },
+ { object: arr, name: 'length', type: 'updated', oldValue: 1 },
+ { object: arr2, name: '1', type: 'deleted', oldValue: 'beta' },
+ { object: arr2, name: 'length', type: 'updated', oldValue: 2 },
+ { object: arr3, name: '2', type: 'deleted', oldValue: 'goodbye' },
+ { object: arr3, name: '0', type: 'deleted', oldValue: 'hello' },
+ { object: arr3, name: 'length', type: 'updated', oldValue: 6 },
+ { object: arr3, name: 'length', type: 'updated', oldValue: 0 },
+ { object: arr3, name: 'length', type: 'updated', oldValue: 5 },
+ // TODO(adamk): This record should be merged with the above
+ { object: arr3, name: 'length', type: 'reconfigured' },
+]);
+
+// Assignments in loops (checking different IC states).
+reset();
+var obj = {};
+Object.observe(obj, observer.callback);
+for (var i = 0; i < 5; i++) {
+ obj["a" + i] = i;
+}
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: obj, name: "a0", type: "new" },
+ { object: obj, name: "a1", type: "new" },
+ { object: obj, name: "a2", type: "new" },
+ { object: obj, name: "a3", type: "new" },
+ { object: obj, name: "a4", type: "new" },
+]);
+
+reset();
+var obj = {};
+Object.observe(obj, observer.callback);
+for (var i = 0; i < 5; i++) {
+ obj[i] = i;
+}
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: obj, name: "0", type: "new" },
+ { object: obj, name: "1", type: "new" },
+ { object: obj, name: "2", type: "new" },
+ { object: obj, name: "3", type: "new" },
+ { object: obj, name: "4", type: "new" },
+]);
+
+// Adding elements past the end of an array should notify on length
+reset();
+var arr = [1, 2, 3];
+Object.observe(arr, observer.callback);
+arr[3] = 10;
+arr[100] = 20;
+Object.defineProperty(arr, '200', {value: 7});
+Object.defineProperty(arr, '400', {get: function(){}});
+arr[50] = 30; // no length change expected
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: arr, name: '3', type: 'new' },
+ { object: arr, name: 'length', type: 'updated', oldValue: 3 },
+ { object: arr, name: '100', type: 'new' },
+ { object: arr, name: 'length', type: 'updated', oldValue: 4 },
+ { object: arr, name: '200', type: 'new' },
+ { object: arr, name: 'length', type: 'updated', oldValue: 101 },
+ { object: arr, name: '400', type: 'new' },
+ { object: arr, name: 'length', type: 'updated', oldValue: 201 },
+ { object: arr, name: '50', type: 'new' },
+]);
+
+// Tests for array methods, first on arrays and then on plain objects
+//
+// === ARRAYS ===
+//
+// Push
+reset();
+var array = [1, 2];
+Object.observe(array, observer.callback);
+array.push(3, 4);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: array, name: '2', type: 'new' },
+ { object: array, name: 'length', type: 'updated', oldValue: 2 },
+ { object: array, name: '3', type: 'new' },
+ { object: array, name: 'length', type: 'updated', oldValue: 3 },
+]);
+
+// Pop
+reset();
+var array = [1, 2];
+Object.observe(array, observer.callback);
+array.pop();
+array.pop();
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: array, name: '1', type: 'deleted', oldValue: 2 },
+ { object: array, name: 'length', type: 'updated', oldValue: 2 },
+ { object: array, name: '0', type: 'deleted', oldValue: 1 },
+ { object: array, name: 'length', type: 'updated', oldValue: 1 },
+]);
+
+// Shift
+reset();
+var array = [1, 2];
+Object.observe(array, observer.callback);
+array.shift();
+array.shift();
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: array, name: '0', type: 'updated', oldValue: 1 },
+ { object: array, name: '1', type: 'deleted', oldValue: 2 },
+ { object: array, name: 'length', type: 'updated', oldValue: 2 },
+ { object: array, name: '0', type: 'deleted', oldValue: 2 },
+ { object: array, name: 'length', type: 'updated', oldValue: 1 },
+]);
+
+// Unshift
+reset();
+var array = [1, 2];
+Object.observe(array, observer.callback);
+array.unshift(3, 4);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: array, name: '3', type: 'new' },
+ { object: array, name: 'length', type: 'updated', oldValue: 2 },
+ { object: array, name: '2', type: 'new' },
+ { object: array, name: '0', type: 'updated', oldValue: 1 },
+ { object: array, name: '1', type: 'updated', oldValue: 2 },
+]);
+
+// Splice
+reset();
+var array = [1, 2, 3];
+Object.observe(array, observer.callback);
+array.splice(1, 1, 4, 5);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: array, name: '3', type: 'new' },
+ { object: array, name: 'length', type: 'updated', oldValue: 3 },
+ { object: array, name: '1', type: 'updated', oldValue: 2 },
+ { object: array, name: '2', type: 'updated', oldValue: 3 },
+]);
+
+//
+// === PLAIN OBJECTS ===
+//
+// Push
+reset()
+var array = {0: 1, 1: 2, length: 2}
+Object.observe(array, observer.callback);
+Array.prototype.push.call(array, 3, 4);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: array, name: '2', type: 'new' },
+ { object: array, name: '3', type: 'new' },
+ { object: array, name: 'length', type: 'updated', oldValue: 2 },
+]);
+
+// Pop
+reset()
+var array = {0: 1, 1: 2, length: 2};
+Object.observe(array, observer.callback);
+Array.prototype.pop.call(array);
+Array.prototype.pop.call(array);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: array, name: '1', type: 'deleted', oldValue: 2 },
+ { object: array, name: 'length', type: 'updated', oldValue: 2 },
+ { object: array, name: '0', type: 'deleted', oldValue: 1 },
+ { object: array, name: 'length', type: 'updated', oldValue: 1 },
+]);
+
+// Shift
+reset()
+var array = {0: 1, 1: 2, length: 2};
+Object.observe(array, observer.callback);
+Array.prototype.shift.call(array);
+Array.prototype.shift.call(array);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: array, name: '0', type: 'updated', oldValue: 1 },
+ { object: array, name: '1', type: 'deleted', oldValue: 2 },
+ { object: array, name: 'length', type: 'updated', oldValue: 2 },
+ { object: array, name: '0', type: 'deleted', oldValue: 2 },
+ { object: array, name: 'length', type: 'updated', oldValue: 1 },
+]);
+
+// Unshift
+reset()
+var array = {0: 1, 1: 2, length: 2};
+Object.observe(array, observer.callback);
+Array.prototype.unshift.call(array, 3, 4);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: array, name: '3', type: 'new' },
+ { object: array, name: '2', type: 'new' },
+ { object: array, name: '0', type: 'updated', oldValue: 1 },
+ { object: array, name: '1', type: 'updated', oldValue: 2 },
+ { object: array, name: 'length', type: 'updated', oldValue: 2 },
+]);
+
+// Splice
+reset()
+var array = {0: 1, 1: 2, 2: 3, length: 3};
+Object.observe(array, observer.callback);
+Array.prototype.splice.call(array, 1, 1, 4, 5);
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: array, name: '3', type: 'new' },
+ { object: array, name: '1', type: 'updated', oldValue: 2 },
+ { object: array, name: '2', type: 'updated', oldValue: 3 },
+ { object: array, name: 'length', type: 'updated', oldValue: 3 },
+]);
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/proxies-json.js b/src/3rdparty/v8/test/mjsunit/harmony/proxies-json.js
new file mode 100644
index 0000000..539c5a8
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/harmony/proxies-json.js
@@ -0,0 +1,178 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony
+
+function testStringify(expected, object) {
+ // Test fast case that bails out to slow case.
+ assertEquals(expected, JSON.stringify(object));
+ // Test slow case.
+ assertEquals(expected, JSON.stringify(object, undefined, 0));
+}
+
+// Test serializing a proxy, function proxy and objects that contain them.
+var handler1 = {
+ get: function(target, name) {
+ return name.toUpperCase();
+ },
+ enumerate: function(target) {
+ return ['a', 'b', 'c'];
+ },
+ getOwnPropertyDescriptor: function(target, name) {
+ return { enumerable: true };
+ }
+}
+
+var proxy1 = Proxy.create(handler1);
+testStringify('{"a":"A","b":"B","c":"C"}', proxy1);
+
+var proxy_fun = Proxy.createFunction(handler1, function() { return 1; });
+testStringify(undefined, proxy_fun);
+testStringify('[1,null]', [1, proxy_fun]);
+
+var parent1a = { b: proxy1 };
+testStringify('{"b":{"a":"A","b":"B","c":"C"}}', parent1a);
+
+var parent1b = { a: 123, b: proxy1, c: true };
+testStringify('{"a":123,"b":{"a":"A","b":"B","c":"C"},"c":true}', parent1b);
+
+var parent1c = [123, proxy1, true];
+testStringify('[123,{"a":"A","b":"B","c":"C"},true]', parent1c);
+
+// Proxy with side effect.
+var handler2 = {
+ get: function(target, name) {
+ delete parent2.c;
+ return name.toUpperCase();
+ },
+ enumerate: function(target) {
+ return ['a', 'b', 'c'];
+ },
+ getOwnPropertyDescriptor: function(target, name) {
+ return { enumerable: true };
+ }
+}
+
+var proxy2 = Proxy.create(handler2);
+var parent2 = { a: "delete", b: proxy2, c: "remove" };
+var expected2 = '{"a":"delete","b":{"a":"A","b":"B","c":"C"}}';
+assertEquals(expected2, JSON.stringify(parent2));
+parent2.c = "remove"; // Revert side effect.
+assertEquals(expected2, JSON.stringify(parent2, undefined, 0));
+
+// Proxy with a get function that uses the first argument.
+var handler3 = {
+ get: function(target, name) {
+ if (name == 'valueOf') return function() { return "proxy" };
+ return name + "(" + target + ")";
+ },
+ enumerate: function(target) {
+ return ['a', 'b', 'c'];
+ },
+ getOwnPropertyDescriptor: function(target, name) {
+ return { enumerable: true };
+ }
+}
+
+var proxy3 = Proxy.create(handler3);
+var parent3 = { x: 123, y: proxy3 }
+testStringify('{"x":123,"y":{"a":"a(proxy)","b":"b(proxy)","c":"c(proxy)"}}',
+ parent3);
+
+// Empty proxy.
+var handler4 = {
+ get: function(target, name) {
+ return 0;
+ },
+ enumerate: function(target) {
+ return [];
+ },
+ getOwnPropertyDescriptor: function(target, name) {
+ return { enumerable: false };
+ }
+}
+
+var proxy4 = Proxy.create(handler4);
+testStringify('{}', proxy4);
+testStringify('{"a":{}}', { a: proxy4 });
+
+// Proxy that provides a toJSON function that uses this.
+var handler5 = {
+ get: function(target, name) {
+ if (name == 'z') return 97000;
+ return function(key) { return key.charCodeAt(0) + this.z; };
+ },
+ enumerate: function(target) {
+ return ['toJSON', 'z'];
+ },
+ getOwnPropertyDescriptor: function(target, name) {
+ return { enumerable: true };
+ }
+}
+
+var proxy5 = Proxy.create(handler5);
+testStringify('{"a":97097}', { a: proxy5 });
+
+// Proxy that provides a toJSON function that returns undefined.
+var handler6 = {
+ get: function(target, name) {
+ return function(key) { return undefined; };
+ },
+ enumerate: function(target) {
+ return ['toJSON'];
+ },
+ getOwnPropertyDescriptor: function(target, name) {
+ return { enumerable: true };
+ }
+}
+
+var proxy6 = Proxy.create(handler6);
+testStringify('[1,null,true]', [1, proxy6, true]);
+testStringify('{"a":1,"c":true}', {a: 1, b: proxy6, c: true});
+
+// Object containing a proxy that changes the parent's properties.
+var handler7 = {
+ get: function(target, name) {
+ delete parent7.a;
+ delete parent7.c;
+ parent7.e = "5";
+ return name.toUpperCase();
+ },
+ enumerate: function(target) {
+ return ['a', 'b', 'c'];
+ },
+ getOwnPropertyDescriptor: function(target, name) {
+ return { enumerable: true };
+ }
+}
+
+var proxy7 = Proxy.create(handler7);
+var parent7 = { a: "1", b: proxy7, c: "3", d: "4" };
+assertEquals('{"a":"1","b":{"a":"A","b":"B","c":"C"},"d":"4"}',
+ JSON.stringify(parent7));
+assertEquals('{"b":{"a":"A","b":"B","c":"C"},"d":"4","e":"5"}',
+ JSON.stringify(parent7));
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/proxies.js b/src/3rdparty/v8/test/mjsunit/harmony/proxies.js
index 8d8f839..04fc769 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/proxies.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/proxies.js
@@ -572,15 +572,16 @@ TestSetThrow(Proxy.create({
}))
+var rec
var key
var val
-function TestSetForDerived(handler) {
- TestWithProxies(TestSetForDerived2, handler)
+function TestSetForDerived(trap) {
+ TestWithProxies(TestSetForDerived2, trap)
}
-function TestSetForDerived2(create, handler) {
- var p = create(handler)
+function TestSetForDerived2(create, trap) {
+ var p = create({getPropertyDescriptor: trap, getOwnPropertyDescriptor: trap})
var o = Object.create(p, {x: {value: 88, writable: true},
'1': {value: 89, writable: true}})
@@ -607,10 +608,16 @@ function TestSetForDerived2(create, handler) {
assertEquals(45, o.p_nonwritable = 45)
assertEquals("p_nonwritable", key)
- assertEquals(45, o.p_nonwritable)
+ assertFalse(Object.prototype.hasOwnProperty.call(o, "p_nonwritable"))
+
+ assertThrows(function(){ "use strict"; o.p_nonwritable = 45 }, TypeError)
+ assertEquals("p_nonwritable", key)
+ assertFalse(Object.prototype.hasOwnProperty.call(o, "p_nonwritable"))
+ val = ""
assertEquals(46, o.p_setter = 46)
assertEquals("p_setter", key)
+ assertSame(o, rec)
assertEquals(46, val) // written to parent
assertFalse(Object.prototype.hasOwnProperty.call(o, "p_setter"))
@@ -624,32 +631,48 @@ function TestSetForDerived2(create, handler) {
assertThrows(function(){ "use strict"; o.p_nosetter = 50 }, TypeError)
assertEquals("p_nosetter", key)
assertEquals("", val) // not written at all
+ assertFalse(Object.prototype.hasOwnProperty.call(o, "p_nosetter"));
assertThrows(function(){ o.p_nonconf = 53 }, TypeError)
assertEquals("p_nonconf", key)
+ assertFalse(Object.prototype.hasOwnProperty.call(o, "p_nonconf"));
assertThrows(function(){ o.p_throw = 51 }, "myexn")
assertEquals("p_throw", key)
+ assertFalse(Object.prototype.hasOwnProperty.call(o, "p_throw"));
assertThrows(function(){ o.p_setterthrow = 52 }, "myexn")
assertEquals("p_setterthrow", key)
+ assertFalse(Object.prototype.hasOwnProperty.call(o, "p_setterthrow"));
}
-TestSetForDerived({
- getPropertyDescriptor: function(k) {
+
+TestSetForDerived(
+ function(k) {
+ // TODO(yangguo): issue 2398 - throwing an error causes formatting of
+ // the message string, which can be observable through this handler.
+ // We ignore keys that occur when formatting the message string.
+ if (k == "toString" || k == "valueOf") return;
+
key = k;
switch (k) {
case "p_writable": return {writable: true, configurable: true}
case "p_nonwritable": return {writable: false, configurable: true}
- case "p_setter":return {set: function(x) { val = x }, configurable: true}
- case "p_nosetter": return {get: function() { return 1 }, configurable: true}
- case "p_nonconf":return {}
+ case "p_setter": return {
+ set: function(x) { rec = this; val = x },
+ configurable: true
+ }
+ case "p_nosetter": return {
+ get: function() { return 1 },
+ configurable: true
+ }
+ case "p_nonconf": return {}
case "p_throw": throw "myexn"
case "p_setterthrow": return {set: function(x) { throw "myexn" }}
default: return undefined
}
}
-})
+)
// Evil proxy-induced side-effects shouldn't crash.
@@ -1630,8 +1653,8 @@ TestPropertyNames([], {
getOwnPropertyNames: function() { return [] }
})
-TestPropertyNames(["a", "zz", " ", "0"], {
- getOwnPropertyNames: function() { return ["a", "zz", " ", 0] }
+TestPropertyNames(["a", "zz", " ", "0", "toString"], {
+ getOwnPropertyNames: function() { return ["a", "zz", " ", 0, "toString"] }
})
TestPropertyNames(["throw", "function "], {
@@ -1678,8 +1701,8 @@ TestKeys([], {
keys: function() { return [] }
})
-TestKeys(["a", "zz", " ", "0"], {
- keys: function() { return ["a", "zz", " ", 0] }
+TestKeys(["a", "zz", " ", "0", "toString"], {
+ keys: function() { return ["a", "zz", " ", 0, "toString"] }
})
TestKeys(["throw", "function "], {
diff --git a/src/3rdparty/v8/test/mjsunit/json-recursive.js b/src/3rdparty/v8/test/mjsunit/json-recursive.js
new file mode 100644
index 0000000..7a8c547
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/json-recursive.js
@@ -0,0 +1,61 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+var a = {};
+for (i = 0; i < 10000; i++) {
+ var current = {};
+ current.a = a;
+ a = current;
+}
+
+function rec(a,b,c,d,e,f,g,h,i,j,k,l,m,n) {
+ JSON.stringify(a);
+ rec(a,b,c,d,e,f,g,h,i,j,k,l,m,n);
+}
+
+assertThrows(function() { rec(1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4) },
+ RangeError);
+
+
+var depth = 10000;
+var deepArray = [];
+for (var i = 0; i < depth; i++) deepArray = [deepArray];
+assertThrows(function() { JSON.stringify(deepArray); }, RangeError);
+
+
+var deepObject = {};
+for (var i = 0; i < depth; i++) deepObject = { next: deepObject };
+assertThrows(function() { JSON.stringify(deepObject); }, RangeError);
+
+
+var str = "[1]";
+for (var i = 0; i < 100000; i++) {
+ str = "[1," + str + "]";
+}
+
+assertThrows(function() { JSON.parse(str); }, RangeError);
diff --git a/src/3rdparty/v8/test/mjsunit/json.js b/src/3rdparty/v8/test/mjsunit/json.js
index bead376..6e91725 100644
--- a/src/3rdparty/v8/test/mjsunit/json.js
+++ b/src/3rdparty/v8/test/mjsunit/json.js
@@ -257,6 +257,42 @@ assertEquals("[1,2,[3,[4],5],6,7]",
assertEquals("[2,4,[6,[8],10],12,14]",
JSON.stringify([1, 2, [3, [4], 5], 6, 7], DoubleNumbers));
assertEquals('["a","ab","abc"]', JSON.stringify(["a","ab","abc"]));
+assertEquals('{"a":1,"c":true}',
+ JSON.stringify({ a : 1,
+ b : function() { 1 },
+ c : true,
+ d : function() { 2 } }));
+assertEquals('[1,null,true,null]',
+ JSON.stringify([1, function() { 1 }, true, function() { 2 }]));
+assertEquals('"toJSON 123"',
+ JSON.stringify({ toJSON : function() { return 'toJSON 123'; } }));
+assertEquals('{"a":321}',
+ JSON.stringify({ a : { toJSON : function() { return 321; } } }));
+var counter = 0;
+assertEquals('{"getter":123}',
+ JSON.stringify({ get getter() { counter++; return 123; } }));
+assertEquals(1, counter);
+assertEquals('{"a":"abc","b":"\u1234bc"}',
+ JSON.stringify({ a : "abc", b : "\u1234bc" }));
+
+
+var a = { a : 1, b : 2 };
+delete a.a;
+assertEquals('{"b":2}', JSON.stringify(a));
+
+var b = {};
+b.__proto__ = { toJSON : function() { return 321;} };
+assertEquals("321", JSON.stringify(b));
+
+var array = [""];
+var expected = '""';
+for (var i = 0; i < 10000; i++) {
+ array.push("");
+ expected = '"",' + expected;
+}
+expected = '[' + expected + ']';
+assertEquals(expected, JSON.stringify(array));
+
var circular = [1, 2, 3];
circular[2] = circular;
@@ -428,5 +464,5 @@ var o = JSON.parse('{"__proto__":5}');
assertEquals(Object.prototype, o.__proto__); // __proto__ isn't changed.
assertEquals(0, Object.keys(o).length); // __proto__ isn't added as enumerable.
-
-
+var json = '{"stuff before slash\\\\stuff after slash":"whatever"}';
+assertEquals(json, JSON.stringify(JSON.parse(json)));
diff --git a/src/3rdparty/v8/test/mjsunit/json2.js b/src/3rdparty/v8/test/mjsunit/json2.js
new file mode 100644
index 0000000..4c0b8f5
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/json2.js
@@ -0,0 +1,153 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Test JSON.stringify on the global object.
+var a = 12345;
+assertTrue(JSON.stringify(this).indexOf('"a":12345') > 0);
+
+// Test JSON.stringify of array in dictionary mode.
+var array_1 = [];
+var array_2 = [];
+array_1[100000] = 1;
+array_2[100000] = function() { return 1; };
+var nulls = "";
+for (var i = 0; i < 100000; i++) {
+ nulls += 'null,';
+}
+expected_1 = '[' + nulls + '1]';
+expected_2 = '[' + nulls + 'null]';
+assertEquals(expected_1, JSON.stringify(array_1));
+assertEquals(expected_2, JSON.stringify(array_2));
+
+// Test JSValue with custom prototype.
+var num_wrapper = Object(42);
+num_wrapper.__proto__ = { __proto__: null,
+ toString: function() { return true; } };
+assertEquals('1', JSON.stringify(num_wrapper));
+
+var str_wrapper = Object('2');
+str_wrapper.__proto__ = { __proto__: null,
+ toString: function() { return true; } };
+assertEquals('"true"', JSON.stringify(str_wrapper));
+
+var bool_wrapper = Object(false);
+bool_wrapper.__proto__ = { __proto__: null,
+ toString: function() { return true; } };
+// Note that toString function is not evaluated here!
+assertEquals('false', JSON.stringify(bool_wrapper));
+
+// Test getters.
+var counter = 0;
+var getter_obj = { get getter() {
+ counter++;
+ return 123;
+ } };
+assertEquals('{"getter":123}', JSON.stringify(getter_obj));
+assertEquals(1, counter);
+
+// Test toJSON function.
+var tojson_obj = { toJSON: function() {
+ counter++;
+ return [1, 2];
+ },
+ a: 1};
+assertEquals('[1,2]', JSON.stringify(tojson_obj));
+assertEquals(2, counter);
+
+// Test that we don't recursively look for the toJSON function.
+var tojson_proto_obj = { a: 'fail' };
+tojson_proto_obj.__proto__ = { toJSON: function() {
+ counter++;
+ return tojson_obj;
+ } };
+assertEquals('{"a":1}', JSON.stringify(tojson_proto_obj));
+
+// Test toJSON produced by a getter.
+var tojson_via_getter = { get toJSON() {
+ return function(x) {
+ counter++;
+ return 321;
+ };
+ },
+ a: 1 };
+assertEquals('321', JSON.stringify(tojson_via_getter));
+
+// Test toJSON with key.
+tojson_obj = { toJSON: function(key) { return key + key; } };
+var tojson_with_key_1 = { a: tojson_obj, b: tojson_obj };
+assertEquals('{"a":"aa","b":"bb"}', JSON.stringify(tojson_with_key_1));
+var tojson_with_key_2 = [ tojson_obj, tojson_obj ];
+assertEquals('["00","11"]', JSON.stringify(tojson_with_key_2));
+
+// Test toJSON with exception.
+var tojson_ex = { toJSON: function(key) { throw "123" } };
+assertThrows(function() { JSON.stringify(tojson_ex); });
+
+// Test toJSON with access to this.
+var obj = { toJSON: function(key) { return this.a + key; }, a: "x" };
+assertEquals('{"y":"xy"}', JSON.stringify({y: obj}));
+
+// Test holes in arrays.
+var fast_smi = [1, 2, 3, 4];
+fast_smi.__proto__ = [7, 7, 7, 7];
+delete fast_smi[2];
+assertTrue(%HasFastSmiElements(fast_smi));
+assertEquals("[1,2,7,4]", JSON.stringify(fast_smi));
+
+var fast_double = [1.1, 2, 3, 4];
+fast_double.__proto__ = [7, 7, 7, 7];
+
+delete fast_double[2];
+assertTrue(%HasFastDoubleElements(fast_double));
+assertEquals("[1.1,2,7,4]", JSON.stringify(fast_double));
+
+var fast_obj = [1, 2, {}, {}];
+fast_obj.__proto__ = [7, 7, 7, 7];
+
+delete fast_obj[2];
+assertTrue(%HasFastObjectElements(fast_obj));
+assertEquals("[1,2,7,{}]", JSON.stringify(fast_obj));
+
+var getter_side_effect = { a: 1,
+ get b() {
+ delete this.a;
+ delete this.c;
+ this.e = 5;
+ return 2;
+ },
+ c: 3,
+ d: 4 };
+assertEquals('{"a":1,"b":2,"d":4}', JSON.stringify(getter_side_effect));
+assertEquals('{"b":2,"d":4,"e":5}', JSON.stringify(getter_side_effect));
+
+var non_enum = {};
+non_enum.a = 1;
+Object.defineProperty(non_enum, "b", { value: 2, enumerable: false });
+non_enum.c = 3;
+assertEquals('{"a":1,"c":3}', JSON.stringify(non_enum));
diff --git a/src/3rdparty/v8/test/mjsunit/limit-locals.js b/src/3rdparty/v8/test/mjsunit/limit-locals.js
index ad9ec43..a166f30 100644
--- a/src/3rdparty/v8/test/mjsunit/limit-locals.js
+++ b/src/3rdparty/v8/test/mjsunit/limit-locals.js
@@ -25,7 +25,9 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Test that there is a limit of 32767 locals.
+// Test that there is a limit of 131071 locals.
+
+// Flags: --stack-size=1200
function function_with_n_locals(n) {
test_prefix = "prefix ";
@@ -40,7 +42,6 @@ function function_with_n_locals(n) {
assertEquals("prefix 0 suffix", function_with_n_locals(0));
assertEquals("prefix 16000 suffix", function_with_n_locals(16000));
-assertEquals("prefix 32767 suffix", function_with_n_locals(32767));
+assertEquals("prefix 131071 suffix", function_with_n_locals(131071));
-assertThrows("function_with_n_locals(32768)");
-assertThrows("function_with_n_locals(100000)");
+assertThrows("function_with_n_locals(131072)");
diff --git a/src/3rdparty/v8/test/mjsunit/math-floor-negative.js b/src/3rdparty/v8/test/mjsunit/math-floor-negative.js
new file mode 100644
index 0000000..4cabff5
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/math-floor-negative.js
@@ -0,0 +1,59 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --noenable_sse4_1 --allow-natives-syntax
+
+function test1() {
+ // Trigger overflow when converting/truncating double to integer.
+ // Divide by 10 to avoid overflow when smi-tagging at the end.
+ return Math.floor(-100000000000.5) / 10;
+}
+
+function test2() {
+ // Trigger no overflow.
+ return Math.floor(-100.2);
+}
+
+function test3() {
+ // Trigger overflow when compensating by subtracting after compare.
+ // Divide by 10 to avoid overflow when smi-tagging at the end.
+ return Math.floor(-2147483648.1) / 10;
+}
+
+test1();
+test1();
+%OptimizeFunctionOnNextCall(test1);
+test2();
+test2();
+%OptimizeFunctionOnNextCall(test2);
+test3();
+test3();
+%OptimizeFunctionOnNextCall(test3);
+
+assertEquals(-10000000000.1, test1());
+assertEquals(-101, test2());
+assertEquals(-214748364.9, test3());
diff --git a/src/3rdparty/v8/test/mjsunit/math-floor-of-div-minus-zero.js b/src/3rdparty/v8/test/mjsunit/math-floor-of-div-minus-zero.js
new file mode 100644
index 0000000..2743490
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/math-floor-of-div-minus-zero.js
@@ -0,0 +1,40 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --nouse_inlining --noparallel-recompilation
+
+// Test for negative zero that doesn't need bail out
+
+function test_div_no_deopt_minus_zero() {
+ var zero_in_array = [0];
+ assertTrue(0 === (Math.floor((zero_in_array[0] | 0) / -1) | 0));
+}
+
+test_div_no_deopt_minus_zero();
+%OptimizeFunctionOnNextCall(test_div_no_deopt_minus_zero);
+test_div_no_deopt_minus_zero();
+assertTrue(2 != %GetOptimizationStatus(test_div_no_deopt_minus_zero));
diff --git a/src/3rdparty/v8/test/mjsunit/math-floor.js b/src/3rdparty/v8/test/mjsunit/math-floor-part1.js
index f211ce2..313f272 100644
--- a/src/3rdparty/v8/test/mjsunit/math-floor.js
+++ b/src/3rdparty/v8/test/mjsunit/math-floor-part1.js
@@ -45,13 +45,6 @@ function zero() {
}
function test() {
- testFloor(0, 0);
- testFloor(0, zero());
- testFloor(-0, -0);
- testFloor(Infinity, Infinity);
- testFloor(-Infinity, -Infinity);
- testFloor(NaN, NaN);
-
// Ensure that a negative zero coming from Math.floor is properly handled
// by other operations.
function ifloor(x) {
@@ -86,74 +79,10 @@ function test() {
testFloor(-Number.MAX_VALUE, -Number.MAX_VALUE);
testFloor(Infinity, Infinity);
testFloor(-Infinity, -Infinity);
-
- // 2^30 is a smi boundary.
- var two_30 = 1 << 30;
-
- testFloor(two_30, two_30);
- testFloor(two_30, two_30 + 0.1);
- testFloor(two_30, two_30 + 0.5);
- testFloor(two_30, two_30 + 0.7);
-
- testFloor(two_30 - 1, two_30 - 1);
- testFloor(two_30 - 1, two_30 - 1 + 0.1);
- testFloor(two_30 - 1, two_30 - 1 + 0.5);
- testFloor(two_30 - 1, two_30 - 1 + 0.7);
-
- testFloor(-two_30, -two_30);
- testFloor(-two_30, -two_30 + 0.1);
- testFloor(-two_30, -two_30 + 0.5);
- testFloor(-two_30, -two_30 + 0.7);
-
- testFloor(-two_30 + 1, -two_30 + 1);
- testFloor(-two_30 + 1, -two_30 + 1 + 0.1);
- testFloor(-two_30 + 1, -two_30 + 1 + 0.5);
- testFloor(-two_30 + 1, -two_30 + 1 + 0.7);
-
- // 2^52 is a precision boundary.
- var two_52 = (1 << 30) * (1 << 22);
-
- testFloor(two_52, two_52);
- testFloor(two_52, two_52 + 0.1);
- assertEquals(two_52, two_52 + 0.5);
- testFloor(two_52, two_52 + 0.5);
- assertEquals(two_52 + 1, two_52 + 0.7);
- testFloor(two_52 + 1, two_52 + 0.7);
-
- testFloor(two_52 - 1, two_52 - 1);
- testFloor(two_52 - 1, two_52 - 1 + 0.1);
- testFloor(two_52 - 1, two_52 - 1 + 0.5);
- testFloor(two_52 - 1, two_52 - 1 + 0.7);
-
- testFloor(-two_52, -two_52);
- testFloor(-two_52, -two_52 + 0.1);
- testFloor(-two_52, -two_52 + 0.5);
- testFloor(-two_52, -two_52 + 0.7);
-
- testFloor(-two_52 + 1, -two_52 + 1);
- testFloor(-two_52 + 1, -two_52 + 1 + 0.1);
- testFloor(-two_52 + 1, -two_52 + 1 + 0.5);
- testFloor(-two_52 + 1, -two_52 + 1 + 0.7);
}
// Test in a loop to cover the custom IC and GC-related issues.
-for (var i = 0; i < 500; i++) {
+for (var i = 0; i < 100; i++) {
test();
}
-
-
-// Regression test for a bug where a negative zero coming from Math.floor
-// was not properly handled by other operations.
-function floorsum(i, n) {
- var ret = Math.floor(n);
- while (--i > 0) {
- ret += Math.floor(n);
- }
- return ret;
-}
-assertEquals(-0, floorsum(1, -0));
-%OptimizeFunctionOnNextCall(floorsum);
-// The optimized function will deopt. Run it with enough iterations to try
-// to optimize via OSR (triggering the bug).
-assertEquals(-0, floorsum(100000, -0));
diff --git a/src/3rdparty/v8/test/mjsunit/math-floor-part2.js b/src/3rdparty/v8/test/mjsunit/math-floor-part2.js
new file mode 100644
index 0000000..b6d51b2
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/math-floor-part2.js
@@ -0,0 +1,76 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --max-new-space-size=256 --allow-natives-syntax
+
+var test_id = 0;
+
+function testFloor(expect, input) {
+ var test = new Function('n',
+ '"' + (test_id++) + '";return Math.floor(n)');
+ assertEquals(expect, test(input));
+ assertEquals(expect, test(input));
+ assertEquals(expect, test(input));
+ %OptimizeFunctionOnNextCall(test);
+ assertEquals(expect, test(input));
+}
+
+function zero() {
+ var x = 0.5;
+ return (function() { return x - 0.5; })();
+}
+
+function test() {
+ // 2^30 is a smi boundary.
+ var two_30 = 1 << 30;
+
+ testFloor(two_30, two_30);
+ testFloor(two_30, two_30 + 0.1);
+ testFloor(two_30, two_30 + 0.5);
+ testFloor(two_30, two_30 + 0.7);
+
+ testFloor(two_30 - 1, two_30 - 1);
+ testFloor(two_30 - 1, two_30 - 1 + 0.1);
+ testFloor(two_30 - 1, two_30 - 1 + 0.5);
+ testFloor(two_30 - 1, two_30 - 1 + 0.7);
+
+ testFloor(-two_30, -two_30);
+ testFloor(-two_30, -two_30 + 0.1);
+ testFloor(-two_30, -two_30 + 0.5);
+ testFloor(-two_30, -two_30 + 0.7);
+
+ testFloor(-two_30 + 1, -two_30 + 1);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.1);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.5);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.7);
+}
+
+
+// Test in a loop to cover the custom IC and GC-related issues.
+for (var i = 0; i < 100; i++) {
+ test();
+}
diff --git a/src/3rdparty/v8/test/mjsunit/math-floor-part3.js b/src/3rdparty/v8/test/mjsunit/math-floor-part3.js
new file mode 100644
index 0000000..db25923
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/math-floor-part3.js
@@ -0,0 +1,78 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --max-new-space-size=256 --allow-natives-syntax
+
+var test_id = 0;
+
+function testFloor(expect, input) {
+ var test = new Function('n',
+ '"' + (test_id++) + '";return Math.floor(n)');
+ assertEquals(expect, test(input));
+ assertEquals(expect, test(input));
+ assertEquals(expect, test(input));
+ %OptimizeFunctionOnNextCall(test);
+ assertEquals(expect, test(input));
+}
+
+function zero() {
+ var x = 0.5;
+ return (function() { return x - 0.5; })();
+}
+
+function test() {
+ // 2^52 is a precision boundary.
+ var two_52 = (1 << 30) * (1 << 22);
+
+ testFloor(two_52, two_52);
+ testFloor(two_52, two_52 + 0.1);
+ assertEquals(two_52, two_52 + 0.5);
+ testFloor(two_52, two_52 + 0.5);
+ assertEquals(two_52 + 1, two_52 + 0.7);
+ testFloor(two_52 + 1, two_52 + 0.7);
+
+ testFloor(two_52 - 1, two_52 - 1);
+ testFloor(two_52 - 1, two_52 - 1 + 0.1);
+ testFloor(two_52 - 1, two_52 - 1 + 0.5);
+ testFloor(two_52 - 1, two_52 - 1 + 0.7);
+
+ testFloor(-two_52, -two_52);
+ testFloor(-two_52, -two_52 + 0.1);
+ testFloor(-two_52, -two_52 + 0.5);
+ testFloor(-two_52, -two_52 + 0.7);
+
+ testFloor(-two_52 + 1, -two_52 + 1);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.1);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.5);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.7);
+}
+
+
+// Test in a loop to cover the custom IC and GC-related issues.
+for (var i = 0; i < 100; i++) {
+ test();
+}
diff --git a/src/3rdparty/v8/test/mjsunit/math-floor-part4.js b/src/3rdparty/v8/test/mjsunit/math-floor-part4.js
new file mode 100644
index 0000000..c633623
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/math-floor-part4.js
@@ -0,0 +1,76 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --max-new-space-size=256 --allow-natives-syntax
+
+var test_id = 0;
+
+function testFloor(expect, input) {
+ var test = new Function('n',
+ '"' + (test_id++) + '";return Math.floor(n)');
+ assertEquals(expect, test(input));
+ assertEquals(expect, test(input));
+ assertEquals(expect, test(input));
+ %OptimizeFunctionOnNextCall(test);
+ assertEquals(expect, test(input));
+}
+
+function zero() {
+ var x = 0.5;
+ return (function() { return x - 0.5; })();
+}
+
+function test() {
+ testFloor(0, 0);
+ testFloor(0, zero());
+ testFloor(-0, -0);
+ testFloor(Infinity, Infinity);
+ testFloor(-Infinity, -Infinity);
+ testFloor(NaN, NaN);
+}
+
+
+// Test in a loop to cover the custom IC and GC-related issues.
+for (var i = 0; i < 100; i++) {
+ test();
+}
+
+
+// Regression test for a bug where a negative zero coming from Math.floor
+// was not properly handled by other operations.
+function floorsum(i, n) {
+ var ret = Math.floor(n);
+ while (--i > 0) {
+ ret += Math.floor(n);
+ }
+ return ret;
+}
+assertEquals(-0, floorsum(1, -0));
+%OptimizeFunctionOnNextCall(floorsum);
+// The optimized function will deopt. Run it with enough iterations to try
+// to optimize via OSR (triggering the bug).
+assertEquals(-0, floorsum(100000, -0));
diff --git a/src/3rdparty/v8/test/mjsunit/mirror-object.js b/src/3rdparty/v8/test/mjsunit/mirror-object.js
index d4d228c..8bf8a2d 100644
--- a/src/3rdparty/v8/test/mjsunit/mirror-object.js
+++ b/src/3rdparty/v8/test/mjsunit/mirror-object.js
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -49,19 +49,19 @@ function testObjectMirror(obj, cls_name, ctor_name, hasSpecialProperties) {
JSON.stringify(serializer.serializeReferencedObjects()));
// Check the mirror hierachy.
- assertTrue(mirror instanceof debug.Mirror, 'Unexpected mirror hierachy');
- assertTrue(mirror instanceof debug.ValueMirror, 'Unexpected mirror hierachy');
- assertTrue(mirror instanceof debug.ObjectMirror, 'Unexpected mirror hierachy');
+ assertTrue(mirror instanceof debug.Mirror, 'Unexpected mirror hierarchy');
+ assertTrue(mirror instanceof debug.ValueMirror, 'Unexpected mirror hierarchy');
+ assertTrue(mirror instanceof debug.ObjectMirror, 'Unexpected mirror hierarchy');
// Check the mirror properties.
assertTrue(mirror.isObject(), 'Unexpected mirror');
assertEquals('object', mirror.type(), 'Unexpected mirror type');
assertFalse(mirror.isPrimitive(), 'Unexpected primitive mirror');
assertEquals(cls_name, mirror.className(), 'Unexpected mirror class name');
- assertTrue(mirror.constructorFunction() instanceof debug.ObjectMirror, 'Unexpected mirror hierachy');
+ assertTrue(mirror.constructorFunction() instanceof debug.ObjectMirror, 'Unexpected mirror hierarchy');
assertEquals(ctor_name, mirror.constructorFunction().name(), 'Unexpected constructor function name');
- assertTrue(mirror.protoObject() instanceof debug.Mirror, 'Unexpected mirror hierachy');
- assertTrue(mirror.prototypeObject() instanceof debug.Mirror, 'Unexpected mirror hierachy');
+ assertTrue(mirror.protoObject() instanceof debug.Mirror, 'Unexpected mirror hierarchy');
+ assertTrue(mirror.prototypeObject() instanceof debug.Mirror, 'Unexpected mirror hierarchy');
assertFalse(mirror.hasNamedInterceptor(), 'No named interceptor expected');
assertFalse(mirror.hasIndexedInterceptor(), 'No indexed interceptor expected');
@@ -69,12 +69,19 @@ function testObjectMirror(obj, cls_name, ctor_name, hasSpecialProperties) {
var properties = mirror.properties();
assertEquals(names.length, properties.length);
for (var i = 0; i < properties.length; i++) {
- assertTrue(properties[i] instanceof debug.Mirror, 'Unexpected mirror hierachy');
- assertTrue(properties[i] instanceof debug.PropertyMirror, 'Unexpected mirror hierachy');
+ assertTrue(properties[i] instanceof debug.Mirror, 'Unexpected mirror hierarchy');
+ assertTrue(properties[i] instanceof debug.PropertyMirror, 'Unexpected mirror hierarchy');
assertEquals('property', properties[i].type(), 'Unexpected mirror type');
assertEquals(names[i], properties[i].name(), 'Unexpected property name');
}
+ var internalProperties = mirror.internalProperties();
+ for (var i = 0; i < internalProperties.length; i++) {
+ assertTrue(internalProperties[i] instanceof debug.Mirror, 'Unexpected mirror hierarchy');
+ assertTrue(internalProperties[i] instanceof debug.InternalPropertyMirror, 'Unexpected mirror hierarchy');
+ assertEquals('internalProperty', internalProperties[i].type(), 'Unexpected mirror type');
+ }
+
for (var p in obj) {
var property_mirror = mirror.property(p);
assertTrue(property_mirror instanceof debug.PropertyMirror);
@@ -172,6 +179,7 @@ testObjectMirror(this, 'global', '', true); // Global object has special proper
testObjectMirror(this.__proto__, 'Object', '');
testObjectMirror([], 'Array', 'Array');
testObjectMirror([1,2], 'Array', 'Array');
+testObjectMirror(Object(17), 'Number', 'Number');
// Test circular references.
o = {};
@@ -230,3 +238,29 @@ assertTrue(mirror.property('length').isNative());
assertEquals('a', mirror.property(0).value().value());
assertEquals('b', mirror.property(1).value().value());
assertEquals('c', mirror.property(2).value().value());
+
+// Test value wrapper internal properties.
+mirror = debug.MakeMirror(Object("Capybara"));
+var ip = mirror.internalProperties();
+assertEquals(1, ip.length);
+assertEquals("[[PrimitiveValue]]", ip[0].name());
+assertEquals("string", ip[0].value().type());
+assertEquals("Capybara", ip[0].value().value());
+
+// Test bound function internal properties.
+mirror = debug.MakeMirror(Number.bind(Array, 2));
+ip = mirror.internalProperties();
+assertEquals(3, ip.length);
+var property_map = {};
+for (var i = 0; i < ip.length; i++) {
+ property_map[ip[i].name()] = ip[i];
+}
+assertTrue("[[BoundThis]]" in property_map);
+assertEquals("function", property_map["[[BoundThis]]"].value().type());
+assertEquals(Array, property_map["[[BoundThis]]"].value().value());
+assertTrue("[[TargetFunction]]" in property_map);
+assertEquals("function", property_map["[[TargetFunction]]"].value().type());
+assertEquals(Number, property_map["[[TargetFunction]]"].value().value());
+assertTrue("[[BoundArgs]]" in property_map);
+assertEquals("object", property_map["[[BoundArgs]]"].value().type());
+assertEquals(1, property_map["[[BoundArgs]]"].value().value().length);
diff --git a/src/3rdparty/v8/test/mjsunit/mjsunit.js b/src/3rdparty/v8/test/mjsunit/mjsunit.js
index 65fb301..25d7c00 100644
--- a/src/3rdparty/v8/test/mjsunit/mjsunit.js
+++ b/src/3rdparty/v8/test/mjsunit/mjsunit.js
@@ -321,7 +321,7 @@ var assertUnreachable;
assertInstanceof = function assertInstanceof(obj, type) {
if (!(obj instanceof type)) {
var actualTypeName = null;
- var actualConstructor = Object.prototypeOf(obj).constructor;
+ var actualConstructor = Object.getPrototypeOf(obj).constructor;
if (typeof actualConstructor == "function") {
actualTypeName = actualConstructor.name || String(actualConstructor);
}
diff --git a/src/3rdparty/v8/test/mjsunit/mjsunit.status b/src/3rdparty/v8/test/mjsunit/mjsunit.status
index ab5f2e3..0bf378b 100644
--- a/src/3rdparty/v8/test/mjsunit/mjsunit.status
+++ b/src/3rdparty/v8/test/mjsunit/mjsunit.status
@@ -28,61 +28,62 @@
prefix mjsunit
# All tests in the bug directory are expected to fail.
-bugs: FAIL
+bugs/*: FAIL
##############################################################################
# Fails.
regress/regress-1119: FAIL
-##############################################################################
-
-# NewGC: BUG(1719) slow to collect arrays over several contexts.
+# Issue 1719: Slow to collect arrays over several contexts.
regress/regress-524: SKIP
+# When that bug is fixed, revert the expectation to:
+# Skip long running test in debug and allow it to timeout in release mode.
+# regress/regress-524: (PASS || TIMEOUT), SKIP if $mode == debug
##############################################################################
# Too slow in debug mode with --stress-opt
compiler/regress-stacktrace-methods: PASS, SKIP if $mode == debug
compiler/regress-funcaller: PASS, SKIP if $mode == debug
+regress/regress-2318: PASS, SKIP if $mode == debug
regress/regress-create-exception: PASS, SKIP if $mode == debug
##############################################################################
-# This one uses a built-in that's only present in debug mode. It takes
+# These use a built-in that's only present in debug mode. They take
# too long to run in debug mode on ARM and MIPS.
-fuzz-natives: PASS, SKIP if ($mode == release || $arch == arm || $arch == mips)
+fuzz-natives-part*: PASS, SKIP if ($mode == release || $arch == arm || $arch == android_arm || $arch == mipsel)
-big-object-literal: PASS, SKIP if ($arch == arm)
+big-object-literal: PASS, SKIP if ($arch == arm || $arch == android_arm)
# Issue 488: this test sometimes times out.
array-constructor: PASS || TIMEOUT
# Very slow on ARM and MIPS, contains no architecture dependent code.
-unicode-case-overoptimization: PASS, TIMEOUT if ($arch == arm || $arch == mips)
-
-# Skip long running test in debug and allow it to timeout in release mode.
-regress/regress-524: (PASS || TIMEOUT), SKIP if $mode == debug
+unicode-case-overoptimization: PASS, TIMEOUT if ($arch == arm || $arch == android_arm || $arch == mipsel)
-# Stack manipulations in LiveEdit are buggy - see bug 915
-debug-liveedit-check-stack: SKIP
-debug-liveedit-patch-positions-replace: SKIP
-debug-liveedit-stack-padding: SKIP
-
-# Test Crankshaft compilation time. Expected to take too long in debug mode.
-regress/regress-1969: PASS, SKIP if $mode == debug
+##############################################################################
+# This test expects to reach a certain recursion depth, which may not work
+# for debug mode.
+json-recursive: PASS, (PASS || FAIL) if $mode == debug
##############################################################################
-[ $isolates ]
+# Skip long running test that times out in debug mode.
+regress/regress-crbug-160010: PASS, SKIP if $mode == debug
+##############################################################################
# This test sets the umask on a per-process basis and hence cannot be
# used in multi-threaded runs.
-d8-os: SKIP
+# On android there is no /tmp directory.
+d8-os: PASS, SKIP if ($isolates || $arch == android_arm || $arch == android_ia32)
+tools/tickprocessor: PASS, SKIP if ($arch == android_arm || $arch == android_ia32)
##############################################################################
-[ $arch == arm ]
+[ $arch == arm || $arch == android_arm ]
# Slow tests which times out in debug mode.
try: PASS, SKIP if $mode == debug
debug-scripts-request: PASS, SKIP if $mode == debug
array-constructor: PASS, SKIP if $mode == debug
+regress/regress-1122: PASS, SKIP if ($mode == debug && $arch == android_arm)
# Flaky test that can hit compilation-time stack overflow in debug mode.
unicode-test: PASS, (PASS || FAIL) if $mode == debug
@@ -92,8 +93,8 @@ compiler/regress-stacktrace-methods: PASS, PASS || TIMEOUT if $mode == release
array-splice: PASS || TIMEOUT
# Long running test.
-mirror-object: PASS || TIMEOUT
string-indexof-2: PASS || TIMEOUT
+mirror-object: PASS || TIMEOUT
# BUG(3251035): Timeouts in long looping crankshaft optimization
# tests. Skipping because having them timeout takes too long on the
@@ -110,11 +111,9 @@ compiler/property-calls: SKIP
compiler/recursive-deopt: SKIP
compiler/regress-4: SKIP
compiler/regress-funcaller: SKIP
-compiler/regress-gvn: SKIP
compiler/regress-rep-change: SKIP
compiler/regress-arguments: SKIP
compiler/regress-funarguments: SKIP
-compiler/regress-or: SKIP
compiler/regress-3249650: SKIP
compiler/simple-deopt: SKIP
regress/regress-490: SKIP
@@ -128,8 +127,17 @@ regress/regress-3247124: SKIP
# should be platform-independent.
regress/regress-1132: SKIP
+# Stack manipulations in LiveEdit is not implemented for this arch.
+debug-liveedit-check-stack: SKIP
+debug-liveedit-stack-padding: SKIP
+debug-liveedit-restart-frame: SKIP
+debug-liveedit-double-call: SKIP
+
+# Currently always deopt on minus zero
+math-floor-of-div-minus-zero: SKIP
+
##############################################################################
-[ $arch == mips ]
+[ $arch == mipsel ]
# Slow tests which times out in debug mode.
try: PASS, SKIP if $mode == debug
@@ -159,11 +167,9 @@ compiler/property-calls: SKIP
compiler/recursive-deopt: SKIP
compiler/regress-4: SKIP
compiler/regress-funcaller: SKIP
-compiler/regress-gvn: SKIP
compiler/regress-rep-change: SKIP
compiler/regress-arguments: SKIP
compiler/regress-funarguments: SKIP
-compiler/regress-or: SKIP
compiler/regress-3249650: SKIP
compiler/simple-deopt: SKIP
regress/regress-490: SKIP
@@ -176,3 +182,10 @@ regress/regress-3247124: SKIP
# the test requires too much time to run. However, the problem test covers
# should be platform-independent.
regress/regress-1132: SKIP
+
+# Stack manipulations in LiveEdit is not implemented for this arch.
+debug-liveedit-check-stack: SKIP
+debug-liveedit-stack-padding: SKIP
+debug-liveedit-restart-frame: SKIP
+debug-liveedit-double-call: SKIP
+
diff --git a/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part1.js b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part1.js
new file mode 100644
index 0000000..7902cc2
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part1.js
@@ -0,0 +1,491 @@
+// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x;
+
+// Converts a number to string respecting -0.
+function stringify(n) {
+ if ((1 / n) === -Infinity) return "-0";
+ return String(n);
+}
+
+function f(expected, y) {
+ function testEval(string, x, y) {
+ var mulFunction = Function("x, y", "return " + string);
+ return mulFunction(x, y);
+ }
+ function mulTest(expected, x, y) {
+ assertEquals(expected, x * y);
+ assertEquals(expected, testEval(stringify(x) + " * y", x, y));
+ assertEquals(expected, testEval("x * " + stringify(y), x, y));
+ assertEquals(expected, testEval(stringify(x) + " * " + stringify(y), x, y));
+ }
+ mulTest(expected, x, y);
+ mulTest(-expected, -x, y);
+ mulTest(-expected, x, -y);
+ mulTest(expected, -x, -y);
+ if (x === y) return; // Symmetric cases not necessary.
+ mulTest(expected, y, x);
+ mulTest(-expected, -y, x);
+ mulTest(-expected, y, -x);
+ mulTest(expected, -y, -x);
+}
+
+x = 0;
+f(0, 0);
+x = 1;
+f(0, 0);
+f(1, 1);
+x = 2;
+f(0, 0);
+f(2, 1);
+f(4, 2);
+x = 3;
+f(0, 0);
+f(3, 1);
+f(6, 2);
+f(9, 3);
+x = 4;
+f(0, 0);
+f(4, 1);
+f(8, 2);
+f(12, 3);
+f(16, 4);
+x = 5;
+f(0, 0);
+f(5, 1);
+f(10, 2);
+f(15, 3);
+f(20, 4);
+f(25, 5);
+x = 7;
+f(0, 0);
+f(7, 1);
+f(14, 2);
+f(21, 3);
+f(28, 4);
+f(35, 5);
+f(49, 7);
+x = 8;
+f(0, 0);
+f(8, 1);
+f(16, 2);
+f(24, 3);
+f(32, 4);
+f(40, 5);
+f(56, 7);
+f(64, 8);
+x = 9;
+f(0, 0);
+f(9, 1);
+f(18, 2);
+f(27, 3);
+f(36, 4);
+f(45, 5);
+f(63, 7);
+f(72, 8);
+f(81, 9);
+x = 15;
+f(0, 0);
+f(15, 1);
+f(30, 2);
+f(45, 3);
+f(60, 4);
+f(75, 5);
+f(105, 7);
+f(120, 8);
+f(135, 9);
+f(225, 15);
+x = 16;
+f(0, 0);
+f(16, 1);
+f(32, 2);
+f(48, 3);
+f(64, 4);
+f(80, 5);
+f(112, 7);
+f(128, 8);
+f(144, 9);
+f(240, 15);
+f(256, 16);
+x = 17;
+f(0, 0);
+f(17, 1);
+f(34, 2);
+f(51, 3);
+f(68, 4);
+f(85, 5);
+f(119, 7);
+f(136, 8);
+f(153, 9);
+f(255, 15);
+f(272, 16);
+f(289, 17);
+x = 31;
+f(0, 0);
+f(31, 1);
+f(62, 2);
+f(93, 3);
+f(124, 4);
+f(155, 5);
+f(217, 7);
+f(248, 8);
+f(279, 9);
+f(465, 15);
+f(496, 16);
+f(527, 17);
+f(961, 31);
+x = 32;
+f(0, 0);
+f(32, 1);
+f(64, 2);
+f(96, 3);
+f(128, 4);
+f(160, 5);
+f(224, 7);
+f(256, 8);
+f(288, 9);
+f(480, 15);
+f(512, 16);
+f(544, 17);
+f(992, 31);
+f(1024, 32);
+x = 33;
+f(0, 0);
+f(33, 1);
+f(66, 2);
+f(99, 3);
+f(132, 4);
+f(165, 5);
+f(231, 7);
+f(264, 8);
+f(297, 9);
+f(495, 15);
+f(528, 16);
+f(561, 17);
+f(1023, 31);
+f(1056, 32);
+f(1089, 33);
+x = 63;
+f(0, 0);
+f(63, 1);
+f(126, 2);
+f(189, 3);
+f(252, 4);
+f(315, 5);
+f(441, 7);
+f(504, 8);
+f(567, 9);
+f(945, 15);
+f(1008, 16);
+f(1071, 17);
+f(1953, 31);
+f(2016, 32);
+f(2079, 33);
+f(3969, 63);
+x = 64;
+f(0, 0);
+f(64, 1);
+f(128, 2);
+f(192, 3);
+f(256, 4);
+f(320, 5);
+f(448, 7);
+f(512, 8);
+f(576, 9);
+f(960, 15);
+f(1024, 16);
+f(1088, 17);
+f(1984, 31);
+f(2048, 32);
+f(2112, 33);
+f(4032, 63);
+f(4096, 64);
+x = 65;
+f(0, 0);
+f(65, 1);
+f(130, 2);
+f(195, 3);
+f(260, 4);
+f(325, 5);
+f(455, 7);
+f(520, 8);
+f(585, 9);
+f(975, 15);
+f(1040, 16);
+f(1105, 17);
+f(2015, 31);
+f(2080, 32);
+f(2145, 33);
+f(4095, 63);
+f(4160, 64);
+f(4225, 65);
+x = 127;
+f(0, 0);
+f(127, 1);
+f(254, 2);
+f(381, 3);
+f(508, 4);
+f(635, 5);
+f(889, 7);
+f(1016, 8);
+f(1143, 9);
+f(1905, 15);
+f(2032, 16);
+f(2159, 17);
+f(3937, 31);
+f(4064, 32);
+f(4191, 33);
+f(8001, 63);
+f(8128, 64);
+f(8255, 65);
+f(16129, 127);
+x = 128;
+f(0, 0);
+f(128, 1);
+f(256, 2);
+f(384, 3);
+f(512, 4);
+f(640, 5);
+f(896, 7);
+f(1024, 8);
+f(1152, 9);
+f(1920, 15);
+f(2048, 16);
+f(2176, 17);
+f(3968, 31);
+f(4096, 32);
+f(4224, 33);
+f(8064, 63);
+f(8192, 64);
+f(8320, 65);
+f(16256, 127);
+f(16384, 128);
+x = 129;
+f(0, 0);
+f(129, 1);
+f(258, 2);
+f(387, 3);
+f(516, 4);
+f(645, 5);
+f(903, 7);
+f(1032, 8);
+f(1161, 9);
+f(1935, 15);
+f(2064, 16);
+f(2193, 17);
+f(3999, 31);
+f(4128, 32);
+f(4257, 33);
+f(8127, 63);
+f(8256, 64);
+f(8385, 65);
+f(16383, 127);
+f(16512, 128);
+f(16641, 129);
+x = 255;
+f(0, 0);
+f(255, 1);
+f(510, 2);
+f(765, 3);
+f(1020, 4);
+f(1275, 5);
+f(1785, 7);
+f(2040, 8);
+f(2295, 9);
+f(3825, 15);
+f(4080, 16);
+f(4335, 17);
+f(7905, 31);
+f(8160, 32);
+f(8415, 33);
+f(16065, 63);
+f(16320, 64);
+f(16575, 65);
+f(32385, 127);
+f(32640, 128);
+f(32895, 129);
+f(65025, 255);
+x = 256;
+f(0, 0);
+f(256, 1);
+f(512, 2);
+f(768, 3);
+f(1024, 4);
+f(1280, 5);
+f(1792, 7);
+f(2048, 8);
+f(2304, 9);
+f(3840, 15);
+f(4096, 16);
+f(4352, 17);
+f(7936, 31);
+f(8192, 32);
+f(8448, 33);
+f(16128, 63);
+f(16384, 64);
+f(16640, 65);
+f(32512, 127);
+f(32768, 128);
+f(33024, 129);
+f(65280, 255);
+f(65536, 256);
+x = 257;
+f(0, 0);
+f(257, 1);
+f(514, 2);
+f(771, 3);
+f(1028, 4);
+f(1285, 5);
+f(1799, 7);
+f(2056, 8);
+f(2313, 9);
+f(3855, 15);
+f(4112, 16);
+f(4369, 17);
+f(7967, 31);
+f(8224, 32);
+f(8481, 33);
+f(16191, 63);
+f(16448, 64);
+f(16705, 65);
+f(32639, 127);
+f(32896, 128);
+f(33153, 129);
+f(65535, 255);
+f(65792, 256);
+f(66049, 257);
+x = 511;
+f(0, 0);
+f(511, 1);
+f(1022, 2);
+f(1533, 3);
+f(2044, 4);
+f(2555, 5);
+f(3577, 7);
+f(4088, 8);
+f(4599, 9);
+f(7665, 15);
+f(8176, 16);
+f(8687, 17);
+f(15841, 31);
+f(16352, 32);
+f(16863, 33);
+f(32193, 63);
+f(32704, 64);
+f(33215, 65);
+f(64897, 127);
+f(65408, 128);
+f(65919, 129);
+f(130305, 255);
+f(130816, 256);
+f(131327, 257);
+f(261121, 511);
+x = 512;
+f(0, 0);
+f(512, 1);
+f(1024, 2);
+f(1536, 3);
+f(2048, 4);
+f(2560, 5);
+f(3584, 7);
+f(4096, 8);
+f(4608, 9);
+f(7680, 15);
+f(8192, 16);
+f(8704, 17);
+f(15872, 31);
+f(16384, 32);
+f(16896, 33);
+f(32256, 63);
+f(32768, 64);
+f(33280, 65);
+f(65024, 127);
+f(65536, 128);
+f(66048, 129);
+f(130560, 255);
+f(131072, 256);
+f(131584, 257);
+f(261632, 511);
+f(262144, 512);
+x = 513;
+f(0, 0);
+f(513, 1);
+f(1026, 2);
+f(1539, 3);
+f(2052, 4);
+f(2565, 5);
+f(3591, 7);
+f(4104, 8);
+f(4617, 9);
+f(7695, 15);
+f(8208, 16);
+f(8721, 17);
+f(15903, 31);
+f(16416, 32);
+f(16929, 33);
+f(32319, 63);
+f(32832, 64);
+f(33345, 65);
+f(65151, 127);
+f(65664, 128);
+f(66177, 129);
+f(130815, 255);
+f(131328, 256);
+f(131841, 257);
+f(262143, 511);
+f(262656, 512);
+f(263169, 513);
+x = 1023;
+f(0, 0);
+f(1023, 1);
+f(2046, 2);
+f(3069, 3);
+f(4092, 4);
+f(5115, 5);
+f(7161, 7);
+f(8184, 8);
+f(9207, 9);
+f(15345, 15);
+f(16368, 16);
+f(17391, 17);
+f(31713, 31);
+f(32736, 32);
+f(33759, 33);
+f(64449, 63);
+f(65472, 64);
+f(66495, 65);
+f(129921, 127);
+f(130944, 128);
+f(131967, 129);
+f(260865, 255);
+f(261888, 256);
+f(262911, 257);
+f(522753, 511);
+f(523776, 512);
+f(524799, 513);
+f(1046529, 1023);
diff --git a/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part10.js b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part10.js
new file mode 100644
index 0000000..166ec52
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part10.js
@@ -0,0 +1,470 @@
+// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x;
+
+// Converts a number to string respecting -0.
+function stringify(n) {
+ if ((1 / n) === -Infinity) return "-0";
+ return String(n);
+}
+
+function f(expected, y) {
+ function testEval(string, x, y) {
+ var mulFunction = Function("x, y", "return " + string);
+ return mulFunction(x, y);
+ }
+ function mulTest(expected, x, y) {
+ assertEquals(expected, x * y);
+ assertEquals(expected, testEval(stringify(x) + " * y", x, y));
+ assertEquals(expected, testEval("x * " + stringify(y), x, y));
+ assertEquals(expected, testEval(stringify(x) + " * " + stringify(y), x, y));
+ }
+ mulTest(expected, x, y);
+ mulTest(-expected, -x, y);
+ mulTest(-expected, x, -y);
+ mulTest(expected, -x, -y);
+ if (x === y) return; // Symmetric cases not necessary.
+ mulTest(expected, y, x);
+ mulTest(-expected, -y, x);
+ mulTest(-expected, y, -x);
+ mulTest(expected, -y, -x);
+}
+
+x = 4294967296;
+f(0, 0);
+f(4294967296, 1);
+f(8589934592, 2);
+f(12884901888, 3);
+f(17179869184, 4);
+f(21474836480, 5);
+f(30064771072, 7);
+f(34359738368, 8);
+f(38654705664, 9);
+f(64424509440, 15);
+f(68719476736, 16);
+f(73014444032, 17);
+f(133143986176, 31);
+f(137438953472, 32);
+f(141733920768, 33);
+f(270582939648, 63);
+f(274877906944, 64);
+f(279172874240, 65);
+f(545460846592, 127);
+f(549755813888, 128);
+f(554050781184, 129);
+f(1095216660480, 255);
+f(1099511627776, 256);
+f(1103806595072, 257);
+f(2194728288256, 511);
+f(2199023255552, 512);
+f(2203318222848, 513);
+f(4393751543808, 1023);
+f(4398046511104, 1024);
+f(4402341478400, 1025);
+f(8791798054912, 2047);
+f(8796093022208, 2048);
+f(8800387989504, 2049);
+f(17587891077120, 4095);
+f(17592186044416, 4096);
+f(17596481011712, 4097);
+f(35180077121536, 8191);
+f(35184372088832, 8192);
+f(35188667056128, 8193);
+f(70364449210368, 16383);
+f(70368744177664, 16384);
+f(70373039144960, 16385);
+f(140733193388032, 32767);
+f(140737488355328, 32768);
+f(140741783322624, 32769);
+f(281470681743360, 65535);
+f(281474976710656, 65536);
+f(281479271677952, 65537);
+f(562945658454016, 131071);
+f(562949953421312, 131072);
+f(562954248388608, 131073);
+f(1125895611875328, 262143);
+f(1125899906842624, 262144);
+f(1125904201809920, 262145);
+x = 4294967297;
+f(0, 0);
+f(4294967297, 1);
+f(8589934594, 2);
+f(12884901891, 3);
+f(17179869188, 4);
+f(21474836485, 5);
+f(30064771079, 7);
+f(34359738376, 8);
+f(38654705673, 9);
+f(64424509455, 15);
+f(68719476752, 16);
+f(73014444049, 17);
+f(133143986207, 31);
+f(137438953504, 32);
+f(141733920801, 33);
+f(270582939711, 63);
+f(274877907008, 64);
+f(279172874305, 65);
+f(545460846719, 127);
+f(549755814016, 128);
+f(554050781313, 129);
+f(1095216660735, 255);
+f(1099511628032, 256);
+f(1103806595329, 257);
+f(2194728288767, 511);
+f(2199023256064, 512);
+f(2203318223361, 513);
+f(4393751544831, 1023);
+f(4398046512128, 1024);
+f(4402341479425, 1025);
+f(8791798056959, 2047);
+f(8796093024256, 2048);
+f(8800387991553, 2049);
+f(17587891081215, 4095);
+f(17592186048512, 4096);
+f(17596481015809, 4097);
+f(35180077129727, 8191);
+f(35184372097024, 8192);
+f(35188667064321, 8193);
+f(70364449226751, 16383);
+f(70368744194048, 16384);
+f(70373039161345, 16385);
+f(140733193420799, 32767);
+f(140737488388096, 32768);
+f(140741783355393, 32769);
+f(281470681808895, 65535);
+f(281474976776192, 65536);
+f(281479271743489, 65537);
+f(562945658585087, 131071);
+f(562949953552384, 131072);
+f(562954248519681, 131073);
+f(1125895612137471, 262143);
+f(1125899907104768, 262144);
+f(1125904202072065, 262145);
+x = 8589934591;
+f(0, 0);
+f(8589934591, 1);
+f(17179869182, 2);
+f(25769803773, 3);
+f(34359738364, 4);
+f(42949672955, 5);
+f(60129542137, 7);
+f(68719476728, 8);
+f(77309411319, 9);
+f(128849018865, 15);
+f(137438953456, 16);
+f(146028888047, 17);
+f(266287972321, 31);
+f(274877906912, 32);
+f(283467841503, 33);
+f(541165879233, 63);
+f(549755813824, 64);
+f(558345748415, 65);
+f(1090921693057, 127);
+f(1099511627648, 128);
+f(1108101562239, 129);
+f(2190433320705, 255);
+f(2199023255296, 256);
+f(2207613189887, 257);
+f(4389456576001, 511);
+f(4398046510592, 512);
+f(4406636445183, 513);
+f(8787503086593, 1023);
+f(8796093021184, 1024);
+f(8804682955775, 1025);
+f(17583596107777, 2047);
+f(17592186042368, 2048);
+f(17600775976959, 2049);
+f(35175782150145, 4095);
+f(35184372084736, 4096);
+f(35192962019327, 4097);
+f(70360154234881, 8191);
+f(70368744169472, 8192);
+f(70377334104063, 8193);
+f(140728898404353, 16383);
+f(140737488338944, 16384);
+f(140746078273535, 16385);
+f(281466386743297, 32767);
+f(281474976677888, 32768);
+f(281483566612479, 32769);
+f(562941363421185, 65535);
+f(562949953355776, 65536);
+f(562958543290367, 65537);
+f(1125891316776961, 131071);
+f(1125899906711552, 131072);
+f(1125908496646143, 131073);
+x = 8589934592;
+f(0, 0);
+f(8589934592, 1);
+f(17179869184, 2);
+f(25769803776, 3);
+f(34359738368, 4);
+f(42949672960, 5);
+f(60129542144, 7);
+f(68719476736, 8);
+f(77309411328, 9);
+f(128849018880, 15);
+f(137438953472, 16);
+f(146028888064, 17);
+f(266287972352, 31);
+f(274877906944, 32);
+f(283467841536, 33);
+f(541165879296, 63);
+f(549755813888, 64);
+f(558345748480, 65);
+f(1090921693184, 127);
+f(1099511627776, 128);
+f(1108101562368, 129);
+f(2190433320960, 255);
+f(2199023255552, 256);
+f(2207613190144, 257);
+f(4389456576512, 511);
+f(4398046511104, 512);
+f(4406636445696, 513);
+f(8787503087616, 1023);
+f(8796093022208, 1024);
+f(8804682956800, 1025);
+f(17583596109824, 2047);
+f(17592186044416, 2048);
+f(17600775979008, 2049);
+f(35175782154240, 4095);
+f(35184372088832, 4096);
+f(35192962023424, 4097);
+f(70360154243072, 8191);
+f(70368744177664, 8192);
+f(70377334112256, 8193);
+f(140728898420736, 16383);
+f(140737488355328, 16384);
+f(140746078289920, 16385);
+f(281466386776064, 32767);
+f(281474976710656, 32768);
+f(281483566645248, 32769);
+f(562941363486720, 65535);
+f(562949953421312, 65536);
+f(562958543355904, 65537);
+f(1125891316908032, 131071);
+f(1125899906842624, 131072);
+f(1125908496777216, 131073);
+x = 8589934593;
+f(0, 0);
+f(8589934593, 1);
+f(17179869186, 2);
+f(25769803779, 3);
+f(34359738372, 4);
+f(42949672965, 5);
+f(60129542151, 7);
+f(68719476744, 8);
+f(77309411337, 9);
+f(128849018895, 15);
+f(137438953488, 16);
+f(146028888081, 17);
+f(266287972383, 31);
+f(274877906976, 32);
+f(283467841569, 33);
+f(541165879359, 63);
+f(549755813952, 64);
+f(558345748545, 65);
+f(1090921693311, 127);
+f(1099511627904, 128);
+f(1108101562497, 129);
+f(2190433321215, 255);
+f(2199023255808, 256);
+f(2207613190401, 257);
+f(4389456577023, 511);
+f(4398046511616, 512);
+f(4406636446209, 513);
+f(8787503088639, 1023);
+f(8796093023232, 1024);
+f(8804682957825, 1025);
+f(17583596111871, 2047);
+f(17592186046464, 2048);
+f(17600775981057, 2049);
+f(35175782158335, 4095);
+f(35184372092928, 4096);
+f(35192962027521, 4097);
+f(70360154251263, 8191);
+f(70368744185856, 8192);
+f(70377334120449, 8193);
+f(140728898437119, 16383);
+f(140737488371712, 16384);
+f(140746078306305, 16385);
+f(281466386808831, 32767);
+f(281474976743424, 32768);
+f(281483566678017, 32769);
+f(562941363552255, 65535);
+f(562949953486848, 65536);
+f(562958543421441, 65537);
+f(1125891317039103, 131071);
+f(1125899906973696, 131072);
+f(1125908496908289, 131073);
+x = 17179869183;
+f(0, 0);
+f(17179869183, 1);
+f(34359738366, 2);
+f(51539607549, 3);
+f(68719476732, 4);
+f(85899345915, 5);
+f(120259084281, 7);
+f(137438953464, 8);
+f(154618822647, 9);
+f(257698037745, 15);
+f(274877906928, 16);
+f(292057776111, 17);
+f(532575944673, 31);
+f(549755813856, 32);
+f(566935683039, 33);
+f(1082331758529, 63);
+f(1099511627712, 64);
+f(1116691496895, 65);
+f(2181843386241, 127);
+f(2199023255424, 128);
+f(2216203124607, 129);
+f(4380866641665, 255);
+f(4398046510848, 256);
+f(4415226380031, 257);
+f(8778913152513, 511);
+f(8796093021696, 512);
+f(8813272890879, 513);
+f(17575006174209, 1023);
+f(17592186043392, 1024);
+f(17609365912575, 1025);
+f(35167192217601, 2047);
+f(35184372086784, 2048);
+f(35201551955967, 2049);
+f(70351564304385, 4095);
+f(70368744173568, 4096);
+f(70385924042751, 4097);
+f(140720308477953, 8191);
+f(140737488347136, 8192);
+f(140754668216319, 8193);
+f(281457796825089, 16383);
+f(281474976694272, 16384);
+f(281492156563455, 16385);
+f(562932773519361, 32767);
+f(562949953388544, 32768);
+f(562967133257727, 32769);
+f(1125882726907905, 65535);
+f(1125899906777088, 65536);
+f(1125917086646271, 65537);
+x = 17179869184;
+f(0, 0);
+f(17179869184, 1);
+f(34359738368, 2);
+f(51539607552, 3);
+f(68719476736, 4);
+f(85899345920, 5);
+f(120259084288, 7);
+f(137438953472, 8);
+f(154618822656, 9);
+f(257698037760, 15);
+f(274877906944, 16);
+f(292057776128, 17);
+f(532575944704, 31);
+f(549755813888, 32);
+f(566935683072, 33);
+f(1082331758592, 63);
+f(1099511627776, 64);
+f(1116691496960, 65);
+f(2181843386368, 127);
+f(2199023255552, 128);
+f(2216203124736, 129);
+f(4380866641920, 255);
+f(4398046511104, 256);
+f(4415226380288, 257);
+f(8778913153024, 511);
+f(8796093022208, 512);
+f(8813272891392, 513);
+f(17575006175232, 1023);
+f(17592186044416, 1024);
+f(17609365913600, 1025);
+f(35167192219648, 2047);
+f(35184372088832, 2048);
+f(35201551958016, 2049);
+f(70351564308480, 4095);
+f(70368744177664, 4096);
+f(70385924046848, 4097);
+f(140720308486144, 8191);
+f(140737488355328, 8192);
+f(140754668224512, 8193);
+f(281457796841472, 16383);
+f(281474976710656, 16384);
+f(281492156579840, 16385);
+f(562932773552128, 32767);
+f(562949953421312, 32768);
+f(562967133290496, 32769);
+f(1125882726973440, 65535);
+f(1125899906842624, 65536);
+f(1125917086711808, 65537);
+x = 17179869185;
+f(0, 0);
+f(17179869185, 1);
+f(34359738370, 2);
+f(51539607555, 3);
+f(68719476740, 4);
+f(85899345925, 5);
+f(120259084295, 7);
+f(137438953480, 8);
+f(154618822665, 9);
+f(257698037775, 15);
+f(274877906960, 16);
+f(292057776145, 17);
+f(532575944735, 31);
+f(549755813920, 32);
+f(566935683105, 33);
+f(1082331758655, 63);
+f(1099511627840, 64);
+f(1116691497025, 65);
+f(2181843386495, 127);
+f(2199023255680, 128);
+f(2216203124865, 129);
+f(4380866642175, 255);
+f(4398046511360, 256);
+f(4415226380545, 257);
+f(8778913153535, 511);
+f(8796093022720, 512);
+f(8813272891905, 513);
+f(17575006176255, 1023);
+f(17592186045440, 1024);
+f(17609365914625, 1025);
+f(35167192221695, 2047);
+f(35184372090880, 2048);
+f(35201551960065, 2049);
+f(70351564312575, 4095);
+f(70368744181760, 4096);
+f(70385924050945, 4097);
+f(140720308494335, 8191);
+f(140737488363520, 8192);
+f(140754668232705, 8193);
+f(281457796857855, 16383);
+f(281474976727040, 16384);
+f(281492156596225, 16385);
+f(562932773584895, 32767);
+f(562949953454080, 32768);
+f(562967133323265, 32769);
+f(1125882727038975, 65535);
+f(1125899906908160, 65536);
+f(1125917086777345, 65537);
diff --git a/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part2.js b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part2.js
new file mode 100644
index 0000000..4c4a123
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part2.js
@@ -0,0 +1,525 @@
+// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x;
+
+// Converts a number to string respecting -0.
+function stringify(n) {
+ if ((1 / n) === -Infinity) return "-0";
+ return String(n);
+}
+
+function f(expected, y) {
+ function testEval(string, x, y) {
+ var mulFunction = Function("x, y", "return " + string);
+ return mulFunction(x, y);
+ }
+ function mulTest(expected, x, y) {
+ assertEquals(expected, x * y);
+ assertEquals(expected, testEval(stringify(x) + " * y", x, y));
+ assertEquals(expected, testEval("x * " + stringify(y), x, y));
+ assertEquals(expected, testEval(stringify(x) + " * " + stringify(y), x, y));
+ }
+ mulTest(expected, x, y);
+ mulTest(-expected, -x, y);
+ mulTest(-expected, x, -y);
+ mulTest(expected, -x, -y);
+ if (x === y) return; // Symmetric cases not necessary.
+ mulTest(expected, y, x);
+ mulTest(-expected, -y, x);
+ mulTest(-expected, y, -x);
+ mulTest(expected, -y, -x);
+}
+
+x = 1024;
+f(0, 0);
+f(1024, 1);
+f(2048, 2);
+f(3072, 3);
+f(4096, 4);
+f(5120, 5);
+f(7168, 7);
+f(8192, 8);
+f(9216, 9);
+f(15360, 15);
+f(16384, 16);
+f(17408, 17);
+f(31744, 31);
+f(32768, 32);
+f(33792, 33);
+f(64512, 63);
+f(65536, 64);
+f(66560, 65);
+f(130048, 127);
+f(131072, 128);
+f(132096, 129);
+f(261120, 255);
+f(262144, 256);
+f(263168, 257);
+f(523264, 511);
+f(524288, 512);
+f(525312, 513);
+f(1047552, 1023);
+f(1048576, 1024);
+x = 1025;
+f(0, 0);
+f(1025, 1);
+f(2050, 2);
+f(3075, 3);
+f(4100, 4);
+f(5125, 5);
+f(7175, 7);
+f(8200, 8);
+f(9225, 9);
+f(15375, 15);
+f(16400, 16);
+f(17425, 17);
+f(31775, 31);
+f(32800, 32);
+f(33825, 33);
+f(64575, 63);
+f(65600, 64);
+f(66625, 65);
+f(130175, 127);
+f(131200, 128);
+f(132225, 129);
+f(261375, 255);
+f(262400, 256);
+f(263425, 257);
+f(523775, 511);
+f(524800, 512);
+f(525825, 513);
+f(1048575, 1023);
+f(1049600, 1024);
+f(1050625, 1025);
+x = 2047;
+f(0, 0);
+f(2047, 1);
+f(4094, 2);
+f(6141, 3);
+f(8188, 4);
+f(10235, 5);
+f(14329, 7);
+f(16376, 8);
+f(18423, 9);
+f(30705, 15);
+f(32752, 16);
+f(34799, 17);
+f(63457, 31);
+f(65504, 32);
+f(67551, 33);
+f(128961, 63);
+f(131008, 64);
+f(133055, 65);
+f(259969, 127);
+f(262016, 128);
+f(264063, 129);
+f(521985, 255);
+f(524032, 256);
+f(526079, 257);
+f(1046017, 511);
+f(1048064, 512);
+f(1050111, 513);
+f(2094081, 1023);
+f(2096128, 1024);
+f(2098175, 1025);
+f(4190209, 2047);
+x = 2048;
+f(0, 0);
+f(2048, 1);
+f(4096, 2);
+f(6144, 3);
+f(8192, 4);
+f(10240, 5);
+f(14336, 7);
+f(16384, 8);
+f(18432, 9);
+f(30720, 15);
+f(32768, 16);
+f(34816, 17);
+f(63488, 31);
+f(65536, 32);
+f(67584, 33);
+f(129024, 63);
+f(131072, 64);
+f(133120, 65);
+f(260096, 127);
+f(262144, 128);
+f(264192, 129);
+f(522240, 255);
+f(524288, 256);
+f(526336, 257);
+f(1046528, 511);
+f(1048576, 512);
+f(1050624, 513);
+f(2095104, 1023);
+f(2097152, 1024);
+f(2099200, 1025);
+f(4192256, 2047);
+f(4194304, 2048);
+x = 2049;
+f(0, 0);
+f(2049, 1);
+f(4098, 2);
+f(6147, 3);
+f(8196, 4);
+f(10245, 5);
+f(14343, 7);
+f(16392, 8);
+f(18441, 9);
+f(30735, 15);
+f(32784, 16);
+f(34833, 17);
+f(63519, 31);
+f(65568, 32);
+f(67617, 33);
+f(129087, 63);
+f(131136, 64);
+f(133185, 65);
+f(260223, 127);
+f(262272, 128);
+f(264321, 129);
+f(522495, 255);
+f(524544, 256);
+f(526593, 257);
+f(1047039, 511);
+f(1049088, 512);
+f(1051137, 513);
+f(2096127, 1023);
+f(2098176, 1024);
+f(2100225, 1025);
+f(4194303, 2047);
+f(4196352, 2048);
+f(4198401, 2049);
+x = 4095;
+f(0, 0);
+f(4095, 1);
+f(8190, 2);
+f(12285, 3);
+f(16380, 4);
+f(20475, 5);
+f(28665, 7);
+f(32760, 8);
+f(36855, 9);
+f(61425, 15);
+f(65520, 16);
+f(69615, 17);
+f(126945, 31);
+f(131040, 32);
+f(135135, 33);
+f(257985, 63);
+f(262080, 64);
+f(266175, 65);
+f(520065, 127);
+f(524160, 128);
+f(528255, 129);
+f(1044225, 255);
+f(1048320, 256);
+f(1052415, 257);
+f(2092545, 511);
+f(2096640, 512);
+f(2100735, 513);
+f(4189185, 1023);
+f(4193280, 1024);
+f(4197375, 1025);
+f(8382465, 2047);
+f(8386560, 2048);
+f(8390655, 2049);
+f(16769025, 4095);
+x = 4096;
+f(0, 0);
+f(4096, 1);
+f(8192, 2);
+f(12288, 3);
+f(16384, 4);
+f(20480, 5);
+f(28672, 7);
+f(32768, 8);
+f(36864, 9);
+f(61440, 15);
+f(65536, 16);
+f(69632, 17);
+f(126976, 31);
+f(131072, 32);
+f(135168, 33);
+f(258048, 63);
+f(262144, 64);
+f(266240, 65);
+f(520192, 127);
+f(524288, 128);
+f(528384, 129);
+f(1044480, 255);
+f(1048576, 256);
+f(1052672, 257);
+f(2093056, 511);
+f(2097152, 512);
+f(2101248, 513);
+f(4190208, 1023);
+f(4194304, 1024);
+f(4198400, 1025);
+f(8384512, 2047);
+f(8388608, 2048);
+f(8392704, 2049);
+f(16773120, 4095);
+f(16777216, 4096);
+x = 4097;
+f(0, 0);
+f(4097, 1);
+f(8194, 2);
+f(12291, 3);
+f(16388, 4);
+f(20485, 5);
+f(28679, 7);
+f(32776, 8);
+f(36873, 9);
+f(61455, 15);
+f(65552, 16);
+f(69649, 17);
+f(127007, 31);
+f(131104, 32);
+f(135201, 33);
+f(258111, 63);
+f(262208, 64);
+f(266305, 65);
+f(520319, 127);
+f(524416, 128);
+f(528513, 129);
+f(1044735, 255);
+f(1048832, 256);
+f(1052929, 257);
+f(2093567, 511);
+f(2097664, 512);
+f(2101761, 513);
+f(4191231, 1023);
+f(4195328, 1024);
+f(4199425, 1025);
+f(8386559, 2047);
+f(8390656, 2048);
+f(8394753, 2049);
+f(16777215, 4095);
+f(16781312, 4096);
+f(16785409, 4097);
+x = 8191;
+f(0, 0);
+f(8191, 1);
+f(16382, 2);
+f(24573, 3);
+f(32764, 4);
+f(40955, 5);
+f(57337, 7);
+f(65528, 8);
+f(73719, 9);
+f(122865, 15);
+f(131056, 16);
+f(139247, 17);
+f(253921, 31);
+f(262112, 32);
+f(270303, 33);
+f(516033, 63);
+f(524224, 64);
+f(532415, 65);
+f(1040257, 127);
+f(1048448, 128);
+f(1056639, 129);
+f(2088705, 255);
+f(2096896, 256);
+f(2105087, 257);
+f(4185601, 511);
+f(4193792, 512);
+f(4201983, 513);
+f(8379393, 1023);
+f(8387584, 1024);
+f(8395775, 1025);
+f(16766977, 2047);
+f(16775168, 2048);
+f(16783359, 2049);
+f(33542145, 4095);
+f(33550336, 4096);
+f(33558527, 4097);
+f(67092481, 8191);
+x = 8192;
+f(0, 0);
+f(8192, 1);
+f(16384, 2);
+f(24576, 3);
+f(32768, 4);
+f(40960, 5);
+f(57344, 7);
+f(65536, 8);
+f(73728, 9);
+f(122880, 15);
+f(131072, 16);
+f(139264, 17);
+f(253952, 31);
+f(262144, 32);
+f(270336, 33);
+f(516096, 63);
+f(524288, 64);
+f(532480, 65);
+f(1040384, 127);
+f(1048576, 128);
+f(1056768, 129);
+f(2088960, 255);
+f(2097152, 256);
+f(2105344, 257);
+f(4186112, 511);
+f(4194304, 512);
+f(4202496, 513);
+f(8380416, 1023);
+f(8388608, 1024);
+f(8396800, 1025);
+f(16769024, 2047);
+f(16777216, 2048);
+f(16785408, 2049);
+f(33546240, 4095);
+f(33554432, 4096);
+f(33562624, 4097);
+f(67100672, 8191);
+f(67108864, 8192);
+x = 8193;
+f(0, 0);
+f(8193, 1);
+f(16386, 2);
+f(24579, 3);
+f(32772, 4);
+f(40965, 5);
+f(57351, 7);
+f(65544, 8);
+f(73737, 9);
+f(122895, 15);
+f(131088, 16);
+f(139281, 17);
+f(253983, 31);
+f(262176, 32);
+f(270369, 33);
+f(516159, 63);
+f(524352, 64);
+f(532545, 65);
+f(1040511, 127);
+f(1048704, 128);
+f(1056897, 129);
+f(2089215, 255);
+f(2097408, 256);
+f(2105601, 257);
+f(4186623, 511);
+f(4194816, 512);
+f(4203009, 513);
+f(8381439, 1023);
+f(8389632, 1024);
+f(8397825, 1025);
+f(16771071, 2047);
+f(16779264, 2048);
+f(16787457, 2049);
+f(33550335, 4095);
+f(33558528, 4096);
+f(33566721, 4097);
+f(67108863, 8191);
+f(67117056, 8192);
+f(67125249, 8193);
+x = 16383;
+f(0, 0);
+f(16383, 1);
+f(32766, 2);
+f(49149, 3);
+f(65532, 4);
+f(81915, 5);
+f(114681, 7);
+f(131064, 8);
+f(147447, 9);
+f(245745, 15);
+f(262128, 16);
+f(278511, 17);
+f(507873, 31);
+f(524256, 32);
+f(540639, 33);
+f(1032129, 63);
+f(1048512, 64);
+f(1064895, 65);
+f(2080641, 127);
+f(2097024, 128);
+f(2113407, 129);
+f(4177665, 255);
+f(4194048, 256);
+f(4210431, 257);
+f(8371713, 511);
+f(8388096, 512);
+f(8404479, 513);
+f(16759809, 1023);
+f(16776192, 1024);
+f(16792575, 1025);
+f(33536001, 2047);
+f(33552384, 2048);
+f(33568767, 2049);
+f(67088385, 4095);
+f(67104768, 4096);
+f(67121151, 4097);
+f(134193153, 8191);
+f(134209536, 8192);
+f(134225919, 8193);
+f(268402689, 16383);
+x = 16384;
+f(0, 0);
+f(16384, 1);
+f(32768, 2);
+f(49152, 3);
+f(65536, 4);
+f(81920, 5);
+f(114688, 7);
+f(131072, 8);
+f(147456, 9);
+f(245760, 15);
+f(262144, 16);
+f(278528, 17);
+f(507904, 31);
+f(524288, 32);
+f(540672, 33);
+f(1032192, 63);
+f(1048576, 64);
+f(1064960, 65);
+f(2080768, 127);
+f(2097152, 128);
+f(2113536, 129);
+f(4177920, 255);
+f(4194304, 256);
+f(4210688, 257);
+f(8372224, 511);
+f(8388608, 512);
+f(8404992, 513);
+f(16760832, 1023);
+f(16777216, 1024);
+f(16793600, 1025);
+f(33538048, 2047);
+f(33554432, 2048);
+f(33570816, 2049);
+f(67092480, 4095);
+f(67108864, 4096);
+f(67125248, 4097);
+f(134201344, 8191);
+f(134217728, 8192);
+f(134234112, 8193);
+f(268419072, 16383);
+f(268435456, 16384);
diff --git a/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part3.js b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part3.js
new file mode 100644
index 0000000..06e41a1
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part3.js
@@ -0,0 +1,532 @@
+// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x;
+
+// Converts a number to string respecting -0.
+function stringify(n) {
+ if ((1 / n) === -Infinity) return "-0";
+ return String(n);
+}
+
+function f(expected, y) {
+ function testEval(string, x, y) {
+ var mulFunction = Function("x, y", "return " + string);
+ return mulFunction(x, y);
+ }
+ function mulTest(expected, x, y) {
+ assertEquals(expected, x * y);
+ assertEquals(expected, testEval(stringify(x) + " * y", x, y));
+ assertEquals(expected, testEval("x * " + stringify(y), x, y));
+ assertEquals(expected, testEval(stringify(x) + " * " + stringify(y), x, y));
+ }
+ mulTest(expected, x, y);
+ mulTest(-expected, -x, y);
+ mulTest(-expected, x, -y);
+ mulTest(expected, -x, -y);
+ if (x === y) return; // Symmetric cases not necessary.
+ mulTest(expected, y, x);
+ mulTest(-expected, -y, x);
+ mulTest(-expected, y, -x);
+ mulTest(expected, -y, -x);
+}
+
+x = 16385;
+f(0, 0);
+f(16385, 1);
+f(32770, 2);
+f(49155, 3);
+f(65540, 4);
+f(81925, 5);
+f(114695, 7);
+f(131080, 8);
+f(147465, 9);
+f(245775, 15);
+f(262160, 16);
+f(278545, 17);
+f(507935, 31);
+f(524320, 32);
+f(540705, 33);
+f(1032255, 63);
+f(1048640, 64);
+f(1065025, 65);
+f(2080895, 127);
+f(2097280, 128);
+f(2113665, 129);
+f(4178175, 255);
+f(4194560, 256);
+f(4210945, 257);
+f(8372735, 511);
+f(8389120, 512);
+f(8405505, 513);
+f(16761855, 1023);
+f(16778240, 1024);
+f(16794625, 1025);
+f(33540095, 2047);
+f(33556480, 2048);
+f(33572865, 2049);
+f(67096575, 4095);
+f(67112960, 4096);
+f(67129345, 4097);
+f(134209535, 8191);
+f(134225920, 8192);
+f(134242305, 8193);
+f(268435455, 16383);
+f(268451840, 16384);
+f(268468225, 16385);
+x = 32767;
+f(0, 0);
+f(32767, 1);
+f(65534, 2);
+f(98301, 3);
+f(131068, 4);
+f(163835, 5);
+f(229369, 7);
+f(262136, 8);
+f(294903, 9);
+f(491505, 15);
+f(524272, 16);
+f(557039, 17);
+f(1015777, 31);
+f(1048544, 32);
+f(1081311, 33);
+f(2064321, 63);
+f(2097088, 64);
+f(2129855, 65);
+f(4161409, 127);
+f(4194176, 128);
+f(4226943, 129);
+f(8355585, 255);
+f(8388352, 256);
+f(8421119, 257);
+f(16743937, 511);
+f(16776704, 512);
+f(16809471, 513);
+f(33520641, 1023);
+f(33553408, 1024);
+f(33586175, 1025);
+f(67074049, 2047);
+f(67106816, 2048);
+f(67139583, 2049);
+f(134180865, 4095);
+f(134213632, 4096);
+f(134246399, 4097);
+f(268394497, 8191);
+f(268427264, 8192);
+f(268460031, 8193);
+f(536821761, 16383);
+f(536854528, 16384);
+f(536887295, 16385);
+f(1073676289, 32767);
+x = 32768;
+f(0, 0);
+f(32768, 1);
+f(65536, 2);
+f(98304, 3);
+f(131072, 4);
+f(163840, 5);
+f(229376, 7);
+f(262144, 8);
+f(294912, 9);
+f(491520, 15);
+f(524288, 16);
+f(557056, 17);
+f(1015808, 31);
+f(1048576, 32);
+f(1081344, 33);
+f(2064384, 63);
+f(2097152, 64);
+f(2129920, 65);
+f(4161536, 127);
+f(4194304, 128);
+f(4227072, 129);
+f(8355840, 255);
+f(8388608, 256);
+f(8421376, 257);
+f(16744448, 511);
+f(16777216, 512);
+f(16809984, 513);
+f(33521664, 1023);
+f(33554432, 1024);
+f(33587200, 1025);
+f(67076096, 2047);
+f(67108864, 2048);
+f(67141632, 2049);
+f(134184960, 4095);
+f(134217728, 4096);
+f(134250496, 4097);
+f(268402688, 8191);
+f(268435456, 8192);
+f(268468224, 8193);
+f(536838144, 16383);
+f(536870912, 16384);
+f(536903680, 16385);
+f(1073709056, 32767);
+f(1073741824, 32768);
+x = 32769;
+f(0, 0);
+f(32769, 1);
+f(65538, 2);
+f(98307, 3);
+f(131076, 4);
+f(163845, 5);
+f(229383, 7);
+f(262152, 8);
+f(294921, 9);
+f(491535, 15);
+f(524304, 16);
+f(557073, 17);
+f(1015839, 31);
+f(1048608, 32);
+f(1081377, 33);
+f(2064447, 63);
+f(2097216, 64);
+f(2129985, 65);
+f(4161663, 127);
+f(4194432, 128);
+f(4227201, 129);
+f(8356095, 255);
+f(8388864, 256);
+f(8421633, 257);
+f(16744959, 511);
+f(16777728, 512);
+f(16810497, 513);
+f(33522687, 1023);
+f(33555456, 1024);
+f(33588225, 1025);
+f(67078143, 2047);
+f(67110912, 2048);
+f(67143681, 2049);
+f(134189055, 4095);
+f(134221824, 4096);
+f(134254593, 4097);
+f(268410879, 8191);
+f(268443648, 8192);
+f(268476417, 8193);
+f(536854527, 16383);
+f(536887296, 16384);
+f(536920065, 16385);
+f(1073741823, 32767);
+f(1073774592, 32768);
+f(1073807361, 32769);
+x = 65535;
+f(0, 0);
+f(65535, 1);
+f(131070, 2);
+f(196605, 3);
+f(262140, 4);
+f(327675, 5);
+f(458745, 7);
+f(524280, 8);
+f(589815, 9);
+f(983025, 15);
+f(1048560, 16);
+f(1114095, 17);
+f(2031585, 31);
+f(2097120, 32);
+f(2162655, 33);
+f(4128705, 63);
+f(4194240, 64);
+f(4259775, 65);
+f(8322945, 127);
+f(8388480, 128);
+f(8454015, 129);
+f(16711425, 255);
+f(16776960, 256);
+f(16842495, 257);
+f(33488385, 511);
+f(33553920, 512);
+f(33619455, 513);
+f(67042305, 1023);
+f(67107840, 1024);
+f(67173375, 1025);
+f(134150145, 2047);
+f(134215680, 2048);
+f(134281215, 2049);
+f(268365825, 4095);
+f(268431360, 4096);
+f(268496895, 4097);
+f(536797185, 8191);
+f(536862720, 8192);
+f(536928255, 8193);
+f(1073659905, 16383);
+f(1073725440, 16384);
+f(1073790975, 16385);
+f(2147385345, 32767);
+f(2147450880, 32768);
+f(2147516415, 32769);
+f(4294836225, 65535);
+x = 65536;
+f(0, 0);
+f(65536, 1);
+f(131072, 2);
+f(196608, 3);
+f(262144, 4);
+f(327680, 5);
+f(458752, 7);
+f(524288, 8);
+f(589824, 9);
+f(983040, 15);
+f(1048576, 16);
+f(1114112, 17);
+f(2031616, 31);
+f(2097152, 32);
+f(2162688, 33);
+f(4128768, 63);
+f(4194304, 64);
+f(4259840, 65);
+f(8323072, 127);
+f(8388608, 128);
+f(8454144, 129);
+f(16711680, 255);
+f(16777216, 256);
+f(16842752, 257);
+f(33488896, 511);
+f(33554432, 512);
+f(33619968, 513);
+f(67043328, 1023);
+f(67108864, 1024);
+f(67174400, 1025);
+f(134152192, 2047);
+f(134217728, 2048);
+f(134283264, 2049);
+f(268369920, 4095);
+f(268435456, 4096);
+f(268500992, 4097);
+f(536805376, 8191);
+f(536870912, 8192);
+f(536936448, 8193);
+f(1073676288, 16383);
+f(1073741824, 16384);
+f(1073807360, 16385);
+f(2147418112, 32767);
+f(2147483648, 32768);
+f(2147549184, 32769);
+f(4294901760, 65535);
+f(4294967296, 65536);
+x = 65537;
+f(0, 0);
+f(65537, 1);
+f(131074, 2);
+f(196611, 3);
+f(262148, 4);
+f(327685, 5);
+f(458759, 7);
+f(524296, 8);
+f(589833, 9);
+f(983055, 15);
+f(1048592, 16);
+f(1114129, 17);
+f(2031647, 31);
+f(2097184, 32);
+f(2162721, 33);
+f(4128831, 63);
+f(4194368, 64);
+f(4259905, 65);
+f(8323199, 127);
+f(8388736, 128);
+f(8454273, 129);
+f(16711935, 255);
+f(16777472, 256);
+f(16843009, 257);
+f(33489407, 511);
+f(33554944, 512);
+f(33620481, 513);
+f(67044351, 1023);
+f(67109888, 1024);
+f(67175425, 1025);
+f(134154239, 2047);
+f(134219776, 2048);
+f(134285313, 2049);
+f(268374015, 4095);
+f(268439552, 4096);
+f(268505089, 4097);
+f(536813567, 8191);
+f(536879104, 8192);
+f(536944641, 8193);
+f(1073692671, 16383);
+f(1073758208, 16384);
+f(1073823745, 16385);
+f(2147450879, 32767);
+f(2147516416, 32768);
+f(2147581953, 32769);
+f(4294967295, 65535);
+f(4295032832, 65536);
+f(4295098369, 65537);
+x = 131071;
+f(0, 0);
+f(131071, 1);
+f(262142, 2);
+f(393213, 3);
+f(524284, 4);
+f(655355, 5);
+f(917497, 7);
+f(1048568, 8);
+f(1179639, 9);
+f(1966065, 15);
+f(2097136, 16);
+f(2228207, 17);
+f(4063201, 31);
+f(4194272, 32);
+f(4325343, 33);
+f(8257473, 63);
+f(8388544, 64);
+f(8519615, 65);
+f(16646017, 127);
+f(16777088, 128);
+f(16908159, 129);
+f(33423105, 255);
+f(33554176, 256);
+f(33685247, 257);
+f(66977281, 511);
+f(67108352, 512);
+f(67239423, 513);
+f(134085633, 1023);
+f(134216704, 1024);
+f(134347775, 1025);
+f(268302337, 2047);
+f(268433408, 2048);
+f(268564479, 2049);
+f(536735745, 4095);
+f(536866816, 4096);
+f(536997887, 4097);
+f(1073602561, 8191);
+f(1073733632, 8192);
+f(1073864703, 8193);
+f(2147336193, 16383);
+f(2147467264, 16384);
+f(2147598335, 16385);
+f(4294803457, 32767);
+f(4294934528, 32768);
+f(4295065599, 32769);
+f(8589737985, 65535);
+f(8589869056, 65536);
+f(8590000127, 65537);
+f(17179607041, 131071);
+x = 131072;
+f(0, 0);
+f(131072, 1);
+f(262144, 2);
+f(393216, 3);
+f(524288, 4);
+f(655360, 5);
+f(917504, 7);
+f(1048576, 8);
+f(1179648, 9);
+f(1966080, 15);
+f(2097152, 16);
+f(2228224, 17);
+f(4063232, 31);
+f(4194304, 32);
+f(4325376, 33);
+f(8257536, 63);
+f(8388608, 64);
+f(8519680, 65);
+f(16646144, 127);
+f(16777216, 128);
+f(16908288, 129);
+f(33423360, 255);
+f(33554432, 256);
+f(33685504, 257);
+f(66977792, 511);
+f(67108864, 512);
+f(67239936, 513);
+f(134086656, 1023);
+f(134217728, 1024);
+f(134348800, 1025);
+f(268304384, 2047);
+f(268435456, 2048);
+f(268566528, 2049);
+f(536739840, 4095);
+f(536870912, 4096);
+f(537001984, 4097);
+f(1073610752, 8191);
+f(1073741824, 8192);
+f(1073872896, 8193);
+f(2147352576, 16383);
+f(2147483648, 16384);
+f(2147614720, 16385);
+f(4294836224, 32767);
+f(4294967296, 32768);
+f(4295098368, 32769);
+f(8589803520, 65535);
+f(8589934592, 65536);
+f(8590065664, 65537);
+f(17179738112, 131071);
+f(17179869184, 131072);
+x = 131073;
+f(0, 0);
+f(131073, 1);
+f(262146, 2);
+f(393219, 3);
+f(524292, 4);
+f(655365, 5);
+f(917511, 7);
+f(1048584, 8);
+f(1179657, 9);
+f(1966095, 15);
+f(2097168, 16);
+f(2228241, 17);
+f(4063263, 31);
+f(4194336, 32);
+f(4325409, 33);
+f(8257599, 63);
+f(8388672, 64);
+f(8519745, 65);
+f(16646271, 127);
+f(16777344, 128);
+f(16908417, 129);
+f(33423615, 255);
+f(33554688, 256);
+f(33685761, 257);
+f(66978303, 511);
+f(67109376, 512);
+f(67240449, 513);
+f(134087679, 1023);
+f(134218752, 1024);
+f(134349825, 1025);
+f(268306431, 2047);
+f(268437504, 2048);
+f(268568577, 2049);
+f(536743935, 4095);
+f(536875008, 4096);
+f(537006081, 4097);
+f(1073618943, 8191);
+f(1073750016, 8192);
+f(1073881089, 8193);
+f(2147368959, 16383);
+f(2147500032, 16384);
+f(2147631105, 16385);
+f(4294868991, 32767);
+f(4295000064, 32768);
+f(4295131137, 32769);
+f(8589869055, 65535);
+f(8590000128, 65536);
+f(8590131201, 65537);
+f(17179869183, 131071);
+f(17180000256, 131072);
+f(17180131329, 131073);
diff --git a/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part4.js b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part4.js
new file mode 100644
index 0000000..de9f983
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part4.js
@@ -0,0 +1,509 @@
+// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x;
+
+// Converts a number to string respecting -0.
+function stringify(n) {
+ if ((1 / n) === -Infinity) return "-0";
+ return String(n);
+}
+
+function f(expected, y) {
+ function testEval(string, x, y) {
+ var mulFunction = Function("x, y", "return " + string);
+ return mulFunction(x, y);
+ }
+ function mulTest(expected, x, y) {
+ assertEquals(expected, x * y);
+ assertEquals(expected, testEval(stringify(x) + " * y", x, y));
+ assertEquals(expected, testEval("x * " + stringify(y), x, y));
+ assertEquals(expected, testEval(stringify(x) + " * " + stringify(y), x, y));
+ }
+ mulTest(expected, x, y);
+ mulTest(-expected, -x, y);
+ mulTest(-expected, x, -y);
+ mulTest(expected, -x, -y);
+ if (x === y) return; // Symmetric cases not necessary.
+ mulTest(expected, y, x);
+ mulTest(-expected, -y, x);
+ mulTest(-expected, y, -x);
+ mulTest(expected, -y, -x);
+}
+
+x = 262143;
+f(0, 0);
+f(262143, 1);
+f(524286, 2);
+f(786429, 3);
+f(1048572, 4);
+f(1310715, 5);
+f(1835001, 7);
+f(2097144, 8);
+f(2359287, 9);
+f(3932145, 15);
+f(4194288, 16);
+f(4456431, 17);
+f(8126433, 31);
+f(8388576, 32);
+f(8650719, 33);
+f(16515009, 63);
+f(16777152, 64);
+f(17039295, 65);
+f(33292161, 127);
+f(33554304, 128);
+f(33816447, 129);
+f(66846465, 255);
+f(67108608, 256);
+f(67370751, 257);
+f(133955073, 511);
+f(134217216, 512);
+f(134479359, 513);
+f(268172289, 1023);
+f(268434432, 1024);
+f(268696575, 1025);
+f(536606721, 2047);
+f(536868864, 2048);
+f(537131007, 2049);
+f(1073475585, 4095);
+f(1073737728, 4096);
+f(1073999871, 4097);
+f(2147213313, 8191);
+f(2147475456, 8192);
+f(2147737599, 8193);
+f(4294688769, 16383);
+f(4294950912, 16384);
+f(4295213055, 16385);
+f(8589639681, 32767);
+f(8589901824, 32768);
+f(8590163967, 32769);
+f(17179541505, 65535);
+f(17179803648, 65536);
+f(17180065791, 65537);
+f(34359345153, 131071);
+f(34359607296, 131072);
+f(34359869439, 131073);
+f(68718952449, 262143);
+x = 262144;
+f(0, 0);
+f(262144, 1);
+f(524288, 2);
+f(786432, 3);
+f(1048576, 4);
+f(1310720, 5);
+f(1835008, 7);
+f(2097152, 8);
+f(2359296, 9);
+f(3932160, 15);
+f(4194304, 16);
+f(4456448, 17);
+f(8126464, 31);
+f(8388608, 32);
+f(8650752, 33);
+f(16515072, 63);
+f(16777216, 64);
+f(17039360, 65);
+f(33292288, 127);
+f(33554432, 128);
+f(33816576, 129);
+f(66846720, 255);
+f(67108864, 256);
+f(67371008, 257);
+f(133955584, 511);
+f(134217728, 512);
+f(134479872, 513);
+f(268173312, 1023);
+f(268435456, 1024);
+f(268697600, 1025);
+f(536608768, 2047);
+f(536870912, 2048);
+f(537133056, 2049);
+f(1073479680, 4095);
+f(1073741824, 4096);
+f(1074003968, 4097);
+f(2147221504, 8191);
+f(2147483648, 8192);
+f(2147745792, 8193);
+f(4294705152, 16383);
+f(4294967296, 16384);
+f(4295229440, 16385);
+f(8589672448, 32767);
+f(8589934592, 32768);
+f(8590196736, 32769);
+f(17179607040, 65535);
+f(17179869184, 65536);
+f(17180131328, 65537);
+f(34359476224, 131071);
+f(34359738368, 131072);
+f(34360000512, 131073);
+f(68719214592, 262143);
+f(68719476736, 262144);
+x = 262145;
+f(0, 0);
+f(262145, 1);
+f(524290, 2);
+f(786435, 3);
+f(1048580, 4);
+f(1310725, 5);
+f(1835015, 7);
+f(2097160, 8);
+f(2359305, 9);
+f(3932175, 15);
+f(4194320, 16);
+f(4456465, 17);
+f(8126495, 31);
+f(8388640, 32);
+f(8650785, 33);
+f(16515135, 63);
+f(16777280, 64);
+f(17039425, 65);
+f(33292415, 127);
+f(33554560, 128);
+f(33816705, 129);
+f(66846975, 255);
+f(67109120, 256);
+f(67371265, 257);
+f(133956095, 511);
+f(134218240, 512);
+f(134480385, 513);
+f(268174335, 1023);
+f(268436480, 1024);
+f(268698625, 1025);
+f(536610815, 2047);
+f(536872960, 2048);
+f(537135105, 2049);
+f(1073483775, 4095);
+f(1073745920, 4096);
+f(1074008065, 4097);
+f(2147229695, 8191);
+f(2147491840, 8192);
+f(2147753985, 8193);
+f(4294721535, 16383);
+f(4294983680, 16384);
+f(4295245825, 16385);
+f(8589705215, 32767);
+f(8589967360, 32768);
+f(8590229505, 32769);
+f(17179672575, 65535);
+f(17179934720, 65536);
+f(17180196865, 65537);
+f(34359607295, 131071);
+f(34359869440, 131072);
+f(34360131585, 131073);
+f(68719476735, 262143);
+f(68719738880, 262144);
+f(68720001025, 262145);
+x = 524287;
+f(0, 0);
+f(524287, 1);
+f(1048574, 2);
+f(1572861, 3);
+f(2097148, 4);
+f(2621435, 5);
+f(3670009, 7);
+f(4194296, 8);
+f(4718583, 9);
+f(7864305, 15);
+f(8388592, 16);
+f(8912879, 17);
+f(16252897, 31);
+f(16777184, 32);
+f(17301471, 33);
+f(33030081, 63);
+f(33554368, 64);
+f(34078655, 65);
+f(66584449, 127);
+f(67108736, 128);
+f(67633023, 129);
+f(133693185, 255);
+f(134217472, 256);
+f(134741759, 257);
+f(267910657, 511);
+f(268434944, 512);
+f(268959231, 513);
+f(536345601, 1023);
+f(536869888, 1024);
+f(537394175, 1025);
+f(1073215489, 2047);
+f(1073739776, 2048);
+f(1074264063, 2049);
+f(2146955265, 4095);
+f(2147479552, 4096);
+f(2148003839, 4097);
+f(4294434817, 8191);
+f(4294959104, 8192);
+f(4295483391, 8193);
+f(8589393921, 16383);
+f(8589918208, 16384);
+f(8590442495, 16385);
+f(17179312129, 32767);
+f(17179836416, 32768);
+f(17180360703, 32769);
+f(34359148545, 65535);
+f(34359672832, 65536);
+f(34360197119, 65537);
+f(68718821377, 131071);
+f(68719345664, 131072);
+f(68719869951, 131073);
+f(137438167041, 262143);
+f(137438691328, 262144);
+f(137439215615, 262145);
+f(274876858369, 524287);
+x = 524288;
+f(0, 0);
+f(524288, 1);
+f(1048576, 2);
+f(1572864, 3);
+f(2097152, 4);
+f(2621440, 5);
+f(3670016, 7);
+f(4194304, 8);
+f(4718592, 9);
+f(7864320, 15);
+f(8388608, 16);
+f(8912896, 17);
+f(16252928, 31);
+f(16777216, 32);
+f(17301504, 33);
+f(33030144, 63);
+f(33554432, 64);
+f(34078720, 65);
+f(66584576, 127);
+f(67108864, 128);
+f(67633152, 129);
+f(133693440, 255);
+f(134217728, 256);
+f(134742016, 257);
+f(267911168, 511);
+f(268435456, 512);
+f(268959744, 513);
+f(536346624, 1023);
+f(536870912, 1024);
+f(537395200, 1025);
+f(1073217536, 2047);
+f(1073741824, 2048);
+f(1074266112, 2049);
+f(2146959360, 4095);
+f(2147483648, 4096);
+f(2148007936, 4097);
+f(4294443008, 8191);
+f(4294967296, 8192);
+f(4295491584, 8193);
+f(8589410304, 16383);
+f(8589934592, 16384);
+f(8590458880, 16385);
+f(17179344896, 32767);
+f(17179869184, 32768);
+f(17180393472, 32769);
+f(34359214080, 65535);
+f(34359738368, 65536);
+f(34360262656, 65537);
+f(68718952448, 131071);
+f(68719476736, 131072);
+f(68720001024, 131073);
+f(137438429184, 262143);
+f(137438953472, 262144);
+f(137439477760, 262145);
+f(274877382656, 524287);
+f(274877906944, 524288);
+x = 524289;
+f(0, 0);
+f(524289, 1);
+f(1048578, 2);
+f(1572867, 3);
+f(2097156, 4);
+f(2621445, 5);
+f(3670023, 7);
+f(4194312, 8);
+f(4718601, 9);
+f(7864335, 15);
+f(8388624, 16);
+f(8912913, 17);
+f(16252959, 31);
+f(16777248, 32);
+f(17301537, 33);
+f(33030207, 63);
+f(33554496, 64);
+f(34078785, 65);
+f(66584703, 127);
+f(67108992, 128);
+f(67633281, 129);
+f(133693695, 255);
+f(134217984, 256);
+f(134742273, 257);
+f(267911679, 511);
+f(268435968, 512);
+f(268960257, 513);
+f(536347647, 1023);
+f(536871936, 1024);
+f(537396225, 1025);
+f(1073219583, 2047);
+f(1073743872, 2048);
+f(1074268161, 2049);
+f(2146963455, 4095);
+f(2147487744, 4096);
+f(2148012033, 4097);
+f(4294451199, 8191);
+f(4294975488, 8192);
+f(4295499777, 8193);
+f(8589426687, 16383);
+f(8589950976, 16384);
+f(8590475265, 16385);
+f(17179377663, 32767);
+f(17179901952, 32768);
+f(17180426241, 32769);
+f(34359279615, 65535);
+f(34359803904, 65536);
+f(34360328193, 65537);
+f(68719083519, 131071);
+f(68719607808, 131072);
+f(68720132097, 131073);
+f(137438691327, 262143);
+f(137439215616, 262144);
+f(137439739905, 262145);
+f(274877906943, 524287);
+f(274878431232, 524288);
+f(274878955521, 524289);
+x = 1048575;
+f(0, 0);
+f(1048575, 1);
+f(2097150, 2);
+f(3145725, 3);
+f(4194300, 4);
+f(5242875, 5);
+f(7340025, 7);
+f(8388600, 8);
+f(9437175, 9);
+f(15728625, 15);
+f(16777200, 16);
+f(17825775, 17);
+f(32505825, 31);
+f(33554400, 32);
+f(34602975, 33);
+f(66060225, 63);
+f(67108800, 64);
+f(68157375, 65);
+f(133169025, 127);
+f(134217600, 128);
+f(135266175, 129);
+f(267386625, 255);
+f(268435200, 256);
+f(269483775, 257);
+f(535821825, 511);
+f(536870400, 512);
+f(537918975, 513);
+f(1072692225, 1023);
+f(1073740800, 1024);
+f(1074789375, 1025);
+f(2146433025, 2047);
+f(2147481600, 2048);
+f(2148530175, 2049);
+f(4293914625, 4095);
+f(4294963200, 4096);
+f(4296011775, 4097);
+f(8588877825, 8191);
+f(8589926400, 8192);
+f(8590974975, 8193);
+f(17178804225, 16383);
+f(17179852800, 16384);
+f(17180901375, 16385);
+f(34358657025, 32767);
+f(34359705600, 32768);
+f(34360754175, 32769);
+f(68718362625, 65535);
+f(68719411200, 65536);
+f(68720459775, 65537);
+f(137437773825, 131071);
+f(137438822400, 131072);
+f(137439870975, 131073);
+f(274876596225, 262143);
+f(274877644800, 262144);
+f(274878693375, 262145);
+f(549754241025, 524287);
+f(549755289600, 524288);
+f(549756338175, 524289);
+f(1099509530625, 1048575);
+x = 1048576;
+f(0, 0);
+f(1048576, 1);
+f(2097152, 2);
+f(3145728, 3);
+f(4194304, 4);
+f(5242880, 5);
+f(7340032, 7);
+f(8388608, 8);
+f(9437184, 9);
+f(15728640, 15);
+f(16777216, 16);
+f(17825792, 17);
+f(32505856, 31);
+f(33554432, 32);
+f(34603008, 33);
+f(66060288, 63);
+f(67108864, 64);
+f(68157440, 65);
+f(133169152, 127);
+f(134217728, 128);
+f(135266304, 129);
+f(267386880, 255);
+f(268435456, 256);
+f(269484032, 257);
+f(535822336, 511);
+f(536870912, 512);
+f(537919488, 513);
+f(1072693248, 1023);
+f(1073741824, 1024);
+f(1074790400, 1025);
+f(2146435072, 2047);
+f(2147483648, 2048);
+f(2148532224, 2049);
+f(4293918720, 4095);
+f(4294967296, 4096);
+f(4296015872, 4097);
+f(8588886016, 8191);
+f(8589934592, 8192);
+f(8590983168, 8193);
+f(17178820608, 16383);
+f(17179869184, 16384);
+f(17180917760, 16385);
+f(34358689792, 32767);
+f(34359738368, 32768);
+f(34360786944, 32769);
+f(68718428160, 65535);
+f(68719476736, 65536);
+f(68720525312, 65537);
+f(137437904896, 131071);
+f(137438953472, 131072);
+f(137440002048, 131073);
+f(274876858368, 262143);
+f(274877906944, 262144);
+f(274878955520, 262145);
+f(549754765312, 524287);
+f(549755813888, 524288);
+f(549756862464, 524289);
+f(1099510579200, 1048575);
+f(1099511627776, 1048576);
diff --git a/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part5.js b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part5.js
new file mode 100644
index 0000000..e929985
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part5.js
@@ -0,0 +1,505 @@
+// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x;
+
+// Converts a number to string respecting -0.
+function stringify(n) {
+ if ((1 / n) === -Infinity) return "-0";
+ return String(n);
+}
+
+function f(expected, y) {
+ function testEval(string, x, y) {
+ var mulFunction = Function("x, y", "return " + string);
+ return mulFunction(x, y);
+ }
+ function mulTest(expected, x, y) {
+ assertEquals(expected, x * y);
+ assertEquals(expected, testEval(stringify(x) + " * y", x, y));
+ assertEquals(expected, testEval("x * " + stringify(y), x, y));
+ assertEquals(expected, testEval(stringify(x) + " * " + stringify(y), x, y));
+ }
+ mulTest(expected, x, y);
+ mulTest(-expected, -x, y);
+ mulTest(-expected, x, -y);
+ mulTest(expected, -x, -y);
+ if (x === y) return; // Symmetric cases not necessary.
+ mulTest(expected, y, x);
+ mulTest(-expected, -y, x);
+ mulTest(-expected, y, -x);
+ mulTest(expected, -y, -x);
+}
+
+x = 1048577;
+f(0, 0);
+f(1048577, 1);
+f(2097154, 2);
+f(3145731, 3);
+f(4194308, 4);
+f(5242885, 5);
+f(7340039, 7);
+f(8388616, 8);
+f(9437193, 9);
+f(15728655, 15);
+f(16777232, 16);
+f(17825809, 17);
+f(32505887, 31);
+f(33554464, 32);
+f(34603041, 33);
+f(66060351, 63);
+f(67108928, 64);
+f(68157505, 65);
+f(133169279, 127);
+f(134217856, 128);
+f(135266433, 129);
+f(267387135, 255);
+f(268435712, 256);
+f(269484289, 257);
+f(535822847, 511);
+f(536871424, 512);
+f(537920001, 513);
+f(1072694271, 1023);
+f(1073742848, 1024);
+f(1074791425, 1025);
+f(2146437119, 2047);
+f(2147485696, 2048);
+f(2148534273, 2049);
+f(4293922815, 4095);
+f(4294971392, 4096);
+f(4296019969, 4097);
+f(8588894207, 8191);
+f(8589942784, 8192);
+f(8590991361, 8193);
+f(17178836991, 16383);
+f(17179885568, 16384);
+f(17180934145, 16385);
+f(34358722559, 32767);
+f(34359771136, 32768);
+f(34360819713, 32769);
+f(68718493695, 65535);
+f(68719542272, 65536);
+f(68720590849, 65537);
+f(137438035967, 131071);
+f(137439084544, 131072);
+f(137440133121, 131073);
+f(274877120511, 262143);
+f(274878169088, 262144);
+f(274879217665, 262145);
+f(549755289599, 524287);
+f(549756338176, 524288);
+f(549757386753, 524289);
+f(1099511627775, 1048575);
+f(1099512676352, 1048576);
+f(1099513724929, 1048577);
+x = 2097151;
+f(0, 0);
+f(2097151, 1);
+f(4194302, 2);
+f(6291453, 3);
+f(8388604, 4);
+f(10485755, 5);
+f(14680057, 7);
+f(16777208, 8);
+f(18874359, 9);
+f(31457265, 15);
+f(33554416, 16);
+f(35651567, 17);
+f(65011681, 31);
+f(67108832, 32);
+f(69205983, 33);
+f(132120513, 63);
+f(134217664, 64);
+f(136314815, 65);
+f(266338177, 127);
+f(268435328, 128);
+f(270532479, 129);
+f(534773505, 255);
+f(536870656, 256);
+f(538967807, 257);
+f(1071644161, 511);
+f(1073741312, 512);
+f(1075838463, 513);
+f(2145385473, 1023);
+f(2147482624, 1024);
+f(2149579775, 1025);
+f(4292868097, 2047);
+f(4294965248, 2048);
+f(4297062399, 2049);
+f(8587833345, 4095);
+f(8589930496, 4096);
+f(8592027647, 4097);
+f(17177763841, 8191);
+f(17179860992, 8192);
+f(17181958143, 8193);
+f(34357624833, 16383);
+f(34359721984, 16384);
+f(34361819135, 16385);
+f(68717346817, 32767);
+f(68719443968, 32768);
+f(68721541119, 32769);
+f(137436790785, 65535);
+f(137438887936, 65536);
+f(137440985087, 65537);
+f(274875678721, 131071);
+f(274877775872, 131072);
+f(274879873023, 131073);
+f(549753454593, 262143);
+f(549755551744, 262144);
+f(549757648895, 262145);
+f(1099509006337, 524287);
+f(1099511103488, 524288);
+f(1099513200639, 524289);
+f(2199020109825, 1048575);
+f(2199022206976, 1048576);
+f(2199024304127, 1048577);
+f(4398042316801, 2097151);
+x = 2097152;
+f(0, 0);
+f(2097152, 1);
+f(4194304, 2);
+f(6291456, 3);
+f(8388608, 4);
+f(10485760, 5);
+f(14680064, 7);
+f(16777216, 8);
+f(18874368, 9);
+f(31457280, 15);
+f(33554432, 16);
+f(35651584, 17);
+f(65011712, 31);
+f(67108864, 32);
+f(69206016, 33);
+f(132120576, 63);
+f(134217728, 64);
+f(136314880, 65);
+f(266338304, 127);
+f(268435456, 128);
+f(270532608, 129);
+f(534773760, 255);
+f(536870912, 256);
+f(538968064, 257);
+f(1071644672, 511);
+f(1073741824, 512);
+f(1075838976, 513);
+f(2145386496, 1023);
+f(2147483648, 1024);
+f(2149580800, 1025);
+f(4292870144, 2047);
+f(4294967296, 2048);
+f(4297064448, 2049);
+f(8587837440, 4095);
+f(8589934592, 4096);
+f(8592031744, 4097);
+f(17177772032, 8191);
+f(17179869184, 8192);
+f(17181966336, 8193);
+f(34357641216, 16383);
+f(34359738368, 16384);
+f(34361835520, 16385);
+f(68717379584, 32767);
+f(68719476736, 32768);
+f(68721573888, 32769);
+f(137436856320, 65535);
+f(137438953472, 65536);
+f(137441050624, 65537);
+f(274875809792, 131071);
+f(274877906944, 131072);
+f(274880004096, 131073);
+f(549753716736, 262143);
+f(549755813888, 262144);
+f(549757911040, 262145);
+f(1099509530624, 524287);
+f(1099511627776, 524288);
+f(1099513724928, 524289);
+f(2199021158400, 1048575);
+f(2199023255552, 1048576);
+f(2199025352704, 1048577);
+f(4398044413952, 2097151);
+f(4398046511104, 2097152);
+x = 2097153;
+f(0, 0);
+f(2097153, 1);
+f(4194306, 2);
+f(6291459, 3);
+f(8388612, 4);
+f(10485765, 5);
+f(14680071, 7);
+f(16777224, 8);
+f(18874377, 9);
+f(31457295, 15);
+f(33554448, 16);
+f(35651601, 17);
+f(65011743, 31);
+f(67108896, 32);
+f(69206049, 33);
+f(132120639, 63);
+f(134217792, 64);
+f(136314945, 65);
+f(266338431, 127);
+f(268435584, 128);
+f(270532737, 129);
+f(534774015, 255);
+f(536871168, 256);
+f(538968321, 257);
+f(1071645183, 511);
+f(1073742336, 512);
+f(1075839489, 513);
+f(2145387519, 1023);
+f(2147484672, 1024);
+f(2149581825, 1025);
+f(4292872191, 2047);
+f(4294969344, 2048);
+f(4297066497, 2049);
+f(8587841535, 4095);
+f(8589938688, 4096);
+f(8592035841, 4097);
+f(17177780223, 8191);
+f(17179877376, 8192);
+f(17181974529, 8193);
+f(34357657599, 16383);
+f(34359754752, 16384);
+f(34361851905, 16385);
+f(68717412351, 32767);
+f(68719509504, 32768);
+f(68721606657, 32769);
+f(137436921855, 65535);
+f(137439019008, 65536);
+f(137441116161, 65537);
+f(274875940863, 131071);
+f(274878038016, 131072);
+f(274880135169, 131073);
+f(549753978879, 262143);
+f(549756076032, 262144);
+f(549758173185, 262145);
+f(1099510054911, 524287);
+f(1099512152064, 524288);
+f(1099514249217, 524289);
+f(2199022206975, 1048575);
+f(2199024304128, 1048576);
+f(2199026401281, 1048577);
+f(4398046511103, 2097151);
+f(4398048608256, 2097152);
+f(4398050705409, 2097153);
+x = 4194303;
+f(0, 0);
+f(4194303, 1);
+f(8388606, 2);
+f(12582909, 3);
+f(16777212, 4);
+f(20971515, 5);
+f(29360121, 7);
+f(33554424, 8);
+f(37748727, 9);
+f(62914545, 15);
+f(67108848, 16);
+f(71303151, 17);
+f(130023393, 31);
+f(134217696, 32);
+f(138411999, 33);
+f(264241089, 63);
+f(268435392, 64);
+f(272629695, 65);
+f(532676481, 127);
+f(536870784, 128);
+f(541065087, 129);
+f(1069547265, 255);
+f(1073741568, 256);
+f(1077935871, 257);
+f(2143288833, 511);
+f(2147483136, 512);
+f(2151677439, 513);
+f(4290771969, 1023);
+f(4294966272, 1024);
+f(4299160575, 1025);
+f(8585738241, 2047);
+f(8589932544, 2048);
+f(8594126847, 2049);
+f(17175670785, 4095);
+f(17179865088, 4096);
+f(17184059391, 4097);
+f(34355535873, 8191);
+f(34359730176, 8192);
+f(34363924479, 8193);
+f(68715266049, 16383);
+f(68719460352, 16384);
+f(68723654655, 16385);
+f(137434726401, 32767);
+f(137438920704, 32768);
+f(137443115007, 32769);
+f(274873647105, 65535);
+f(274877841408, 65536);
+f(274882035711, 65537);
+f(549751488513, 131071);
+f(549755682816, 131072);
+f(549759877119, 131073);
+f(1099507171329, 262143);
+f(1099511365632, 262144);
+f(1099515559935, 262145);
+f(2199018536961, 524287);
+f(2199022731264, 524288);
+f(2199026925567, 524289);
+f(4398041268225, 1048575);
+f(4398045462528, 1048576);
+f(4398049656831, 1048577);
+f(8796086730753, 2097151);
+f(8796090925056, 2097152);
+f(8796095119359, 2097153);
+f(17592177655809, 4194303);
+x = 4194304;
+f(0, 0);
+f(4194304, 1);
+f(8388608, 2);
+f(12582912, 3);
+f(16777216, 4);
+f(20971520, 5);
+f(29360128, 7);
+f(33554432, 8);
+f(37748736, 9);
+f(62914560, 15);
+f(67108864, 16);
+f(71303168, 17);
+f(130023424, 31);
+f(134217728, 32);
+f(138412032, 33);
+f(264241152, 63);
+f(268435456, 64);
+f(272629760, 65);
+f(532676608, 127);
+f(536870912, 128);
+f(541065216, 129);
+f(1069547520, 255);
+f(1073741824, 256);
+f(1077936128, 257);
+f(2143289344, 511);
+f(2147483648, 512);
+f(2151677952, 513);
+f(4290772992, 1023);
+f(4294967296, 1024);
+f(4299161600, 1025);
+f(8585740288, 2047);
+f(8589934592, 2048);
+f(8594128896, 2049);
+f(17175674880, 4095);
+f(17179869184, 4096);
+f(17184063488, 4097);
+f(34355544064, 8191);
+f(34359738368, 8192);
+f(34363932672, 8193);
+f(68715282432, 16383);
+f(68719476736, 16384);
+f(68723671040, 16385);
+f(137434759168, 32767);
+f(137438953472, 32768);
+f(137443147776, 32769);
+f(274873712640, 65535);
+f(274877906944, 65536);
+f(274882101248, 65537);
+f(549751619584, 131071);
+f(549755813888, 131072);
+f(549760008192, 131073);
+f(1099507433472, 262143);
+f(1099511627776, 262144);
+f(1099515822080, 262145);
+f(2199019061248, 524287);
+f(2199023255552, 524288);
+f(2199027449856, 524289);
+f(4398042316800, 1048575);
+f(4398046511104, 1048576);
+f(4398050705408, 1048577);
+f(8796088827904, 2097151);
+f(8796093022208, 2097152);
+f(8796097216512, 2097153);
+f(17592181850112, 4194303);
+f(17592186044416, 4194304);
+x = 4194305;
+f(0, 0);
+f(4194305, 1);
+f(8388610, 2);
+f(12582915, 3);
+f(16777220, 4);
+f(20971525, 5);
+f(29360135, 7);
+f(33554440, 8);
+f(37748745, 9);
+f(62914575, 15);
+f(67108880, 16);
+f(71303185, 17);
+f(130023455, 31);
+f(134217760, 32);
+f(138412065, 33);
+f(264241215, 63);
+f(268435520, 64);
+f(272629825, 65);
+f(532676735, 127);
+f(536871040, 128);
+f(541065345, 129);
+f(1069547775, 255);
+f(1073742080, 256);
+f(1077936385, 257);
+f(2143289855, 511);
+f(2147484160, 512);
+f(2151678465, 513);
+f(4290774015, 1023);
+f(4294968320, 1024);
+f(4299162625, 1025);
+f(8585742335, 2047);
+f(8589936640, 2048);
+f(8594130945, 2049);
+f(17175678975, 4095);
+f(17179873280, 4096);
+f(17184067585, 4097);
+f(34355552255, 8191);
+f(34359746560, 8192);
+f(34363940865, 8193);
+f(68715298815, 16383);
+f(68719493120, 16384);
+f(68723687425, 16385);
+f(137434791935, 32767);
+f(137438986240, 32768);
+f(137443180545, 32769);
+f(274873778175, 65535);
+f(274877972480, 65536);
+f(274882166785, 65537);
+f(549751750655, 131071);
+f(549755944960, 131072);
+f(549760139265, 131073);
+f(1099507695615, 262143);
+f(1099511889920, 262144);
+f(1099516084225, 262145);
+f(2199019585535, 524287);
+f(2199023779840, 524288);
+f(2199027974145, 524289);
+f(4398043365375, 1048575);
+f(4398047559680, 1048576);
+f(4398051753985, 1048577);
+f(8796090925055, 2097151);
+f(8796095119360, 2097152);
+f(8796099313665, 2097153);
+f(17592186044415, 4194303);
+f(17592190238720, 4194304);
+f(17592194433025, 4194305);
diff --git a/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part6.js b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part6.js
new file mode 100644
index 0000000..91cb798
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part6.js
@@ -0,0 +1,554 @@
+// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x;
+
+// Converts a number to string respecting -0.
+function stringify(n) {
+ if ((1 / n) === -Infinity) return "-0";
+ return String(n);
+}
+
+function f(expected, y) {
+ function testEval(string, x, y) {
+ var mulFunction = Function("x, y", "return " + string);
+ return mulFunction(x, y);
+ }
+ function mulTest(expected, x, y) {
+ assertEquals(expected, x * y);
+ assertEquals(expected, testEval(stringify(x) + " * y", x, y));
+ assertEquals(expected, testEval("x * " + stringify(y), x, y));
+ assertEquals(expected, testEval(stringify(x) + " * " + stringify(y), x, y));
+ }
+ mulTest(expected, x, y);
+ mulTest(-expected, -x, y);
+ mulTest(-expected, x, -y);
+ mulTest(expected, -x, -y);
+ if (x === y) return; // Symmetric cases not necessary.
+ mulTest(expected, y, x);
+ mulTest(-expected, -y, x);
+ mulTest(-expected, y, -x);
+ mulTest(expected, -y, -x);
+}
+
+x = 8388607;
+f(0, 0);
+f(8388607, 1);
+f(16777214, 2);
+f(25165821, 3);
+f(33554428, 4);
+f(41943035, 5);
+f(58720249, 7);
+f(67108856, 8);
+f(75497463, 9);
+f(125829105, 15);
+f(134217712, 16);
+f(142606319, 17);
+f(260046817, 31);
+f(268435424, 32);
+f(276824031, 33);
+f(528482241, 63);
+f(536870848, 64);
+f(545259455, 65);
+f(1065353089, 127);
+f(1073741696, 128);
+f(1082130303, 129);
+f(2139094785, 255);
+f(2147483392, 256);
+f(2155871999, 257);
+f(4286578177, 511);
+f(4294966784, 512);
+f(4303355391, 513);
+f(8581544961, 1023);
+f(8589933568, 1024);
+f(8598322175, 1025);
+f(17171478529, 2047);
+f(17179867136, 2048);
+f(17188255743, 2049);
+f(34351345665, 4095);
+f(34359734272, 4096);
+f(34368122879, 4097);
+f(68711079937, 8191);
+f(68719468544, 8192);
+f(68727857151, 8193);
+f(137430548481, 16383);
+f(137438937088, 16384);
+f(137447325695, 16385);
+f(274869485569, 32767);
+f(274877874176, 32768);
+f(274886262783, 32769);
+f(549747359745, 65535);
+f(549755748352, 65536);
+f(549764136959, 65537);
+f(1099503108097, 131071);
+f(1099511496704, 131072);
+f(1099519885311, 131073);
+f(2199014604801, 262143);
+f(2199022993408, 262144);
+f(2199031382015, 262145);
+f(4398037598209, 524287);
+f(4398045986816, 524288);
+f(4398054375423, 524289);
+f(8796083585025, 1048575);
+f(8796091973632, 1048576);
+f(8796100362239, 1048577);
+f(17592175558657, 2097151);
+f(17592183947264, 2097152);
+f(17592192335871, 2097153);
+f(35184359505921, 4194303);
+f(35184367894528, 4194304);
+f(35184376283135, 4194305);
+f(70368727400449, 8388607);
+x = 8388608;
+f(0, 0);
+f(8388608, 1);
+f(16777216, 2);
+f(25165824, 3);
+f(33554432, 4);
+f(41943040, 5);
+f(58720256, 7);
+f(67108864, 8);
+f(75497472, 9);
+f(125829120, 15);
+f(134217728, 16);
+f(142606336, 17);
+f(260046848, 31);
+f(268435456, 32);
+f(276824064, 33);
+f(528482304, 63);
+f(536870912, 64);
+f(545259520, 65);
+f(1065353216, 127);
+f(1073741824, 128);
+f(1082130432, 129);
+f(2139095040, 255);
+f(2147483648, 256);
+f(2155872256, 257);
+f(4286578688, 511);
+f(4294967296, 512);
+f(4303355904, 513);
+f(8581545984, 1023);
+f(8589934592, 1024);
+f(8598323200, 1025);
+f(17171480576, 2047);
+f(17179869184, 2048);
+f(17188257792, 2049);
+f(34351349760, 4095);
+f(34359738368, 4096);
+f(34368126976, 4097);
+f(68711088128, 8191);
+f(68719476736, 8192);
+f(68727865344, 8193);
+f(137430564864, 16383);
+f(137438953472, 16384);
+f(137447342080, 16385);
+f(274869518336, 32767);
+f(274877906944, 32768);
+f(274886295552, 32769);
+f(549747425280, 65535);
+f(549755813888, 65536);
+f(549764202496, 65537);
+f(1099503239168, 131071);
+f(1099511627776, 131072);
+f(1099520016384, 131073);
+f(2199014866944, 262143);
+f(2199023255552, 262144);
+f(2199031644160, 262145);
+f(4398038122496, 524287);
+f(4398046511104, 524288);
+f(4398054899712, 524289);
+f(8796084633600, 1048575);
+f(8796093022208, 1048576);
+f(8796101410816, 1048577);
+f(17592177655808, 2097151);
+f(17592186044416, 2097152);
+f(17592194433024, 2097153);
+f(35184363700224, 4194303);
+f(35184372088832, 4194304);
+f(35184380477440, 4194305);
+f(70368735789056, 8388607);
+f(70368744177664, 8388608);
+x = 8388609;
+f(0, 0);
+f(8388609, 1);
+f(16777218, 2);
+f(25165827, 3);
+f(33554436, 4);
+f(41943045, 5);
+f(58720263, 7);
+f(67108872, 8);
+f(75497481, 9);
+f(125829135, 15);
+f(134217744, 16);
+f(142606353, 17);
+f(260046879, 31);
+f(268435488, 32);
+f(276824097, 33);
+f(528482367, 63);
+f(536870976, 64);
+f(545259585, 65);
+f(1065353343, 127);
+f(1073741952, 128);
+f(1082130561, 129);
+f(2139095295, 255);
+f(2147483904, 256);
+f(2155872513, 257);
+f(4286579199, 511);
+f(4294967808, 512);
+f(4303356417, 513);
+f(8581547007, 1023);
+f(8589935616, 1024);
+f(8598324225, 1025);
+f(17171482623, 2047);
+f(17179871232, 2048);
+f(17188259841, 2049);
+f(34351353855, 4095);
+f(34359742464, 4096);
+f(34368131073, 4097);
+f(68711096319, 8191);
+f(68719484928, 8192);
+f(68727873537, 8193);
+f(137430581247, 16383);
+f(137438969856, 16384);
+f(137447358465, 16385);
+f(274869551103, 32767);
+f(274877939712, 32768);
+f(274886328321, 32769);
+f(549747490815, 65535);
+f(549755879424, 65536);
+f(549764268033, 65537);
+f(1099503370239, 131071);
+f(1099511758848, 131072);
+f(1099520147457, 131073);
+f(2199015129087, 262143);
+f(2199023517696, 262144);
+f(2199031906305, 262145);
+f(4398038646783, 524287);
+f(4398047035392, 524288);
+f(4398055424001, 524289);
+f(8796085682175, 1048575);
+f(8796094070784, 1048576);
+f(8796102459393, 1048577);
+f(17592179752959, 2097151);
+f(17592188141568, 2097152);
+f(17592196530177, 2097153);
+f(35184367894527, 4194303);
+f(35184376283136, 4194304);
+f(35184384671745, 4194305);
+f(70368744177663, 8388607);
+f(70368752566272, 8388608);
+f(70368760954881, 8388609);
+x = 16777215;
+f(0, 0);
+f(16777215, 1);
+f(33554430, 2);
+f(50331645, 3);
+f(67108860, 4);
+f(83886075, 5);
+f(117440505, 7);
+f(134217720, 8);
+f(150994935, 9);
+f(251658225, 15);
+f(268435440, 16);
+f(285212655, 17);
+f(520093665, 31);
+f(536870880, 32);
+f(553648095, 33);
+f(1056964545, 63);
+f(1073741760, 64);
+f(1090518975, 65);
+f(2130706305, 127);
+f(2147483520, 128);
+f(2164260735, 129);
+f(4278189825, 255);
+f(4294967040, 256);
+f(4311744255, 257);
+f(8573156865, 511);
+f(8589934080, 512);
+f(8606711295, 513);
+f(17163090945, 1023);
+f(17179868160, 1024);
+f(17196645375, 1025);
+f(34342959105, 2047);
+f(34359736320, 2048);
+f(34376513535, 2049);
+f(68702695425, 4095);
+f(68719472640, 4096);
+f(68736249855, 4097);
+f(137422168065, 8191);
+f(137438945280, 8192);
+f(137455722495, 8193);
+f(274861113345, 16383);
+f(274877890560, 16384);
+f(274894667775, 16385);
+f(549739003905, 32767);
+f(549755781120, 32768);
+f(549772558335, 32769);
+f(1099494785025, 65535);
+f(1099511562240, 65536);
+f(1099528339455, 65537);
+f(2199006347265, 131071);
+f(2199023124480, 131072);
+f(2199039901695, 131073);
+f(4398029471745, 262143);
+f(4398046248960, 262144);
+f(4398063026175, 262145);
+f(8796075720705, 524287);
+f(8796092497920, 524288);
+f(8796109275135, 524289);
+f(17592168218625, 1048575);
+f(17592184995840, 1048576);
+f(17592201773055, 1048577);
+f(35184353214465, 2097151);
+f(35184369991680, 2097152);
+f(35184386768895, 2097153);
+f(70368723206145, 4194303);
+f(70368739983360, 4194304);
+f(70368756760575, 4194305);
+f(140737463189505, 8388607);
+f(140737479966720, 8388608);
+f(140737496743935, 8388609);
+f(281474943156225, 16777215);
+x = 16777216;
+f(0, 0);
+f(16777216, 1);
+f(33554432, 2);
+f(50331648, 3);
+f(67108864, 4);
+f(83886080, 5);
+f(117440512, 7);
+f(134217728, 8);
+f(150994944, 9);
+f(251658240, 15);
+f(268435456, 16);
+f(285212672, 17);
+f(520093696, 31);
+f(536870912, 32);
+f(553648128, 33);
+f(1056964608, 63);
+f(1073741824, 64);
+f(1090519040, 65);
+f(2130706432, 127);
+f(2147483648, 128);
+f(2164260864, 129);
+f(4278190080, 255);
+f(4294967296, 256);
+f(4311744512, 257);
+f(8573157376, 511);
+f(8589934592, 512);
+f(8606711808, 513);
+f(17163091968, 1023);
+f(17179869184, 1024);
+f(17196646400, 1025);
+f(34342961152, 2047);
+f(34359738368, 2048);
+f(34376515584, 2049);
+f(68702699520, 4095);
+f(68719476736, 4096);
+f(68736253952, 4097);
+f(137422176256, 8191);
+f(137438953472, 8192);
+f(137455730688, 8193);
+f(274861129728, 16383);
+f(274877906944, 16384);
+f(274894684160, 16385);
+f(549739036672, 32767);
+f(549755813888, 32768);
+f(549772591104, 32769);
+f(1099494850560, 65535);
+f(1099511627776, 65536);
+f(1099528404992, 65537);
+f(2199006478336, 131071);
+f(2199023255552, 131072);
+f(2199040032768, 131073);
+f(4398029733888, 262143);
+f(4398046511104, 262144);
+f(4398063288320, 262145);
+f(8796076244992, 524287);
+f(8796093022208, 524288);
+f(8796109799424, 524289);
+f(17592169267200, 1048575);
+f(17592186044416, 1048576);
+f(17592202821632, 1048577);
+f(35184355311616, 2097151);
+f(35184372088832, 2097152);
+f(35184388866048, 2097153);
+f(70368727400448, 4194303);
+f(70368744177664, 4194304);
+f(70368760954880, 4194305);
+f(140737471578112, 8388607);
+f(140737488355328, 8388608);
+f(140737505132544, 8388609);
+f(281474959933440, 16777215);
+f(281474976710656, 16777216);
+x = 16777217;
+f(0, 0);
+f(16777217, 1);
+f(33554434, 2);
+f(50331651, 3);
+f(67108868, 4);
+f(83886085, 5);
+f(117440519, 7);
+f(134217736, 8);
+f(150994953, 9);
+f(251658255, 15);
+f(268435472, 16);
+f(285212689, 17);
+f(520093727, 31);
+f(536870944, 32);
+f(553648161, 33);
+f(1056964671, 63);
+f(1073741888, 64);
+f(1090519105, 65);
+f(2130706559, 127);
+f(2147483776, 128);
+f(2164260993, 129);
+f(4278190335, 255);
+f(4294967552, 256);
+f(4311744769, 257);
+f(8573157887, 511);
+f(8589935104, 512);
+f(8606712321, 513);
+f(17163092991, 1023);
+f(17179870208, 1024);
+f(17196647425, 1025);
+f(34342963199, 2047);
+f(34359740416, 2048);
+f(34376517633, 2049);
+f(68702703615, 4095);
+f(68719480832, 4096);
+f(68736258049, 4097);
+f(137422184447, 8191);
+f(137438961664, 8192);
+f(137455738881, 8193);
+f(274861146111, 16383);
+f(274877923328, 16384);
+f(274894700545, 16385);
+f(549739069439, 32767);
+f(549755846656, 32768);
+f(549772623873, 32769);
+f(1099494916095, 65535);
+f(1099511693312, 65536);
+f(1099528470529, 65537);
+f(2199006609407, 131071);
+f(2199023386624, 131072);
+f(2199040163841, 131073);
+f(4398029996031, 262143);
+f(4398046773248, 262144);
+f(4398063550465, 262145);
+f(8796076769279, 524287);
+f(8796093546496, 524288);
+f(8796110323713, 524289);
+f(17592170315775, 1048575);
+f(17592187092992, 1048576);
+f(17592203870209, 1048577);
+f(35184357408767, 2097151);
+f(35184374185984, 2097152);
+f(35184390963201, 2097153);
+f(70368731594751, 4194303);
+f(70368748371968, 4194304);
+f(70368765149185, 4194305);
+f(140737479966719, 8388607);
+f(140737496743936, 8388608);
+f(140737513521153, 8388609);
+f(281474976710655, 16777215);
+f(281474993487872, 16777216);
+f(281475010265089, 16777217);
+x = 33554431;
+f(0, 0);
+f(33554431, 1);
+f(67108862, 2);
+f(100663293, 3);
+f(134217724, 4);
+f(167772155, 5);
+f(234881017, 7);
+f(268435448, 8);
+f(301989879, 9);
+f(503316465, 15);
+f(536870896, 16);
+f(570425327, 17);
+f(1040187361, 31);
+f(1073741792, 32);
+f(1107296223, 33);
+f(2113929153, 63);
+f(2147483584, 64);
+f(2181038015, 65);
+f(4261412737, 127);
+f(4294967168, 128);
+f(4328521599, 129);
+f(8556379905, 255);
+f(8589934336, 256);
+f(8623488767, 257);
+f(17146314241, 511);
+f(17179868672, 512);
+f(17213423103, 513);
+f(34326182913, 1023);
+f(34359737344, 1024);
+f(34393291775, 1025);
+f(68685920257, 2047);
+f(68719474688, 2048);
+f(68753029119, 2049);
+f(137405394945, 4095);
+f(137438949376, 4096);
+f(137472503807, 4097);
+f(274844344321, 8191);
+f(274877898752, 8192);
+f(274911453183, 8193);
+f(549722243073, 16383);
+f(549755797504, 16384);
+f(549789351935, 16385);
+f(1099478040577, 32767);
+f(1099511595008, 32768);
+f(1099545149439, 32769);
+f(2198989635585, 65535);
+f(2199023190016, 65536);
+f(2199056744447, 65537);
+f(4398012825601, 131071);
+f(4398046380032, 131072);
+f(4398079934463, 131073);
+f(8796059205633, 262143);
+f(8796092760064, 262144);
+f(8796126314495, 262145);
+f(17592151965697, 524287);
+f(17592185520128, 524288);
+f(17592219074559, 524289);
+f(35184337485825, 1048575);
+f(35184371040256, 1048576);
+f(35184404594687, 1048577);
+f(70368708526081, 2097151);
+f(70368742080512, 2097152);
+f(70368775634943, 2097153);
+f(140737450606593, 4194303);
+f(140737484161024, 4194304);
+f(140737517715455, 4194305);
+f(281474934767617, 8388607);
+f(281474968322048, 8388608);
+f(281475001876479, 8388609);
+f(562949903089665, 16777215);
+f(562949936644096, 16777216);
+f(562949970198527, 16777217);
+f(1125899839733761, 33554431); \ No newline at end of file
diff --git a/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part7.js b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part7.js
new file mode 100644
index 0000000..d517225
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part7.js
@@ -0,0 +1,497 @@
+// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x;
+
+// Converts a number to string respecting -0.
+function stringify(n) {
+ if ((1 / n) === -Infinity) return "-0";
+ return String(n);
+}
+
+function f(expected, y) {
+ function testEval(string, x, y) {
+ var mulFunction = Function("x, y", "return " + string);
+ return mulFunction(x, y);
+ }
+ function mulTest(expected, x, y) {
+ assertEquals(expected, x * y);
+ assertEquals(expected, testEval(stringify(x) + " * y", x, y));
+ assertEquals(expected, testEval("x * " + stringify(y), x, y));
+ assertEquals(expected, testEval(stringify(x) + " * " + stringify(y), x, y));
+ }
+ mulTest(expected, x, y);
+ mulTest(-expected, -x, y);
+ mulTest(-expected, x, -y);
+ mulTest(expected, -x, -y);
+ if (x === y) return; // Symmetric cases not necessary.
+ mulTest(expected, y, x);
+ mulTest(-expected, -y, x);
+ mulTest(-expected, y, -x);
+ mulTest(expected, -y, -x);
+}
+
+x = 33554432;
+f(0, 0);
+f(33554432, 1);
+f(67108864, 2);
+f(100663296, 3);
+f(134217728, 4);
+f(167772160, 5);
+f(234881024, 7);
+f(268435456, 8);
+f(301989888, 9);
+f(503316480, 15);
+f(536870912, 16);
+f(570425344, 17);
+f(1040187392, 31);
+f(1073741824, 32);
+f(1107296256, 33);
+f(2113929216, 63);
+f(2147483648, 64);
+f(2181038080, 65);
+f(4261412864, 127);
+f(4294967296, 128);
+f(4328521728, 129);
+f(8556380160, 255);
+f(8589934592, 256);
+f(8623489024, 257);
+f(17146314752, 511);
+f(17179869184, 512);
+f(17213423616, 513);
+f(34326183936, 1023);
+f(34359738368, 1024);
+f(34393292800, 1025);
+f(68685922304, 2047);
+f(68719476736, 2048);
+f(68753031168, 2049);
+f(137405399040, 4095);
+f(137438953472, 4096);
+f(137472507904, 4097);
+f(274844352512, 8191);
+f(274877906944, 8192);
+f(274911461376, 8193);
+f(549722259456, 16383);
+f(549755813888, 16384);
+f(549789368320, 16385);
+f(1099478073344, 32767);
+f(1099511627776, 32768);
+f(1099545182208, 32769);
+f(2198989701120, 65535);
+f(2199023255552, 65536);
+f(2199056809984, 65537);
+f(4398012956672, 131071);
+f(4398046511104, 131072);
+f(4398080065536, 131073);
+f(8796059467776, 262143);
+f(8796093022208, 262144);
+f(8796126576640, 262145);
+f(17592152489984, 524287);
+f(17592186044416, 524288);
+f(17592219598848, 524289);
+f(35184338534400, 1048575);
+f(35184372088832, 1048576);
+f(35184405643264, 1048577);
+f(70368710623232, 2097151);
+f(70368744177664, 2097152);
+f(70368777732096, 2097153);
+f(140737454800896, 4194303);
+f(140737488355328, 4194304);
+f(140737521909760, 4194305);
+f(281474943156224, 8388607);
+f(281474976710656, 8388608);
+f(281475010265088, 8388609);
+f(562949919866880, 16777215);
+f(562949953421312, 16777216);
+f(562949986975744, 16777217);
+f(1125899873288192, 33554431);
+f(1125899906842624, 33554432);
+x = 33554433;
+f(0, 0);
+f(33554433, 1);
+f(67108866, 2);
+f(100663299, 3);
+f(134217732, 4);
+f(167772165, 5);
+f(234881031, 7);
+f(268435464, 8);
+f(301989897, 9);
+f(503316495, 15);
+f(536870928, 16);
+f(570425361, 17);
+f(1040187423, 31);
+f(1073741856, 32);
+f(1107296289, 33);
+f(2113929279, 63);
+f(2147483712, 64);
+f(2181038145, 65);
+f(4261412991, 127);
+f(4294967424, 128);
+f(4328521857, 129);
+f(8556380415, 255);
+f(8589934848, 256);
+f(8623489281, 257);
+f(17146315263, 511);
+f(17179869696, 512);
+f(17213424129, 513);
+f(34326184959, 1023);
+f(34359739392, 1024);
+f(34393293825, 1025);
+f(68685924351, 2047);
+f(68719478784, 2048);
+f(68753033217, 2049);
+f(137405403135, 4095);
+f(137438957568, 4096);
+f(137472512001, 4097);
+f(274844360703, 8191);
+f(274877915136, 8192);
+f(274911469569, 8193);
+f(549722275839, 16383);
+f(549755830272, 16384);
+f(549789384705, 16385);
+f(1099478106111, 32767);
+f(1099511660544, 32768);
+f(1099545214977, 32769);
+f(2198989766655, 65535);
+f(2199023321088, 65536);
+f(2199056875521, 65537);
+f(4398013087743, 131071);
+f(4398046642176, 131072);
+f(4398080196609, 131073);
+f(8796059729919, 262143);
+f(8796093284352, 262144);
+f(8796126838785, 262145);
+f(17592153014271, 524287);
+f(17592186568704, 524288);
+f(17592220123137, 524289);
+f(35184339582975, 1048575);
+f(35184373137408, 1048576);
+f(35184406691841, 1048577);
+f(70368712720383, 2097151);
+f(70368746274816, 2097152);
+f(70368779829249, 2097153);
+f(140737458995199, 4194303);
+f(140737492549632, 4194304);
+f(140737526104065, 4194305);
+f(281474951544831, 8388607);
+f(281474985099264, 8388608);
+f(281475018653697, 8388609);
+f(562949936644095, 16777215);
+f(562949970198528, 16777216);
+f(562950003752961, 16777217);
+f(1125899906842623, 33554431);
+f(1125899940397056, 33554432);
+f(1125899973951489, 33554433);
+x = 67108863;
+f(0, 0);
+f(67108863, 1);
+f(134217726, 2);
+f(201326589, 3);
+f(268435452, 4);
+f(335544315, 5);
+f(469762041, 7);
+f(536870904, 8);
+f(603979767, 9);
+f(1006632945, 15);
+f(1073741808, 16);
+f(1140850671, 17);
+f(2080374753, 31);
+f(2147483616, 32);
+f(2214592479, 33);
+f(4227858369, 63);
+f(4294967232, 64);
+f(4362076095, 65);
+f(8522825601, 127);
+f(8589934464, 128);
+f(8657043327, 129);
+f(17112760065, 255);
+f(17179868928, 256);
+f(17246977791, 257);
+f(34292628993, 511);
+f(34359737856, 512);
+f(34426846719, 513);
+f(68652366849, 1023);
+f(68719475712, 1024);
+f(68786584575, 1025);
+f(137371842561, 2047);
+f(137438951424, 2048);
+f(137506060287, 2049);
+f(274810793985, 4095);
+f(274877902848, 4096);
+f(274945011711, 4097);
+f(549688696833, 8191);
+f(549755805696, 8192);
+f(549822914559, 8193);
+f(1099444502529, 16383);
+f(1099511611392, 16384);
+f(1099578720255, 16385);
+f(2198956113921, 32767);
+f(2199023222784, 32768);
+f(2199090331647, 32769);
+f(4397979336705, 65535);
+f(4398046445568, 65536);
+f(4398113554431, 65537);
+f(8796025782273, 131071);
+f(8796092891136, 131072);
+f(8796159999999, 131073);
+f(17592118673409, 262143);
+f(17592185782272, 262144);
+f(17592252891135, 262145);
+f(35184304455681, 524287);
+f(35184371564544, 524288);
+f(35184438673407, 524289);
+f(70368676020225, 1048575);
+f(70368743129088, 1048576);
+f(70368810237951, 1048577);
+f(140737419149313, 2097151);
+f(140737486258176, 2097152);
+f(140737553367039, 2097153);
+f(281474905407489, 4194303);
+f(281474972516352, 4194304);
+f(281475039625215, 4194305);
+f(562949877923841, 8388607);
+f(562949945032704, 8388608);
+f(562950012141567, 8388609);
+f(1125899822956545, 16777215);
+f(1125899890065408, 16777216);
+f(1125899957174271, 16777217);
+x = 67108864;
+f(0, 0);
+f(67108864, 1);
+f(134217728, 2);
+f(201326592, 3);
+f(268435456, 4);
+f(335544320, 5);
+f(469762048, 7);
+f(536870912, 8);
+f(603979776, 9);
+f(1006632960, 15);
+f(1073741824, 16);
+f(1140850688, 17);
+f(2080374784, 31);
+f(2147483648, 32);
+f(2214592512, 33);
+f(4227858432, 63);
+f(4294967296, 64);
+f(4362076160, 65);
+f(8522825728, 127);
+f(8589934592, 128);
+f(8657043456, 129);
+f(17112760320, 255);
+f(17179869184, 256);
+f(17246978048, 257);
+f(34292629504, 511);
+f(34359738368, 512);
+f(34426847232, 513);
+f(68652367872, 1023);
+f(68719476736, 1024);
+f(68786585600, 1025);
+f(137371844608, 2047);
+f(137438953472, 2048);
+f(137506062336, 2049);
+f(274810798080, 4095);
+f(274877906944, 4096);
+f(274945015808, 4097);
+f(549688705024, 8191);
+f(549755813888, 8192);
+f(549822922752, 8193);
+f(1099444518912, 16383);
+f(1099511627776, 16384);
+f(1099578736640, 16385);
+f(2198956146688, 32767);
+f(2199023255552, 32768);
+f(2199090364416, 32769);
+f(4397979402240, 65535);
+f(4398046511104, 65536);
+f(4398113619968, 65537);
+f(8796025913344, 131071);
+f(8796093022208, 131072);
+f(8796160131072, 131073);
+f(17592118935552, 262143);
+f(17592186044416, 262144);
+f(17592253153280, 262145);
+f(35184304979968, 524287);
+f(35184372088832, 524288);
+f(35184439197696, 524289);
+f(70368677068800, 1048575);
+f(70368744177664, 1048576);
+f(70368811286528, 1048577);
+f(140737421246464, 2097151);
+f(140737488355328, 2097152);
+f(140737555464192, 2097153);
+f(281474909601792, 4194303);
+f(281474976710656, 4194304);
+f(281475043819520, 4194305);
+f(562949886312448, 8388607);
+f(562949953421312, 8388608);
+f(562950020530176, 8388609);
+f(1125899839733760, 16777215);
+f(1125899906842624, 16777216);
+f(1125899973951488, 16777217);
+x = 67108865;
+f(0, 0);
+f(67108865, 1);
+f(134217730, 2);
+f(201326595, 3);
+f(268435460, 4);
+f(335544325, 5);
+f(469762055, 7);
+f(536870920, 8);
+f(603979785, 9);
+f(1006632975, 15);
+f(1073741840, 16);
+f(1140850705, 17);
+f(2080374815, 31);
+f(2147483680, 32);
+f(2214592545, 33);
+f(4227858495, 63);
+f(4294967360, 64);
+f(4362076225, 65);
+f(8522825855, 127);
+f(8589934720, 128);
+f(8657043585, 129);
+f(17112760575, 255);
+f(17179869440, 256);
+f(17246978305, 257);
+f(34292630015, 511);
+f(34359738880, 512);
+f(34426847745, 513);
+f(68652368895, 1023);
+f(68719477760, 1024);
+f(68786586625, 1025);
+f(137371846655, 2047);
+f(137438955520, 2048);
+f(137506064385, 2049);
+f(274810802175, 4095);
+f(274877911040, 4096);
+f(274945019905, 4097);
+f(549688713215, 8191);
+f(549755822080, 8192);
+f(549822930945, 8193);
+f(1099444535295, 16383);
+f(1099511644160, 16384);
+f(1099578753025, 16385);
+f(2198956179455, 32767);
+f(2199023288320, 32768);
+f(2199090397185, 32769);
+f(4397979467775, 65535);
+f(4398046576640, 65536);
+f(4398113685505, 65537);
+f(8796026044415, 131071);
+f(8796093153280, 131072);
+f(8796160262145, 131073);
+f(17592119197695, 262143);
+f(17592186306560, 262144);
+f(17592253415425, 262145);
+f(35184305504255, 524287);
+f(35184372613120, 524288);
+f(35184439721985, 524289);
+f(70368678117375, 1048575);
+f(70368745226240, 1048576);
+f(70368812335105, 1048577);
+f(140737423343615, 2097151);
+f(140737490452480, 2097152);
+f(140737557561345, 2097153);
+f(281474913796095, 4194303);
+f(281474980904960, 4194304);
+f(281475048013825, 4194305);
+f(562949894701055, 8388607);
+f(562949961809920, 8388608);
+f(562950028918785, 8388609);
+f(1125899856510975, 16777215);
+f(1125899923619840, 16777216);
+f(1125899990728705, 16777217);
+x = 134217727;
+f(0, 0);
+f(134217727, 1);
+f(268435454, 2);
+f(402653181, 3);
+f(536870908, 4);
+f(671088635, 5);
+f(939524089, 7);
+f(1073741816, 8);
+f(1207959543, 9);
+f(2013265905, 15);
+f(2147483632, 16);
+f(2281701359, 17);
+f(4160749537, 31);
+f(4294967264, 32);
+f(4429184991, 33);
+f(8455716801, 63);
+f(8589934528, 64);
+f(8724152255, 65);
+f(17045651329, 127);
+f(17179869056, 128);
+f(17314086783, 129);
+f(34225520385, 255);
+f(34359738112, 256);
+f(34493955839, 257);
+f(68585258497, 511);
+f(68719476224, 512);
+f(68853693951, 513);
+f(137304734721, 1023);
+f(137438952448, 1024);
+f(137573170175, 1025);
+f(274743687169, 2047);
+f(274877904896, 2048);
+f(275012122623, 2049);
+f(549621592065, 4095);
+f(549755809792, 4096);
+f(549890027519, 4097);
+f(1099377401857, 8191);
+f(1099511619584, 8192);
+f(1099645837311, 8193);
+f(2198889021441, 16383);
+f(2199023239168, 16384);
+f(2199157456895, 16385);
+f(4397912260609, 32767);
+f(4398046478336, 32768);
+f(4398180696063, 32769);
+f(8795958738945, 65535);
+f(8796092956672, 65536);
+f(8796227174399, 65537);
+f(17592051695617, 131071);
+f(17592185913344, 131072);
+f(17592320131071, 131073);
+f(35184237608961, 262143);
+f(35184371826688, 262144);
+f(35184506044415, 262145);
+f(70368609435649, 524287);
+f(70368743653376, 524288);
+f(70368877871103, 524289);
+f(140737353089025, 1048575);
+f(140737487306752, 1048576);
+f(140737621524479, 1048577);
+f(281474840395777, 2097151);
+f(281474974613504, 2097152);
+f(281475108831231, 2097153);
+f(562949815009281, 4194303);
+f(562949949227008, 4194304);
+f(562950083444735, 4194305);
+f(1125899764236289, 8388607);
+f(1125899898454016, 8388608);
+f(1125900032671743, 8388609);
diff --git a/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part8.js b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part8.js
new file mode 100644
index 0000000..7e5f285
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part8.js
@@ -0,0 +1,526 @@
+// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x;
+
+// Converts a number to string respecting -0.
+function stringify(n) {
+ if ((1 / n) === -Infinity) return "-0";
+ return String(n);
+}
+
+function f(expected, y) {
+ function testEval(string, x, y) {
+ var mulFunction = Function("x, y", "return " + string);
+ return mulFunction(x, y);
+ }
+ function mulTest(expected, x, y) {
+ assertEquals(expected, x * y);
+ assertEquals(expected, testEval(stringify(x) + " * y", x, y));
+ assertEquals(expected, testEval("x * " + stringify(y), x, y));
+ assertEquals(expected, testEval(stringify(x) + " * " + stringify(y), x, y));
+ }
+ mulTest(expected, x, y);
+ mulTest(-expected, -x, y);
+ mulTest(-expected, x, -y);
+ mulTest(expected, -x, -y);
+ if (x === y) return; // Symmetric cases not necessary.
+ mulTest(expected, y, x);
+ mulTest(-expected, -y, x);
+ mulTest(-expected, y, -x);
+ mulTest(expected, -y, -x);
+}
+
+x = 134217728;
+f(0, 0);
+f(134217728, 1);
+f(268435456, 2);
+f(402653184, 3);
+f(536870912, 4);
+f(671088640, 5);
+f(939524096, 7);
+f(1073741824, 8);
+f(1207959552, 9);
+f(2013265920, 15);
+f(2147483648, 16);
+f(2281701376, 17);
+f(4160749568, 31);
+f(4294967296, 32);
+f(4429185024, 33);
+f(8455716864, 63);
+f(8589934592, 64);
+f(8724152320, 65);
+f(17045651456, 127);
+f(17179869184, 128);
+f(17314086912, 129);
+f(34225520640, 255);
+f(34359738368, 256);
+f(34493956096, 257);
+f(68585259008, 511);
+f(68719476736, 512);
+f(68853694464, 513);
+f(137304735744, 1023);
+f(137438953472, 1024);
+f(137573171200, 1025);
+f(274743689216, 2047);
+f(274877906944, 2048);
+f(275012124672, 2049);
+f(549621596160, 4095);
+f(549755813888, 4096);
+f(549890031616, 4097);
+f(1099377410048, 8191);
+f(1099511627776, 8192);
+f(1099645845504, 8193);
+f(2198889037824, 16383);
+f(2199023255552, 16384);
+f(2199157473280, 16385);
+f(4397912293376, 32767);
+f(4398046511104, 32768);
+f(4398180728832, 32769);
+f(8795958804480, 65535);
+f(8796093022208, 65536);
+f(8796227239936, 65537);
+f(17592051826688, 131071);
+f(17592186044416, 131072);
+f(17592320262144, 131073);
+f(35184237871104, 262143);
+f(35184372088832, 262144);
+f(35184506306560, 262145);
+f(70368609959936, 524287);
+f(70368744177664, 524288);
+f(70368878395392, 524289);
+f(140737354137600, 1048575);
+f(140737488355328, 1048576);
+f(140737622573056, 1048577);
+f(281474842492928, 2097151);
+f(281474976710656, 2097152);
+f(281475110928384, 2097153);
+f(562949819203584, 4194303);
+f(562949953421312, 4194304);
+f(562950087639040, 4194305);
+f(1125899772624896, 8388607);
+f(1125899906842624, 8388608);
+f(1125900041060352, 8388609);
+x = 134217729;
+f(0, 0);
+f(134217729, 1);
+f(268435458, 2);
+f(402653187, 3);
+f(536870916, 4);
+f(671088645, 5);
+f(939524103, 7);
+f(1073741832, 8);
+f(1207959561, 9);
+f(2013265935, 15);
+f(2147483664, 16);
+f(2281701393, 17);
+f(4160749599, 31);
+f(4294967328, 32);
+f(4429185057, 33);
+f(8455716927, 63);
+f(8589934656, 64);
+f(8724152385, 65);
+f(17045651583, 127);
+f(17179869312, 128);
+f(17314087041, 129);
+f(34225520895, 255);
+f(34359738624, 256);
+f(34493956353, 257);
+f(68585259519, 511);
+f(68719477248, 512);
+f(68853694977, 513);
+f(137304736767, 1023);
+f(137438954496, 1024);
+f(137573172225, 1025);
+f(274743691263, 2047);
+f(274877908992, 2048);
+f(275012126721, 2049);
+f(549621600255, 4095);
+f(549755817984, 4096);
+f(549890035713, 4097);
+f(1099377418239, 8191);
+f(1099511635968, 8192);
+f(1099645853697, 8193);
+f(2198889054207, 16383);
+f(2199023271936, 16384);
+f(2199157489665, 16385);
+f(4397912326143, 32767);
+f(4398046543872, 32768);
+f(4398180761601, 32769);
+f(8795958870015, 65535);
+f(8796093087744, 65536);
+f(8796227305473, 65537);
+f(17592051957759, 131071);
+f(17592186175488, 131072);
+f(17592320393217, 131073);
+f(35184238133247, 262143);
+f(35184372350976, 262144);
+f(35184506568705, 262145);
+f(70368610484223, 524287);
+f(70368744701952, 524288);
+f(70368878919681, 524289);
+f(140737355186175, 1048575);
+f(140737489403904, 1048576);
+f(140737623621633, 1048577);
+f(281474844590079, 2097151);
+f(281474978807808, 2097152);
+f(281475113025537, 2097153);
+f(562949823397887, 4194303);
+f(562949957615616, 4194304);
+f(562950091833345, 4194305);
+f(1125899781013503, 8388607);
+f(1125899915231232, 8388608);
+f(1125900049448961, 8388609);
+x = 268435455;
+f(0, 0);
+f(268435455, 1);
+f(536870910, 2);
+f(805306365, 3);
+f(1073741820, 4);
+f(1342177275, 5);
+f(1879048185, 7);
+f(2147483640, 8);
+f(2415919095, 9);
+f(4026531825, 15);
+f(4294967280, 16);
+f(4563402735, 17);
+f(8321499105, 31);
+f(8589934560, 32);
+f(8858370015, 33);
+f(16911433665, 63);
+f(17179869120, 64);
+f(17448304575, 65);
+f(34091302785, 127);
+f(34359738240, 128);
+f(34628173695, 129);
+f(68451041025, 255);
+f(68719476480, 256);
+f(68987911935, 257);
+f(137170517505, 511);
+f(137438952960, 512);
+f(137707388415, 513);
+f(274609470465, 1023);
+f(274877905920, 1024);
+f(275146341375, 1025);
+f(549487376385, 2047);
+f(549755811840, 2048);
+f(550024247295, 2049);
+f(1099243188225, 4095);
+f(1099511623680, 4096);
+f(1099780059135, 4097);
+f(2198754811905, 8191);
+f(2199023247360, 8192);
+f(2199291682815, 8193);
+f(4397778059265, 16383);
+f(4398046494720, 16384);
+f(4398314930175, 16385);
+f(8795824553985, 32767);
+f(8796092989440, 32768);
+f(8796361424895, 32769);
+f(17591917543425, 65535);
+f(17592185978880, 65536);
+f(17592454414335, 65537);
+f(35184103522305, 131071);
+f(35184371957760, 131072);
+f(35184640393215, 131073);
+f(70368475480065, 262143);
+f(70368743915520, 262144);
+f(70369012350975, 262145);
+f(140737219395585, 524287);
+f(140737487831040, 524288);
+f(140737756266495, 524289);
+f(281474707226625, 1048575);
+f(281474975662080, 1048576);
+f(281475244097535, 1048577);
+f(562949682888705, 2097151);
+f(562949951324160, 2097152);
+f(562950219759615, 2097153);
+f(1125899634212865, 4194303);
+f(1125899902648320, 4194304);
+f(1125900171083775, 4194305);
+x = 268435456;
+f(0, 0);
+f(268435456, 1);
+f(536870912, 2);
+f(805306368, 3);
+f(1073741824, 4);
+f(1342177280, 5);
+f(1879048192, 7);
+f(2147483648, 8);
+f(2415919104, 9);
+f(4026531840, 15);
+f(4294967296, 16);
+f(4563402752, 17);
+f(8321499136, 31);
+f(8589934592, 32);
+f(8858370048, 33);
+f(16911433728, 63);
+f(17179869184, 64);
+f(17448304640, 65);
+f(34091302912, 127);
+f(34359738368, 128);
+f(34628173824, 129);
+f(68451041280, 255);
+f(68719476736, 256);
+f(68987912192, 257);
+f(137170518016, 511);
+f(137438953472, 512);
+f(137707388928, 513);
+f(274609471488, 1023);
+f(274877906944, 1024);
+f(275146342400, 1025);
+f(549487378432, 2047);
+f(549755813888, 2048);
+f(550024249344, 2049);
+f(1099243192320, 4095);
+f(1099511627776, 4096);
+f(1099780063232, 4097);
+f(2198754820096, 8191);
+f(2199023255552, 8192);
+f(2199291691008, 8193);
+f(4397778075648, 16383);
+f(4398046511104, 16384);
+f(4398314946560, 16385);
+f(8795824586752, 32767);
+f(8796093022208, 32768);
+f(8796361457664, 32769);
+f(17591917608960, 65535);
+f(17592186044416, 65536);
+f(17592454479872, 65537);
+f(35184103653376, 131071);
+f(35184372088832, 131072);
+f(35184640524288, 131073);
+f(70368475742208, 262143);
+f(70368744177664, 262144);
+f(70369012613120, 262145);
+f(140737219919872, 524287);
+f(140737488355328, 524288);
+f(140737756790784, 524289);
+f(281474708275200, 1048575);
+f(281474976710656, 1048576);
+f(281475245146112, 1048577);
+f(562949684985856, 2097151);
+f(562949953421312, 2097152);
+f(562950221856768, 2097153);
+f(1125899638407168, 4194303);
+f(1125899906842624, 4194304);
+f(1125900175278080, 4194305);
+x = 268435457;
+f(0, 0);
+f(268435457, 1);
+f(536870914, 2);
+f(805306371, 3);
+f(1073741828, 4);
+f(1342177285, 5);
+f(1879048199, 7);
+f(2147483656, 8);
+f(2415919113, 9);
+f(4026531855, 15);
+f(4294967312, 16);
+f(4563402769, 17);
+f(8321499167, 31);
+f(8589934624, 32);
+f(8858370081, 33);
+f(16911433791, 63);
+f(17179869248, 64);
+f(17448304705, 65);
+f(34091303039, 127);
+f(34359738496, 128);
+f(34628173953, 129);
+f(68451041535, 255);
+f(68719476992, 256);
+f(68987912449, 257);
+f(137170518527, 511);
+f(137438953984, 512);
+f(137707389441, 513);
+f(274609472511, 1023);
+f(274877907968, 1024);
+f(275146343425, 1025);
+f(549487380479, 2047);
+f(549755815936, 2048);
+f(550024251393, 2049);
+f(1099243196415, 4095);
+f(1099511631872, 4096);
+f(1099780067329, 4097);
+f(2198754828287, 8191);
+f(2199023263744, 8192);
+f(2199291699201, 8193);
+f(4397778092031, 16383);
+f(4398046527488, 16384);
+f(4398314962945, 16385);
+f(8795824619519, 32767);
+f(8796093054976, 32768);
+f(8796361490433, 32769);
+f(17591917674495, 65535);
+f(17592186109952, 65536);
+f(17592454545409, 65537);
+f(35184103784447, 131071);
+f(35184372219904, 131072);
+f(35184640655361, 131073);
+f(70368476004351, 262143);
+f(70368744439808, 262144);
+f(70369012875265, 262145);
+f(140737220444159, 524287);
+f(140737488879616, 524288);
+f(140737757315073, 524289);
+f(281474709323775, 1048575);
+f(281474977759232, 1048576);
+f(281475246194689, 1048577);
+f(562949687083007, 2097151);
+f(562949955518464, 2097152);
+f(562950223953921, 2097153);
+f(1125899642601471, 4194303);
+f(1125899911036928, 4194304);
+f(1125900179472385, 4194305);
+x = 536870911;
+f(0, 0);
+f(536870911, 1);
+f(1073741822, 2);
+f(1610612733, 3);
+f(2147483644, 4);
+f(2684354555, 5);
+f(3758096377, 7);
+f(4294967288, 8);
+f(4831838199, 9);
+f(8053063665, 15);
+f(8589934576, 16);
+f(9126805487, 17);
+f(16642998241, 31);
+f(17179869152, 32);
+f(17716740063, 33);
+f(33822867393, 63);
+f(34359738304, 64);
+f(34896609215, 65);
+f(68182605697, 127);
+f(68719476608, 128);
+f(69256347519, 129);
+f(136902082305, 255);
+f(137438953216, 256);
+f(137975824127, 257);
+f(274341035521, 511);
+f(274877906432, 512);
+f(275414777343, 513);
+f(549218941953, 1023);
+f(549755812864, 1024);
+f(550292683775, 1025);
+f(1098974754817, 2047);
+f(1099511625728, 2048);
+f(1100048496639, 2049);
+f(2198486380545, 4095);
+f(2199023251456, 4096);
+f(2199560122367, 4097);
+f(4397509632001, 8191);
+f(4398046502912, 8192);
+f(4398583373823, 8193);
+f(8795556134913, 16383);
+f(8796093005824, 16384);
+f(8796629876735, 16385);
+f(17591649140737, 32767);
+f(17592186011648, 32768);
+f(17592722882559, 32769);
+f(35183835152385, 65535);
+f(35184372023296, 65536);
+f(35184908894207, 65537);
+f(70368207175681, 131071);
+f(70368744046592, 131072);
+f(70369280917503, 131073);
+f(140736951222273, 262143);
+f(140737488093184, 262144);
+f(140738024964095, 262145);
+f(281474439315457, 524287);
+f(281474976186368, 524288);
+f(281475513057279, 524289);
+f(562949415501825, 1048575);
+f(562949952372736, 1048576);
+f(562950489243647, 1048577);
+f(1125899367874561, 2097151);
+f(1125899904745472, 2097152);
+f(1125900441616383, 2097153);
+x = 536870912;
+f(0, 0);
+f(536870912, 1);
+f(1073741824, 2);
+f(1610612736, 3);
+f(2147483648, 4);
+f(2684354560, 5);
+f(3758096384, 7);
+f(4294967296, 8);
+f(4831838208, 9);
+f(8053063680, 15);
+f(8589934592, 16);
+f(9126805504, 17);
+f(16642998272, 31);
+f(17179869184, 32);
+f(17716740096, 33);
+f(33822867456, 63);
+f(34359738368, 64);
+f(34896609280, 65);
+f(68182605824, 127);
+f(68719476736, 128);
+f(69256347648, 129);
+f(136902082560, 255);
+f(137438953472, 256);
+f(137975824384, 257);
+f(274341036032, 511);
+f(274877906944, 512);
+f(275414777856, 513);
+f(549218942976, 1023);
+f(549755813888, 1024);
+f(550292684800, 1025);
+f(1098974756864, 2047);
+f(1099511627776, 2048);
+f(1100048498688, 2049);
+f(2198486384640, 4095);
+f(2199023255552, 4096);
+f(2199560126464, 4097);
+f(4397509640192, 8191);
+f(4398046511104, 8192);
+f(4398583382016, 8193);
+f(8795556151296, 16383);
+f(8796093022208, 16384);
+f(8796629893120, 16385);
+f(17591649173504, 32767);
+f(17592186044416, 32768);
+f(17592722915328, 32769);
+f(35183835217920, 65535);
+f(35184372088832, 65536);
+f(35184908959744, 65537);
+f(70368207306752, 131071);
+f(70368744177664, 131072);
+f(70369281048576, 131073);
+f(140736951484416, 262143);
+f(140737488355328, 262144);
+f(140738025226240, 262145);
+f(281474439839744, 524287);
+f(281474976710656, 524288);
+f(281475513581568, 524289);
+f(562949416550400, 1048575);
+f(562949953421312, 1048576);
+f(562950490292224, 1048577);
+f(1125899369971712, 2097151);
+f(1125899906842624, 2097152);
+f(1125900443713536, 2097153);
diff --git a/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part9.js b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part9.js
new file mode 100644
index 0000000..f329a5a
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/mul-exhaustive-part9.js
@@ -0,0 +1,533 @@
+// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x;
+
+// Converts a number to string respecting -0.
+function stringify(n) {
+ if ((1 / n) === -Infinity) return "-0";
+ return String(n);
+}
+
+function f(expected, y) {
+ function testEval(string, x, y) {
+ var mulFunction = Function("x, y", "return " + string);
+ return mulFunction(x, y);
+ }
+ function mulTest(expected, x, y) {
+ assertEquals(expected, x * y);
+ assertEquals(expected, testEval(stringify(x) + " * y", x, y));
+ assertEquals(expected, testEval("x * " + stringify(y), x, y));
+ assertEquals(expected, testEval(stringify(x) + " * " + stringify(y), x, y));
+ }
+ mulTest(expected, x, y);
+ mulTest(-expected, -x, y);
+ mulTest(-expected, x, -y);
+ mulTest(expected, -x, -y);
+ if (x === y) return; // Symmetric cases not necessary.
+ mulTest(expected, y, x);
+ mulTest(-expected, -y, x);
+ mulTest(-expected, y, -x);
+ mulTest(expected, -y, -x);
+}
+
+x = 536870913;
+f(0, 0);
+f(536870913, 1);
+f(1073741826, 2);
+f(1610612739, 3);
+f(2147483652, 4);
+f(2684354565, 5);
+f(3758096391, 7);
+f(4294967304, 8);
+f(4831838217, 9);
+f(8053063695, 15);
+f(8589934608, 16);
+f(9126805521, 17);
+f(16642998303, 31);
+f(17179869216, 32);
+f(17716740129, 33);
+f(33822867519, 63);
+f(34359738432, 64);
+f(34896609345, 65);
+f(68182605951, 127);
+f(68719476864, 128);
+f(69256347777, 129);
+f(136902082815, 255);
+f(137438953728, 256);
+f(137975824641, 257);
+f(274341036543, 511);
+f(274877907456, 512);
+f(275414778369, 513);
+f(549218943999, 1023);
+f(549755814912, 1024);
+f(550292685825, 1025);
+f(1098974758911, 2047);
+f(1099511629824, 2048);
+f(1100048500737, 2049);
+f(2198486388735, 4095);
+f(2199023259648, 4096);
+f(2199560130561, 4097);
+f(4397509648383, 8191);
+f(4398046519296, 8192);
+f(4398583390209, 8193);
+f(8795556167679, 16383);
+f(8796093038592, 16384);
+f(8796629909505, 16385);
+f(17591649206271, 32767);
+f(17592186077184, 32768);
+f(17592722948097, 32769);
+f(35183835283455, 65535);
+f(35184372154368, 65536);
+f(35184909025281, 65537);
+f(70368207437823, 131071);
+f(70368744308736, 131072);
+f(70369281179649, 131073);
+f(140736951746559, 262143);
+f(140737488617472, 262144);
+f(140738025488385, 262145);
+f(281474440364031, 524287);
+f(281474977234944, 524288);
+f(281475514105857, 524289);
+f(562949417598975, 1048575);
+f(562949954469888, 1048576);
+f(562950491340801, 1048577);
+f(1125899372068863, 2097151);
+f(1125899908939776, 2097152);
+f(1125900445810689, 2097153);
+x = 1073741823;
+f(0, 0);
+f(1073741823, 1);
+f(2147483646, 2);
+f(3221225469, 3);
+f(4294967292, 4);
+f(5368709115, 5);
+f(7516192761, 7);
+f(8589934584, 8);
+f(9663676407, 9);
+f(16106127345, 15);
+f(17179869168, 16);
+f(18253610991, 17);
+f(33285996513, 31);
+f(34359738336, 32);
+f(35433480159, 33);
+f(67645734849, 63);
+f(68719476672, 64);
+f(69793218495, 65);
+f(136365211521, 127);
+f(137438953344, 128);
+f(138512695167, 129);
+f(273804164865, 255);
+f(274877906688, 256);
+f(275951648511, 257);
+f(548682071553, 511);
+f(549755813376, 512);
+f(550829555199, 513);
+f(1098437884929, 1023);
+f(1099511626752, 1024);
+f(1100585368575, 1025);
+f(2197949511681, 2047);
+f(2199023253504, 2048);
+f(2200096995327, 2049);
+f(4396972765185, 4095);
+f(4398046507008, 4096);
+f(4399120248831, 4097);
+f(8795019272193, 8191);
+f(8796093014016, 8192);
+f(8797166755839, 8193);
+f(17591112286209, 16383);
+f(17592186028032, 16384);
+f(17593259769855, 16385);
+f(35183298314241, 32767);
+f(35184372056064, 32768);
+f(35185445797887, 32769);
+f(70367670370305, 65535);
+f(70368744112128, 65536);
+f(70369817853951, 65537);
+f(140736414482433, 131071);
+f(140737488224256, 131072);
+f(140738561966079, 131073);
+f(281473902706689, 262143);
+f(281474976448512, 262144);
+f(281476050190335, 262145);
+f(562948879155201, 524287);
+f(562949952897024, 524288);
+f(562951026638847, 524289);
+f(1125898832052225, 1048575);
+f(1125899905794048, 1048576);
+f(1125900979535871, 1048577);
+x = 1073741824;
+f(0, 0);
+f(1073741824, 1);
+f(2147483648, 2);
+f(3221225472, 3);
+f(4294967296, 4);
+f(5368709120, 5);
+f(7516192768, 7);
+f(8589934592, 8);
+f(9663676416, 9);
+f(16106127360, 15);
+f(17179869184, 16);
+f(18253611008, 17);
+f(33285996544, 31);
+f(34359738368, 32);
+f(35433480192, 33);
+f(67645734912, 63);
+f(68719476736, 64);
+f(69793218560, 65);
+f(136365211648, 127);
+f(137438953472, 128);
+f(138512695296, 129);
+f(273804165120, 255);
+f(274877906944, 256);
+f(275951648768, 257);
+f(548682072064, 511);
+f(549755813888, 512);
+f(550829555712, 513);
+f(1098437885952, 1023);
+f(1099511627776, 1024);
+f(1100585369600, 1025);
+f(2197949513728, 2047);
+f(2199023255552, 2048);
+f(2200096997376, 2049);
+f(4396972769280, 4095);
+f(4398046511104, 4096);
+f(4399120252928, 4097);
+f(8795019280384, 8191);
+f(8796093022208, 8192);
+f(8797166764032, 8193);
+f(17591112302592, 16383);
+f(17592186044416, 16384);
+f(17593259786240, 16385);
+f(35183298347008, 32767);
+f(35184372088832, 32768);
+f(35185445830656, 32769);
+f(70367670435840, 65535);
+f(70368744177664, 65536);
+f(70369817919488, 65537);
+f(140736414613504, 131071);
+f(140737488355328, 131072);
+f(140738562097152, 131073);
+f(281473902968832, 262143);
+f(281474976710656, 262144);
+f(281476050452480, 262145);
+f(562948879679488, 524287);
+f(562949953421312, 524288);
+f(562951027163136, 524289);
+f(1125898833100800, 1048575);
+f(1125899906842624, 1048576);
+f(1125900980584448, 1048577);
+x = 1073741825;
+f(0, 0);
+f(1073741825, 1);
+f(2147483650, 2);
+f(3221225475, 3);
+f(4294967300, 4);
+f(5368709125, 5);
+f(7516192775, 7);
+f(8589934600, 8);
+f(9663676425, 9);
+f(16106127375, 15);
+f(17179869200, 16);
+f(18253611025, 17);
+f(33285996575, 31);
+f(34359738400, 32);
+f(35433480225, 33);
+f(67645734975, 63);
+f(68719476800, 64);
+f(69793218625, 65);
+f(136365211775, 127);
+f(137438953600, 128);
+f(138512695425, 129);
+f(273804165375, 255);
+f(274877907200, 256);
+f(275951649025, 257);
+f(548682072575, 511);
+f(549755814400, 512);
+f(550829556225, 513);
+f(1098437886975, 1023);
+f(1099511628800, 1024);
+f(1100585370625, 1025);
+f(2197949515775, 2047);
+f(2199023257600, 2048);
+f(2200096999425, 2049);
+f(4396972773375, 4095);
+f(4398046515200, 4096);
+f(4399120257025, 4097);
+f(8795019288575, 8191);
+f(8796093030400, 8192);
+f(8797166772225, 8193);
+f(17591112318975, 16383);
+f(17592186060800, 16384);
+f(17593259802625, 16385);
+f(35183298379775, 32767);
+f(35184372121600, 32768);
+f(35185445863425, 32769);
+f(70367670501375, 65535);
+f(70368744243200, 65536);
+f(70369817985025, 65537);
+f(140736414744575, 131071);
+f(140737488486400, 131072);
+f(140738562228225, 131073);
+f(281473903230975, 262143);
+f(281474976972800, 262144);
+f(281476050714625, 262145);
+f(562948880203775, 524287);
+f(562949953945600, 524288);
+f(562951027687425, 524289);
+f(1125898834149375, 1048575);
+f(1125899907891200, 1048576);
+f(1125900981633025, 1048577);
+x = 2147483647;
+f(0, 0);
+f(2147483647, 1);
+f(4294967294, 2);
+f(6442450941, 3);
+f(8589934588, 4);
+f(10737418235, 5);
+f(15032385529, 7);
+f(17179869176, 8);
+f(19327352823, 9);
+f(32212254705, 15);
+f(34359738352, 16);
+f(36507221999, 17);
+f(66571993057, 31);
+f(68719476704, 32);
+f(70866960351, 33);
+f(135291469761, 63);
+f(137438953408, 64);
+f(139586437055, 65);
+f(272730423169, 127);
+f(274877906816, 128);
+f(277025390463, 129);
+f(547608329985, 255);
+f(549755813632, 256);
+f(551903297279, 257);
+f(1097364143617, 511);
+f(1099511627264, 512);
+f(1101659110911, 513);
+f(2196875770881, 1023);
+f(2199023254528, 1024);
+f(2201170738175, 1025);
+f(4395899025409, 2047);
+f(4398046509056, 2048);
+f(4400193992703, 2049);
+f(8793945534465, 4095);
+f(8796093018112, 4096);
+f(8798240501759, 4097);
+f(17590038552577, 8191);
+f(17592186036224, 8192);
+f(17594333519871, 8193);
+f(35182224588801, 16383);
+f(35184372072448, 16384);
+f(35186519556095, 16385);
+f(70366596661249, 32767);
+f(70368744144896, 32768);
+f(70370891628543, 32769);
+f(140735340806145, 65535);
+f(140737488289792, 65536);
+f(140739635773439, 65537);
+f(281472829095937, 131071);
+f(281474976579584, 131072);
+f(281477124063231, 131073);
+f(562947805675521, 262143);
+f(562949953159168, 262144);
+f(562952100642815, 262145);
+f(1125897758834689, 524287);
+f(1125899906318336, 524288);
+f(1125902053801983, 524289);
+x = 2147483648;
+f(0, 0);
+f(2147483648, 1);
+f(4294967296, 2);
+f(6442450944, 3);
+f(8589934592, 4);
+f(10737418240, 5);
+f(15032385536, 7);
+f(17179869184, 8);
+f(19327352832, 9);
+f(32212254720, 15);
+f(34359738368, 16);
+f(36507222016, 17);
+f(66571993088, 31);
+f(68719476736, 32);
+f(70866960384, 33);
+f(135291469824, 63);
+f(137438953472, 64);
+f(139586437120, 65);
+f(272730423296, 127);
+f(274877906944, 128);
+f(277025390592, 129);
+f(547608330240, 255);
+f(549755813888, 256);
+f(551903297536, 257);
+f(1097364144128, 511);
+f(1099511627776, 512);
+f(1101659111424, 513);
+f(2196875771904, 1023);
+f(2199023255552, 1024);
+f(2201170739200, 1025);
+f(4395899027456, 2047);
+f(4398046511104, 2048);
+f(4400193994752, 2049);
+f(8793945538560, 4095);
+f(8796093022208, 4096);
+f(8798240505856, 4097);
+f(17590038560768, 8191);
+f(17592186044416, 8192);
+f(17594333528064, 8193);
+f(35182224605184, 16383);
+f(35184372088832, 16384);
+f(35186519572480, 16385);
+f(70366596694016, 32767);
+f(70368744177664, 32768);
+f(70370891661312, 32769);
+f(140735340871680, 65535);
+f(140737488355328, 65536);
+f(140739635838976, 65537);
+f(281472829227008, 131071);
+f(281474976710656, 131072);
+f(281477124194304, 131073);
+f(562947805937664, 262143);
+f(562949953421312, 262144);
+f(562952100904960, 262145);
+f(1125897759358976, 524287);
+f(1125899906842624, 524288);
+f(1125902054326272, 524289);
+x = 2147483649;
+f(0, 0);
+f(2147483649, 1);
+f(4294967298, 2);
+f(6442450947, 3);
+f(8589934596, 4);
+f(10737418245, 5);
+f(15032385543, 7);
+f(17179869192, 8);
+f(19327352841, 9);
+f(32212254735, 15);
+f(34359738384, 16);
+f(36507222033, 17);
+f(66571993119, 31);
+f(68719476768, 32);
+f(70866960417, 33);
+f(135291469887, 63);
+f(137438953536, 64);
+f(139586437185, 65);
+f(272730423423, 127);
+f(274877907072, 128);
+f(277025390721, 129);
+f(547608330495, 255);
+f(549755814144, 256);
+f(551903297793, 257);
+f(1097364144639, 511);
+f(1099511628288, 512);
+f(1101659111937, 513);
+f(2196875772927, 1023);
+f(2199023256576, 1024);
+f(2201170740225, 1025);
+f(4395899029503, 2047);
+f(4398046513152, 2048);
+f(4400193996801, 2049);
+f(8793945542655, 4095);
+f(8796093026304, 4096);
+f(8798240509953, 4097);
+f(17590038568959, 8191);
+f(17592186052608, 8192);
+f(17594333536257, 8193);
+f(35182224621567, 16383);
+f(35184372105216, 16384);
+f(35186519588865, 16385);
+f(70366596726783, 32767);
+f(70368744210432, 32768);
+f(70370891694081, 32769);
+f(140735340937215, 65535);
+f(140737488420864, 65536);
+f(140739635904513, 65537);
+f(281472829358079, 131071);
+f(281474976841728, 131072);
+f(281477124325377, 131073);
+f(562947806199807, 262143);
+f(562949953683456, 262144);
+f(562952101167105, 262145);
+f(1125897759883263, 524287);
+f(1125899907366912, 524288);
+f(1125902054850561, 524289);
+x = 4294967295;
+f(0, 0);
+f(4294967295, 1);
+f(8589934590, 2);
+f(12884901885, 3);
+f(17179869180, 4);
+f(21474836475, 5);
+f(30064771065, 7);
+f(34359738360, 8);
+f(38654705655, 9);
+f(64424509425, 15);
+f(68719476720, 16);
+f(73014444015, 17);
+f(133143986145, 31);
+f(137438953440, 32);
+f(141733920735, 33);
+f(270582939585, 63);
+f(274877906880, 64);
+f(279172874175, 65);
+f(545460846465, 127);
+f(549755813760, 128);
+f(554050781055, 129);
+f(1095216660225, 255);
+f(1099511627520, 256);
+f(1103806594815, 257);
+f(2194728287745, 511);
+f(2199023255040, 512);
+f(2203318222335, 513);
+f(4393751542785, 1023);
+f(4398046510080, 1024);
+f(4402341477375, 1025);
+f(8791798052865, 2047);
+f(8796093020160, 2048);
+f(8800387987455, 2049);
+f(17587891073025, 4095);
+f(17592186040320, 4096);
+f(17596481007615, 4097);
+f(35180077113345, 8191);
+f(35184372080640, 8192);
+f(35188667047935, 8193);
+f(70364449193985, 16383);
+f(70368744161280, 16384);
+f(70373039128575, 16385);
+f(140733193355265, 32767);
+f(140737488322560, 32768);
+f(140741783289855, 32769);
+f(281470681677825, 65535);
+f(281474976645120, 65536);
+f(281479271612415, 65537);
+f(562945658322945, 131071);
+f(562949953290240, 131072);
+f(562954248257535, 131073);
+f(1125895611613185, 262143);
+f(1125899906580480, 262144);
+f(1125904201547775, 262145);
diff --git a/src/3rdparty/v8/test/mjsunit/mul-exhaustive.js b/src/3rdparty/v8/test/mjsunit/mul-exhaustive.js
deleted file mode 100644
index 12689db..0000000
--- a/src/3rdparty/v8/test/mjsunit/mul-exhaustive.js
+++ /dev/null
@@ -1,4629 +0,0 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-var x;
-
-// Converts a number to string respecting -0.
-function stringify(n) {
- if ((1 / n) === -Infinity) return "-0";
- return String(n);
-}
-
-function f(expected, y) {
- function testEval(string, x, y) {
- var mulFunction = Function("x, y", "return " + string);
- return mulFunction(x, y);
- }
- function mulTest(expected, x, y) {
- assertEquals(expected, x * y);
- assertEquals(expected, testEval(stringify(x) + " * y", x, y));
- assertEquals(expected, testEval("x * " + stringify(y), x, y));
- assertEquals(expected, testEval(stringify(x) + " * " + stringify(y), x, y));
- }
- mulTest(expected, x, y);
- mulTest(-expected, -x, y);
- mulTest(-expected, x, -y);
- mulTest(expected, -x, -y);
- if (x === y) return; // Symmetric cases not necessary.
- mulTest(expected, y, x);
- mulTest(-expected, -y, x);
- mulTest(-expected, y, -x);
- mulTest(expected, -y, -x);
-}
-
-x = 0;
-f(0, 0);
-x = 1;
-f(0, 0);
-f(1, 1);
-x = 2;
-f(0, 0);
-f(2, 1);
-f(4, 2);
-x = 3;
-f(0, 0);
-f(3, 1);
-f(6, 2);
-f(9, 3);
-x = 4;
-f(0, 0);
-f(4, 1);
-f(8, 2);
-f(12, 3);
-f(16, 4);
-x = 5;
-f(0, 0);
-f(5, 1);
-f(10, 2);
-f(15, 3);
-f(20, 4);
-f(25, 5);
-x = 7;
-f(0, 0);
-f(7, 1);
-f(14, 2);
-f(21, 3);
-f(28, 4);
-f(35, 5);
-f(49, 7);
-x = 8;
-f(0, 0);
-f(8, 1);
-f(16, 2);
-f(24, 3);
-f(32, 4);
-f(40, 5);
-f(56, 7);
-f(64, 8);
-x = 9;
-f(0, 0);
-f(9, 1);
-f(18, 2);
-f(27, 3);
-f(36, 4);
-f(45, 5);
-f(63, 7);
-f(72, 8);
-f(81, 9);
-x = 15;
-f(0, 0);
-f(15, 1);
-f(30, 2);
-f(45, 3);
-f(60, 4);
-f(75, 5);
-f(105, 7);
-f(120, 8);
-f(135, 9);
-f(225, 15);
-x = 16;
-f(0, 0);
-f(16, 1);
-f(32, 2);
-f(48, 3);
-f(64, 4);
-f(80, 5);
-f(112, 7);
-f(128, 8);
-f(144, 9);
-f(240, 15);
-f(256, 16);
-x = 17;
-f(0, 0);
-f(17, 1);
-f(34, 2);
-f(51, 3);
-f(68, 4);
-f(85, 5);
-f(119, 7);
-f(136, 8);
-f(153, 9);
-f(255, 15);
-f(272, 16);
-f(289, 17);
-x = 31;
-f(0, 0);
-f(31, 1);
-f(62, 2);
-f(93, 3);
-f(124, 4);
-f(155, 5);
-f(217, 7);
-f(248, 8);
-f(279, 9);
-f(465, 15);
-f(496, 16);
-f(527, 17);
-f(961, 31);
-x = 32;
-f(0, 0);
-f(32, 1);
-f(64, 2);
-f(96, 3);
-f(128, 4);
-f(160, 5);
-f(224, 7);
-f(256, 8);
-f(288, 9);
-f(480, 15);
-f(512, 16);
-f(544, 17);
-f(992, 31);
-f(1024, 32);
-x = 33;
-f(0, 0);
-f(33, 1);
-f(66, 2);
-f(99, 3);
-f(132, 4);
-f(165, 5);
-f(231, 7);
-f(264, 8);
-f(297, 9);
-f(495, 15);
-f(528, 16);
-f(561, 17);
-f(1023, 31);
-f(1056, 32);
-f(1089, 33);
-x = 63;
-f(0, 0);
-f(63, 1);
-f(126, 2);
-f(189, 3);
-f(252, 4);
-f(315, 5);
-f(441, 7);
-f(504, 8);
-f(567, 9);
-f(945, 15);
-f(1008, 16);
-f(1071, 17);
-f(1953, 31);
-f(2016, 32);
-f(2079, 33);
-f(3969, 63);
-x = 64;
-f(0, 0);
-f(64, 1);
-f(128, 2);
-f(192, 3);
-f(256, 4);
-f(320, 5);
-f(448, 7);
-f(512, 8);
-f(576, 9);
-f(960, 15);
-f(1024, 16);
-f(1088, 17);
-f(1984, 31);
-f(2048, 32);
-f(2112, 33);
-f(4032, 63);
-f(4096, 64);
-x = 65;
-f(0, 0);
-f(65, 1);
-f(130, 2);
-f(195, 3);
-f(260, 4);
-f(325, 5);
-f(455, 7);
-f(520, 8);
-f(585, 9);
-f(975, 15);
-f(1040, 16);
-f(1105, 17);
-f(2015, 31);
-f(2080, 32);
-f(2145, 33);
-f(4095, 63);
-f(4160, 64);
-f(4225, 65);
-x = 127;
-f(0, 0);
-f(127, 1);
-f(254, 2);
-f(381, 3);
-f(508, 4);
-f(635, 5);
-f(889, 7);
-f(1016, 8);
-f(1143, 9);
-f(1905, 15);
-f(2032, 16);
-f(2159, 17);
-f(3937, 31);
-f(4064, 32);
-f(4191, 33);
-f(8001, 63);
-f(8128, 64);
-f(8255, 65);
-f(16129, 127);
-x = 128;
-f(0, 0);
-f(128, 1);
-f(256, 2);
-f(384, 3);
-f(512, 4);
-f(640, 5);
-f(896, 7);
-f(1024, 8);
-f(1152, 9);
-f(1920, 15);
-f(2048, 16);
-f(2176, 17);
-f(3968, 31);
-f(4096, 32);
-f(4224, 33);
-f(8064, 63);
-f(8192, 64);
-f(8320, 65);
-f(16256, 127);
-f(16384, 128);
-x = 129;
-f(0, 0);
-f(129, 1);
-f(258, 2);
-f(387, 3);
-f(516, 4);
-f(645, 5);
-f(903, 7);
-f(1032, 8);
-f(1161, 9);
-f(1935, 15);
-f(2064, 16);
-f(2193, 17);
-f(3999, 31);
-f(4128, 32);
-f(4257, 33);
-f(8127, 63);
-f(8256, 64);
-f(8385, 65);
-f(16383, 127);
-f(16512, 128);
-f(16641, 129);
-x = 255;
-f(0, 0);
-f(255, 1);
-f(510, 2);
-f(765, 3);
-f(1020, 4);
-f(1275, 5);
-f(1785, 7);
-f(2040, 8);
-f(2295, 9);
-f(3825, 15);
-f(4080, 16);
-f(4335, 17);
-f(7905, 31);
-f(8160, 32);
-f(8415, 33);
-f(16065, 63);
-f(16320, 64);
-f(16575, 65);
-f(32385, 127);
-f(32640, 128);
-f(32895, 129);
-f(65025, 255);
-x = 256;
-f(0, 0);
-f(256, 1);
-f(512, 2);
-f(768, 3);
-f(1024, 4);
-f(1280, 5);
-f(1792, 7);
-f(2048, 8);
-f(2304, 9);
-f(3840, 15);
-f(4096, 16);
-f(4352, 17);
-f(7936, 31);
-f(8192, 32);
-f(8448, 33);
-f(16128, 63);
-f(16384, 64);
-f(16640, 65);
-f(32512, 127);
-f(32768, 128);
-f(33024, 129);
-f(65280, 255);
-f(65536, 256);
-x = 257;
-f(0, 0);
-f(257, 1);
-f(514, 2);
-f(771, 3);
-f(1028, 4);
-f(1285, 5);
-f(1799, 7);
-f(2056, 8);
-f(2313, 9);
-f(3855, 15);
-f(4112, 16);
-f(4369, 17);
-f(7967, 31);
-f(8224, 32);
-f(8481, 33);
-f(16191, 63);
-f(16448, 64);
-f(16705, 65);
-f(32639, 127);
-f(32896, 128);
-f(33153, 129);
-f(65535, 255);
-f(65792, 256);
-f(66049, 257);
-x = 511;
-f(0, 0);
-f(511, 1);
-f(1022, 2);
-f(1533, 3);
-f(2044, 4);
-f(2555, 5);
-f(3577, 7);
-f(4088, 8);
-f(4599, 9);
-f(7665, 15);
-f(8176, 16);
-f(8687, 17);
-f(15841, 31);
-f(16352, 32);
-f(16863, 33);
-f(32193, 63);
-f(32704, 64);
-f(33215, 65);
-f(64897, 127);
-f(65408, 128);
-f(65919, 129);
-f(130305, 255);
-f(130816, 256);
-f(131327, 257);
-f(261121, 511);
-x = 512;
-f(0, 0);
-f(512, 1);
-f(1024, 2);
-f(1536, 3);
-f(2048, 4);
-f(2560, 5);
-f(3584, 7);
-f(4096, 8);
-f(4608, 9);
-f(7680, 15);
-f(8192, 16);
-f(8704, 17);
-f(15872, 31);
-f(16384, 32);
-f(16896, 33);
-f(32256, 63);
-f(32768, 64);
-f(33280, 65);
-f(65024, 127);
-f(65536, 128);
-f(66048, 129);
-f(130560, 255);
-f(131072, 256);
-f(131584, 257);
-f(261632, 511);
-f(262144, 512);
-x = 513;
-f(0, 0);
-f(513, 1);
-f(1026, 2);
-f(1539, 3);
-f(2052, 4);
-f(2565, 5);
-f(3591, 7);
-f(4104, 8);
-f(4617, 9);
-f(7695, 15);
-f(8208, 16);
-f(8721, 17);
-f(15903, 31);
-f(16416, 32);
-f(16929, 33);
-f(32319, 63);
-f(32832, 64);
-f(33345, 65);
-f(65151, 127);
-f(65664, 128);
-f(66177, 129);
-f(130815, 255);
-f(131328, 256);
-f(131841, 257);
-f(262143, 511);
-f(262656, 512);
-f(263169, 513);
-x = 1023;
-f(0, 0);
-f(1023, 1);
-f(2046, 2);
-f(3069, 3);
-f(4092, 4);
-f(5115, 5);
-f(7161, 7);
-f(8184, 8);
-f(9207, 9);
-f(15345, 15);
-f(16368, 16);
-f(17391, 17);
-f(31713, 31);
-f(32736, 32);
-f(33759, 33);
-f(64449, 63);
-f(65472, 64);
-f(66495, 65);
-f(129921, 127);
-f(130944, 128);
-f(131967, 129);
-f(260865, 255);
-f(261888, 256);
-f(262911, 257);
-f(522753, 511);
-f(523776, 512);
-f(524799, 513);
-f(1046529, 1023);
-x = 1024;
-f(0, 0);
-f(1024, 1);
-f(2048, 2);
-f(3072, 3);
-f(4096, 4);
-f(5120, 5);
-f(7168, 7);
-f(8192, 8);
-f(9216, 9);
-f(15360, 15);
-f(16384, 16);
-f(17408, 17);
-f(31744, 31);
-f(32768, 32);
-f(33792, 33);
-f(64512, 63);
-f(65536, 64);
-f(66560, 65);
-f(130048, 127);
-f(131072, 128);
-f(132096, 129);
-f(261120, 255);
-f(262144, 256);
-f(263168, 257);
-f(523264, 511);
-f(524288, 512);
-f(525312, 513);
-f(1047552, 1023);
-f(1048576, 1024);
-x = 1025;
-f(0, 0);
-f(1025, 1);
-f(2050, 2);
-f(3075, 3);
-f(4100, 4);
-f(5125, 5);
-f(7175, 7);
-f(8200, 8);
-f(9225, 9);
-f(15375, 15);
-f(16400, 16);
-f(17425, 17);
-f(31775, 31);
-f(32800, 32);
-f(33825, 33);
-f(64575, 63);
-f(65600, 64);
-f(66625, 65);
-f(130175, 127);
-f(131200, 128);
-f(132225, 129);
-f(261375, 255);
-f(262400, 256);
-f(263425, 257);
-f(523775, 511);
-f(524800, 512);
-f(525825, 513);
-f(1048575, 1023);
-f(1049600, 1024);
-f(1050625, 1025);
-x = 2047;
-f(0, 0);
-f(2047, 1);
-f(4094, 2);
-f(6141, 3);
-f(8188, 4);
-f(10235, 5);
-f(14329, 7);
-f(16376, 8);
-f(18423, 9);
-f(30705, 15);
-f(32752, 16);
-f(34799, 17);
-f(63457, 31);
-f(65504, 32);
-f(67551, 33);
-f(128961, 63);
-f(131008, 64);
-f(133055, 65);
-f(259969, 127);
-f(262016, 128);
-f(264063, 129);
-f(521985, 255);
-f(524032, 256);
-f(526079, 257);
-f(1046017, 511);
-f(1048064, 512);
-f(1050111, 513);
-f(2094081, 1023);
-f(2096128, 1024);
-f(2098175, 1025);
-f(4190209, 2047);
-x = 2048;
-f(0, 0);
-f(2048, 1);
-f(4096, 2);
-f(6144, 3);
-f(8192, 4);
-f(10240, 5);
-f(14336, 7);
-f(16384, 8);
-f(18432, 9);
-f(30720, 15);
-f(32768, 16);
-f(34816, 17);
-f(63488, 31);
-f(65536, 32);
-f(67584, 33);
-f(129024, 63);
-f(131072, 64);
-f(133120, 65);
-f(260096, 127);
-f(262144, 128);
-f(264192, 129);
-f(522240, 255);
-f(524288, 256);
-f(526336, 257);
-f(1046528, 511);
-f(1048576, 512);
-f(1050624, 513);
-f(2095104, 1023);
-f(2097152, 1024);
-f(2099200, 1025);
-f(4192256, 2047);
-f(4194304, 2048);
-x = 2049;
-f(0, 0);
-f(2049, 1);
-f(4098, 2);
-f(6147, 3);
-f(8196, 4);
-f(10245, 5);
-f(14343, 7);
-f(16392, 8);
-f(18441, 9);
-f(30735, 15);
-f(32784, 16);
-f(34833, 17);
-f(63519, 31);
-f(65568, 32);
-f(67617, 33);
-f(129087, 63);
-f(131136, 64);
-f(133185, 65);
-f(260223, 127);
-f(262272, 128);
-f(264321, 129);
-f(522495, 255);
-f(524544, 256);
-f(526593, 257);
-f(1047039, 511);
-f(1049088, 512);
-f(1051137, 513);
-f(2096127, 1023);
-f(2098176, 1024);
-f(2100225, 1025);
-f(4194303, 2047);
-f(4196352, 2048);
-f(4198401, 2049);
-x = 4095;
-f(0, 0);
-f(4095, 1);
-f(8190, 2);
-f(12285, 3);
-f(16380, 4);
-f(20475, 5);
-f(28665, 7);
-f(32760, 8);
-f(36855, 9);
-f(61425, 15);
-f(65520, 16);
-f(69615, 17);
-f(126945, 31);
-f(131040, 32);
-f(135135, 33);
-f(257985, 63);
-f(262080, 64);
-f(266175, 65);
-f(520065, 127);
-f(524160, 128);
-f(528255, 129);
-f(1044225, 255);
-f(1048320, 256);
-f(1052415, 257);
-f(2092545, 511);
-f(2096640, 512);
-f(2100735, 513);
-f(4189185, 1023);
-f(4193280, 1024);
-f(4197375, 1025);
-f(8382465, 2047);
-f(8386560, 2048);
-f(8390655, 2049);
-f(16769025, 4095);
-x = 4096;
-f(0, 0);
-f(4096, 1);
-f(8192, 2);
-f(12288, 3);
-f(16384, 4);
-f(20480, 5);
-f(28672, 7);
-f(32768, 8);
-f(36864, 9);
-f(61440, 15);
-f(65536, 16);
-f(69632, 17);
-f(126976, 31);
-f(131072, 32);
-f(135168, 33);
-f(258048, 63);
-f(262144, 64);
-f(266240, 65);
-f(520192, 127);
-f(524288, 128);
-f(528384, 129);
-f(1044480, 255);
-f(1048576, 256);
-f(1052672, 257);
-f(2093056, 511);
-f(2097152, 512);
-f(2101248, 513);
-f(4190208, 1023);
-f(4194304, 1024);
-f(4198400, 1025);
-f(8384512, 2047);
-f(8388608, 2048);
-f(8392704, 2049);
-f(16773120, 4095);
-f(16777216, 4096);
-x = 4097;
-f(0, 0);
-f(4097, 1);
-f(8194, 2);
-f(12291, 3);
-f(16388, 4);
-f(20485, 5);
-f(28679, 7);
-f(32776, 8);
-f(36873, 9);
-f(61455, 15);
-f(65552, 16);
-f(69649, 17);
-f(127007, 31);
-f(131104, 32);
-f(135201, 33);
-f(258111, 63);
-f(262208, 64);
-f(266305, 65);
-f(520319, 127);
-f(524416, 128);
-f(528513, 129);
-f(1044735, 255);
-f(1048832, 256);
-f(1052929, 257);
-f(2093567, 511);
-f(2097664, 512);
-f(2101761, 513);
-f(4191231, 1023);
-f(4195328, 1024);
-f(4199425, 1025);
-f(8386559, 2047);
-f(8390656, 2048);
-f(8394753, 2049);
-f(16777215, 4095);
-f(16781312, 4096);
-f(16785409, 4097);
-x = 8191;
-f(0, 0);
-f(8191, 1);
-f(16382, 2);
-f(24573, 3);
-f(32764, 4);
-f(40955, 5);
-f(57337, 7);
-f(65528, 8);
-f(73719, 9);
-f(122865, 15);
-f(131056, 16);
-f(139247, 17);
-f(253921, 31);
-f(262112, 32);
-f(270303, 33);
-f(516033, 63);
-f(524224, 64);
-f(532415, 65);
-f(1040257, 127);
-f(1048448, 128);
-f(1056639, 129);
-f(2088705, 255);
-f(2096896, 256);
-f(2105087, 257);
-f(4185601, 511);
-f(4193792, 512);
-f(4201983, 513);
-f(8379393, 1023);
-f(8387584, 1024);
-f(8395775, 1025);
-f(16766977, 2047);
-f(16775168, 2048);
-f(16783359, 2049);
-f(33542145, 4095);
-f(33550336, 4096);
-f(33558527, 4097);
-f(67092481, 8191);
-x = 8192;
-f(0, 0);
-f(8192, 1);
-f(16384, 2);
-f(24576, 3);
-f(32768, 4);
-f(40960, 5);
-f(57344, 7);
-f(65536, 8);
-f(73728, 9);
-f(122880, 15);
-f(131072, 16);
-f(139264, 17);
-f(253952, 31);
-f(262144, 32);
-f(270336, 33);
-f(516096, 63);
-f(524288, 64);
-f(532480, 65);
-f(1040384, 127);
-f(1048576, 128);
-f(1056768, 129);
-f(2088960, 255);
-f(2097152, 256);
-f(2105344, 257);
-f(4186112, 511);
-f(4194304, 512);
-f(4202496, 513);
-f(8380416, 1023);
-f(8388608, 1024);
-f(8396800, 1025);
-f(16769024, 2047);
-f(16777216, 2048);
-f(16785408, 2049);
-f(33546240, 4095);
-f(33554432, 4096);
-f(33562624, 4097);
-f(67100672, 8191);
-f(67108864, 8192);
-x = 8193;
-f(0, 0);
-f(8193, 1);
-f(16386, 2);
-f(24579, 3);
-f(32772, 4);
-f(40965, 5);
-f(57351, 7);
-f(65544, 8);
-f(73737, 9);
-f(122895, 15);
-f(131088, 16);
-f(139281, 17);
-f(253983, 31);
-f(262176, 32);
-f(270369, 33);
-f(516159, 63);
-f(524352, 64);
-f(532545, 65);
-f(1040511, 127);
-f(1048704, 128);
-f(1056897, 129);
-f(2089215, 255);
-f(2097408, 256);
-f(2105601, 257);
-f(4186623, 511);
-f(4194816, 512);
-f(4203009, 513);
-f(8381439, 1023);
-f(8389632, 1024);
-f(8397825, 1025);
-f(16771071, 2047);
-f(16779264, 2048);
-f(16787457, 2049);
-f(33550335, 4095);
-f(33558528, 4096);
-f(33566721, 4097);
-f(67108863, 8191);
-f(67117056, 8192);
-f(67125249, 8193);
-x = 16383;
-f(0, 0);
-f(16383, 1);
-f(32766, 2);
-f(49149, 3);
-f(65532, 4);
-f(81915, 5);
-f(114681, 7);
-f(131064, 8);
-f(147447, 9);
-f(245745, 15);
-f(262128, 16);
-f(278511, 17);
-f(507873, 31);
-f(524256, 32);
-f(540639, 33);
-f(1032129, 63);
-f(1048512, 64);
-f(1064895, 65);
-f(2080641, 127);
-f(2097024, 128);
-f(2113407, 129);
-f(4177665, 255);
-f(4194048, 256);
-f(4210431, 257);
-f(8371713, 511);
-f(8388096, 512);
-f(8404479, 513);
-f(16759809, 1023);
-f(16776192, 1024);
-f(16792575, 1025);
-f(33536001, 2047);
-f(33552384, 2048);
-f(33568767, 2049);
-f(67088385, 4095);
-f(67104768, 4096);
-f(67121151, 4097);
-f(134193153, 8191);
-f(134209536, 8192);
-f(134225919, 8193);
-f(268402689, 16383);
-x = 16384;
-f(0, 0);
-f(16384, 1);
-f(32768, 2);
-f(49152, 3);
-f(65536, 4);
-f(81920, 5);
-f(114688, 7);
-f(131072, 8);
-f(147456, 9);
-f(245760, 15);
-f(262144, 16);
-f(278528, 17);
-f(507904, 31);
-f(524288, 32);
-f(540672, 33);
-f(1032192, 63);
-f(1048576, 64);
-f(1064960, 65);
-f(2080768, 127);
-f(2097152, 128);
-f(2113536, 129);
-f(4177920, 255);
-f(4194304, 256);
-f(4210688, 257);
-f(8372224, 511);
-f(8388608, 512);
-f(8404992, 513);
-f(16760832, 1023);
-f(16777216, 1024);
-f(16793600, 1025);
-f(33538048, 2047);
-f(33554432, 2048);
-f(33570816, 2049);
-f(67092480, 4095);
-f(67108864, 4096);
-f(67125248, 4097);
-f(134201344, 8191);
-f(134217728, 8192);
-f(134234112, 8193);
-f(268419072, 16383);
-f(268435456, 16384);
-x = 16385;
-f(0, 0);
-f(16385, 1);
-f(32770, 2);
-f(49155, 3);
-f(65540, 4);
-f(81925, 5);
-f(114695, 7);
-f(131080, 8);
-f(147465, 9);
-f(245775, 15);
-f(262160, 16);
-f(278545, 17);
-f(507935, 31);
-f(524320, 32);
-f(540705, 33);
-f(1032255, 63);
-f(1048640, 64);
-f(1065025, 65);
-f(2080895, 127);
-f(2097280, 128);
-f(2113665, 129);
-f(4178175, 255);
-f(4194560, 256);
-f(4210945, 257);
-f(8372735, 511);
-f(8389120, 512);
-f(8405505, 513);
-f(16761855, 1023);
-f(16778240, 1024);
-f(16794625, 1025);
-f(33540095, 2047);
-f(33556480, 2048);
-f(33572865, 2049);
-f(67096575, 4095);
-f(67112960, 4096);
-f(67129345, 4097);
-f(134209535, 8191);
-f(134225920, 8192);
-f(134242305, 8193);
-f(268435455, 16383);
-f(268451840, 16384);
-f(268468225, 16385);
-x = 32767;
-f(0, 0);
-f(32767, 1);
-f(65534, 2);
-f(98301, 3);
-f(131068, 4);
-f(163835, 5);
-f(229369, 7);
-f(262136, 8);
-f(294903, 9);
-f(491505, 15);
-f(524272, 16);
-f(557039, 17);
-f(1015777, 31);
-f(1048544, 32);
-f(1081311, 33);
-f(2064321, 63);
-f(2097088, 64);
-f(2129855, 65);
-f(4161409, 127);
-f(4194176, 128);
-f(4226943, 129);
-f(8355585, 255);
-f(8388352, 256);
-f(8421119, 257);
-f(16743937, 511);
-f(16776704, 512);
-f(16809471, 513);
-f(33520641, 1023);
-f(33553408, 1024);
-f(33586175, 1025);
-f(67074049, 2047);
-f(67106816, 2048);
-f(67139583, 2049);
-f(134180865, 4095);
-f(134213632, 4096);
-f(134246399, 4097);
-f(268394497, 8191);
-f(268427264, 8192);
-f(268460031, 8193);
-f(536821761, 16383);
-f(536854528, 16384);
-f(536887295, 16385);
-f(1073676289, 32767);
-x = 32768;
-f(0, 0);
-f(32768, 1);
-f(65536, 2);
-f(98304, 3);
-f(131072, 4);
-f(163840, 5);
-f(229376, 7);
-f(262144, 8);
-f(294912, 9);
-f(491520, 15);
-f(524288, 16);
-f(557056, 17);
-f(1015808, 31);
-f(1048576, 32);
-f(1081344, 33);
-f(2064384, 63);
-f(2097152, 64);
-f(2129920, 65);
-f(4161536, 127);
-f(4194304, 128);
-f(4227072, 129);
-f(8355840, 255);
-f(8388608, 256);
-f(8421376, 257);
-f(16744448, 511);
-f(16777216, 512);
-f(16809984, 513);
-f(33521664, 1023);
-f(33554432, 1024);
-f(33587200, 1025);
-f(67076096, 2047);
-f(67108864, 2048);
-f(67141632, 2049);
-f(134184960, 4095);
-f(134217728, 4096);
-f(134250496, 4097);
-f(268402688, 8191);
-f(268435456, 8192);
-f(268468224, 8193);
-f(536838144, 16383);
-f(536870912, 16384);
-f(536903680, 16385);
-f(1073709056, 32767);
-f(1073741824, 32768);
-x = 32769;
-f(0, 0);
-f(32769, 1);
-f(65538, 2);
-f(98307, 3);
-f(131076, 4);
-f(163845, 5);
-f(229383, 7);
-f(262152, 8);
-f(294921, 9);
-f(491535, 15);
-f(524304, 16);
-f(557073, 17);
-f(1015839, 31);
-f(1048608, 32);
-f(1081377, 33);
-f(2064447, 63);
-f(2097216, 64);
-f(2129985, 65);
-f(4161663, 127);
-f(4194432, 128);
-f(4227201, 129);
-f(8356095, 255);
-f(8388864, 256);
-f(8421633, 257);
-f(16744959, 511);
-f(16777728, 512);
-f(16810497, 513);
-f(33522687, 1023);
-f(33555456, 1024);
-f(33588225, 1025);
-f(67078143, 2047);
-f(67110912, 2048);
-f(67143681, 2049);
-f(134189055, 4095);
-f(134221824, 4096);
-f(134254593, 4097);
-f(268410879, 8191);
-f(268443648, 8192);
-f(268476417, 8193);
-f(536854527, 16383);
-f(536887296, 16384);
-f(536920065, 16385);
-f(1073741823, 32767);
-f(1073774592, 32768);
-f(1073807361, 32769);
-x = 65535;
-f(0, 0);
-f(65535, 1);
-f(131070, 2);
-f(196605, 3);
-f(262140, 4);
-f(327675, 5);
-f(458745, 7);
-f(524280, 8);
-f(589815, 9);
-f(983025, 15);
-f(1048560, 16);
-f(1114095, 17);
-f(2031585, 31);
-f(2097120, 32);
-f(2162655, 33);
-f(4128705, 63);
-f(4194240, 64);
-f(4259775, 65);
-f(8322945, 127);
-f(8388480, 128);
-f(8454015, 129);
-f(16711425, 255);
-f(16776960, 256);
-f(16842495, 257);
-f(33488385, 511);
-f(33553920, 512);
-f(33619455, 513);
-f(67042305, 1023);
-f(67107840, 1024);
-f(67173375, 1025);
-f(134150145, 2047);
-f(134215680, 2048);
-f(134281215, 2049);
-f(268365825, 4095);
-f(268431360, 4096);
-f(268496895, 4097);
-f(536797185, 8191);
-f(536862720, 8192);
-f(536928255, 8193);
-f(1073659905, 16383);
-f(1073725440, 16384);
-f(1073790975, 16385);
-f(2147385345, 32767);
-f(2147450880, 32768);
-f(2147516415, 32769);
-f(4294836225, 65535);
-x = 65536;
-f(0, 0);
-f(65536, 1);
-f(131072, 2);
-f(196608, 3);
-f(262144, 4);
-f(327680, 5);
-f(458752, 7);
-f(524288, 8);
-f(589824, 9);
-f(983040, 15);
-f(1048576, 16);
-f(1114112, 17);
-f(2031616, 31);
-f(2097152, 32);
-f(2162688, 33);
-f(4128768, 63);
-f(4194304, 64);
-f(4259840, 65);
-f(8323072, 127);
-f(8388608, 128);
-f(8454144, 129);
-f(16711680, 255);
-f(16777216, 256);
-f(16842752, 257);
-f(33488896, 511);
-f(33554432, 512);
-f(33619968, 513);
-f(67043328, 1023);
-f(67108864, 1024);
-f(67174400, 1025);
-f(134152192, 2047);
-f(134217728, 2048);
-f(134283264, 2049);
-f(268369920, 4095);
-f(268435456, 4096);
-f(268500992, 4097);
-f(536805376, 8191);
-f(536870912, 8192);
-f(536936448, 8193);
-f(1073676288, 16383);
-f(1073741824, 16384);
-f(1073807360, 16385);
-f(2147418112, 32767);
-f(2147483648, 32768);
-f(2147549184, 32769);
-f(4294901760, 65535);
-f(4294967296, 65536);
-x = 65537;
-f(0, 0);
-f(65537, 1);
-f(131074, 2);
-f(196611, 3);
-f(262148, 4);
-f(327685, 5);
-f(458759, 7);
-f(524296, 8);
-f(589833, 9);
-f(983055, 15);
-f(1048592, 16);
-f(1114129, 17);
-f(2031647, 31);
-f(2097184, 32);
-f(2162721, 33);
-f(4128831, 63);
-f(4194368, 64);
-f(4259905, 65);
-f(8323199, 127);
-f(8388736, 128);
-f(8454273, 129);
-f(16711935, 255);
-f(16777472, 256);
-f(16843009, 257);
-f(33489407, 511);
-f(33554944, 512);
-f(33620481, 513);
-f(67044351, 1023);
-f(67109888, 1024);
-f(67175425, 1025);
-f(134154239, 2047);
-f(134219776, 2048);
-f(134285313, 2049);
-f(268374015, 4095);
-f(268439552, 4096);
-f(268505089, 4097);
-f(536813567, 8191);
-f(536879104, 8192);
-f(536944641, 8193);
-f(1073692671, 16383);
-f(1073758208, 16384);
-f(1073823745, 16385);
-f(2147450879, 32767);
-f(2147516416, 32768);
-f(2147581953, 32769);
-f(4294967295, 65535);
-f(4295032832, 65536);
-f(4295098369, 65537);
-x = 131071;
-f(0, 0);
-f(131071, 1);
-f(262142, 2);
-f(393213, 3);
-f(524284, 4);
-f(655355, 5);
-f(917497, 7);
-f(1048568, 8);
-f(1179639, 9);
-f(1966065, 15);
-f(2097136, 16);
-f(2228207, 17);
-f(4063201, 31);
-f(4194272, 32);
-f(4325343, 33);
-f(8257473, 63);
-f(8388544, 64);
-f(8519615, 65);
-f(16646017, 127);
-f(16777088, 128);
-f(16908159, 129);
-f(33423105, 255);
-f(33554176, 256);
-f(33685247, 257);
-f(66977281, 511);
-f(67108352, 512);
-f(67239423, 513);
-f(134085633, 1023);
-f(134216704, 1024);
-f(134347775, 1025);
-f(268302337, 2047);
-f(268433408, 2048);
-f(268564479, 2049);
-f(536735745, 4095);
-f(536866816, 4096);
-f(536997887, 4097);
-f(1073602561, 8191);
-f(1073733632, 8192);
-f(1073864703, 8193);
-f(2147336193, 16383);
-f(2147467264, 16384);
-f(2147598335, 16385);
-f(4294803457, 32767);
-f(4294934528, 32768);
-f(4295065599, 32769);
-f(8589737985, 65535);
-f(8589869056, 65536);
-f(8590000127, 65537);
-f(17179607041, 131071);
-x = 131072;
-f(0, 0);
-f(131072, 1);
-f(262144, 2);
-f(393216, 3);
-f(524288, 4);
-f(655360, 5);
-f(917504, 7);
-f(1048576, 8);
-f(1179648, 9);
-f(1966080, 15);
-f(2097152, 16);
-f(2228224, 17);
-f(4063232, 31);
-f(4194304, 32);
-f(4325376, 33);
-f(8257536, 63);
-f(8388608, 64);
-f(8519680, 65);
-f(16646144, 127);
-f(16777216, 128);
-f(16908288, 129);
-f(33423360, 255);
-f(33554432, 256);
-f(33685504, 257);
-f(66977792, 511);
-f(67108864, 512);
-f(67239936, 513);
-f(134086656, 1023);
-f(134217728, 1024);
-f(134348800, 1025);
-f(268304384, 2047);
-f(268435456, 2048);
-f(268566528, 2049);
-f(536739840, 4095);
-f(536870912, 4096);
-f(537001984, 4097);
-f(1073610752, 8191);
-f(1073741824, 8192);
-f(1073872896, 8193);
-f(2147352576, 16383);
-f(2147483648, 16384);
-f(2147614720, 16385);
-f(4294836224, 32767);
-f(4294967296, 32768);
-f(4295098368, 32769);
-f(8589803520, 65535);
-f(8589934592, 65536);
-f(8590065664, 65537);
-f(17179738112, 131071);
-f(17179869184, 131072);
-x = 131073;
-f(0, 0);
-f(131073, 1);
-f(262146, 2);
-f(393219, 3);
-f(524292, 4);
-f(655365, 5);
-f(917511, 7);
-f(1048584, 8);
-f(1179657, 9);
-f(1966095, 15);
-f(2097168, 16);
-f(2228241, 17);
-f(4063263, 31);
-f(4194336, 32);
-f(4325409, 33);
-f(8257599, 63);
-f(8388672, 64);
-f(8519745, 65);
-f(16646271, 127);
-f(16777344, 128);
-f(16908417, 129);
-f(33423615, 255);
-f(33554688, 256);
-f(33685761, 257);
-f(66978303, 511);
-f(67109376, 512);
-f(67240449, 513);
-f(134087679, 1023);
-f(134218752, 1024);
-f(134349825, 1025);
-f(268306431, 2047);
-f(268437504, 2048);
-f(268568577, 2049);
-f(536743935, 4095);
-f(536875008, 4096);
-f(537006081, 4097);
-f(1073618943, 8191);
-f(1073750016, 8192);
-f(1073881089, 8193);
-f(2147368959, 16383);
-f(2147500032, 16384);
-f(2147631105, 16385);
-f(4294868991, 32767);
-f(4295000064, 32768);
-f(4295131137, 32769);
-f(8589869055, 65535);
-f(8590000128, 65536);
-f(8590131201, 65537);
-f(17179869183, 131071);
-f(17180000256, 131072);
-f(17180131329, 131073);
-x = 262143;
-f(0, 0);
-f(262143, 1);
-f(524286, 2);
-f(786429, 3);
-f(1048572, 4);
-f(1310715, 5);
-f(1835001, 7);
-f(2097144, 8);
-f(2359287, 9);
-f(3932145, 15);
-f(4194288, 16);
-f(4456431, 17);
-f(8126433, 31);
-f(8388576, 32);
-f(8650719, 33);
-f(16515009, 63);
-f(16777152, 64);
-f(17039295, 65);
-f(33292161, 127);
-f(33554304, 128);
-f(33816447, 129);
-f(66846465, 255);
-f(67108608, 256);
-f(67370751, 257);
-f(133955073, 511);
-f(134217216, 512);
-f(134479359, 513);
-f(268172289, 1023);
-f(268434432, 1024);
-f(268696575, 1025);
-f(536606721, 2047);
-f(536868864, 2048);
-f(537131007, 2049);
-f(1073475585, 4095);
-f(1073737728, 4096);
-f(1073999871, 4097);
-f(2147213313, 8191);
-f(2147475456, 8192);
-f(2147737599, 8193);
-f(4294688769, 16383);
-f(4294950912, 16384);
-f(4295213055, 16385);
-f(8589639681, 32767);
-f(8589901824, 32768);
-f(8590163967, 32769);
-f(17179541505, 65535);
-f(17179803648, 65536);
-f(17180065791, 65537);
-f(34359345153, 131071);
-f(34359607296, 131072);
-f(34359869439, 131073);
-f(68718952449, 262143);
-x = 262144;
-f(0, 0);
-f(262144, 1);
-f(524288, 2);
-f(786432, 3);
-f(1048576, 4);
-f(1310720, 5);
-f(1835008, 7);
-f(2097152, 8);
-f(2359296, 9);
-f(3932160, 15);
-f(4194304, 16);
-f(4456448, 17);
-f(8126464, 31);
-f(8388608, 32);
-f(8650752, 33);
-f(16515072, 63);
-f(16777216, 64);
-f(17039360, 65);
-f(33292288, 127);
-f(33554432, 128);
-f(33816576, 129);
-f(66846720, 255);
-f(67108864, 256);
-f(67371008, 257);
-f(133955584, 511);
-f(134217728, 512);
-f(134479872, 513);
-f(268173312, 1023);
-f(268435456, 1024);
-f(268697600, 1025);
-f(536608768, 2047);
-f(536870912, 2048);
-f(537133056, 2049);
-f(1073479680, 4095);
-f(1073741824, 4096);
-f(1074003968, 4097);
-f(2147221504, 8191);
-f(2147483648, 8192);
-f(2147745792, 8193);
-f(4294705152, 16383);
-f(4294967296, 16384);
-f(4295229440, 16385);
-f(8589672448, 32767);
-f(8589934592, 32768);
-f(8590196736, 32769);
-f(17179607040, 65535);
-f(17179869184, 65536);
-f(17180131328, 65537);
-f(34359476224, 131071);
-f(34359738368, 131072);
-f(34360000512, 131073);
-f(68719214592, 262143);
-f(68719476736, 262144);
-x = 262145;
-f(0, 0);
-f(262145, 1);
-f(524290, 2);
-f(786435, 3);
-f(1048580, 4);
-f(1310725, 5);
-f(1835015, 7);
-f(2097160, 8);
-f(2359305, 9);
-f(3932175, 15);
-f(4194320, 16);
-f(4456465, 17);
-f(8126495, 31);
-f(8388640, 32);
-f(8650785, 33);
-f(16515135, 63);
-f(16777280, 64);
-f(17039425, 65);
-f(33292415, 127);
-f(33554560, 128);
-f(33816705, 129);
-f(66846975, 255);
-f(67109120, 256);
-f(67371265, 257);
-f(133956095, 511);
-f(134218240, 512);
-f(134480385, 513);
-f(268174335, 1023);
-f(268436480, 1024);
-f(268698625, 1025);
-f(536610815, 2047);
-f(536872960, 2048);
-f(537135105, 2049);
-f(1073483775, 4095);
-f(1073745920, 4096);
-f(1074008065, 4097);
-f(2147229695, 8191);
-f(2147491840, 8192);
-f(2147753985, 8193);
-f(4294721535, 16383);
-f(4294983680, 16384);
-f(4295245825, 16385);
-f(8589705215, 32767);
-f(8589967360, 32768);
-f(8590229505, 32769);
-f(17179672575, 65535);
-f(17179934720, 65536);
-f(17180196865, 65537);
-f(34359607295, 131071);
-f(34359869440, 131072);
-f(34360131585, 131073);
-f(68719476735, 262143);
-f(68719738880, 262144);
-f(68720001025, 262145);
-x = 524287;
-f(0, 0);
-f(524287, 1);
-f(1048574, 2);
-f(1572861, 3);
-f(2097148, 4);
-f(2621435, 5);
-f(3670009, 7);
-f(4194296, 8);
-f(4718583, 9);
-f(7864305, 15);
-f(8388592, 16);
-f(8912879, 17);
-f(16252897, 31);
-f(16777184, 32);
-f(17301471, 33);
-f(33030081, 63);
-f(33554368, 64);
-f(34078655, 65);
-f(66584449, 127);
-f(67108736, 128);
-f(67633023, 129);
-f(133693185, 255);
-f(134217472, 256);
-f(134741759, 257);
-f(267910657, 511);
-f(268434944, 512);
-f(268959231, 513);
-f(536345601, 1023);
-f(536869888, 1024);
-f(537394175, 1025);
-f(1073215489, 2047);
-f(1073739776, 2048);
-f(1074264063, 2049);
-f(2146955265, 4095);
-f(2147479552, 4096);
-f(2148003839, 4097);
-f(4294434817, 8191);
-f(4294959104, 8192);
-f(4295483391, 8193);
-f(8589393921, 16383);
-f(8589918208, 16384);
-f(8590442495, 16385);
-f(17179312129, 32767);
-f(17179836416, 32768);
-f(17180360703, 32769);
-f(34359148545, 65535);
-f(34359672832, 65536);
-f(34360197119, 65537);
-f(68718821377, 131071);
-f(68719345664, 131072);
-f(68719869951, 131073);
-f(137438167041, 262143);
-f(137438691328, 262144);
-f(137439215615, 262145);
-f(274876858369, 524287);
-x = 524288;
-f(0, 0);
-f(524288, 1);
-f(1048576, 2);
-f(1572864, 3);
-f(2097152, 4);
-f(2621440, 5);
-f(3670016, 7);
-f(4194304, 8);
-f(4718592, 9);
-f(7864320, 15);
-f(8388608, 16);
-f(8912896, 17);
-f(16252928, 31);
-f(16777216, 32);
-f(17301504, 33);
-f(33030144, 63);
-f(33554432, 64);
-f(34078720, 65);
-f(66584576, 127);
-f(67108864, 128);
-f(67633152, 129);
-f(133693440, 255);
-f(134217728, 256);
-f(134742016, 257);
-f(267911168, 511);
-f(268435456, 512);
-f(268959744, 513);
-f(536346624, 1023);
-f(536870912, 1024);
-f(537395200, 1025);
-f(1073217536, 2047);
-f(1073741824, 2048);
-f(1074266112, 2049);
-f(2146959360, 4095);
-f(2147483648, 4096);
-f(2148007936, 4097);
-f(4294443008, 8191);
-f(4294967296, 8192);
-f(4295491584, 8193);
-f(8589410304, 16383);
-f(8589934592, 16384);
-f(8590458880, 16385);
-f(17179344896, 32767);
-f(17179869184, 32768);
-f(17180393472, 32769);
-f(34359214080, 65535);
-f(34359738368, 65536);
-f(34360262656, 65537);
-f(68718952448, 131071);
-f(68719476736, 131072);
-f(68720001024, 131073);
-f(137438429184, 262143);
-f(137438953472, 262144);
-f(137439477760, 262145);
-f(274877382656, 524287);
-f(274877906944, 524288);
-x = 524289;
-f(0, 0);
-f(524289, 1);
-f(1048578, 2);
-f(1572867, 3);
-f(2097156, 4);
-f(2621445, 5);
-f(3670023, 7);
-f(4194312, 8);
-f(4718601, 9);
-f(7864335, 15);
-f(8388624, 16);
-f(8912913, 17);
-f(16252959, 31);
-f(16777248, 32);
-f(17301537, 33);
-f(33030207, 63);
-f(33554496, 64);
-f(34078785, 65);
-f(66584703, 127);
-f(67108992, 128);
-f(67633281, 129);
-f(133693695, 255);
-f(134217984, 256);
-f(134742273, 257);
-f(267911679, 511);
-f(268435968, 512);
-f(268960257, 513);
-f(536347647, 1023);
-f(536871936, 1024);
-f(537396225, 1025);
-f(1073219583, 2047);
-f(1073743872, 2048);
-f(1074268161, 2049);
-f(2146963455, 4095);
-f(2147487744, 4096);
-f(2148012033, 4097);
-f(4294451199, 8191);
-f(4294975488, 8192);
-f(4295499777, 8193);
-f(8589426687, 16383);
-f(8589950976, 16384);
-f(8590475265, 16385);
-f(17179377663, 32767);
-f(17179901952, 32768);
-f(17180426241, 32769);
-f(34359279615, 65535);
-f(34359803904, 65536);
-f(34360328193, 65537);
-f(68719083519, 131071);
-f(68719607808, 131072);
-f(68720132097, 131073);
-f(137438691327, 262143);
-f(137439215616, 262144);
-f(137439739905, 262145);
-f(274877906943, 524287);
-f(274878431232, 524288);
-f(274878955521, 524289);
-x = 1048575;
-f(0, 0);
-f(1048575, 1);
-f(2097150, 2);
-f(3145725, 3);
-f(4194300, 4);
-f(5242875, 5);
-f(7340025, 7);
-f(8388600, 8);
-f(9437175, 9);
-f(15728625, 15);
-f(16777200, 16);
-f(17825775, 17);
-f(32505825, 31);
-f(33554400, 32);
-f(34602975, 33);
-f(66060225, 63);
-f(67108800, 64);
-f(68157375, 65);
-f(133169025, 127);
-f(134217600, 128);
-f(135266175, 129);
-f(267386625, 255);
-f(268435200, 256);
-f(269483775, 257);
-f(535821825, 511);
-f(536870400, 512);
-f(537918975, 513);
-f(1072692225, 1023);
-f(1073740800, 1024);
-f(1074789375, 1025);
-f(2146433025, 2047);
-f(2147481600, 2048);
-f(2148530175, 2049);
-f(4293914625, 4095);
-f(4294963200, 4096);
-f(4296011775, 4097);
-f(8588877825, 8191);
-f(8589926400, 8192);
-f(8590974975, 8193);
-f(17178804225, 16383);
-f(17179852800, 16384);
-f(17180901375, 16385);
-f(34358657025, 32767);
-f(34359705600, 32768);
-f(34360754175, 32769);
-f(68718362625, 65535);
-f(68719411200, 65536);
-f(68720459775, 65537);
-f(137437773825, 131071);
-f(137438822400, 131072);
-f(137439870975, 131073);
-f(274876596225, 262143);
-f(274877644800, 262144);
-f(274878693375, 262145);
-f(549754241025, 524287);
-f(549755289600, 524288);
-f(549756338175, 524289);
-f(1099509530625, 1048575);
-x = 1048576;
-f(0, 0);
-f(1048576, 1);
-f(2097152, 2);
-f(3145728, 3);
-f(4194304, 4);
-f(5242880, 5);
-f(7340032, 7);
-f(8388608, 8);
-f(9437184, 9);
-f(15728640, 15);
-f(16777216, 16);
-f(17825792, 17);
-f(32505856, 31);
-f(33554432, 32);
-f(34603008, 33);
-f(66060288, 63);
-f(67108864, 64);
-f(68157440, 65);
-f(133169152, 127);
-f(134217728, 128);
-f(135266304, 129);
-f(267386880, 255);
-f(268435456, 256);
-f(269484032, 257);
-f(535822336, 511);
-f(536870912, 512);
-f(537919488, 513);
-f(1072693248, 1023);
-f(1073741824, 1024);
-f(1074790400, 1025);
-f(2146435072, 2047);
-f(2147483648, 2048);
-f(2148532224, 2049);
-f(4293918720, 4095);
-f(4294967296, 4096);
-f(4296015872, 4097);
-f(8588886016, 8191);
-f(8589934592, 8192);
-f(8590983168, 8193);
-f(17178820608, 16383);
-f(17179869184, 16384);
-f(17180917760, 16385);
-f(34358689792, 32767);
-f(34359738368, 32768);
-f(34360786944, 32769);
-f(68718428160, 65535);
-f(68719476736, 65536);
-f(68720525312, 65537);
-f(137437904896, 131071);
-f(137438953472, 131072);
-f(137440002048, 131073);
-f(274876858368, 262143);
-f(274877906944, 262144);
-f(274878955520, 262145);
-f(549754765312, 524287);
-f(549755813888, 524288);
-f(549756862464, 524289);
-f(1099510579200, 1048575);
-f(1099511627776, 1048576);
-x = 1048577;
-f(0, 0);
-f(1048577, 1);
-f(2097154, 2);
-f(3145731, 3);
-f(4194308, 4);
-f(5242885, 5);
-f(7340039, 7);
-f(8388616, 8);
-f(9437193, 9);
-f(15728655, 15);
-f(16777232, 16);
-f(17825809, 17);
-f(32505887, 31);
-f(33554464, 32);
-f(34603041, 33);
-f(66060351, 63);
-f(67108928, 64);
-f(68157505, 65);
-f(133169279, 127);
-f(134217856, 128);
-f(135266433, 129);
-f(267387135, 255);
-f(268435712, 256);
-f(269484289, 257);
-f(535822847, 511);
-f(536871424, 512);
-f(537920001, 513);
-f(1072694271, 1023);
-f(1073742848, 1024);
-f(1074791425, 1025);
-f(2146437119, 2047);
-f(2147485696, 2048);
-f(2148534273, 2049);
-f(4293922815, 4095);
-f(4294971392, 4096);
-f(4296019969, 4097);
-f(8588894207, 8191);
-f(8589942784, 8192);
-f(8590991361, 8193);
-f(17178836991, 16383);
-f(17179885568, 16384);
-f(17180934145, 16385);
-f(34358722559, 32767);
-f(34359771136, 32768);
-f(34360819713, 32769);
-f(68718493695, 65535);
-f(68719542272, 65536);
-f(68720590849, 65537);
-f(137438035967, 131071);
-f(137439084544, 131072);
-f(137440133121, 131073);
-f(274877120511, 262143);
-f(274878169088, 262144);
-f(274879217665, 262145);
-f(549755289599, 524287);
-f(549756338176, 524288);
-f(549757386753, 524289);
-f(1099511627775, 1048575);
-f(1099512676352, 1048576);
-f(1099513724929, 1048577);
-x = 2097151;
-f(0, 0);
-f(2097151, 1);
-f(4194302, 2);
-f(6291453, 3);
-f(8388604, 4);
-f(10485755, 5);
-f(14680057, 7);
-f(16777208, 8);
-f(18874359, 9);
-f(31457265, 15);
-f(33554416, 16);
-f(35651567, 17);
-f(65011681, 31);
-f(67108832, 32);
-f(69205983, 33);
-f(132120513, 63);
-f(134217664, 64);
-f(136314815, 65);
-f(266338177, 127);
-f(268435328, 128);
-f(270532479, 129);
-f(534773505, 255);
-f(536870656, 256);
-f(538967807, 257);
-f(1071644161, 511);
-f(1073741312, 512);
-f(1075838463, 513);
-f(2145385473, 1023);
-f(2147482624, 1024);
-f(2149579775, 1025);
-f(4292868097, 2047);
-f(4294965248, 2048);
-f(4297062399, 2049);
-f(8587833345, 4095);
-f(8589930496, 4096);
-f(8592027647, 4097);
-f(17177763841, 8191);
-f(17179860992, 8192);
-f(17181958143, 8193);
-f(34357624833, 16383);
-f(34359721984, 16384);
-f(34361819135, 16385);
-f(68717346817, 32767);
-f(68719443968, 32768);
-f(68721541119, 32769);
-f(137436790785, 65535);
-f(137438887936, 65536);
-f(137440985087, 65537);
-f(274875678721, 131071);
-f(274877775872, 131072);
-f(274879873023, 131073);
-f(549753454593, 262143);
-f(549755551744, 262144);
-f(549757648895, 262145);
-f(1099509006337, 524287);
-f(1099511103488, 524288);
-f(1099513200639, 524289);
-f(2199020109825, 1048575);
-f(2199022206976, 1048576);
-f(2199024304127, 1048577);
-f(4398042316801, 2097151);
-x = 2097152;
-f(0, 0);
-f(2097152, 1);
-f(4194304, 2);
-f(6291456, 3);
-f(8388608, 4);
-f(10485760, 5);
-f(14680064, 7);
-f(16777216, 8);
-f(18874368, 9);
-f(31457280, 15);
-f(33554432, 16);
-f(35651584, 17);
-f(65011712, 31);
-f(67108864, 32);
-f(69206016, 33);
-f(132120576, 63);
-f(134217728, 64);
-f(136314880, 65);
-f(266338304, 127);
-f(268435456, 128);
-f(270532608, 129);
-f(534773760, 255);
-f(536870912, 256);
-f(538968064, 257);
-f(1071644672, 511);
-f(1073741824, 512);
-f(1075838976, 513);
-f(2145386496, 1023);
-f(2147483648, 1024);
-f(2149580800, 1025);
-f(4292870144, 2047);
-f(4294967296, 2048);
-f(4297064448, 2049);
-f(8587837440, 4095);
-f(8589934592, 4096);
-f(8592031744, 4097);
-f(17177772032, 8191);
-f(17179869184, 8192);
-f(17181966336, 8193);
-f(34357641216, 16383);
-f(34359738368, 16384);
-f(34361835520, 16385);
-f(68717379584, 32767);
-f(68719476736, 32768);
-f(68721573888, 32769);
-f(137436856320, 65535);
-f(137438953472, 65536);
-f(137441050624, 65537);
-f(274875809792, 131071);
-f(274877906944, 131072);
-f(274880004096, 131073);
-f(549753716736, 262143);
-f(549755813888, 262144);
-f(549757911040, 262145);
-f(1099509530624, 524287);
-f(1099511627776, 524288);
-f(1099513724928, 524289);
-f(2199021158400, 1048575);
-f(2199023255552, 1048576);
-f(2199025352704, 1048577);
-f(4398044413952, 2097151);
-f(4398046511104, 2097152);
-x = 2097153;
-f(0, 0);
-f(2097153, 1);
-f(4194306, 2);
-f(6291459, 3);
-f(8388612, 4);
-f(10485765, 5);
-f(14680071, 7);
-f(16777224, 8);
-f(18874377, 9);
-f(31457295, 15);
-f(33554448, 16);
-f(35651601, 17);
-f(65011743, 31);
-f(67108896, 32);
-f(69206049, 33);
-f(132120639, 63);
-f(134217792, 64);
-f(136314945, 65);
-f(266338431, 127);
-f(268435584, 128);
-f(270532737, 129);
-f(534774015, 255);
-f(536871168, 256);
-f(538968321, 257);
-f(1071645183, 511);
-f(1073742336, 512);
-f(1075839489, 513);
-f(2145387519, 1023);
-f(2147484672, 1024);
-f(2149581825, 1025);
-f(4292872191, 2047);
-f(4294969344, 2048);
-f(4297066497, 2049);
-f(8587841535, 4095);
-f(8589938688, 4096);
-f(8592035841, 4097);
-f(17177780223, 8191);
-f(17179877376, 8192);
-f(17181974529, 8193);
-f(34357657599, 16383);
-f(34359754752, 16384);
-f(34361851905, 16385);
-f(68717412351, 32767);
-f(68719509504, 32768);
-f(68721606657, 32769);
-f(137436921855, 65535);
-f(137439019008, 65536);
-f(137441116161, 65537);
-f(274875940863, 131071);
-f(274878038016, 131072);
-f(274880135169, 131073);
-f(549753978879, 262143);
-f(549756076032, 262144);
-f(549758173185, 262145);
-f(1099510054911, 524287);
-f(1099512152064, 524288);
-f(1099514249217, 524289);
-f(2199022206975, 1048575);
-f(2199024304128, 1048576);
-f(2199026401281, 1048577);
-f(4398046511103, 2097151);
-f(4398048608256, 2097152);
-f(4398050705409, 2097153);
-x = 4194303;
-f(0, 0);
-f(4194303, 1);
-f(8388606, 2);
-f(12582909, 3);
-f(16777212, 4);
-f(20971515, 5);
-f(29360121, 7);
-f(33554424, 8);
-f(37748727, 9);
-f(62914545, 15);
-f(67108848, 16);
-f(71303151, 17);
-f(130023393, 31);
-f(134217696, 32);
-f(138411999, 33);
-f(264241089, 63);
-f(268435392, 64);
-f(272629695, 65);
-f(532676481, 127);
-f(536870784, 128);
-f(541065087, 129);
-f(1069547265, 255);
-f(1073741568, 256);
-f(1077935871, 257);
-f(2143288833, 511);
-f(2147483136, 512);
-f(2151677439, 513);
-f(4290771969, 1023);
-f(4294966272, 1024);
-f(4299160575, 1025);
-f(8585738241, 2047);
-f(8589932544, 2048);
-f(8594126847, 2049);
-f(17175670785, 4095);
-f(17179865088, 4096);
-f(17184059391, 4097);
-f(34355535873, 8191);
-f(34359730176, 8192);
-f(34363924479, 8193);
-f(68715266049, 16383);
-f(68719460352, 16384);
-f(68723654655, 16385);
-f(137434726401, 32767);
-f(137438920704, 32768);
-f(137443115007, 32769);
-f(274873647105, 65535);
-f(274877841408, 65536);
-f(274882035711, 65537);
-f(549751488513, 131071);
-f(549755682816, 131072);
-f(549759877119, 131073);
-f(1099507171329, 262143);
-f(1099511365632, 262144);
-f(1099515559935, 262145);
-f(2199018536961, 524287);
-f(2199022731264, 524288);
-f(2199026925567, 524289);
-f(4398041268225, 1048575);
-f(4398045462528, 1048576);
-f(4398049656831, 1048577);
-f(8796086730753, 2097151);
-f(8796090925056, 2097152);
-f(8796095119359, 2097153);
-f(17592177655809, 4194303);
-x = 4194304;
-f(0, 0);
-f(4194304, 1);
-f(8388608, 2);
-f(12582912, 3);
-f(16777216, 4);
-f(20971520, 5);
-f(29360128, 7);
-f(33554432, 8);
-f(37748736, 9);
-f(62914560, 15);
-f(67108864, 16);
-f(71303168, 17);
-f(130023424, 31);
-f(134217728, 32);
-f(138412032, 33);
-f(264241152, 63);
-f(268435456, 64);
-f(272629760, 65);
-f(532676608, 127);
-f(536870912, 128);
-f(541065216, 129);
-f(1069547520, 255);
-f(1073741824, 256);
-f(1077936128, 257);
-f(2143289344, 511);
-f(2147483648, 512);
-f(2151677952, 513);
-f(4290772992, 1023);
-f(4294967296, 1024);
-f(4299161600, 1025);
-f(8585740288, 2047);
-f(8589934592, 2048);
-f(8594128896, 2049);
-f(17175674880, 4095);
-f(17179869184, 4096);
-f(17184063488, 4097);
-f(34355544064, 8191);
-f(34359738368, 8192);
-f(34363932672, 8193);
-f(68715282432, 16383);
-f(68719476736, 16384);
-f(68723671040, 16385);
-f(137434759168, 32767);
-f(137438953472, 32768);
-f(137443147776, 32769);
-f(274873712640, 65535);
-f(274877906944, 65536);
-f(274882101248, 65537);
-f(549751619584, 131071);
-f(549755813888, 131072);
-f(549760008192, 131073);
-f(1099507433472, 262143);
-f(1099511627776, 262144);
-f(1099515822080, 262145);
-f(2199019061248, 524287);
-f(2199023255552, 524288);
-f(2199027449856, 524289);
-f(4398042316800, 1048575);
-f(4398046511104, 1048576);
-f(4398050705408, 1048577);
-f(8796088827904, 2097151);
-f(8796093022208, 2097152);
-f(8796097216512, 2097153);
-f(17592181850112, 4194303);
-f(17592186044416, 4194304);
-x = 4194305;
-f(0, 0);
-f(4194305, 1);
-f(8388610, 2);
-f(12582915, 3);
-f(16777220, 4);
-f(20971525, 5);
-f(29360135, 7);
-f(33554440, 8);
-f(37748745, 9);
-f(62914575, 15);
-f(67108880, 16);
-f(71303185, 17);
-f(130023455, 31);
-f(134217760, 32);
-f(138412065, 33);
-f(264241215, 63);
-f(268435520, 64);
-f(272629825, 65);
-f(532676735, 127);
-f(536871040, 128);
-f(541065345, 129);
-f(1069547775, 255);
-f(1073742080, 256);
-f(1077936385, 257);
-f(2143289855, 511);
-f(2147484160, 512);
-f(2151678465, 513);
-f(4290774015, 1023);
-f(4294968320, 1024);
-f(4299162625, 1025);
-f(8585742335, 2047);
-f(8589936640, 2048);
-f(8594130945, 2049);
-f(17175678975, 4095);
-f(17179873280, 4096);
-f(17184067585, 4097);
-f(34355552255, 8191);
-f(34359746560, 8192);
-f(34363940865, 8193);
-f(68715298815, 16383);
-f(68719493120, 16384);
-f(68723687425, 16385);
-f(137434791935, 32767);
-f(137438986240, 32768);
-f(137443180545, 32769);
-f(274873778175, 65535);
-f(274877972480, 65536);
-f(274882166785, 65537);
-f(549751750655, 131071);
-f(549755944960, 131072);
-f(549760139265, 131073);
-f(1099507695615, 262143);
-f(1099511889920, 262144);
-f(1099516084225, 262145);
-f(2199019585535, 524287);
-f(2199023779840, 524288);
-f(2199027974145, 524289);
-f(4398043365375, 1048575);
-f(4398047559680, 1048576);
-f(4398051753985, 1048577);
-f(8796090925055, 2097151);
-f(8796095119360, 2097152);
-f(8796099313665, 2097153);
-f(17592186044415, 4194303);
-f(17592190238720, 4194304);
-f(17592194433025, 4194305);
-x = 8388607;
-f(0, 0);
-f(8388607, 1);
-f(16777214, 2);
-f(25165821, 3);
-f(33554428, 4);
-f(41943035, 5);
-f(58720249, 7);
-f(67108856, 8);
-f(75497463, 9);
-f(125829105, 15);
-f(134217712, 16);
-f(142606319, 17);
-f(260046817, 31);
-f(268435424, 32);
-f(276824031, 33);
-f(528482241, 63);
-f(536870848, 64);
-f(545259455, 65);
-f(1065353089, 127);
-f(1073741696, 128);
-f(1082130303, 129);
-f(2139094785, 255);
-f(2147483392, 256);
-f(2155871999, 257);
-f(4286578177, 511);
-f(4294966784, 512);
-f(4303355391, 513);
-f(8581544961, 1023);
-f(8589933568, 1024);
-f(8598322175, 1025);
-f(17171478529, 2047);
-f(17179867136, 2048);
-f(17188255743, 2049);
-f(34351345665, 4095);
-f(34359734272, 4096);
-f(34368122879, 4097);
-f(68711079937, 8191);
-f(68719468544, 8192);
-f(68727857151, 8193);
-f(137430548481, 16383);
-f(137438937088, 16384);
-f(137447325695, 16385);
-f(274869485569, 32767);
-f(274877874176, 32768);
-f(274886262783, 32769);
-f(549747359745, 65535);
-f(549755748352, 65536);
-f(549764136959, 65537);
-f(1099503108097, 131071);
-f(1099511496704, 131072);
-f(1099519885311, 131073);
-f(2199014604801, 262143);
-f(2199022993408, 262144);
-f(2199031382015, 262145);
-f(4398037598209, 524287);
-f(4398045986816, 524288);
-f(4398054375423, 524289);
-f(8796083585025, 1048575);
-f(8796091973632, 1048576);
-f(8796100362239, 1048577);
-f(17592175558657, 2097151);
-f(17592183947264, 2097152);
-f(17592192335871, 2097153);
-f(35184359505921, 4194303);
-f(35184367894528, 4194304);
-f(35184376283135, 4194305);
-f(70368727400449, 8388607);
-x = 8388608;
-f(0, 0);
-f(8388608, 1);
-f(16777216, 2);
-f(25165824, 3);
-f(33554432, 4);
-f(41943040, 5);
-f(58720256, 7);
-f(67108864, 8);
-f(75497472, 9);
-f(125829120, 15);
-f(134217728, 16);
-f(142606336, 17);
-f(260046848, 31);
-f(268435456, 32);
-f(276824064, 33);
-f(528482304, 63);
-f(536870912, 64);
-f(545259520, 65);
-f(1065353216, 127);
-f(1073741824, 128);
-f(1082130432, 129);
-f(2139095040, 255);
-f(2147483648, 256);
-f(2155872256, 257);
-f(4286578688, 511);
-f(4294967296, 512);
-f(4303355904, 513);
-f(8581545984, 1023);
-f(8589934592, 1024);
-f(8598323200, 1025);
-f(17171480576, 2047);
-f(17179869184, 2048);
-f(17188257792, 2049);
-f(34351349760, 4095);
-f(34359738368, 4096);
-f(34368126976, 4097);
-f(68711088128, 8191);
-f(68719476736, 8192);
-f(68727865344, 8193);
-f(137430564864, 16383);
-f(137438953472, 16384);
-f(137447342080, 16385);
-f(274869518336, 32767);
-f(274877906944, 32768);
-f(274886295552, 32769);
-f(549747425280, 65535);
-f(549755813888, 65536);
-f(549764202496, 65537);
-f(1099503239168, 131071);
-f(1099511627776, 131072);
-f(1099520016384, 131073);
-f(2199014866944, 262143);
-f(2199023255552, 262144);
-f(2199031644160, 262145);
-f(4398038122496, 524287);
-f(4398046511104, 524288);
-f(4398054899712, 524289);
-f(8796084633600, 1048575);
-f(8796093022208, 1048576);
-f(8796101410816, 1048577);
-f(17592177655808, 2097151);
-f(17592186044416, 2097152);
-f(17592194433024, 2097153);
-f(35184363700224, 4194303);
-f(35184372088832, 4194304);
-f(35184380477440, 4194305);
-f(70368735789056, 8388607);
-f(70368744177664, 8388608);
-x = 8388609;
-f(0, 0);
-f(8388609, 1);
-f(16777218, 2);
-f(25165827, 3);
-f(33554436, 4);
-f(41943045, 5);
-f(58720263, 7);
-f(67108872, 8);
-f(75497481, 9);
-f(125829135, 15);
-f(134217744, 16);
-f(142606353, 17);
-f(260046879, 31);
-f(268435488, 32);
-f(276824097, 33);
-f(528482367, 63);
-f(536870976, 64);
-f(545259585, 65);
-f(1065353343, 127);
-f(1073741952, 128);
-f(1082130561, 129);
-f(2139095295, 255);
-f(2147483904, 256);
-f(2155872513, 257);
-f(4286579199, 511);
-f(4294967808, 512);
-f(4303356417, 513);
-f(8581547007, 1023);
-f(8589935616, 1024);
-f(8598324225, 1025);
-f(17171482623, 2047);
-f(17179871232, 2048);
-f(17188259841, 2049);
-f(34351353855, 4095);
-f(34359742464, 4096);
-f(34368131073, 4097);
-f(68711096319, 8191);
-f(68719484928, 8192);
-f(68727873537, 8193);
-f(137430581247, 16383);
-f(137438969856, 16384);
-f(137447358465, 16385);
-f(274869551103, 32767);
-f(274877939712, 32768);
-f(274886328321, 32769);
-f(549747490815, 65535);
-f(549755879424, 65536);
-f(549764268033, 65537);
-f(1099503370239, 131071);
-f(1099511758848, 131072);
-f(1099520147457, 131073);
-f(2199015129087, 262143);
-f(2199023517696, 262144);
-f(2199031906305, 262145);
-f(4398038646783, 524287);
-f(4398047035392, 524288);
-f(4398055424001, 524289);
-f(8796085682175, 1048575);
-f(8796094070784, 1048576);
-f(8796102459393, 1048577);
-f(17592179752959, 2097151);
-f(17592188141568, 2097152);
-f(17592196530177, 2097153);
-f(35184367894527, 4194303);
-f(35184376283136, 4194304);
-f(35184384671745, 4194305);
-f(70368744177663, 8388607);
-f(70368752566272, 8388608);
-f(70368760954881, 8388609);
-x = 16777215;
-f(0, 0);
-f(16777215, 1);
-f(33554430, 2);
-f(50331645, 3);
-f(67108860, 4);
-f(83886075, 5);
-f(117440505, 7);
-f(134217720, 8);
-f(150994935, 9);
-f(251658225, 15);
-f(268435440, 16);
-f(285212655, 17);
-f(520093665, 31);
-f(536870880, 32);
-f(553648095, 33);
-f(1056964545, 63);
-f(1073741760, 64);
-f(1090518975, 65);
-f(2130706305, 127);
-f(2147483520, 128);
-f(2164260735, 129);
-f(4278189825, 255);
-f(4294967040, 256);
-f(4311744255, 257);
-f(8573156865, 511);
-f(8589934080, 512);
-f(8606711295, 513);
-f(17163090945, 1023);
-f(17179868160, 1024);
-f(17196645375, 1025);
-f(34342959105, 2047);
-f(34359736320, 2048);
-f(34376513535, 2049);
-f(68702695425, 4095);
-f(68719472640, 4096);
-f(68736249855, 4097);
-f(137422168065, 8191);
-f(137438945280, 8192);
-f(137455722495, 8193);
-f(274861113345, 16383);
-f(274877890560, 16384);
-f(274894667775, 16385);
-f(549739003905, 32767);
-f(549755781120, 32768);
-f(549772558335, 32769);
-f(1099494785025, 65535);
-f(1099511562240, 65536);
-f(1099528339455, 65537);
-f(2199006347265, 131071);
-f(2199023124480, 131072);
-f(2199039901695, 131073);
-f(4398029471745, 262143);
-f(4398046248960, 262144);
-f(4398063026175, 262145);
-f(8796075720705, 524287);
-f(8796092497920, 524288);
-f(8796109275135, 524289);
-f(17592168218625, 1048575);
-f(17592184995840, 1048576);
-f(17592201773055, 1048577);
-f(35184353214465, 2097151);
-f(35184369991680, 2097152);
-f(35184386768895, 2097153);
-f(70368723206145, 4194303);
-f(70368739983360, 4194304);
-f(70368756760575, 4194305);
-f(140737463189505, 8388607);
-f(140737479966720, 8388608);
-f(140737496743935, 8388609);
-f(281474943156225, 16777215);
-x = 16777216;
-f(0, 0);
-f(16777216, 1);
-f(33554432, 2);
-f(50331648, 3);
-f(67108864, 4);
-f(83886080, 5);
-f(117440512, 7);
-f(134217728, 8);
-f(150994944, 9);
-f(251658240, 15);
-f(268435456, 16);
-f(285212672, 17);
-f(520093696, 31);
-f(536870912, 32);
-f(553648128, 33);
-f(1056964608, 63);
-f(1073741824, 64);
-f(1090519040, 65);
-f(2130706432, 127);
-f(2147483648, 128);
-f(2164260864, 129);
-f(4278190080, 255);
-f(4294967296, 256);
-f(4311744512, 257);
-f(8573157376, 511);
-f(8589934592, 512);
-f(8606711808, 513);
-f(17163091968, 1023);
-f(17179869184, 1024);
-f(17196646400, 1025);
-f(34342961152, 2047);
-f(34359738368, 2048);
-f(34376515584, 2049);
-f(68702699520, 4095);
-f(68719476736, 4096);
-f(68736253952, 4097);
-f(137422176256, 8191);
-f(137438953472, 8192);
-f(137455730688, 8193);
-f(274861129728, 16383);
-f(274877906944, 16384);
-f(274894684160, 16385);
-f(549739036672, 32767);
-f(549755813888, 32768);
-f(549772591104, 32769);
-f(1099494850560, 65535);
-f(1099511627776, 65536);
-f(1099528404992, 65537);
-f(2199006478336, 131071);
-f(2199023255552, 131072);
-f(2199040032768, 131073);
-f(4398029733888, 262143);
-f(4398046511104, 262144);
-f(4398063288320, 262145);
-f(8796076244992, 524287);
-f(8796093022208, 524288);
-f(8796109799424, 524289);
-f(17592169267200, 1048575);
-f(17592186044416, 1048576);
-f(17592202821632, 1048577);
-f(35184355311616, 2097151);
-f(35184372088832, 2097152);
-f(35184388866048, 2097153);
-f(70368727400448, 4194303);
-f(70368744177664, 4194304);
-f(70368760954880, 4194305);
-f(140737471578112, 8388607);
-f(140737488355328, 8388608);
-f(140737505132544, 8388609);
-f(281474959933440, 16777215);
-f(281474976710656, 16777216);
-x = 16777217;
-f(0, 0);
-f(16777217, 1);
-f(33554434, 2);
-f(50331651, 3);
-f(67108868, 4);
-f(83886085, 5);
-f(117440519, 7);
-f(134217736, 8);
-f(150994953, 9);
-f(251658255, 15);
-f(268435472, 16);
-f(285212689, 17);
-f(520093727, 31);
-f(536870944, 32);
-f(553648161, 33);
-f(1056964671, 63);
-f(1073741888, 64);
-f(1090519105, 65);
-f(2130706559, 127);
-f(2147483776, 128);
-f(2164260993, 129);
-f(4278190335, 255);
-f(4294967552, 256);
-f(4311744769, 257);
-f(8573157887, 511);
-f(8589935104, 512);
-f(8606712321, 513);
-f(17163092991, 1023);
-f(17179870208, 1024);
-f(17196647425, 1025);
-f(34342963199, 2047);
-f(34359740416, 2048);
-f(34376517633, 2049);
-f(68702703615, 4095);
-f(68719480832, 4096);
-f(68736258049, 4097);
-f(137422184447, 8191);
-f(137438961664, 8192);
-f(137455738881, 8193);
-f(274861146111, 16383);
-f(274877923328, 16384);
-f(274894700545, 16385);
-f(549739069439, 32767);
-f(549755846656, 32768);
-f(549772623873, 32769);
-f(1099494916095, 65535);
-f(1099511693312, 65536);
-f(1099528470529, 65537);
-f(2199006609407, 131071);
-f(2199023386624, 131072);
-f(2199040163841, 131073);
-f(4398029996031, 262143);
-f(4398046773248, 262144);
-f(4398063550465, 262145);
-f(8796076769279, 524287);
-f(8796093546496, 524288);
-f(8796110323713, 524289);
-f(17592170315775, 1048575);
-f(17592187092992, 1048576);
-f(17592203870209, 1048577);
-f(35184357408767, 2097151);
-f(35184374185984, 2097152);
-f(35184390963201, 2097153);
-f(70368731594751, 4194303);
-f(70368748371968, 4194304);
-f(70368765149185, 4194305);
-f(140737479966719, 8388607);
-f(140737496743936, 8388608);
-f(140737513521153, 8388609);
-f(281474976710655, 16777215);
-f(281474993487872, 16777216);
-f(281475010265089, 16777217);
-x = 33554431;
-f(0, 0);
-f(33554431, 1);
-f(67108862, 2);
-f(100663293, 3);
-f(134217724, 4);
-f(167772155, 5);
-f(234881017, 7);
-f(268435448, 8);
-f(301989879, 9);
-f(503316465, 15);
-f(536870896, 16);
-f(570425327, 17);
-f(1040187361, 31);
-f(1073741792, 32);
-f(1107296223, 33);
-f(2113929153, 63);
-f(2147483584, 64);
-f(2181038015, 65);
-f(4261412737, 127);
-f(4294967168, 128);
-f(4328521599, 129);
-f(8556379905, 255);
-f(8589934336, 256);
-f(8623488767, 257);
-f(17146314241, 511);
-f(17179868672, 512);
-f(17213423103, 513);
-f(34326182913, 1023);
-f(34359737344, 1024);
-f(34393291775, 1025);
-f(68685920257, 2047);
-f(68719474688, 2048);
-f(68753029119, 2049);
-f(137405394945, 4095);
-f(137438949376, 4096);
-f(137472503807, 4097);
-f(274844344321, 8191);
-f(274877898752, 8192);
-f(274911453183, 8193);
-f(549722243073, 16383);
-f(549755797504, 16384);
-f(549789351935, 16385);
-f(1099478040577, 32767);
-f(1099511595008, 32768);
-f(1099545149439, 32769);
-f(2198989635585, 65535);
-f(2199023190016, 65536);
-f(2199056744447, 65537);
-f(4398012825601, 131071);
-f(4398046380032, 131072);
-f(4398079934463, 131073);
-f(8796059205633, 262143);
-f(8796092760064, 262144);
-f(8796126314495, 262145);
-f(17592151965697, 524287);
-f(17592185520128, 524288);
-f(17592219074559, 524289);
-f(35184337485825, 1048575);
-f(35184371040256, 1048576);
-f(35184404594687, 1048577);
-f(70368708526081, 2097151);
-f(70368742080512, 2097152);
-f(70368775634943, 2097153);
-f(140737450606593, 4194303);
-f(140737484161024, 4194304);
-f(140737517715455, 4194305);
-f(281474934767617, 8388607);
-f(281474968322048, 8388608);
-f(281475001876479, 8388609);
-f(562949903089665, 16777215);
-f(562949936644096, 16777216);
-f(562949970198527, 16777217);
-f(1125899839733761, 33554431);
-x = 33554432;
-f(0, 0);
-f(33554432, 1);
-f(67108864, 2);
-f(100663296, 3);
-f(134217728, 4);
-f(167772160, 5);
-f(234881024, 7);
-f(268435456, 8);
-f(301989888, 9);
-f(503316480, 15);
-f(536870912, 16);
-f(570425344, 17);
-f(1040187392, 31);
-f(1073741824, 32);
-f(1107296256, 33);
-f(2113929216, 63);
-f(2147483648, 64);
-f(2181038080, 65);
-f(4261412864, 127);
-f(4294967296, 128);
-f(4328521728, 129);
-f(8556380160, 255);
-f(8589934592, 256);
-f(8623489024, 257);
-f(17146314752, 511);
-f(17179869184, 512);
-f(17213423616, 513);
-f(34326183936, 1023);
-f(34359738368, 1024);
-f(34393292800, 1025);
-f(68685922304, 2047);
-f(68719476736, 2048);
-f(68753031168, 2049);
-f(137405399040, 4095);
-f(137438953472, 4096);
-f(137472507904, 4097);
-f(274844352512, 8191);
-f(274877906944, 8192);
-f(274911461376, 8193);
-f(549722259456, 16383);
-f(549755813888, 16384);
-f(549789368320, 16385);
-f(1099478073344, 32767);
-f(1099511627776, 32768);
-f(1099545182208, 32769);
-f(2198989701120, 65535);
-f(2199023255552, 65536);
-f(2199056809984, 65537);
-f(4398012956672, 131071);
-f(4398046511104, 131072);
-f(4398080065536, 131073);
-f(8796059467776, 262143);
-f(8796093022208, 262144);
-f(8796126576640, 262145);
-f(17592152489984, 524287);
-f(17592186044416, 524288);
-f(17592219598848, 524289);
-f(35184338534400, 1048575);
-f(35184372088832, 1048576);
-f(35184405643264, 1048577);
-f(70368710623232, 2097151);
-f(70368744177664, 2097152);
-f(70368777732096, 2097153);
-f(140737454800896, 4194303);
-f(140737488355328, 4194304);
-f(140737521909760, 4194305);
-f(281474943156224, 8388607);
-f(281474976710656, 8388608);
-f(281475010265088, 8388609);
-f(562949919866880, 16777215);
-f(562949953421312, 16777216);
-f(562949986975744, 16777217);
-f(1125899873288192, 33554431);
-f(1125899906842624, 33554432);
-x = 33554433;
-f(0, 0);
-f(33554433, 1);
-f(67108866, 2);
-f(100663299, 3);
-f(134217732, 4);
-f(167772165, 5);
-f(234881031, 7);
-f(268435464, 8);
-f(301989897, 9);
-f(503316495, 15);
-f(536870928, 16);
-f(570425361, 17);
-f(1040187423, 31);
-f(1073741856, 32);
-f(1107296289, 33);
-f(2113929279, 63);
-f(2147483712, 64);
-f(2181038145, 65);
-f(4261412991, 127);
-f(4294967424, 128);
-f(4328521857, 129);
-f(8556380415, 255);
-f(8589934848, 256);
-f(8623489281, 257);
-f(17146315263, 511);
-f(17179869696, 512);
-f(17213424129, 513);
-f(34326184959, 1023);
-f(34359739392, 1024);
-f(34393293825, 1025);
-f(68685924351, 2047);
-f(68719478784, 2048);
-f(68753033217, 2049);
-f(137405403135, 4095);
-f(137438957568, 4096);
-f(137472512001, 4097);
-f(274844360703, 8191);
-f(274877915136, 8192);
-f(274911469569, 8193);
-f(549722275839, 16383);
-f(549755830272, 16384);
-f(549789384705, 16385);
-f(1099478106111, 32767);
-f(1099511660544, 32768);
-f(1099545214977, 32769);
-f(2198989766655, 65535);
-f(2199023321088, 65536);
-f(2199056875521, 65537);
-f(4398013087743, 131071);
-f(4398046642176, 131072);
-f(4398080196609, 131073);
-f(8796059729919, 262143);
-f(8796093284352, 262144);
-f(8796126838785, 262145);
-f(17592153014271, 524287);
-f(17592186568704, 524288);
-f(17592220123137, 524289);
-f(35184339582975, 1048575);
-f(35184373137408, 1048576);
-f(35184406691841, 1048577);
-f(70368712720383, 2097151);
-f(70368746274816, 2097152);
-f(70368779829249, 2097153);
-f(140737458995199, 4194303);
-f(140737492549632, 4194304);
-f(140737526104065, 4194305);
-f(281474951544831, 8388607);
-f(281474985099264, 8388608);
-f(281475018653697, 8388609);
-f(562949936644095, 16777215);
-f(562949970198528, 16777216);
-f(562950003752961, 16777217);
-f(1125899906842623, 33554431);
-f(1125899940397056, 33554432);
-f(1125899973951489, 33554433);
-x = 67108863;
-f(0, 0);
-f(67108863, 1);
-f(134217726, 2);
-f(201326589, 3);
-f(268435452, 4);
-f(335544315, 5);
-f(469762041, 7);
-f(536870904, 8);
-f(603979767, 9);
-f(1006632945, 15);
-f(1073741808, 16);
-f(1140850671, 17);
-f(2080374753, 31);
-f(2147483616, 32);
-f(2214592479, 33);
-f(4227858369, 63);
-f(4294967232, 64);
-f(4362076095, 65);
-f(8522825601, 127);
-f(8589934464, 128);
-f(8657043327, 129);
-f(17112760065, 255);
-f(17179868928, 256);
-f(17246977791, 257);
-f(34292628993, 511);
-f(34359737856, 512);
-f(34426846719, 513);
-f(68652366849, 1023);
-f(68719475712, 1024);
-f(68786584575, 1025);
-f(137371842561, 2047);
-f(137438951424, 2048);
-f(137506060287, 2049);
-f(274810793985, 4095);
-f(274877902848, 4096);
-f(274945011711, 4097);
-f(549688696833, 8191);
-f(549755805696, 8192);
-f(549822914559, 8193);
-f(1099444502529, 16383);
-f(1099511611392, 16384);
-f(1099578720255, 16385);
-f(2198956113921, 32767);
-f(2199023222784, 32768);
-f(2199090331647, 32769);
-f(4397979336705, 65535);
-f(4398046445568, 65536);
-f(4398113554431, 65537);
-f(8796025782273, 131071);
-f(8796092891136, 131072);
-f(8796159999999, 131073);
-f(17592118673409, 262143);
-f(17592185782272, 262144);
-f(17592252891135, 262145);
-f(35184304455681, 524287);
-f(35184371564544, 524288);
-f(35184438673407, 524289);
-f(70368676020225, 1048575);
-f(70368743129088, 1048576);
-f(70368810237951, 1048577);
-f(140737419149313, 2097151);
-f(140737486258176, 2097152);
-f(140737553367039, 2097153);
-f(281474905407489, 4194303);
-f(281474972516352, 4194304);
-f(281475039625215, 4194305);
-f(562949877923841, 8388607);
-f(562949945032704, 8388608);
-f(562950012141567, 8388609);
-f(1125899822956545, 16777215);
-f(1125899890065408, 16777216);
-f(1125899957174271, 16777217);
-x = 67108864;
-f(0, 0);
-f(67108864, 1);
-f(134217728, 2);
-f(201326592, 3);
-f(268435456, 4);
-f(335544320, 5);
-f(469762048, 7);
-f(536870912, 8);
-f(603979776, 9);
-f(1006632960, 15);
-f(1073741824, 16);
-f(1140850688, 17);
-f(2080374784, 31);
-f(2147483648, 32);
-f(2214592512, 33);
-f(4227858432, 63);
-f(4294967296, 64);
-f(4362076160, 65);
-f(8522825728, 127);
-f(8589934592, 128);
-f(8657043456, 129);
-f(17112760320, 255);
-f(17179869184, 256);
-f(17246978048, 257);
-f(34292629504, 511);
-f(34359738368, 512);
-f(34426847232, 513);
-f(68652367872, 1023);
-f(68719476736, 1024);
-f(68786585600, 1025);
-f(137371844608, 2047);
-f(137438953472, 2048);
-f(137506062336, 2049);
-f(274810798080, 4095);
-f(274877906944, 4096);
-f(274945015808, 4097);
-f(549688705024, 8191);
-f(549755813888, 8192);
-f(549822922752, 8193);
-f(1099444518912, 16383);
-f(1099511627776, 16384);
-f(1099578736640, 16385);
-f(2198956146688, 32767);
-f(2199023255552, 32768);
-f(2199090364416, 32769);
-f(4397979402240, 65535);
-f(4398046511104, 65536);
-f(4398113619968, 65537);
-f(8796025913344, 131071);
-f(8796093022208, 131072);
-f(8796160131072, 131073);
-f(17592118935552, 262143);
-f(17592186044416, 262144);
-f(17592253153280, 262145);
-f(35184304979968, 524287);
-f(35184372088832, 524288);
-f(35184439197696, 524289);
-f(70368677068800, 1048575);
-f(70368744177664, 1048576);
-f(70368811286528, 1048577);
-f(140737421246464, 2097151);
-f(140737488355328, 2097152);
-f(140737555464192, 2097153);
-f(281474909601792, 4194303);
-f(281474976710656, 4194304);
-f(281475043819520, 4194305);
-f(562949886312448, 8388607);
-f(562949953421312, 8388608);
-f(562950020530176, 8388609);
-f(1125899839733760, 16777215);
-f(1125899906842624, 16777216);
-f(1125899973951488, 16777217);
-x = 67108865;
-f(0, 0);
-f(67108865, 1);
-f(134217730, 2);
-f(201326595, 3);
-f(268435460, 4);
-f(335544325, 5);
-f(469762055, 7);
-f(536870920, 8);
-f(603979785, 9);
-f(1006632975, 15);
-f(1073741840, 16);
-f(1140850705, 17);
-f(2080374815, 31);
-f(2147483680, 32);
-f(2214592545, 33);
-f(4227858495, 63);
-f(4294967360, 64);
-f(4362076225, 65);
-f(8522825855, 127);
-f(8589934720, 128);
-f(8657043585, 129);
-f(17112760575, 255);
-f(17179869440, 256);
-f(17246978305, 257);
-f(34292630015, 511);
-f(34359738880, 512);
-f(34426847745, 513);
-f(68652368895, 1023);
-f(68719477760, 1024);
-f(68786586625, 1025);
-f(137371846655, 2047);
-f(137438955520, 2048);
-f(137506064385, 2049);
-f(274810802175, 4095);
-f(274877911040, 4096);
-f(274945019905, 4097);
-f(549688713215, 8191);
-f(549755822080, 8192);
-f(549822930945, 8193);
-f(1099444535295, 16383);
-f(1099511644160, 16384);
-f(1099578753025, 16385);
-f(2198956179455, 32767);
-f(2199023288320, 32768);
-f(2199090397185, 32769);
-f(4397979467775, 65535);
-f(4398046576640, 65536);
-f(4398113685505, 65537);
-f(8796026044415, 131071);
-f(8796093153280, 131072);
-f(8796160262145, 131073);
-f(17592119197695, 262143);
-f(17592186306560, 262144);
-f(17592253415425, 262145);
-f(35184305504255, 524287);
-f(35184372613120, 524288);
-f(35184439721985, 524289);
-f(70368678117375, 1048575);
-f(70368745226240, 1048576);
-f(70368812335105, 1048577);
-f(140737423343615, 2097151);
-f(140737490452480, 2097152);
-f(140737557561345, 2097153);
-f(281474913796095, 4194303);
-f(281474980904960, 4194304);
-f(281475048013825, 4194305);
-f(562949894701055, 8388607);
-f(562949961809920, 8388608);
-f(562950028918785, 8388609);
-f(1125899856510975, 16777215);
-f(1125899923619840, 16777216);
-f(1125899990728705, 16777217);
-x = 134217727;
-f(0, 0);
-f(134217727, 1);
-f(268435454, 2);
-f(402653181, 3);
-f(536870908, 4);
-f(671088635, 5);
-f(939524089, 7);
-f(1073741816, 8);
-f(1207959543, 9);
-f(2013265905, 15);
-f(2147483632, 16);
-f(2281701359, 17);
-f(4160749537, 31);
-f(4294967264, 32);
-f(4429184991, 33);
-f(8455716801, 63);
-f(8589934528, 64);
-f(8724152255, 65);
-f(17045651329, 127);
-f(17179869056, 128);
-f(17314086783, 129);
-f(34225520385, 255);
-f(34359738112, 256);
-f(34493955839, 257);
-f(68585258497, 511);
-f(68719476224, 512);
-f(68853693951, 513);
-f(137304734721, 1023);
-f(137438952448, 1024);
-f(137573170175, 1025);
-f(274743687169, 2047);
-f(274877904896, 2048);
-f(275012122623, 2049);
-f(549621592065, 4095);
-f(549755809792, 4096);
-f(549890027519, 4097);
-f(1099377401857, 8191);
-f(1099511619584, 8192);
-f(1099645837311, 8193);
-f(2198889021441, 16383);
-f(2199023239168, 16384);
-f(2199157456895, 16385);
-f(4397912260609, 32767);
-f(4398046478336, 32768);
-f(4398180696063, 32769);
-f(8795958738945, 65535);
-f(8796092956672, 65536);
-f(8796227174399, 65537);
-f(17592051695617, 131071);
-f(17592185913344, 131072);
-f(17592320131071, 131073);
-f(35184237608961, 262143);
-f(35184371826688, 262144);
-f(35184506044415, 262145);
-f(70368609435649, 524287);
-f(70368743653376, 524288);
-f(70368877871103, 524289);
-f(140737353089025, 1048575);
-f(140737487306752, 1048576);
-f(140737621524479, 1048577);
-f(281474840395777, 2097151);
-f(281474974613504, 2097152);
-f(281475108831231, 2097153);
-f(562949815009281, 4194303);
-f(562949949227008, 4194304);
-f(562950083444735, 4194305);
-f(1125899764236289, 8388607);
-f(1125899898454016, 8388608);
-f(1125900032671743, 8388609);
-x = 134217728;
-f(0, 0);
-f(134217728, 1);
-f(268435456, 2);
-f(402653184, 3);
-f(536870912, 4);
-f(671088640, 5);
-f(939524096, 7);
-f(1073741824, 8);
-f(1207959552, 9);
-f(2013265920, 15);
-f(2147483648, 16);
-f(2281701376, 17);
-f(4160749568, 31);
-f(4294967296, 32);
-f(4429185024, 33);
-f(8455716864, 63);
-f(8589934592, 64);
-f(8724152320, 65);
-f(17045651456, 127);
-f(17179869184, 128);
-f(17314086912, 129);
-f(34225520640, 255);
-f(34359738368, 256);
-f(34493956096, 257);
-f(68585259008, 511);
-f(68719476736, 512);
-f(68853694464, 513);
-f(137304735744, 1023);
-f(137438953472, 1024);
-f(137573171200, 1025);
-f(274743689216, 2047);
-f(274877906944, 2048);
-f(275012124672, 2049);
-f(549621596160, 4095);
-f(549755813888, 4096);
-f(549890031616, 4097);
-f(1099377410048, 8191);
-f(1099511627776, 8192);
-f(1099645845504, 8193);
-f(2198889037824, 16383);
-f(2199023255552, 16384);
-f(2199157473280, 16385);
-f(4397912293376, 32767);
-f(4398046511104, 32768);
-f(4398180728832, 32769);
-f(8795958804480, 65535);
-f(8796093022208, 65536);
-f(8796227239936, 65537);
-f(17592051826688, 131071);
-f(17592186044416, 131072);
-f(17592320262144, 131073);
-f(35184237871104, 262143);
-f(35184372088832, 262144);
-f(35184506306560, 262145);
-f(70368609959936, 524287);
-f(70368744177664, 524288);
-f(70368878395392, 524289);
-f(140737354137600, 1048575);
-f(140737488355328, 1048576);
-f(140737622573056, 1048577);
-f(281474842492928, 2097151);
-f(281474976710656, 2097152);
-f(281475110928384, 2097153);
-f(562949819203584, 4194303);
-f(562949953421312, 4194304);
-f(562950087639040, 4194305);
-f(1125899772624896, 8388607);
-f(1125899906842624, 8388608);
-f(1125900041060352, 8388609);
-x = 134217729;
-f(0, 0);
-f(134217729, 1);
-f(268435458, 2);
-f(402653187, 3);
-f(536870916, 4);
-f(671088645, 5);
-f(939524103, 7);
-f(1073741832, 8);
-f(1207959561, 9);
-f(2013265935, 15);
-f(2147483664, 16);
-f(2281701393, 17);
-f(4160749599, 31);
-f(4294967328, 32);
-f(4429185057, 33);
-f(8455716927, 63);
-f(8589934656, 64);
-f(8724152385, 65);
-f(17045651583, 127);
-f(17179869312, 128);
-f(17314087041, 129);
-f(34225520895, 255);
-f(34359738624, 256);
-f(34493956353, 257);
-f(68585259519, 511);
-f(68719477248, 512);
-f(68853694977, 513);
-f(137304736767, 1023);
-f(137438954496, 1024);
-f(137573172225, 1025);
-f(274743691263, 2047);
-f(274877908992, 2048);
-f(275012126721, 2049);
-f(549621600255, 4095);
-f(549755817984, 4096);
-f(549890035713, 4097);
-f(1099377418239, 8191);
-f(1099511635968, 8192);
-f(1099645853697, 8193);
-f(2198889054207, 16383);
-f(2199023271936, 16384);
-f(2199157489665, 16385);
-f(4397912326143, 32767);
-f(4398046543872, 32768);
-f(4398180761601, 32769);
-f(8795958870015, 65535);
-f(8796093087744, 65536);
-f(8796227305473, 65537);
-f(17592051957759, 131071);
-f(17592186175488, 131072);
-f(17592320393217, 131073);
-f(35184238133247, 262143);
-f(35184372350976, 262144);
-f(35184506568705, 262145);
-f(70368610484223, 524287);
-f(70368744701952, 524288);
-f(70368878919681, 524289);
-f(140737355186175, 1048575);
-f(140737489403904, 1048576);
-f(140737623621633, 1048577);
-f(281474844590079, 2097151);
-f(281474978807808, 2097152);
-f(281475113025537, 2097153);
-f(562949823397887, 4194303);
-f(562949957615616, 4194304);
-f(562950091833345, 4194305);
-f(1125899781013503, 8388607);
-f(1125899915231232, 8388608);
-f(1125900049448961, 8388609);
-x = 268435455;
-f(0, 0);
-f(268435455, 1);
-f(536870910, 2);
-f(805306365, 3);
-f(1073741820, 4);
-f(1342177275, 5);
-f(1879048185, 7);
-f(2147483640, 8);
-f(2415919095, 9);
-f(4026531825, 15);
-f(4294967280, 16);
-f(4563402735, 17);
-f(8321499105, 31);
-f(8589934560, 32);
-f(8858370015, 33);
-f(16911433665, 63);
-f(17179869120, 64);
-f(17448304575, 65);
-f(34091302785, 127);
-f(34359738240, 128);
-f(34628173695, 129);
-f(68451041025, 255);
-f(68719476480, 256);
-f(68987911935, 257);
-f(137170517505, 511);
-f(137438952960, 512);
-f(137707388415, 513);
-f(274609470465, 1023);
-f(274877905920, 1024);
-f(275146341375, 1025);
-f(549487376385, 2047);
-f(549755811840, 2048);
-f(550024247295, 2049);
-f(1099243188225, 4095);
-f(1099511623680, 4096);
-f(1099780059135, 4097);
-f(2198754811905, 8191);
-f(2199023247360, 8192);
-f(2199291682815, 8193);
-f(4397778059265, 16383);
-f(4398046494720, 16384);
-f(4398314930175, 16385);
-f(8795824553985, 32767);
-f(8796092989440, 32768);
-f(8796361424895, 32769);
-f(17591917543425, 65535);
-f(17592185978880, 65536);
-f(17592454414335, 65537);
-f(35184103522305, 131071);
-f(35184371957760, 131072);
-f(35184640393215, 131073);
-f(70368475480065, 262143);
-f(70368743915520, 262144);
-f(70369012350975, 262145);
-f(140737219395585, 524287);
-f(140737487831040, 524288);
-f(140737756266495, 524289);
-f(281474707226625, 1048575);
-f(281474975662080, 1048576);
-f(281475244097535, 1048577);
-f(562949682888705, 2097151);
-f(562949951324160, 2097152);
-f(562950219759615, 2097153);
-f(1125899634212865, 4194303);
-f(1125899902648320, 4194304);
-f(1125900171083775, 4194305);
-x = 268435456;
-f(0, 0);
-f(268435456, 1);
-f(536870912, 2);
-f(805306368, 3);
-f(1073741824, 4);
-f(1342177280, 5);
-f(1879048192, 7);
-f(2147483648, 8);
-f(2415919104, 9);
-f(4026531840, 15);
-f(4294967296, 16);
-f(4563402752, 17);
-f(8321499136, 31);
-f(8589934592, 32);
-f(8858370048, 33);
-f(16911433728, 63);
-f(17179869184, 64);
-f(17448304640, 65);
-f(34091302912, 127);
-f(34359738368, 128);
-f(34628173824, 129);
-f(68451041280, 255);
-f(68719476736, 256);
-f(68987912192, 257);
-f(137170518016, 511);
-f(137438953472, 512);
-f(137707388928, 513);
-f(274609471488, 1023);
-f(274877906944, 1024);
-f(275146342400, 1025);
-f(549487378432, 2047);
-f(549755813888, 2048);
-f(550024249344, 2049);
-f(1099243192320, 4095);
-f(1099511627776, 4096);
-f(1099780063232, 4097);
-f(2198754820096, 8191);
-f(2199023255552, 8192);
-f(2199291691008, 8193);
-f(4397778075648, 16383);
-f(4398046511104, 16384);
-f(4398314946560, 16385);
-f(8795824586752, 32767);
-f(8796093022208, 32768);
-f(8796361457664, 32769);
-f(17591917608960, 65535);
-f(17592186044416, 65536);
-f(17592454479872, 65537);
-f(35184103653376, 131071);
-f(35184372088832, 131072);
-f(35184640524288, 131073);
-f(70368475742208, 262143);
-f(70368744177664, 262144);
-f(70369012613120, 262145);
-f(140737219919872, 524287);
-f(140737488355328, 524288);
-f(140737756790784, 524289);
-f(281474708275200, 1048575);
-f(281474976710656, 1048576);
-f(281475245146112, 1048577);
-f(562949684985856, 2097151);
-f(562949953421312, 2097152);
-f(562950221856768, 2097153);
-f(1125899638407168, 4194303);
-f(1125899906842624, 4194304);
-f(1125900175278080, 4194305);
-x = 268435457;
-f(0, 0);
-f(268435457, 1);
-f(536870914, 2);
-f(805306371, 3);
-f(1073741828, 4);
-f(1342177285, 5);
-f(1879048199, 7);
-f(2147483656, 8);
-f(2415919113, 9);
-f(4026531855, 15);
-f(4294967312, 16);
-f(4563402769, 17);
-f(8321499167, 31);
-f(8589934624, 32);
-f(8858370081, 33);
-f(16911433791, 63);
-f(17179869248, 64);
-f(17448304705, 65);
-f(34091303039, 127);
-f(34359738496, 128);
-f(34628173953, 129);
-f(68451041535, 255);
-f(68719476992, 256);
-f(68987912449, 257);
-f(137170518527, 511);
-f(137438953984, 512);
-f(137707389441, 513);
-f(274609472511, 1023);
-f(274877907968, 1024);
-f(275146343425, 1025);
-f(549487380479, 2047);
-f(549755815936, 2048);
-f(550024251393, 2049);
-f(1099243196415, 4095);
-f(1099511631872, 4096);
-f(1099780067329, 4097);
-f(2198754828287, 8191);
-f(2199023263744, 8192);
-f(2199291699201, 8193);
-f(4397778092031, 16383);
-f(4398046527488, 16384);
-f(4398314962945, 16385);
-f(8795824619519, 32767);
-f(8796093054976, 32768);
-f(8796361490433, 32769);
-f(17591917674495, 65535);
-f(17592186109952, 65536);
-f(17592454545409, 65537);
-f(35184103784447, 131071);
-f(35184372219904, 131072);
-f(35184640655361, 131073);
-f(70368476004351, 262143);
-f(70368744439808, 262144);
-f(70369012875265, 262145);
-f(140737220444159, 524287);
-f(140737488879616, 524288);
-f(140737757315073, 524289);
-f(281474709323775, 1048575);
-f(281474977759232, 1048576);
-f(281475246194689, 1048577);
-f(562949687083007, 2097151);
-f(562949955518464, 2097152);
-f(562950223953921, 2097153);
-f(1125899642601471, 4194303);
-f(1125899911036928, 4194304);
-f(1125900179472385, 4194305);
-x = 536870911;
-f(0, 0);
-f(536870911, 1);
-f(1073741822, 2);
-f(1610612733, 3);
-f(2147483644, 4);
-f(2684354555, 5);
-f(3758096377, 7);
-f(4294967288, 8);
-f(4831838199, 9);
-f(8053063665, 15);
-f(8589934576, 16);
-f(9126805487, 17);
-f(16642998241, 31);
-f(17179869152, 32);
-f(17716740063, 33);
-f(33822867393, 63);
-f(34359738304, 64);
-f(34896609215, 65);
-f(68182605697, 127);
-f(68719476608, 128);
-f(69256347519, 129);
-f(136902082305, 255);
-f(137438953216, 256);
-f(137975824127, 257);
-f(274341035521, 511);
-f(274877906432, 512);
-f(275414777343, 513);
-f(549218941953, 1023);
-f(549755812864, 1024);
-f(550292683775, 1025);
-f(1098974754817, 2047);
-f(1099511625728, 2048);
-f(1100048496639, 2049);
-f(2198486380545, 4095);
-f(2199023251456, 4096);
-f(2199560122367, 4097);
-f(4397509632001, 8191);
-f(4398046502912, 8192);
-f(4398583373823, 8193);
-f(8795556134913, 16383);
-f(8796093005824, 16384);
-f(8796629876735, 16385);
-f(17591649140737, 32767);
-f(17592186011648, 32768);
-f(17592722882559, 32769);
-f(35183835152385, 65535);
-f(35184372023296, 65536);
-f(35184908894207, 65537);
-f(70368207175681, 131071);
-f(70368744046592, 131072);
-f(70369280917503, 131073);
-f(140736951222273, 262143);
-f(140737488093184, 262144);
-f(140738024964095, 262145);
-f(281474439315457, 524287);
-f(281474976186368, 524288);
-f(281475513057279, 524289);
-f(562949415501825, 1048575);
-f(562949952372736, 1048576);
-f(562950489243647, 1048577);
-f(1125899367874561, 2097151);
-f(1125899904745472, 2097152);
-f(1125900441616383, 2097153);
-x = 536870912;
-f(0, 0);
-f(536870912, 1);
-f(1073741824, 2);
-f(1610612736, 3);
-f(2147483648, 4);
-f(2684354560, 5);
-f(3758096384, 7);
-f(4294967296, 8);
-f(4831838208, 9);
-f(8053063680, 15);
-f(8589934592, 16);
-f(9126805504, 17);
-f(16642998272, 31);
-f(17179869184, 32);
-f(17716740096, 33);
-f(33822867456, 63);
-f(34359738368, 64);
-f(34896609280, 65);
-f(68182605824, 127);
-f(68719476736, 128);
-f(69256347648, 129);
-f(136902082560, 255);
-f(137438953472, 256);
-f(137975824384, 257);
-f(274341036032, 511);
-f(274877906944, 512);
-f(275414777856, 513);
-f(549218942976, 1023);
-f(549755813888, 1024);
-f(550292684800, 1025);
-f(1098974756864, 2047);
-f(1099511627776, 2048);
-f(1100048498688, 2049);
-f(2198486384640, 4095);
-f(2199023255552, 4096);
-f(2199560126464, 4097);
-f(4397509640192, 8191);
-f(4398046511104, 8192);
-f(4398583382016, 8193);
-f(8795556151296, 16383);
-f(8796093022208, 16384);
-f(8796629893120, 16385);
-f(17591649173504, 32767);
-f(17592186044416, 32768);
-f(17592722915328, 32769);
-f(35183835217920, 65535);
-f(35184372088832, 65536);
-f(35184908959744, 65537);
-f(70368207306752, 131071);
-f(70368744177664, 131072);
-f(70369281048576, 131073);
-f(140736951484416, 262143);
-f(140737488355328, 262144);
-f(140738025226240, 262145);
-f(281474439839744, 524287);
-f(281474976710656, 524288);
-f(281475513581568, 524289);
-f(562949416550400, 1048575);
-f(562949953421312, 1048576);
-f(562950490292224, 1048577);
-f(1125899369971712, 2097151);
-f(1125899906842624, 2097152);
-f(1125900443713536, 2097153);
-x = 536870913;
-f(0, 0);
-f(536870913, 1);
-f(1073741826, 2);
-f(1610612739, 3);
-f(2147483652, 4);
-f(2684354565, 5);
-f(3758096391, 7);
-f(4294967304, 8);
-f(4831838217, 9);
-f(8053063695, 15);
-f(8589934608, 16);
-f(9126805521, 17);
-f(16642998303, 31);
-f(17179869216, 32);
-f(17716740129, 33);
-f(33822867519, 63);
-f(34359738432, 64);
-f(34896609345, 65);
-f(68182605951, 127);
-f(68719476864, 128);
-f(69256347777, 129);
-f(136902082815, 255);
-f(137438953728, 256);
-f(137975824641, 257);
-f(274341036543, 511);
-f(274877907456, 512);
-f(275414778369, 513);
-f(549218943999, 1023);
-f(549755814912, 1024);
-f(550292685825, 1025);
-f(1098974758911, 2047);
-f(1099511629824, 2048);
-f(1100048500737, 2049);
-f(2198486388735, 4095);
-f(2199023259648, 4096);
-f(2199560130561, 4097);
-f(4397509648383, 8191);
-f(4398046519296, 8192);
-f(4398583390209, 8193);
-f(8795556167679, 16383);
-f(8796093038592, 16384);
-f(8796629909505, 16385);
-f(17591649206271, 32767);
-f(17592186077184, 32768);
-f(17592722948097, 32769);
-f(35183835283455, 65535);
-f(35184372154368, 65536);
-f(35184909025281, 65537);
-f(70368207437823, 131071);
-f(70368744308736, 131072);
-f(70369281179649, 131073);
-f(140736951746559, 262143);
-f(140737488617472, 262144);
-f(140738025488385, 262145);
-f(281474440364031, 524287);
-f(281474977234944, 524288);
-f(281475514105857, 524289);
-f(562949417598975, 1048575);
-f(562949954469888, 1048576);
-f(562950491340801, 1048577);
-f(1125899372068863, 2097151);
-f(1125899908939776, 2097152);
-f(1125900445810689, 2097153);
-x = 1073741823;
-f(0, 0);
-f(1073741823, 1);
-f(2147483646, 2);
-f(3221225469, 3);
-f(4294967292, 4);
-f(5368709115, 5);
-f(7516192761, 7);
-f(8589934584, 8);
-f(9663676407, 9);
-f(16106127345, 15);
-f(17179869168, 16);
-f(18253610991, 17);
-f(33285996513, 31);
-f(34359738336, 32);
-f(35433480159, 33);
-f(67645734849, 63);
-f(68719476672, 64);
-f(69793218495, 65);
-f(136365211521, 127);
-f(137438953344, 128);
-f(138512695167, 129);
-f(273804164865, 255);
-f(274877906688, 256);
-f(275951648511, 257);
-f(548682071553, 511);
-f(549755813376, 512);
-f(550829555199, 513);
-f(1098437884929, 1023);
-f(1099511626752, 1024);
-f(1100585368575, 1025);
-f(2197949511681, 2047);
-f(2199023253504, 2048);
-f(2200096995327, 2049);
-f(4396972765185, 4095);
-f(4398046507008, 4096);
-f(4399120248831, 4097);
-f(8795019272193, 8191);
-f(8796093014016, 8192);
-f(8797166755839, 8193);
-f(17591112286209, 16383);
-f(17592186028032, 16384);
-f(17593259769855, 16385);
-f(35183298314241, 32767);
-f(35184372056064, 32768);
-f(35185445797887, 32769);
-f(70367670370305, 65535);
-f(70368744112128, 65536);
-f(70369817853951, 65537);
-f(140736414482433, 131071);
-f(140737488224256, 131072);
-f(140738561966079, 131073);
-f(281473902706689, 262143);
-f(281474976448512, 262144);
-f(281476050190335, 262145);
-f(562948879155201, 524287);
-f(562949952897024, 524288);
-f(562951026638847, 524289);
-f(1125898832052225, 1048575);
-f(1125899905794048, 1048576);
-f(1125900979535871, 1048577);
-x = 1073741824;
-f(0, 0);
-f(1073741824, 1);
-f(2147483648, 2);
-f(3221225472, 3);
-f(4294967296, 4);
-f(5368709120, 5);
-f(7516192768, 7);
-f(8589934592, 8);
-f(9663676416, 9);
-f(16106127360, 15);
-f(17179869184, 16);
-f(18253611008, 17);
-f(33285996544, 31);
-f(34359738368, 32);
-f(35433480192, 33);
-f(67645734912, 63);
-f(68719476736, 64);
-f(69793218560, 65);
-f(136365211648, 127);
-f(137438953472, 128);
-f(138512695296, 129);
-f(273804165120, 255);
-f(274877906944, 256);
-f(275951648768, 257);
-f(548682072064, 511);
-f(549755813888, 512);
-f(550829555712, 513);
-f(1098437885952, 1023);
-f(1099511627776, 1024);
-f(1100585369600, 1025);
-f(2197949513728, 2047);
-f(2199023255552, 2048);
-f(2200096997376, 2049);
-f(4396972769280, 4095);
-f(4398046511104, 4096);
-f(4399120252928, 4097);
-f(8795019280384, 8191);
-f(8796093022208, 8192);
-f(8797166764032, 8193);
-f(17591112302592, 16383);
-f(17592186044416, 16384);
-f(17593259786240, 16385);
-f(35183298347008, 32767);
-f(35184372088832, 32768);
-f(35185445830656, 32769);
-f(70367670435840, 65535);
-f(70368744177664, 65536);
-f(70369817919488, 65537);
-f(140736414613504, 131071);
-f(140737488355328, 131072);
-f(140738562097152, 131073);
-f(281473902968832, 262143);
-f(281474976710656, 262144);
-f(281476050452480, 262145);
-f(562948879679488, 524287);
-f(562949953421312, 524288);
-f(562951027163136, 524289);
-f(1125898833100800, 1048575);
-f(1125899906842624, 1048576);
-f(1125900980584448, 1048577);
-x = 1073741825;
-f(0, 0);
-f(1073741825, 1);
-f(2147483650, 2);
-f(3221225475, 3);
-f(4294967300, 4);
-f(5368709125, 5);
-f(7516192775, 7);
-f(8589934600, 8);
-f(9663676425, 9);
-f(16106127375, 15);
-f(17179869200, 16);
-f(18253611025, 17);
-f(33285996575, 31);
-f(34359738400, 32);
-f(35433480225, 33);
-f(67645734975, 63);
-f(68719476800, 64);
-f(69793218625, 65);
-f(136365211775, 127);
-f(137438953600, 128);
-f(138512695425, 129);
-f(273804165375, 255);
-f(274877907200, 256);
-f(275951649025, 257);
-f(548682072575, 511);
-f(549755814400, 512);
-f(550829556225, 513);
-f(1098437886975, 1023);
-f(1099511628800, 1024);
-f(1100585370625, 1025);
-f(2197949515775, 2047);
-f(2199023257600, 2048);
-f(2200096999425, 2049);
-f(4396972773375, 4095);
-f(4398046515200, 4096);
-f(4399120257025, 4097);
-f(8795019288575, 8191);
-f(8796093030400, 8192);
-f(8797166772225, 8193);
-f(17591112318975, 16383);
-f(17592186060800, 16384);
-f(17593259802625, 16385);
-f(35183298379775, 32767);
-f(35184372121600, 32768);
-f(35185445863425, 32769);
-f(70367670501375, 65535);
-f(70368744243200, 65536);
-f(70369817985025, 65537);
-f(140736414744575, 131071);
-f(140737488486400, 131072);
-f(140738562228225, 131073);
-f(281473903230975, 262143);
-f(281474976972800, 262144);
-f(281476050714625, 262145);
-f(562948880203775, 524287);
-f(562949953945600, 524288);
-f(562951027687425, 524289);
-f(1125898834149375, 1048575);
-f(1125899907891200, 1048576);
-f(1125900981633025, 1048577);
-x = 2147483647;
-f(0, 0);
-f(2147483647, 1);
-f(4294967294, 2);
-f(6442450941, 3);
-f(8589934588, 4);
-f(10737418235, 5);
-f(15032385529, 7);
-f(17179869176, 8);
-f(19327352823, 9);
-f(32212254705, 15);
-f(34359738352, 16);
-f(36507221999, 17);
-f(66571993057, 31);
-f(68719476704, 32);
-f(70866960351, 33);
-f(135291469761, 63);
-f(137438953408, 64);
-f(139586437055, 65);
-f(272730423169, 127);
-f(274877906816, 128);
-f(277025390463, 129);
-f(547608329985, 255);
-f(549755813632, 256);
-f(551903297279, 257);
-f(1097364143617, 511);
-f(1099511627264, 512);
-f(1101659110911, 513);
-f(2196875770881, 1023);
-f(2199023254528, 1024);
-f(2201170738175, 1025);
-f(4395899025409, 2047);
-f(4398046509056, 2048);
-f(4400193992703, 2049);
-f(8793945534465, 4095);
-f(8796093018112, 4096);
-f(8798240501759, 4097);
-f(17590038552577, 8191);
-f(17592186036224, 8192);
-f(17594333519871, 8193);
-f(35182224588801, 16383);
-f(35184372072448, 16384);
-f(35186519556095, 16385);
-f(70366596661249, 32767);
-f(70368744144896, 32768);
-f(70370891628543, 32769);
-f(140735340806145, 65535);
-f(140737488289792, 65536);
-f(140739635773439, 65537);
-f(281472829095937, 131071);
-f(281474976579584, 131072);
-f(281477124063231, 131073);
-f(562947805675521, 262143);
-f(562949953159168, 262144);
-f(562952100642815, 262145);
-f(1125897758834689, 524287);
-f(1125899906318336, 524288);
-f(1125902053801983, 524289);
-x = 2147483648;
-f(0, 0);
-f(2147483648, 1);
-f(4294967296, 2);
-f(6442450944, 3);
-f(8589934592, 4);
-f(10737418240, 5);
-f(15032385536, 7);
-f(17179869184, 8);
-f(19327352832, 9);
-f(32212254720, 15);
-f(34359738368, 16);
-f(36507222016, 17);
-f(66571993088, 31);
-f(68719476736, 32);
-f(70866960384, 33);
-f(135291469824, 63);
-f(137438953472, 64);
-f(139586437120, 65);
-f(272730423296, 127);
-f(274877906944, 128);
-f(277025390592, 129);
-f(547608330240, 255);
-f(549755813888, 256);
-f(551903297536, 257);
-f(1097364144128, 511);
-f(1099511627776, 512);
-f(1101659111424, 513);
-f(2196875771904, 1023);
-f(2199023255552, 1024);
-f(2201170739200, 1025);
-f(4395899027456, 2047);
-f(4398046511104, 2048);
-f(4400193994752, 2049);
-f(8793945538560, 4095);
-f(8796093022208, 4096);
-f(8798240505856, 4097);
-f(17590038560768, 8191);
-f(17592186044416, 8192);
-f(17594333528064, 8193);
-f(35182224605184, 16383);
-f(35184372088832, 16384);
-f(35186519572480, 16385);
-f(70366596694016, 32767);
-f(70368744177664, 32768);
-f(70370891661312, 32769);
-f(140735340871680, 65535);
-f(140737488355328, 65536);
-f(140739635838976, 65537);
-f(281472829227008, 131071);
-f(281474976710656, 131072);
-f(281477124194304, 131073);
-f(562947805937664, 262143);
-f(562949953421312, 262144);
-f(562952100904960, 262145);
-f(1125897759358976, 524287);
-f(1125899906842624, 524288);
-f(1125902054326272, 524289);
-x = 2147483649;
-f(0, 0);
-f(2147483649, 1);
-f(4294967298, 2);
-f(6442450947, 3);
-f(8589934596, 4);
-f(10737418245, 5);
-f(15032385543, 7);
-f(17179869192, 8);
-f(19327352841, 9);
-f(32212254735, 15);
-f(34359738384, 16);
-f(36507222033, 17);
-f(66571993119, 31);
-f(68719476768, 32);
-f(70866960417, 33);
-f(135291469887, 63);
-f(137438953536, 64);
-f(139586437185, 65);
-f(272730423423, 127);
-f(274877907072, 128);
-f(277025390721, 129);
-f(547608330495, 255);
-f(549755814144, 256);
-f(551903297793, 257);
-f(1097364144639, 511);
-f(1099511628288, 512);
-f(1101659111937, 513);
-f(2196875772927, 1023);
-f(2199023256576, 1024);
-f(2201170740225, 1025);
-f(4395899029503, 2047);
-f(4398046513152, 2048);
-f(4400193996801, 2049);
-f(8793945542655, 4095);
-f(8796093026304, 4096);
-f(8798240509953, 4097);
-f(17590038568959, 8191);
-f(17592186052608, 8192);
-f(17594333536257, 8193);
-f(35182224621567, 16383);
-f(35184372105216, 16384);
-f(35186519588865, 16385);
-f(70366596726783, 32767);
-f(70368744210432, 32768);
-f(70370891694081, 32769);
-f(140735340937215, 65535);
-f(140737488420864, 65536);
-f(140739635904513, 65537);
-f(281472829358079, 131071);
-f(281474976841728, 131072);
-f(281477124325377, 131073);
-f(562947806199807, 262143);
-f(562949953683456, 262144);
-f(562952101167105, 262145);
-f(1125897759883263, 524287);
-f(1125899907366912, 524288);
-f(1125902054850561, 524289);
-x = 4294967295;
-f(0, 0);
-f(4294967295, 1);
-f(8589934590, 2);
-f(12884901885, 3);
-f(17179869180, 4);
-f(21474836475, 5);
-f(30064771065, 7);
-f(34359738360, 8);
-f(38654705655, 9);
-f(64424509425, 15);
-f(68719476720, 16);
-f(73014444015, 17);
-f(133143986145, 31);
-f(137438953440, 32);
-f(141733920735, 33);
-f(270582939585, 63);
-f(274877906880, 64);
-f(279172874175, 65);
-f(545460846465, 127);
-f(549755813760, 128);
-f(554050781055, 129);
-f(1095216660225, 255);
-f(1099511627520, 256);
-f(1103806594815, 257);
-f(2194728287745, 511);
-f(2199023255040, 512);
-f(2203318222335, 513);
-f(4393751542785, 1023);
-f(4398046510080, 1024);
-f(4402341477375, 1025);
-f(8791798052865, 2047);
-f(8796093020160, 2048);
-f(8800387987455, 2049);
-f(17587891073025, 4095);
-f(17592186040320, 4096);
-f(17596481007615, 4097);
-f(35180077113345, 8191);
-f(35184372080640, 8192);
-f(35188667047935, 8193);
-f(70364449193985, 16383);
-f(70368744161280, 16384);
-f(70373039128575, 16385);
-f(140733193355265, 32767);
-f(140737488322560, 32768);
-f(140741783289855, 32769);
-f(281470681677825, 65535);
-f(281474976645120, 65536);
-f(281479271612415, 65537);
-f(562945658322945, 131071);
-f(562949953290240, 131072);
-f(562954248257535, 131073);
-f(1125895611613185, 262143);
-f(1125899906580480, 262144);
-f(1125904201547775, 262145);
-x = 4294967296;
-f(0, 0);
-f(4294967296, 1);
-f(8589934592, 2);
-f(12884901888, 3);
-f(17179869184, 4);
-f(21474836480, 5);
-f(30064771072, 7);
-f(34359738368, 8);
-f(38654705664, 9);
-f(64424509440, 15);
-f(68719476736, 16);
-f(73014444032, 17);
-f(133143986176, 31);
-f(137438953472, 32);
-f(141733920768, 33);
-f(270582939648, 63);
-f(274877906944, 64);
-f(279172874240, 65);
-f(545460846592, 127);
-f(549755813888, 128);
-f(554050781184, 129);
-f(1095216660480, 255);
-f(1099511627776, 256);
-f(1103806595072, 257);
-f(2194728288256, 511);
-f(2199023255552, 512);
-f(2203318222848, 513);
-f(4393751543808, 1023);
-f(4398046511104, 1024);
-f(4402341478400, 1025);
-f(8791798054912, 2047);
-f(8796093022208, 2048);
-f(8800387989504, 2049);
-f(17587891077120, 4095);
-f(17592186044416, 4096);
-f(17596481011712, 4097);
-f(35180077121536, 8191);
-f(35184372088832, 8192);
-f(35188667056128, 8193);
-f(70364449210368, 16383);
-f(70368744177664, 16384);
-f(70373039144960, 16385);
-f(140733193388032, 32767);
-f(140737488355328, 32768);
-f(140741783322624, 32769);
-f(281470681743360, 65535);
-f(281474976710656, 65536);
-f(281479271677952, 65537);
-f(562945658454016, 131071);
-f(562949953421312, 131072);
-f(562954248388608, 131073);
-f(1125895611875328, 262143);
-f(1125899906842624, 262144);
-f(1125904201809920, 262145);
-x = 4294967297;
-f(0, 0);
-f(4294967297, 1);
-f(8589934594, 2);
-f(12884901891, 3);
-f(17179869188, 4);
-f(21474836485, 5);
-f(30064771079, 7);
-f(34359738376, 8);
-f(38654705673, 9);
-f(64424509455, 15);
-f(68719476752, 16);
-f(73014444049, 17);
-f(133143986207, 31);
-f(137438953504, 32);
-f(141733920801, 33);
-f(270582939711, 63);
-f(274877907008, 64);
-f(279172874305, 65);
-f(545460846719, 127);
-f(549755814016, 128);
-f(554050781313, 129);
-f(1095216660735, 255);
-f(1099511628032, 256);
-f(1103806595329, 257);
-f(2194728288767, 511);
-f(2199023256064, 512);
-f(2203318223361, 513);
-f(4393751544831, 1023);
-f(4398046512128, 1024);
-f(4402341479425, 1025);
-f(8791798056959, 2047);
-f(8796093024256, 2048);
-f(8800387991553, 2049);
-f(17587891081215, 4095);
-f(17592186048512, 4096);
-f(17596481015809, 4097);
-f(35180077129727, 8191);
-f(35184372097024, 8192);
-f(35188667064321, 8193);
-f(70364449226751, 16383);
-f(70368744194048, 16384);
-f(70373039161345, 16385);
-f(140733193420799, 32767);
-f(140737488388096, 32768);
-f(140741783355393, 32769);
-f(281470681808895, 65535);
-f(281474976776192, 65536);
-f(281479271743489, 65537);
-f(562945658585087, 131071);
-f(562949953552384, 131072);
-f(562954248519681, 131073);
-f(1125895612137471, 262143);
-f(1125899907104768, 262144);
-f(1125904202072065, 262145);
-x = 8589934591;
-f(0, 0);
-f(8589934591, 1);
-f(17179869182, 2);
-f(25769803773, 3);
-f(34359738364, 4);
-f(42949672955, 5);
-f(60129542137, 7);
-f(68719476728, 8);
-f(77309411319, 9);
-f(128849018865, 15);
-f(137438953456, 16);
-f(146028888047, 17);
-f(266287972321, 31);
-f(274877906912, 32);
-f(283467841503, 33);
-f(541165879233, 63);
-f(549755813824, 64);
-f(558345748415, 65);
-f(1090921693057, 127);
-f(1099511627648, 128);
-f(1108101562239, 129);
-f(2190433320705, 255);
-f(2199023255296, 256);
-f(2207613189887, 257);
-f(4389456576001, 511);
-f(4398046510592, 512);
-f(4406636445183, 513);
-f(8787503086593, 1023);
-f(8796093021184, 1024);
-f(8804682955775, 1025);
-f(17583596107777, 2047);
-f(17592186042368, 2048);
-f(17600775976959, 2049);
-f(35175782150145, 4095);
-f(35184372084736, 4096);
-f(35192962019327, 4097);
-f(70360154234881, 8191);
-f(70368744169472, 8192);
-f(70377334104063, 8193);
-f(140728898404353, 16383);
-f(140737488338944, 16384);
-f(140746078273535, 16385);
-f(281466386743297, 32767);
-f(281474976677888, 32768);
-f(281483566612479, 32769);
-f(562941363421185, 65535);
-f(562949953355776, 65536);
-f(562958543290367, 65537);
-f(1125891316776961, 131071);
-f(1125899906711552, 131072);
-f(1125908496646143, 131073);
-x = 8589934592;
-f(0, 0);
-f(8589934592, 1);
-f(17179869184, 2);
-f(25769803776, 3);
-f(34359738368, 4);
-f(42949672960, 5);
-f(60129542144, 7);
-f(68719476736, 8);
-f(77309411328, 9);
-f(128849018880, 15);
-f(137438953472, 16);
-f(146028888064, 17);
-f(266287972352, 31);
-f(274877906944, 32);
-f(283467841536, 33);
-f(541165879296, 63);
-f(549755813888, 64);
-f(558345748480, 65);
-f(1090921693184, 127);
-f(1099511627776, 128);
-f(1108101562368, 129);
-f(2190433320960, 255);
-f(2199023255552, 256);
-f(2207613190144, 257);
-f(4389456576512, 511);
-f(4398046511104, 512);
-f(4406636445696, 513);
-f(8787503087616, 1023);
-f(8796093022208, 1024);
-f(8804682956800, 1025);
-f(17583596109824, 2047);
-f(17592186044416, 2048);
-f(17600775979008, 2049);
-f(35175782154240, 4095);
-f(35184372088832, 4096);
-f(35192962023424, 4097);
-f(70360154243072, 8191);
-f(70368744177664, 8192);
-f(70377334112256, 8193);
-f(140728898420736, 16383);
-f(140737488355328, 16384);
-f(140746078289920, 16385);
-f(281466386776064, 32767);
-f(281474976710656, 32768);
-f(281483566645248, 32769);
-f(562941363486720, 65535);
-f(562949953421312, 65536);
-f(562958543355904, 65537);
-f(1125891316908032, 131071);
-f(1125899906842624, 131072);
-f(1125908496777216, 131073);
-x = 8589934593;
-f(0, 0);
-f(8589934593, 1);
-f(17179869186, 2);
-f(25769803779, 3);
-f(34359738372, 4);
-f(42949672965, 5);
-f(60129542151, 7);
-f(68719476744, 8);
-f(77309411337, 9);
-f(128849018895, 15);
-f(137438953488, 16);
-f(146028888081, 17);
-f(266287972383, 31);
-f(274877906976, 32);
-f(283467841569, 33);
-f(541165879359, 63);
-f(549755813952, 64);
-f(558345748545, 65);
-f(1090921693311, 127);
-f(1099511627904, 128);
-f(1108101562497, 129);
-f(2190433321215, 255);
-f(2199023255808, 256);
-f(2207613190401, 257);
-f(4389456577023, 511);
-f(4398046511616, 512);
-f(4406636446209, 513);
-f(8787503088639, 1023);
-f(8796093023232, 1024);
-f(8804682957825, 1025);
-f(17583596111871, 2047);
-f(17592186046464, 2048);
-f(17600775981057, 2049);
-f(35175782158335, 4095);
-f(35184372092928, 4096);
-f(35192962027521, 4097);
-f(70360154251263, 8191);
-f(70368744185856, 8192);
-f(70377334120449, 8193);
-f(140728898437119, 16383);
-f(140737488371712, 16384);
-f(140746078306305, 16385);
-f(281466386808831, 32767);
-f(281474976743424, 32768);
-f(281483566678017, 32769);
-f(562941363552255, 65535);
-f(562949953486848, 65536);
-f(562958543421441, 65537);
-f(1125891317039103, 131071);
-f(1125899906973696, 131072);
-f(1125908496908289, 131073);
-x = 17179869183;
-f(0, 0);
-f(17179869183, 1);
-f(34359738366, 2);
-f(51539607549, 3);
-f(68719476732, 4);
-f(85899345915, 5);
-f(120259084281, 7);
-f(137438953464, 8);
-f(154618822647, 9);
-f(257698037745, 15);
-f(274877906928, 16);
-f(292057776111, 17);
-f(532575944673, 31);
-f(549755813856, 32);
-f(566935683039, 33);
-f(1082331758529, 63);
-f(1099511627712, 64);
-f(1116691496895, 65);
-f(2181843386241, 127);
-f(2199023255424, 128);
-f(2216203124607, 129);
-f(4380866641665, 255);
-f(4398046510848, 256);
-f(4415226380031, 257);
-f(8778913152513, 511);
-f(8796093021696, 512);
-f(8813272890879, 513);
-f(17575006174209, 1023);
-f(17592186043392, 1024);
-f(17609365912575, 1025);
-f(35167192217601, 2047);
-f(35184372086784, 2048);
-f(35201551955967, 2049);
-f(70351564304385, 4095);
-f(70368744173568, 4096);
-f(70385924042751, 4097);
-f(140720308477953, 8191);
-f(140737488347136, 8192);
-f(140754668216319, 8193);
-f(281457796825089, 16383);
-f(281474976694272, 16384);
-f(281492156563455, 16385);
-f(562932773519361, 32767);
-f(562949953388544, 32768);
-f(562967133257727, 32769);
-f(1125882726907905, 65535);
-f(1125899906777088, 65536);
-f(1125917086646271, 65537);
-x = 17179869184;
-f(0, 0);
-f(17179869184, 1);
-f(34359738368, 2);
-f(51539607552, 3);
-f(68719476736, 4);
-f(85899345920, 5);
-f(120259084288, 7);
-f(137438953472, 8);
-f(154618822656, 9);
-f(257698037760, 15);
-f(274877906944, 16);
-f(292057776128, 17);
-f(532575944704, 31);
-f(549755813888, 32);
-f(566935683072, 33);
-f(1082331758592, 63);
-f(1099511627776, 64);
-f(1116691496960, 65);
-f(2181843386368, 127);
-f(2199023255552, 128);
-f(2216203124736, 129);
-f(4380866641920, 255);
-f(4398046511104, 256);
-f(4415226380288, 257);
-f(8778913153024, 511);
-f(8796093022208, 512);
-f(8813272891392, 513);
-f(17575006175232, 1023);
-f(17592186044416, 1024);
-f(17609365913600, 1025);
-f(35167192219648, 2047);
-f(35184372088832, 2048);
-f(35201551958016, 2049);
-f(70351564308480, 4095);
-f(70368744177664, 4096);
-f(70385924046848, 4097);
-f(140720308486144, 8191);
-f(140737488355328, 8192);
-f(140754668224512, 8193);
-f(281457796841472, 16383);
-f(281474976710656, 16384);
-f(281492156579840, 16385);
-f(562932773552128, 32767);
-f(562949953421312, 32768);
-f(562967133290496, 32769);
-f(1125882726973440, 65535);
-f(1125899906842624, 65536);
-f(1125917086711808, 65537);
-x = 17179869185;
-f(0, 0);
-f(17179869185, 1);
-f(34359738370, 2);
-f(51539607555, 3);
-f(68719476740, 4);
-f(85899345925, 5);
-f(120259084295, 7);
-f(137438953480, 8);
-f(154618822665, 9);
-f(257698037775, 15);
-f(274877906960, 16);
-f(292057776145, 17);
-f(532575944735, 31);
-f(549755813920, 32);
-f(566935683105, 33);
-f(1082331758655, 63);
-f(1099511627840, 64);
-f(1116691497025, 65);
-f(2181843386495, 127);
-f(2199023255680, 128);
-f(2216203124865, 129);
-f(4380866642175, 255);
-f(4398046511360, 256);
-f(4415226380545, 257);
-f(8778913153535, 511);
-f(8796093022720, 512);
-f(8813272891905, 513);
-f(17575006176255, 1023);
-f(17592186045440, 1024);
-f(17609365914625, 1025);
-f(35167192221695, 2047);
-f(35184372090880, 2048);
-f(35201551960065, 2049);
-f(70351564312575, 4095);
-f(70368744181760, 4096);
-f(70385924050945, 4097);
-f(140720308494335, 8191);
-f(140737488363520, 8192);
-f(140754668232705, 8193);
-f(281457796857855, 16383);
-f(281474976727040, 16384);
-f(281492156596225, 16385);
-f(562932773584895, 32767);
-f(562949953454080, 32768);
-f(562967133323265, 32769);
-f(1125882727038975, 65535);
-f(1125899906908160, 65536);
-f(1125917086777345, 65537);
diff --git a/src/3rdparty/v8/test/mjsunit/new-function.js b/src/3rdparty/v8/test/mjsunit/new-function.js
new file mode 100644
index 0000000..9e8cc27
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/new-function.js
@@ -0,0 +1,34 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x;
+try {
+ Function("}), x = this, (function() {");
+} catch(e) {
+ print("Caught " + e);
+}
+assertTrue(x == "[object global]");
diff --git a/src/3rdparty/v8/test/mjsunit/numops-fuzz-part1.js b/src/3rdparty/v8/test/mjsunit/numops-fuzz-part1.js
new file mode 100644
index 0000000..8e98ae6
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/numops-fuzz-part1.js
@@ -0,0 +1,1172 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+function f() {
+ var x = 0;
+ var tmp = 0;
+ assertEquals(0, x /= (tmp = 798469700.4090232, tmp));
+ assertEquals(0, x *= (2714102322.365509));
+ assertEquals(0, x *= x);
+ assertEquals(139516372, x -= (tmp = -139516372, tmp));
+ assertEquals(1, x /= (x%(2620399703.344006)));
+ assertEquals(0, x >>>= x);
+ assertEquals(-2772151192.8633175, x -= (tmp = 2772151192.8633175, tmp));
+ assertEquals(-2786298206.8633175, x -= (14147014));
+ assertEquals(1509750523, x |= ((1073767916)-(tmp = 919311632.2789925, tmp)));
+ assertEquals(2262404051.926751, x += ((752653528.9267509)%x));
+ assertEquals(-270926893, x |= (tmp = 1837232194, tmp));
+ assertEquals(0.17730273401688765, x /= ((tmp = -2657202795, tmp)-(((((x|(tmp = -1187733892.282897, tmp))-x)<<(556523578))-x)+(-57905508.42881298))));
+ assertEquals(122483.56550261026, x *= ((((tmp = 2570017060.15193, tmp)%((-1862621126.9968336)>>x))>>(x>>(tmp = 2388674677, tmp)))>>>(-2919657526.470434)));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x <<= (tmp = 2705124845.0455265, tmp));
+ assertEquals(0, x &= (-135286835.07069612));
+ assertEquals(-0, x *= ((tmp = -165810479.10020828, tmp)|x));
+ assertEquals(248741888, x += ((735976871.1308595)<<(-2608055185.0700903)));
+ assertEquals(139526144, x &= (tmp = -1454301068, tmp));
+ assertEquals(-0.047221345672746884, x /= (tmp = -2954726130.994727, tmp));
+ assertEquals(0, x <<= (x>>x));
+ assertEquals(0, x >>>= ((x+(912111201.488966))-(tmp = 1405800042.6070075, tmp)));
+ assertEquals(-1663642733, x |= (((-1663642733.5700119)<<(x^x))<<x));
+ assertEquals(-914358272, x <<= ((((-308411676)-(-618261840.9113789))%(-68488626.58621716))-x));
+ assertEquals(-1996488704, x &= (-1358622641.5848842));
+ assertEquals(-345978263, x += (1650510441));
+ assertEquals(3, x >>>= (-1106714178.701668));
+ assertEquals(1, x %= (((x>>(x>>(tmp = -3052773846.817114, tmp)))*(tmp = 1659218887.379526, tmp))&x));
+ assertEquals(-943225672, x += (-943225673));
+ assertEquals(-0.41714300120060854, x /= (tmp = 2261156652, tmp));
+ assertEquals(0, x >>>= ((3107060934.8863482)<<(tmp = 1902730887, tmp)));
+ assertEquals(0, x &= x);
+ assertEquals(1476628, x |= ((tmp = -2782899841.390033, tmp)>>>(2097653770)));
+ assertEquals(0.0008887648921591833, x /= ((tmp = 1661438264.5253348, tmp)%((tmp = 2555939813, tmp)*(-877024323.6515315))));
+ assertEquals(0, x <<= (tmp = -2366551345, tmp));
+ assertEquals(0, x &= (tmp = 1742843591, tmp));
+ assertEquals(0, x -= x);
+ assertEquals(4239, x += ((-3183564176.232031)>>>(349622674.1255014)));
+ assertEquals(-67560, x -= ((2352742295)>>>x));
+ assertEquals(-67560, x &= x);
+ assertEquals(-0.00003219917807302283, x /= (2098190203.699741));
+ assertEquals(0, x -= x);
+ assertEquals(0, x >>= ((((tmp = -869086522.8358297, tmp)/(187820779))-(tmp = -2000970995.1931965, tmp))|(1853528755.6064696)));
+ assertEquals(0, x >>= (-3040509919));
+ assertEquals(0, x %= (((tmp = -2386688049.194946, tmp)<<(tmp = -669711391, tmp))|x));
+ assertEquals(0, x %= (tmp = -298431511.4839926, tmp));
+ assertEquals(0, x /= (2830845091.2793818));
+ assertEquals(0, x /= ((((-2529926178)|x)^((tmp = 2139313707.0894063, tmp)%((-1825768525.0541775)-(-952600362.7758243))))+x));
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x -= x);
+ assertEquals(NaN, x /= (tmp = -432944480, tmp));
+ assertEquals(0, x <<= (((((x^((-1777523727)+(2194962794)))>>>(((((-590335134.8224905)%(x*(2198198974)))|(tmp = -2068556796, tmp))/(1060765637))*(-147051676)))/((tmp = -477350113.92686677, tmp)<<((x/(2018712621.0397925))^((tmp = 491163813.3921983, tmp)+(((x|((((x%(1990073256.812654))%((-2024388518.9599915)>>((tmp = 223182187, tmp)*(-722241065))))>>>(tmp = 2517147885.305745, tmp))%(1189996239.11222)))&x)%(-306932860))))))&((tmp = 1117802724.485684, tmp)+((-1391614045)-x)))%((((x>>((2958453447)*x))^(((410825859)|(((tmp = -1119269292.5495896, tmp)>>>(((((((x%(tmp = 648541746.6059314, tmp))*((-2304508480)<<((((x^(1408199888.1454597))|((251623937)|x))/((-382389946.9984102)|(tmp = -2082681143.5893767, tmp)))-(((tmp = 631243472, tmp)>>>(1407556544))/(((x>>>x)>>>(tmp = -6329025.47865057, tmp))>>>(tmp = 948664752.543093, tmp))))))/((((-183248880)>>x)&x)&x))>>x)&(((-978737284.8492057)%(tmp = 2983300011.737006, tmp))&(tmp = 2641937234.2954116, tmp)))<<x)>>(2795416632.9722223)))%((((tmp = -50926632, tmp)/x)&(((tmp = -2510786916, tmp)/x)/(-699755674)))|((((tmp = 1411792593, tmp)>>(924286570.2637128))>>((1609997725)>>(2735658951.0762663)))*(tmp = 726205435, tmp)))))<<(tmp = -2135055357.3156831, tmp)))/(tmp = 1408695065, tmp))^(tmp = -1343267739.8562133, tmp))));
+ assertEquals(0, x %= (-437232116));
+ assertEquals(-2463314518.2747326, x -= (2463314518.2747326));
+ assertEquals(109, x >>= (2401429560));
+ assertEquals(-2687641732.0253763, x += (-2687641841.0253763));
+ assertEquals(-2336375490019484000, x *= (tmp = 869303174.6678596, tmp));
+ assertEquals(5.458650430363785e+36, x *= x);
+ assertEquals(0, x |= ((((-1676972008.797291)*x)*((tmp = 2606991807, tmp)-x))<<x));
+ assertEquals(0, x &= ((-3053393759.3496876)+(-1431008367)));
+ assertEquals(-856728369, x |= (x-(((((764337872)/x)<<((x|(((tmp = 1409368192.1268077, tmp)+(tmp = -848083676, tmp))|(-2797102463.7915916)))^x))/x)^(tmp = 856728369.0589117, tmp))));
+ assertEquals(-0, x %= x);
+ assertEquals(1116550103, x ^= (-3178417193));
+ assertEquals(1116550103, x %= (tmp = -1482481942, tmp));
+ assertEquals(133, x >>>= x);
+ assertEquals(-1.381429241671034e-7, x /= ((tmp = -962771116.8101778, tmp)^x));
+ assertEquals(-1092268961, x |= ((tmp = 3202672531, tmp)-((x-(tmp = 845529357, tmp))>>(tmp = -868680593, tmp))));
+ assertEquals(-1092268961, x %= (tmp = 2670840415.304719, tmp));
+ assertEquals(-122794480, x %= (tmp = 969474481, tmp));
+ assertEquals(-297606521542193600, x *= (2423614820));
+ assertEquals(72460064, x >>>= (tmp = -1230798655, tmp));
+ assertEquals(-203714325373689600, x *= (-2811401400));
+ assertEquals(2154914048, x >>>= (((2241377026.001436)/x)+x));
+ assertEquals(1177864081, x ^= (tmp = -968513903, tmp));
+ assertEquals(35947664, x &= (-2086226758.2704995));
+ assertEquals(20795732539020670, x += (x*(578500247)));
+ assertEquals(-892004992, x >>= x);
+ assertEquals(-7023661.354330708, x /= ((((((1740714214)%((tmp = -459699286, tmp)+(tmp = -1700187400, tmp)))>>(tmp = -3170295237, tmp))+(tmp = -497509780, tmp))+((1971976144.6197853)+(661992813.6077721)))>>>(-1683802728)));
+ assertEquals(-1634205696, x <<= x);
+ assertEquals(-7, x >>= (-3187653764.930914));
+ assertEquals(-5.095345981491203, x -= ((tmp = 748315289, tmp)/(tmp = -392887780, tmp)));
+ assertEquals(1486531570, x &= (1486531570.9300508));
+ assertEquals(5670, x >>= (((tmp = -2486758205.26425, tmp)*(732510414))|x));
+ assertEquals(5670, x >>= (((-1811879946.2553763)%(1797475764))/(((tmp = -2159923884, tmp)|x)+(tmp = -1774410807, tmp))));
+ assertEquals(38, x %= (x>>>x));
+ assertEquals(-151134215, x ^= (((tmp = -2593085609.5622163, tmp)+((tmp = -814992345.7516887, tmp)-(534809571)))|(tmp = -232678571, tmp)));
+ assertEquals(-234881024, x <<= x);
+ assertEquals(-234881024, x <<= (x>>>x));
+ assertEquals(55169095435288580, x *= x);
+ assertEquals(0, x >>= (tmp = 1176612256, tmp));
+ assertEquals(0, x <<= (1321866341.2486475));
+ assertEquals(0, x %= (x-(-602577995)));
+ assertEquals(0, x >>>= (((((tmp = -125628635.79970193, tmp)^(tmp = 1294209955.229382, tmp))&(((tmp = -2353256654.0725203, tmp)|((-1136743028.9425385)|((((950703429.1110399)-(x>>>x))/((((x%(-252705869.21126103))/((tmp = 886957620, tmp)<<(x%((tmp = -1952249741, tmp)*(tmp = -1998149844, tmp)))))|(tmp = 1933366713, tmp))|((tmp = -2957141565, tmp)>>>(tmp = 1408598804, tmp))))+(((((((-2455002047.4910946)%(tmp = -528017836, tmp))&((-2693432769)/(tmp = 2484427670.9045153, tmp)))%(-356969659))-((((((tmp = 3104828644.0753174, tmp)%(x>>>(tmp = 820832137.8175925, tmp)))*((tmp = 763080553.9260503, tmp)+(3173597855)))<<(((-510785437)^x)<<(x|(((x*(x%((tmp = -1391951515, tmp)/x)))-x)|(x-((-522681793.93221474)/((2514619703.2162743)*(2936688324))))))))|x)>>>(-2093210042)))&(763129279.3651779))&x))))-x))%(((-1331164821)&(tmp = 1342684586, tmp))<<(x<<(tmp = 2675008614.588005, tmp))))>>((2625292569.8984914)+(-3185992401))));
+ assertEquals(0, x *= (tmp = 671817215.1147974, tmp));
+ assertEquals(-1608821121, x ^= ((tmp = 2686146175.04077, tmp)>>>x));
+ assertEquals(-0, x %= x);
+ assertEquals(-0, x /= ((tmp = 286794551.0720866, tmp)|(x%x)));
+ assertEquals(0, x <<= (x|(tmp = 1095503996.2285218, tmp)));
+ assertEquals(443296752, x ^= (443296752));
+ assertEquals(110824188, x >>= ((184708570)>>(x&x)));
+ assertEquals(0.7908194935161674, x /= ((((167151154.63381648)&((tmp = -1434120690, tmp)-(tmp = 2346173080, tmp)))/(56656051.87305987))^(140138414)));
+ assertEquals(-0.9027245492678485, x *= ((tmp = 1724366578, tmp)/(((2979477411)<<(((897038568)>>(tmp = 348960298, tmp))%(281056223.2037884)))^((((-1383133388)-(((-1379748375)-((x>>(x&(tmp = 2456582046, tmp)))>>>(-2923911755.565961)))&x))<<(-2825791731))^(tmp = -1979992970, tmp)))));
+ assertEquals(0, x &= (2482304279));
+ assertEquals(-0, x *= (-2284213673));
+ assertEquals(0, x <<= ((2874381218.015819)|x));
+ assertEquals(0, x *= (x>>>(tmp = 2172786480, tmp)));
+ assertEquals(0, x &= (-1638727867.2978938));
+ assertEquals(0, x %= ((tmp = -2213947368.285817, tmp)>>x));
+ assertEquals(0, x >>>= (tmp = -531324706, tmp));
+ assertEquals(0, x %= (tmp = -2338792486, tmp));
+ assertEquals(0, x <<= (((tmp = 351012164, tmp)<<(x|((tmp = -3023836638.5337825, tmp)^(-2678806692))))|x));
+ assertEquals(0, x %= (x-(tmp = -3220231305.45039, tmp)));
+ assertEquals(0, x <<= (-2132833261));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x %= ((2544970469)+(((-2633093458.5911965)&(644108176))-(x>>>(tmp = -949043718, tmp)))));
+ assertEquals(-2750531265, x += (-2750531265));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x *= ((tmp = 1299005700, tmp)-x));
+ assertEquals(0, x >>= x);
+ assertEquals(-1785515304, x -= (((((-806054462.5563161)/x)>>>x)+(1785515304))|((tmp = 2937069788.9396844, tmp)/x)));
+ assertEquals(-3810117159.173689, x -= (2024601855.1736891));
+ assertEquals(-6.276064139320051, x /= (607087033.3053156));
+ assertEquals(134217727, x >>>= (((x%(tmp = 924293127, tmp))^x)|((x>>>(x&((((tmp = -413386639, tmp)/(x>>(tmp = 599075308.8479941, tmp)))^(tmp = -1076703198, tmp))*((tmp = -2239117284, tmp)>>(655036983)))))-x)));
+ assertEquals(134217727, x %= (tmp = 2452642261.038778, tmp));
+ assertEquals(-569504740360507, x *= ((tmp = -1086243941, tmp)>>(tmp = 1850668904.4885683, tmp)));
+ assertEquals(113378806, x >>>= (tmp = -2558233435, tmp));
+ assertEquals(979264375, x -= (((x>>(1950008052))%((2917183569.0209)*(tmp = 1184250640.446752, tmp)))|((((tmp = -691875212, tmp)-(-2872881803))>>(tmp = 44162204.97461021, tmp))^(tmp = 865885647, tmp))));
+ assertEquals(-1127813632, x <<= ((((tmp = -2210499281, tmp)>>>x)-(tmp = 2359697240, tmp))-x));
+ assertEquals(-1707799657, x ^= (653518231.3995534));
+ assertEquals(2916579668449318000, x *= x);
+ assertEquals(2916579669254640600, x += (x&(tmp = 2986558026.399422, tmp)));
+ assertEquals(870995175, x ^= (2598813927.8991632));
+ assertEquals(870995175, x %= (-2857038782));
+ assertEquals(1869503575895591000, x *= (x|(x|(((tmp = 2478650307.4118147, tmp)*((tmp = 2576240847.476932, tmp)>>>x))<<x))));
+ assertEquals(-134947790, x |= ((tmp = 1150911808, tmp)*((2847735464)/(-2603172652.929262))));
+ assertEquals(-137053182, x -= ((tmp = 2155921819.0929346, tmp)>>>(x-(((-1960937402)-(-1907735074.2875962))%((1827808310)^(tmp = -2788307127, tmp))))));
+ assertEquals(-134824702, x |= (((2912578752.2395406)^(x%(((-2585660111.0638976)<<(((((tmp = 747742706, tmp)%(-1630261205))&((((x|(x|(-2619903144.278758)))|((2785710568.8651934)>>((-968301967.5982246)<<(x&x))))>>((x>>>((x>>>(tmp = -1402085797.0310762, tmp))*((tmp = -323729645.2250068, tmp)<<(tmp = 2234667799, tmp))))>>>(-167003745)))>>((924665972.4681011)<<x)))>>>x)<<((((x+x)+x)-(((tmp = 2399203431.0526247, tmp)-(-2872533271))-(((tmp = 914778794.2087344, tmp)-(tmp = 806353942.9502392, tmp))|(((tmp = 262924334.99231672, tmp)&x)|(tmp = -460248836.5602243, tmp)))))/x)))%((-1681000689)/(tmp = -2805054623.654228, tmp)))))*(tmp = 957346233.9619625, tmp)));
+ assertEquals(-3274838, x %= ((((tmp = 3155450543.3524327, tmp)>>>x)<<(tmp = 2103079652.3410985, tmp))>>x));
+ assertEquals(-3274838, x |= ((((tmp = 2148004645.639173, tmp)>>>(tmp = -1285119223, tmp))<<(((((-711596054)>>>(tmp = -2779776371.3473206, tmp))^(((((tmp = -1338880329.383915, tmp)<<((-1245247254.477341)>>x))*(tmp = -2649052844.20065, tmp))>>((1734345880.4600453)%(x/(2723093117.118899))))*(1252918475.3285656)))<<(2911356885))^x))<<(-1019761103)));
+ assertEquals(1703281954, x &= (((tmp = 1036570471.7412028, tmp)+((tmp = 3043119517, tmp)%(2374310816.8346715)))%(tmp = -2979155076, tmp)));
+ assertEquals(1741588391, x |= ((tmp = 1230009575.6003838, tmp)>>>(-1247515003.8152597)));
+ assertEquals(72869474.64782429, x %= (tmp = 1668718916.3521757, tmp));
+ assertEquals(770936242.104203, x += (698066767.4563787));
+ assertEquals(-0.2820604726420833, x /= (tmp = -2733230342, tmp));
+ assertEquals(403480578, x |= ((969730374)&(tmp = 1577889835, tmp)));
+ assertEquals(-1669557233, x ^= ((-1616812135)+(tmp = -456209292, tmp)));
+ assertEquals(-1630427, x >>= ((2327783031.1175823)/(226947662.4579488)));
+ assertEquals(131022, x >>>= ((tmp = -1325018897.2482083, tmp)>>(x&((((((-1588579772.9240348)<<(tmp = -1775580288.356329, tmp))<<(tmp = -1021528325.2075481, tmp))>>((tmp = 2373033451.079956, tmp)*(tmp = 810304612, tmp)))-((tmp = -639152097, tmp)<<(tmp = 513879484, tmp)))&(2593958513)))));
+ assertEquals(1, x >>= ((3033200222)-x));
+ assertEquals(-561146816.4851823, x += (tmp = -561146817.4851823, tmp));
+ assertEquals(-4.347990105831158, x /= ((((-1270435902)*x)%((tmp = 637328492.7386824, tmp)-(x>>(-749100689))))%(x+x)));
+ assertEquals(-1, x >>= x);
+ assertEquals(1, x *= x);
+ assertEquals(111316849706694460, x += ((966274056)*(x|(115202150))));
+ assertEquals(-1001883840, x >>= x);
+ assertEquals(-1001883840, x &= x);
+ assertEquals(-3006880758, x += ((((-2275110637.4054556)/((x+(tmp = -1390035090.4324536, tmp))>>(-5910593)))&(tmp = 378982420, tmp))|(tmp = 2289970378.568629, tmp)));
+ assertEquals(314474, x >>>= (x>>((tmp = -228007336.31281257, tmp)%(tmp = 1127648013, tmp))));
+ assertEquals(-17694827, x ^= ((tmp = 2095133598.1849852, tmp)|(-1978322311)));
+ assertEquals(1, x /= x);
+ assertEquals(1, x %= (-2323617209.7531185));
+ assertEquals(0, x >>>= (x*(tmp = -1574455400.489434, tmp)));
+ assertEquals(0, x >>= (3131854684));
+ assertEquals(2853609824, x += ((-231012098)-(tmp = -3084621922, tmp)));
+ assertEquals(8143089027629311000, x *= x);
+ assertEquals(313052685, x ^= (tmp = 2962303501, tmp));
+ assertEquals(4776, x >>= (tmp = 2271457232, tmp));
+ assertEquals(0.000002812258572702285, x /= (tmp = 1698279115, tmp));
+ assertEquals(0, x >>>= (tmp = 1698465782.0927145, tmp));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x |= ((x<<((-1824760240.3040407)<<(2798263764.39145)))&(tmp = 1795988253.0493627, tmp)));
+ assertEquals(1782206945, x ^= (-2512760351.7881565));
+ assertEquals(7610569113843172000, x *= (((tmp = -44415823.92972565, tmp)&(tmp = 1402483498.9421625, tmp))+(tmp = 2909778666, tmp)));
+ assertEquals(15221138227873292000, x += (x-(tmp = -186948658.394145, tmp)));
+ assertEquals(0, x -= x);
+ assertEquals(-2238823252, x -= ((tmp = 2238823252, tmp)+x));
+ assertEquals(0, x -= x);
+ assertEquals(0, x >>= (2976069570));
+ assertEquals(0, x >>= ((tmp = -2358157433, tmp)/x));
+ assertEquals(-949967713, x ^= (tmp = -949967713, tmp));
+ assertEquals(-1, x >>= x);
+ assertEquals(-1522291702.1977966, x *= (1522291702.1977966));
+ assertEquals(-1522291702, x >>= ((((2290279800)|x)|(1793154434.6798015))&((-1161390929.0766077)>>>x)));
+ assertEquals(83894274, x &= (tmp = 1571058486, tmp));
+ assertEquals(43186847.90522933, x += ((tmp = -1131332988.0947707, tmp)%x));
+ assertEquals(0, x >>= (tmp = -1968312707.269359, tmp));
+ assertEquals(0, x &= (2507747643.26175));
+ assertEquals(0, x %= (tmp = 3190525303.366887, tmp));
+ assertEquals(-1968984602, x ^= (((x/(x|(-1607062026.5338054)))<<(tmp = 2207669861.8770065, tmp))+(tmp = 2325982694.956348, tmp)));
+ assertEquals(554, x >>>= (((tmp = -2302283871.993821, tmp)>>>(-3151835112))|(((((x%(-1534374264))/((731246012)<<(((883830997.1194847)<<(((-1337895080.1937215)/(tmp = 3166402571.8157315, tmp))^(tmp = -1563897595.5799441, tmp)))>>(tmp = -556816951.0537591, tmp))))>>(-2682203577))<<(x/((1654294674.865079)+x)))/((x^(-2189474695.4259806))/(-475915245.7363057)))));
+ assertEquals(1372586111, x ^= (1372586581));
+ assertEquals(1166831229, x -= ((-834168138)&(762573579)));
+ assertEquals(2333662456, x -= ((x>>x)-x));
+ assertEquals(-1961304840, x &= x);
+ assertEquals(-2130143128, x &= (2982852718.0711775));
+ assertEquals(1073741824, x <<= (-1446978661.6426942));
+ assertEquals(2097152, x >>>= ((-1424728215)-(((127872198)%(tmp = -2596923298, tmp))&x)));
+ assertEquals(2097152, x >>>= x);
+ assertEquals(0, x &= (x/(tmp = -518419194.42994523, tmp)));
+ assertEquals(0, x >>= ((x/(-1865078245))%(tmp = 2959239210, tmp)));
+ assertEquals(-0, x *= ((x|(-1721307400))|(-3206147171.9491577)));
+ assertEquals(0, x >>>= ((-694741143)&(tmp = -2196513947.699142, tmp)));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x &= ((tmp = 2037824385.8836646, tmp)+((tmp = 1203034986.4647732, tmp)/(x>>>(((-1374881234)/(899771270.3237157))+((-2296524362.8020077)|(-1529870870)))))));
+ assertEquals(0, x >>= (tmp = 2770637816, tmp));
+ assertEquals(0, x ^= x);
+ assertEquals(-1861843456, x |= ((632402668)*((x|(tmp = -1032952662.8269436, tmp))|(tmp = 2671272511, tmp))));
+ assertEquals(-1861843456, x >>= (((x>>>x)+x)<<(-1600908842)));
+ assertEquals(-58182608, x >>= (x-(tmp = -2496617861, tmp)));
+ assertEquals(-3636413, x >>= (tmp = -400700028, tmp));
+ assertEquals(-7272826, x += x);
+ assertEquals(-1, x >>= ((tmp = -3184897005.3614545, tmp)-((-1799843014)|(tmp = 2832132915, tmp))));
+ assertEquals(-121800925.94209385, x *= (121800925.94209385));
+ assertEquals(-30450232, x >>= (-979274206.6261561));
+ assertEquals(-30450232, x >>= (tmp = -1028204832.5078967, tmp));
+ assertEquals(-30450232, x |= x);
+ assertEquals(965888871, x ^= (((((-2157753481.3375635)*((tmp = -1810667184.8165767, tmp)&((tmp = 2503908344.422232, tmp)|x)))>>(x>>(1601560785)))<<x)^(tmp = 943867311.6380403, tmp)));
+ assertEquals(7546006, x >>>= x);
+ assertEquals(7546006, x <<= ((tmp = 1388931761.780241, tmp)*(x-(tmp = -1245147647.0070577, tmp))));
+ assertEquals(12985628, x += (x&(-1520746354)));
+ assertEquals(12985628, x &= x);
+ assertEquals(12985628, x %= (tmp = 308641965, tmp));
+ assertEquals(685733278, x |= ((tmp = -1275653544, tmp)-((tmp = -1956798010.3773859, tmp)%(tmp = 2086889575.643448, tmp))));
+ assertEquals(679679376, x &= (2860752368));
+ assertEquals(1770773904, x |= (x<<(3200659207)));
+ assertEquals(1224886544, x &= (-585733767.6876519));
+ assertEquals(1224886544, x %= ((tmp = -114218494, tmp)-x));
+ assertEquals(1208109328, x &= (tmp = 1854361593, tmp));
+ assertEquals(18434, x >>>= x);
+ assertEquals(-349394636955256100, x *= (x*(-1028198742)));
+ assertEquals(-519536600.7713163, x %= (-1054085356.9120367));
+ assertEquals(-1610612736, x ^= ((tmp = -3126078854, tmp)&x));
+ assertEquals(-2637321565906333700, x *= (1637464740.5658746));
+ assertEquals(-2637321568051070500, x -= ((tmp = -1006718806, tmp)<<(3005848133.106345)));
+ assertEquals(368168695, x ^= (x^(tmp = 368168695.6881037, tmp)));
+ assertEquals(43, x >>>= x);
+ assertEquals(-2081297089, x |= ((167169305.77248895)+(-2248466405.3199244)));
+ assertEquals(-2474622167, x -= (tmp = 393325078, tmp));
+ assertEquals(-135109701, x %= (-1169756233));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x >>= (((((tmp = -164768854, tmp)/(tmp = -1774989993.1909926, tmp))+x)-((-921438912)>>(tmp = -191772028.69249105, tmp)))-(tmp = 558728578.22033, tmp)));
+ assertEquals(0, x %= (tmp = 2188003745, tmp));
+ assertEquals(0, x <<= (((tmp = -999335540, tmp)>>((((325101977)/(tmp = -3036991542, tmp))<<(tmp = -213302488, tmp))+x))|(tmp = -1054204587, tmp)));
+ assertEquals(0, x &= ((2844053429.4720345)>>>x));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x -= (-1481729275.9118822));
+ assertEquals(NaN, x *= (tmp = 1098314618.2397528, tmp));
+ assertEquals(-1073741824, x ^= ((tmp = 1718545772, tmp)<<(((tmp = -81058910, tmp)-(2831123087.424368))+(tmp = 576710057.2361784, tmp))));
+ assertEquals(-2921155898.4793186, x -= (1847414074.4793184));
+ assertEquals(-1295646720, x <<= (2178621744));
+ assertEquals(-0.8906779709597907, x /= ((tmp = -2840292585.6837263, tmp)<<(x&((tmp = 892527695.6172305, tmp)>>>x))));
+ assertEquals(0, x <<= (((tmp = 3149667213.298993, tmp)>>(tmp = 1679370761.7226725, tmp))^(115417747.21537328)));
+ assertEquals(0, x |= x);
+ assertEquals(0, x %= ((-1112849427)>>(-1245508870.7514496)));
+ assertEquals(0, x &= x);
+ assertEquals(0, x |= x);
+ assertEquals(0, x >>>= ((3144100694.930459)>>>(tmp = 2408610503, tmp)));
+ assertEquals(0, x <<= ((tmp = 2671709754.0318713, tmp)%x));
+ assertEquals(0, x >>>= (x|((tmp = -3048578701, tmp)-(674147224))));
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x &= ((tmp = -2084883715, tmp)|(((((-3008427069)+(875536047.4283574))>>>x)%(tmp = -450003426.1091652, tmp))%(((-2956878433.269356)|(x/((((x%((((((x<<(((tmp = -1581063482.510351, tmp)^x)-(tmp = 1364458217, tmp)))^((tmp = 1661446342, tmp)+(1307091014)))/(342270750.9901335))>>>(x&((1760980812.898993)&((tmp = 2878165745.6401143, tmp)/(((tmp = -981178013, tmp)/(-2338761668.29912))>>(-958462630))))))*((1807522840)^((tmp = 1885835034, tmp)^(-2538647938))))*(1673607540.0854697)))%x)>>x)<<x)))<<(853348877.2407281)))));
+ assertEquals(0, x >>>= x);
+ assertEquals(-1162790279, x -= (1162790279));
+ assertEquals(-1162790279, x >>= (((-490178658)*x)/((((((tmp = -1883861998.6699312, tmp)/(tmp = -2369967345.240594, tmp))+(3142759868.266447))&(508784917.8158537))&x)>>(-2129532322))));
+ assertEquals(-1360849740.9829152, x -= (x+(1360849740.9829152)));
+ assertEquals(1928392181, x ^= (-602670783));
+ assertEquals(19478708.898989897, x /= (((-2617861994)>>(tmp = 797256920, tmp))%(-1784987906)));
+ assertEquals(-8648903.575540157, x *= (((tmp = 673979276, tmp)/(-1517908716))%(x/x)));
+ assertEquals(-8648903.575540157, x %= ((((643195610.4221292)>>>(tmp = 2342669302, tmp))>>>(tmp = -1682965878, tmp))^((tmp = -208158937.63443017, tmp)>>((907286989)&(x<<(448634893))))));
+ assertEquals(1399288769, x ^= (tmp = -1407486728, tmp));
+ assertEquals(0, x &= (((1999255838.815517)/(tmp = 564646001, tmp))/(-3075888101.3274765)));
+ assertEquals(0, x ^= ((-78451711.59404826)%x));
+ assertEquals(-1351557131, x |= (2943410165));
+ assertEquals(1715626371, x -= (-3067183502));
+ assertEquals(71434240, x &= ((-1800066426)<<(((((x<<(-324796375))+x)<<(tmp = 2696824955.735132, tmp))^x)%(tmp = 444916469, tmp))));
+ assertEquals(71434240, x >>>= (((x&((x%x)|x))+(tmp = 2226992348.3050146, tmp))<<(-305526260)));
+ assertEquals(0, x -= (x%(tmp = 582790928.5832802, tmp)));
+ assertEquals(0, x *= ((x%(1865155340))>>>((x<<(2600488191))^(-308995123))));
+ assertEquals(0, x >>= (x&(-3120043868.8531103)));
+ assertEquals(0, x |= x);
+ assertEquals(-0, x *= (tmp = -172569944, tmp));
+ assertEquals(0, x <<= (-1664372874));
+ assertEquals(1377713344.6784928, x += (tmp = 1377713344.6784928, tmp));
+ assertEquals(1377713344, x |= x);
+ assertEquals(-232833282, x |= (tmp = 2685870654, tmp));
+ assertEquals(84639, x -= (((((2778531079.998492)%(2029165314))>>>(tmp = -468881172.3729558, tmp))^x)|((x>>>((((x%(3044318992.943596))&(1996754328.2214756))^(1985227172.7485228))%(tmp = -1984848676.1347625, tmp)))|((tmp = 2637662639, tmp)<<x))));
+ assertEquals(0, x ^= x);
+ assertEquals(1237720303, x -= (-1237720303));
+ assertEquals(2, x >>= (-2148785379.428976));
+ assertEquals(2, x &= (tmp = -3087007874, tmp));
+ assertEquals(0, x %= x);
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x += x);
+ assertEquals(0, x &= (2055693082));
+ assertEquals(-1349456492, x += (x^(-1349456492.315998)));
+ assertEquals(671088640, x <<= (x>>(-2030805724.5472062)));
+ assertEquals(-417654580004782100, x *= (tmp = -622353822, tmp));
+ assertEquals(1538160360, x |= (195983080.56698656));
+ assertEquals(733, x >>>= (tmp = 661085269, tmp));
+ assertEquals(657, x &= (-1611460943.993404));
+ assertEquals(431649, x *= x);
+ assertEquals(863298, x += x);
+ assertEquals(0, x &= ((1899423003)/((472439729)>>((tmp = 2903738952, tmp)+(tmp = 2164601630.3456993, tmp)))));
+ assertEquals(0, x &= (x>>>(tmp = 1939167951.2828958, tmp)));
+ assertEquals(1557813284, x |= (x-(-1557813284)));
+ assertEquals(72876068, x &= (662438974.2372154));
+ assertEquals(0.6695448637501589, x /= (tmp = 108844189.45702457, tmp));
+ assertEquals(0, x -= x);
+ assertEquals(2944889412, x += (2944889412));
+ assertEquals(3787980288, x -= ((((tmp = -2003814373.2301111, tmp)<<x)>>>(tmp = -3088357284.4405823, tmp))-(843090884)));
+ assertEquals(1, x >>>= (729274079));
+ assertEquals(1, x %= (-148002187.33869123));
+ assertEquals(3073988415.673201, x *= (tmp = 3073988415.673201, tmp));
+ assertEquals(4839166225.673201, x += (tmp = 1765177810, tmp));
+ assertEquals(4529373898.673201, x += (-309792327));
+ assertEquals(3097903.090496063, x %= (-150875866.51942348));
+ assertEquals(1270874112, x <<= ((((((tmp = -960966763.1418135, tmp)>>((((-3208596981.613482)>>>(tmp = 746403937.6913509, tmp))>>>(-2190042854.066803))/(2449323432)))*(-1272232665.791577))<<(-99306767.7209444))^((-1942103828)/((1570981655)/(tmp = 2381666337, tmp))))+(tmp = -1946759395.1558368, tmp)));
+ assertEquals(1273845956, x |= (tmp = -3197282108.6120167, tmp));
+ assertEquals(159230744, x >>= (((tmp = -1036031403.8108604, tmp)>>>(((3084964493)>>((x*x)^x))+(((2980108409.352001)^x)-(tmp = -2501685423.513927, tmp))))&(326263839)));
+ assertEquals(-370091747145550100, x *= (tmp = -2324248055.674161, tmp));
+ assertEquals(143384219.54999557, x /= (tmp = -2581119096, tmp));
+ assertEquals(1843396287, x |= (tmp = 1842718767, tmp));
+ assertEquals(2.4895593465813803, x /= (740450831));
+ assertEquals(2.4895593465813803, x %= ((((((((-3175333618)>>>((tmp = -1403880166, tmp)<<(tmp = -134875360, tmp)))>>>(2721317334.998084))<<(x&(tmp = 2924634208.1484184, tmp)))*((((x>>(tmp = -200319931.15328693, tmp))-(tmp = -495128933, tmp))+((-788052518.6610589)*((((tmp = 107902557, tmp)&(1221562660))%(x<<(((3155498059)*(((tmp = -1354381139.4897022, tmp)^(tmp = 3084557138.332852, tmp))*((((tmp = 1855251464.8464525, tmp)/((-1857403525.2008865)>>x))|x)-(-2061968455.0023944))))*(1917481864.84619))))^(x-(-508176709.52712965)))))+((((x%(-1942063404))+(x%(tmp = 855152281.180481, tmp)))|(-522863804))>>x)))>>>((tmp = -2515550553, tmp)&(((((-801095375)-(tmp = -2298729336.9792976, tmp))^x)/(tmp = 2370468053, tmp))>>(x|(tmp = -900008879, tmp)))))>>>(((tmp = -810295719.9509168, tmp)*((tmp = -1306212963.6226444, tmp)/(((tmp = 3175881540.9514832, tmp)|(-1439142297.819246))+((tmp = -134415617, tmp)|((-245801870)+x)))))>>(tmp = 1889815478, tmp)))-(((tmp = 597031177, tmp)%(858071823.7655672))+((tmp = 2320838665.8243756, tmp)|((938555608)<<(2351739219.6461897))))));
+ assertEquals(6.197905740150709, x *= x);
+ assertEquals(1, x /= x);
+ assertEquals(0, x >>= (-1639664165.9076233));
+ assertEquals(0, x >>= (-3135317748.801177));
+ assertEquals(0, x &= (3185479232.5325994));
+ assertEquals(-0, x *= ((-119759439.19668174)/(tmp = 2123964608, tmp)));
+ assertEquals(0, x /= (-1183061929.2827876));
+ assertEquals(0, x <<= (-1981831198));
+ assertEquals(0, x >>= ((((x<<(((((((-2133752838)&((tmp = -3045157736.9331336, tmp)>>>(x%x)))>>x)%(tmp = 3082217039, tmp))&(tmp = 270770770.97558427, tmp))|((-2212037556)^((((((2089224421)|(tmp = 360979560, tmp))<<x)%((tmp = -1679487690.6940534, tmp)+((173021423)|((tmp = 560900612, tmp)+((244376267.58977115)^x)))))<<(tmp = 2534513699, tmp))^x)))>>>(2915907189.4873834)))+(x*x))%(1637581117))%(tmp = 2363861105.3786244, tmp)));
+ assertEquals(0, x &= ((-2765495757.873004)&(1727406493)));
+ assertEquals(NaN, x -= (((((-1419667515.2616255)|x)-(150530256.48022234))%((((x|x)<<x)>>>(x^x))+x))-((-1216384577.3749187)*(495244398))));
+ assertEquals(NaN, x += (x^((tmp = 2472035493, tmp)+x)));
+ assertEquals(NaN, x %= ((tmp = -1753037412.885754, tmp)|((tmp = 2507058310, tmp)<<(1475945705))));
+ assertEquals(-1008981005, x |= ((tmp = -1140889842.6099494, tmp)-(tmp = -131908837, tmp)));
+ assertEquals(999230327.5872104, x -= (tmp = -2008211332.5872104, tmp));
+ assertEquals(975810, x >>= (((-1211913874)*x)>>>((-2842129009)>>(x&(tmp = -1410865834, tmp)))));
+ assertEquals(7623, x >>= ((tmp = -1051327071, tmp)-(((tmp = -237716102.8005445, tmp)|((2938903833.416546)&x))|(((-1831064579)^x)/((tmp = 2999232092, tmp)-(981996301.2875179))))));
+ assertEquals(0, x -= x);
+ assertEquals(0, x %= (x|(tmp = -666201160.5810485, tmp)));
+ assertEquals(-1347124100, x |= (-1347124100));
+ assertEquals(-0, x %= (x&x));
+ assertEquals(-661607963, x ^= (tmp = -661607963.3794863, tmp));
+ assertEquals(3465, x >>>= (-828119020.8056595));
+ assertEquals(-268431991, x -= (((tmp = -1386256352, tmp)^((tmp = 743629575, tmp)%((x*((tmp = -1719517658, tmp)>>(2019516558)))<<((2637317661)|x))))<<(tmp = -51637065, tmp)));
+ assertEquals(1578876380, x += ((tmp = 1847308371, tmp)&(((((((tmp = 1487934776.1893163, tmp)%(tmp = 1423264469.3137975, tmp))|(((2653260792.5668964)/(-2417905016.043802))>>>(2097411118.4501896)))^x)^(((tmp = -71334226, tmp)|x)>>>(tmp = -2771758874.7696714, tmp)))^((tmp = -1464849031.3240793, tmp)%(tmp = 2349739690.6430283, tmp)))/x)));
+ assertEquals(3269293934, x += (1690417554));
+ assertEquals(4025392608.031957, x -= (((tmp = 268501120.7225704, tmp)<<(tmp = 2841620654.8903794, tmp))+((tmp = 1606704462.8455591, tmp)/((-2601879963)/(tmp = 2966620168.989736, tmp)))));
+ assertEquals(7, x >>>= (x^(-1913800035)));
+ assertEquals(1.4326776816275493e-8, x /= ((((tmp = -2703417892, tmp)/x)^((-2693772270.396241)>>>((x-(tmp = 615999818.5666655, tmp))>>((((2308121439.3702726)<<((-1794701502)>>(x+(tmp = -2253406035.972883, tmp))))<<((tmp = -197103799.0624652, tmp)|(629975898)))>>>x))))>>>((tmp = 2833656803, tmp)^(x^(tmp = -1580436025, tmp)))));
+ assertEquals(0, x >>>= (tmp = 1525372830.2126007, tmp));
+ assertEquals(0, x %= ((2354010949.24469)>>>(x<<x)));
+ assertEquals(0, x ^= (((1112335059.6922574)*(tmp = -1874363935, tmp))&(((((2154894295.8360596)<<x)&(tmp = -270736315.13505507, tmp))&x)>>>(-2205692260.552064))));
+ assertEquals(0, x >>>= (x<<((1488533932)*(tmp = 1707754286, tmp))));
+ assertEquals(0, x >>= (((tmp = 1232547376.463387, tmp)%((x>>(711691823.1608362))>>>x))>>(((895039781.7478573)*(((((-334946524)&x)*(tmp = -1214529640, tmp))^(tmp = -1586820245, tmp))*(1062595445)))+x)));
+ assertEquals(0, x *= (1863299863.2631998));
+ assertEquals(0, x /= (tmp = 1858428705.1330547, tmp));
+ assertEquals(0, x &= x);
+ assertEquals(611788028, x += (x^(611788028.1510412)));
+ assertEquals(1, x /= x);
+ assertEquals(0, x >>= ((tmp = -1617320707.1784317, tmp)-((-2139400380)-(-1402777976))));
+ assertEquals(0, x >>= (415866827.34665));
+ assertEquals(-1990811897, x -= (tmp = 1990811897, tmp));
+ assertEquals(-1990811895, x += ((x>>>(tmp = -2175453282.769696, tmp))&(tmp = -1459450498.7327478, tmp)));
+ assertEquals(-2377017935.149517, x += (-386206040.1495173));
+ assertEquals(1946129845, x |= (tmp = -2890956796.936539, tmp));
+ assertEquals(0, x %= x);
+ assertEquals(0, x <<= (1616188263));
+ assertEquals(-1081213596, x ^= (tmp = 3213753700, tmp));
+ assertEquals(3213753700, x >>>= (tmp = -3211181312, tmp));
+ assertEquals(-1081213596, x &= x);
+ assertEquals(-1081213583, x ^= (((tmp = 1599988273.4926577, tmp)>>((((-1061394954.6331315)^x)+((-1835761078)*x))+(x%(tmp = -696221869, tmp))))/((tmp = -1156966790.3436491, tmp)^x)));
+ assertEquals(0, x ^= x);
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x += (-1257400530.9263027));
+ assertEquals(NaN, x /= (753062089));
+ assertEquals(NaN, x *= ((tmp = 305418865.57012296, tmp)^(((-2797769706)+((((tmp = -33288276.988654375, tmp)%(tmp = 1242979846, tmp))|(-316574800))-((tmp = -1766083579.4203427, tmp)*(((x*(tmp = -2400342309.2349987, tmp))>>(tmp = 2632061795, tmp))^(tmp = -1001440809, tmp)))))^((((x-(tmp = -1469542637.6925495, tmp))-x)-(3184196890))%(((((((633226688)*((tmp = -2692547856, tmp)>>(((tmp = -1244311756, tmp)>>>x)+((1746013631.405202)>>>(941829464.1962085)))))%(x-x))+(995681795))-(tmp = -3047070551.3642616, tmp))/(1968259705))-((-2853237880)^(tmp = -2746628223.4540343, tmp)))))));
+ assertEquals(0, x >>= x);
+ assertEquals(0.5713172378854926, x += (((x+(((x+x)/(tmp = 2642822318, tmp))*(-2590095885.4280834)))|(tmp = -1769210836, tmp))/(tmp = -3096722308.8665104, tmp)));
+ assertEquals(-0.000002311097780334994, x /= ((2269858877.9010344)>>(-2992512915.984787)));
+ assertEquals(-0.000002311097780334994, x %= (-1139222821));
+ assertEquals(-0.000004622195560669988, x += x);
+ assertEquals(1, x /= x);
+ assertEquals(1, x >>>= (((3002169429.6061807)/(-3068577366))>>>((tmp = -1844537620, tmp)%((((tmp = 2087505119, tmp)>>>x)+x)&(2179989542)))));
+ assertEquals(-534213071, x *= (-534213071));
+ assertEquals(-534213077.3716287, x -= (((tmp = -2390432951.154034, tmp)^x)/(-290501980)));
+ assertEquals(1836305, x >>>= (x&x));
+ assertEquals(1836305, x %= ((x|((3070123855)^(49986396)))+((-1863644960.4202995)>>>((tmp = 1886126804.6019692, tmp)^x))));
+ assertEquals(28692, x >>>= ((2561362139.491764)>>(((((tmp = -1347469854.7413375, tmp)/(((x|(x+x))^((x^(tmp = -2737413775.4595394, tmp))^x))<<(((tmp = 225344844.07128417, tmp)&x)&(tmp = 145794498, tmp))))*x)<<(1424529187))/((-2924344715)/(tmp = -2125770148, tmp)))));
+ assertEquals(-2089419535.2717648, x += (-2089448227.2717648));
+ assertEquals(18957929, x ^= (tmp = 2186590872, tmp));
+ assertEquals(-708972800, x -= (727930729));
+ assertEquals(-4198593, x |= (799483455.1885371));
+ assertEquals(-1, x >>= (-2330654693.6413193));
+ assertEquals(-1, x |= (((tmp = -116877155, tmp)>>>((((tmp = -1677422314.1333556, tmp)/(tmp = -3108738499.0798397, tmp))%((x&(x/x))%((tmp = -695607185.1561592, tmp)-(tmp = 2302449181.622259, tmp))))^(((-1482743646.5604773)^((897705064)>>>x))-(tmp = -2933836669, tmp))))%(((tmp = -2991584625, tmp)|(((x>>x)+(-1101066835))-x))>>(-33192973.819939613))));
+ assertEquals(-1, x &= x);
+ assertEquals(-524288, x <<= (-1177513101.3087924));
+ assertEquals(1978770334.9189441, x += (tmp = 1979294622.9189441, tmp));
+ assertEquals(901783582, x &= ((-368584615)^(((((-478030699.2647903)<<x)<<x)+(tmp = 708725752, tmp))^((tmp = -3081556856, tmp)/(tmp = 1149958711.0676727, tmp)))));
+ assertEquals(-1480333211.8654308, x += (tmp = -2382116793.865431, tmp));
+ assertEquals(956930239.6783283, x *= ((tmp = 956930239.6783283, tmp)/x));
+ assertEquals(1277610.4668602513, x /= ((tmp = 1571029828, tmp)>>(tmp = 2417481141, tmp)));
+ assertEquals(-1077333228, x ^= (tmp = 3218755006, tmp));
+ assertEquals(-50218, x |= (tmp = -1044436526.6435988, tmp));
+ assertEquals(-1, x >>= (-154655245.18921852));
+ assertEquals(0.00006276207290978003, x *= (((tmp = 2234286992.9800305, tmp)>>(tmp = 2132564046.0696363, tmp))/((((tmp = -2565534644.3428087, tmp)>>>(tmp = 2622809851.043325, tmp))>>>((tmp = 311277386, tmp)&x))-(tmp = -2003980974, tmp))));
+ assertEquals(0, x %= x);
+ assertEquals(1282114076, x += ((((422838227)>>>((tmp = 1024613366.1899053, tmp)-((368275340)<<(((tmp = -3066121318, tmp)+(-2319101378))&x))))^(x>>(tmp = 1920136319.803412, tmp)))^(1282264803.3968434)));
+ assertEquals(-277097604, x |= (-283585688.9123297));
+ assertEquals(553816692, x &= (x&(tmp = 554082036.676608, tmp)));
+ assertEquals(658505728, x <<= x);
+ assertEquals(658505728, x &= (x%(2846071230)));
+ assertEquals(39, x >>= (334728536.5172192));
+ assertEquals(0, x -= x);
+ assertEquals(0, x += x);
+ assertEquals(0, x &= (tmp = -335285336, tmp));
+ assertEquals(0, x <<= (tmp = 1255594828.3430014, tmp));
+ assertEquals(0, x %= (-630772751.1248167));
+ assertEquals(NaN, x /= ((((x&(tmp = -1576090612, tmp))%x)>>>x)*((-1038073094.2787619)>>>x)));
+ assertEquals(NaN, x += x);
+ assertEquals(NaN, x -= (((tmp = -2663887803, tmp)&((x+(-1402421046))/x))/(-2675654483)));
+ assertEquals(NaN, x %= (x&(tmp = 672002093, tmp)));
+ assertEquals(0, x |= x);
+ assertEquals(-2698925754, x += (tmp = -2698925754, tmp));
+ assertEquals(-2057748993, x += ((tmp = -2263466497, tmp)^x));
+ assertEquals(1, x /= x);
+ assertEquals(-2769559719.4045835, x -= (2769559720.4045835));
+ assertEquals(-1.3964174646069973, x /= (tmp = 1983332198, tmp));
+ assertEquals(-2140716624.3964174, x += (tmp = -2140716623, tmp));
+ assertEquals(0, x <<= ((2589073007)-(-816764911.8571186)));
+ assertEquals(-2837097288.161354, x -= (tmp = 2837097288.161354, tmp));
+ assertEquals(-1445059927.161354, x += (tmp = 1392037361, tmp));
+ assertEquals(155197984, x &= (tmp = -2694712730.924674, tmp));
+ assertEquals(155197984, x |= (x>>>(tmp = 69118015.20305443, tmp)));
+ assertEquals(155197984, x >>>= (((x^(-1353660241))*x)<<(((((x%(tmp = -1905584634, tmp))>>>(tmp = -860171244.5963638, tmp))&(-1084415001.7039547))+(x-(((tmp = 298064661, tmp)>>x)>>((tmp = 378629912.383446, tmp)-(x%x)))))+(((3212580683)/(((((x^x)>>(tmp = -1502887218, tmp))<<x)%(-142779025))|(((tmp = 1361745708, tmp)*(((((tmp = 1797072528.0673332, tmp)+x)%(tmp = 167297609, tmp))%(-287345856.1791787))^(((((((x*(tmp = -640510459.1514752, tmp))<<(x^(tmp = 1387982082.5646644, tmp)))>>(tmp = 2473373497.467914, tmp))^((234025940)*x))+(tmp = 520098202.9546956, tmp))*(x*(tmp = -362929250.1775775, tmp)))^(-2379972900))))*(tmp = -1385817972, tmp))))+(-1788631834)))));
+ assertEquals(0, x >>= ((tmp = -18671049, tmp)/((tmp = 651261550.6716013, tmp)>>(-58105114.70740628))));
+ assertEquals(0, x *= ((((x>>(tmp = 2256492150.737681, tmp))<<(x<<(((-2738910707)&x)<<(1892428322))))*(tmp = 1547934638, tmp))>>((((319464033.7888391)|(((((tmp = 2705641070, tmp)<<((tmp = 1566904759.36666, tmp)*((-682175559.7540412)&(-691692016.3021002))))%(tmp = 1118101737, tmp))|(902774462))<<x))^((tmp = -388997180, tmp)<<(x<<((((((-88462733)+(x>>>x))%x)*(tmp = -20297481.556210756, tmp))>>>(1927423855.1719701))-((2047811185.6278129)-(tmp = 2952219346.72126, tmp))))))|(-1685518403.7513878))));
+ assertEquals(0, x /= (tmp = 1858074757.563318, tmp));
+ assertEquals(-1351623058, x ^= (-1351623058.4756806));
+ assertEquals(1, x /= x);
+ assertEquals(0, x ^= x);
+ assertEquals(0, x -= (x&(997878144.9798675)));
+ assertEquals(-0, x /= (-2769731277));
+ assertEquals(0, x >>>= ((-2598508325)>>(-1355571351)));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x -= (x&(tmp = 1672810223, tmp)));
+ assertEquals(-924449908.1999881, x -= (924449908.1999881));
+ assertEquals(-0, x %= x);
+ assertEquals(-0, x /= (tmp = 2007131382.059545, tmp));
+ assertEquals(-0, x += x);
+ assertEquals(225132064, x += ((((tmp = -2422670578.1260514, tmp)|x)+x)^(1660142894.7066057)));
+ assertEquals(Infinity, x /= (x-x));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x <<= x);
+ assertEquals(-2455424946.732606, x -= (2455424946.732606));
+ assertEquals(1208029258, x &= ((tmp = 1823728509, tmp)+x));
+ assertEquals(1.3682499724725645, x /= ((((tmp = 1267938464.3854322, tmp)%((tmp = 2510853574, tmp)+(((2979355693.866435)-(tmp = 1989726095.7746763, tmp))<<x)))%((-1382092141.1627176)+(((-901799353)+((-2936414080.8254457)>>>(2515004943.0865674)))-(2532799222.353197))))<<(tmp = -2168058960.2694826, tmp)));
+ assertEquals(0.13799826710735907, x %= ((-1090423235)/(tmp = 2659024727, tmp)));
+ assertEquals(0, x >>= (1688542889.082693));
+ assertEquals(0, x <<= x);
+ assertEquals(NaN, x %= ((((tmp = 1461037539, tmp)<<((x<<(tmp = 2101282906.5302017, tmp))>>(-2792197742)))%(((x%x)^(((tmp = 1399565526, tmp)^(tmp = 643902, tmp))-((tmp = -1449543738, tmp)|x)))/x))*(x<<(471967867))));
+ assertEquals(0, x &= ((tmp = -2121748100.6824129, tmp)>>(tmp = -2817271480.6497793, tmp)));
+ assertEquals(0, x &= (3169130964.6291866));
+ assertEquals(-0, x /= (-2303316806));
+ assertEquals(0, x <<= (tmp = 120185946.51617038, tmp));
+ assertEquals(449448375, x ^= ((((tmp = -836410266.014014, tmp)/x)&((x>>>(tmp = -2602671283, tmp))+x))+(tmp = 449448375, tmp)));
+ assertEquals(202003841790140640, x *= x);
+ assertEquals(202003840800829020, x += (((tmp = -1339865843, tmp)+(tmp = 350554234.15375435, tmp))<<((((((tmp = -1798499687.8208885, tmp)>>(((x-(x^x))|((tmp = 463627396.23932934, tmp)/(2714928060)))&(tmp = 3048222568.1103754, tmp)))&(-3127578553))<<(tmp = -2569797028.8299003, tmp))&x)<<((tmp = 2104393646, tmp)/((tmp = 2314471015.742891, tmp)<<((2704090554.1746845)>>(((tmp = 1935999696, tmp)*(((1348554815)>>>x)>>>(146665093.82445252)))%x)))))));
+ assertEquals(202003841764125400, x -= (tmp = -963296372.2846234, tmp));
+ assertEquals(-413485056, x <<= (tmp = -2474480506.6054573, tmp));
+ assertEquals(-3171894580.186845, x += ((tmp = -1261111102, tmp)+(tmp = -1497298422.1868448, tmp)));
+ assertEquals(17136, x >>= (tmp = 3055058160, tmp));
+ assertEquals(17136, x %= (tmp = 1706784063.3577294, tmp));
+ assertEquals(17136, x >>= ((tmp = 2161213808, tmp)*x));
+ assertEquals(-17136, x /= ((((tmp = -1492618154, tmp)>>x)|(1381949066))>>(tmp = 2014457960, tmp)));
+ assertEquals(-34272, x += x);
+ assertEquals(-1498690902, x += (-1498656630));
+ assertEquals(-1168674482, x ^= (486325220));
+ assertEquals(-1168674482, x <<= ((x^x)*x));
+ assertEquals(794521557347068000, x *= (-679848469));
+ assertEquals(1.3330392590424505e+26, x *= (tmp = 167778866, tmp));
+ assertEquals(0, x <<= (tmp = -2501540637.3664584, tmp));
+ assertEquals(0, x >>>= (x-(x*(-890638026.1825848))));
+ assertEquals(0, x %= ((-285010538.2813468)&(1314684460.7634423)));
+ assertEquals(0, x -= x);
+ assertEquals(0, x *= x);
+ assertEquals(NaN, x %= (x*(x<<x)));
+ assertEquals(NaN, x %= (x<<(((tmp = -1763171810.601149, tmp)&(-138151449.18303752))^(x|x))));
+ assertEquals(0, x |= (x>>x));
+ assertEquals(0, x &= (tmp = 1107152048, tmp));
+ assertEquals(0, x >>= (1489117056.8200984));
+ assertEquals(518749976, x ^= (518749976.20107937));
+ assertEquals(356718654, x += (tmp = -162031322, tmp));
+ assertEquals(356718654, x %= (((x>>>((tmp = -373747439.09634733, tmp)*(tmp = 563665566, tmp)))*(tmp = 2853322586.588251, tmp))*((1303537213)%(-2995314284))));
+ assertEquals(5573728, x >>= (tmp = -2095997978, tmp));
+ assertEquals(5573728, x <<= x);
+ assertEquals(5573728, x >>= (((((tmp = 1745399178.334154, tmp)<<(tmp = 2647999783.8219824, tmp))^(tmp = 1571286759, tmp))%x)/(2166250345.181711)));
+ assertEquals(10886, x >>>= ((682837289)+(x*x)));
+ assertEquals(170, x >>>= x);
+ assertEquals(169.95167497151652, x -= (((tmp = 527356024.19706845, tmp)+((tmp = 1263164619.2954736, tmp)|(tmp = 2942471886, tmp)))/((3017909419.131321)+(tmp = 2137746252.8006272, tmp))));
+ assertEquals(-1915170061, x ^= (tmp = -1915170214, tmp));
+ assertEquals(206045792, x &= (((tmp = 887031922, tmp)>>>x)-((-1861922770)|(9633541))));
+ assertEquals(-1940321674, x |= (tmp = -2012149162.1817405, tmp));
+ assertEquals(-1940321674, x &= x);
+ assertEquals(1128412272.160699, x += (tmp = 3068733946.160699, tmp));
+ assertEquals(0.47486363523180236, x /= (tmp = 2376286976.807289, tmp));
+ assertEquals(-1.4931079540252477e-10, x /= (tmp = -3180370407.5892467, tmp));
+ assertEquals(0, x |= (((1220765170.5933602)*(884017786))*((x%(tmp = -2538196897.226384, tmp))<<(x^x))));
+ assertEquals(-525529894, x += (tmp = -525529894, tmp));
+ assertEquals(1621426184, x &= ((3046517714)*(((((-162481040.8033898)+(x/((x&(1489724492))/((x|(tmp = 943542303, tmp))>>>((-1840491388.1365871)<<(2338177232))))))+(((-2268887573.2430763)>>>(((tmp = 2919141667, tmp)+((tmp = 1326295559.692003, tmp)<<(-2256653815)))>>>(((((tmp = 1602731976.7514615, tmp)*(856036244.3730336))^x)>>>((((2846316421.252943)&(915324162))%(tmp = 1144577211.0221815, tmp))%x))*(x*x))))%(tmp = -2641416560, tmp)))*(x+(x>>>x)))>>x)));
+ assertEquals(1621426184, x %= (tmp = 1898223948, tmp));
+ assertEquals(-3.383396676504762, x /= ((tmp = 2211088034.5234556, tmp)^x));
+ assertEquals(7120923705.122882, x *= (((((tmp = 2632382342.914504, tmp)/(-615440284.1762738))&(2162453853.6658797))<<(-849038082.5298986))|(tmp = -2104667110.5603983, tmp)));
+ assertEquals(-1469010887, x &= x);
+ assertEquals(850767635866964700, x *= (tmp = -579143179.5338116, tmp));
+ assertEquals(0, x %= x);
+ assertEquals(-571457, x |= ((2849326490.8464212)|(tmp = 1450592063, tmp)));
+ assertEquals(-571457, x &= x);
+ assertEquals(-0.00018638416434019244, x /= (3066016912.021368));
+ assertEquals(0, x <<= (2058262829));
+ assertEquals(NaN, x %= ((x|((x%x)>>>x))%((tmp = -2970314895.6974382, tmp)+x)));
+ assertEquals(NaN, x *= (-698693934.9483855));
+ assertEquals(NaN, x += (-100150720.64391875));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x -= (-530301478));
+ assertEquals(NaN, x /= (1507673244));
+ assertEquals(0, x <<= (x%(tmp = 2977838420.857235, tmp)));
+ assertEquals(0, x <<= (tmp = 3200877763, tmp));
+ assertEquals(0, x <<= (tmp = -2592127060, tmp));
+ assertEquals(NaN, x -= (((((((1930632619)*(3018666359))<<((tmp = 2676511886, tmp)&(-2786714482.25468)))%x)-(-633193192))<<((tmp = 403293598, tmp)*(-2765170226)))%x));
+ assertEquals(530062092, x |= (tmp = 530062092, tmp));
+ assertEquals(129409, x >>>= x);
+ assertEquals(-152430382316341.78, x *= (-1177896300.229055));
+ assertEquals(-304860764632683.56, x += x);
+ assertEquals(0, x ^= x);
+ assertEquals(0, x %= (tmp = -63071565.367660046, tmp));
+ assertEquals(0, x &= ((((tmp = -1007464338, tmp)<<(x<<((x^(tmp = -726826835, tmp))|x)))>>>x)*(((tmp = 469293335.9161849, tmp)<<(((((tmp = 1035077379, tmp)*(tmp = -555174353.7567515, tmp))&(3109222796.8286266))-(((((x-(tmp = 1128900353.6650414, tmp))|(tmp = 3119921303, tmp))&((-1353827690)&(x%((-924615958)&x))))>>>x)+(tmp = 1167787910, tmp)))+x))%((605363594)>>(1784370958.269381)))));
+ assertEquals(0, x %= (2953812835.9781704));
+ assertEquals(0, x -= x);
+ assertEquals(0, x <<= x);
+ assertEquals(-901209266, x += (-901209266));
+ assertEquals(-901209266, x &= x);
+ assertEquals(404, x >>>= (-3195686249));
+ assertEquals(824237108, x ^= (824237472));
+ assertEquals(497790936.1853996, x /= ((tmp = 1253776028, tmp)/(757207285)));
+ assertEquals(497790936, x >>>= ((tmp = -2212598336, tmp)<<(x^(1335355792.9363852))));
+ assertEquals(0, x %= x);
+ assertEquals(-2659887352.6415873, x += (tmp = -2659887352.6415873, tmp));
+ assertEquals(1635079945, x |= ((x&(1234659380))>>((((tmp = 2694276886.979136, tmp)|x)^((tmp = 132795582, tmp)<<((-1089828902)>>>x)))<<((((tmp = -2098728613.0310376, tmp)<<(x/(tmp = -2253865599, tmp)))*((x+(x>>>((48633053.82579231)-(385301592))))*(tmp = -1847454853.333535, tmp)))/((-540428068.8583717)+x)))));
+ assertEquals(1, x /= x);
+ assertEquals(33554432, x <<= ((((2803140769)<<x)|(tmp = -1965793804, tmp))>>>(tmp = -2273336965.575082, tmp)));
+ assertEquals(67108864, x += x);
+ assertEquals(9007199254740992, x *= (x+((x>>x)%(2674760854))));
+ assertEquals(55369784, x %= (x|(-170725544.20038843)));
+ assertEquals(55369784, x %= (-1186186787));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x <<= x);
+ assertEquals(NaN, x /= ((-2968110098)-((x/(x|(((((x|((x&((-130329882)>>>(((-135670650)|(x<<(tmp = 1280371822, tmp)))^x)))-(-1183024707.2230911)))&(-1072829280))>>>(-340696948.41492534))>>>(tmp = 436308526.4938295, tmp))<<(((tmp = 3113787500, tmp)*((2038309320)>>>(-1818917055)))&((2808000707)/(774731251))))))%x)));
+ assertEquals(0, x |= (x*(tmp = -843074864, tmp)));
+ assertEquals(0, x &= (tmp = -752261173.8090212, tmp));
+ assertEquals(0, x >>>= (tmp = 1532349931.7517128, tmp));
+ assertEquals(0, x <<= ((tmp = -8628768, tmp)-((((tmp = 225928543, tmp)%(x>>>(x+x)))^((tmp = -2051536806.5249376, tmp)-x))-((tmp = -2274310376.9964137, tmp)%(tmp = 2251342739, tmp)))));
+ assertEquals(0, x >>= (1011388449));
+ assertEquals(0, x += x);
+ assertEquals(0, x >>>= x);
+ assertEquals(-0, x *= ((-1781234179.8663826)>>(((1514201119.9761915)>>(((((1174857164.90042)^(tmp = 1124973934, tmp))^x)+((-1059246013.8834443)<<(2997611138.4876065)))%(((798188010)*(-1428293122))>>>(tmp = -3087267036.8035297, tmp))))<<x)));
+ assertEquals(1752554372, x ^= (tmp = -2542412924, tmp));
+ assertEquals(1752554372, x %= (tmp = 3037553410.2298307, tmp));
+ assertEquals(1859383977, x -= (x^(2446603103)));
+ assertEquals(1183048193, x &= ((tmp = -962336957, tmp)/(x/x)));
+ assertEquals(67738157, x %= ((((tmp = -1813911745.5223546, tmp)+x)<<(x-(((-1980179168)^x)|x)))|(1913769561.1308007)));
+ assertEquals(67698724, x &= ((1801574998.3142045)*((tmp = -2057492249, tmp)/((1713854494.72282)>>x))));
+ assertEquals(0, x -= x);
+ assertEquals(-25232836, x -= ((tmp = 25232836, tmp)|x));
+ assertEquals(-49, x >>= (x+((tmp = 2201204630.2897243, tmp)|(-1929326509))));
+ assertEquals(-1605632, x <<= x);
+ assertEquals(-165965313, x += (tmp = -164359681, tmp));
+ assertEquals(9.220413724941365e-10, x /= (((((tmp = 2579760013.0808706, tmp)*(tmp = -2535370639.9805303, tmp))>>((tmp = 2138199747.0301933, tmp)-(tmp = -2698019325.0972376, tmp)))*(tmp = -425284716, tmp))/((-1951538149.6611228)/(x^(2632919130)))));
+ assertEquals(0, x &= x);
+ assertEquals(0, x &= ((-645189137)/(tmp = 800952748, tmp)));
+ assertEquals(0, x &= (tmp = -1773606925, tmp));
+ assertEquals(0, x += x);
+ assertEquals(0, x >>>= (tmp = 211399355.0741787, tmp));
+ assertEquals(0, x <<= ((-1317040231.5737965)/((((((tmp = 838897586.0147077, tmp)|((-1902447594)|(tmp = 404942728.83034873, tmp)))^(2462760692.2907705))%((((((x%(tmp = -2888980287, tmp))<<(-368505224.49609876))-((x>>>(532513369))&(((((((tmp = -1298067543, tmp)^(tmp = -3130435881.100909, tmp))>>x)/(tmp = -3041161992, tmp))>>(x|(-431685991.95776653)))^((tmp = 1031777777, tmp)^((-105610810)>>>((-631433779)>>(tmp = -2577780871.167671, tmp)))))%(tmp = -3170517650.088039, tmp))))-(((tmp = 2175146237.968785, tmp)-((384631158.50508535)>>((893912279.4646157)|(tmp = -1478803924.5338967, tmp))))%(x/(-1089156420))))<<(tmp = -2024709456, tmp))>>x))*(tmp = -1423824994.6993582, tmp))%(tmp = 1739143409, tmp))));
+ assertEquals(-1799353648, x |= ((-1799353648.3589036)>>>((((x&(-923571640.1012449))%x)+((tmp = 971885508, tmp)>>((tmp = -2207464428.2123804, tmp)+(-3108177894.0459776))))-(-2048954486.7014258))));
+ assertEquals(-3666808032.2958965, x -= (tmp = 1867454384.2958965, tmp));
+ assertEquals(-260069478915415100, x *= (tmp = 70925305.23136711, tmp));
+ assertEquals(1142096768, x &= (tmp = 1866401706.9144325, tmp));
+ assertEquals(1, x >>>= (tmp = 2701377150.5717473, tmp));
+ assertEquals(1865946805, x |= (tmp = -2429020492, tmp));
+ assertEquals(1424222287, x ^= ((((tmp = 433781338, tmp)>>(x>>>((-2914418422.4829016)/(tmp = 1600920669, tmp))))|(tmp = 588320482.9566053, tmp))>>>((((((x+(tmp = -2556387365.5071325, tmp))+(tmp = -2381889946.1830974, tmp))/(3154278191))>>>(-1069701268.8022757))>>(((tmp = 182049089.28866422, tmp)>>x)>>>(tmp = -447146173, tmp)))/(x-(2103883357.0929923)))));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x -= (x%(3036884806)));
+ assertEquals(0, x >>>= (tmp = -652793480.3870945, tmp));
+ assertEquals(0, x += x);
+ assertEquals(304031003, x ^= ((tmp = -900156495, tmp)^(-666397014.0711515)));
+ assertEquals(1, x /= x);
+ assertEquals(-1974501681, x |= (x^(-1974501681.4628205)));
+ assertEquals(-1.3089278317616264, x /= (((-1723703186.962839)>>>x)|((2061022161.6239533)<<x)));
+ assertEquals(-1, x |= (tmp = -1987006457, tmp));
+ assertEquals(-0.14285714285714285, x /= ((((((x|(-1767793799.7595732))-(-1391656680))<<x)|(x>>(tmp = -2301588485.2811003, tmp)))>>>(((tmp = 1812723993, tmp)>>>((x^(((tmp = -3154100157.951021, tmp)%((tmp = -1254955564.4553523, tmp)-(((x>>>(((-1762886343)*x)*x))*(x^(x*(-750918563.4387553))))*x)))|((x>>x)>>(x<<((((-1766797454.5634143)^(tmp = -2251474340, tmp))-(-787637516.5276759))<<((1390653368)^(-1937605249.245374)))))))|(((tmp = 1156611894, tmp)<<x)<<(x>>((((x+(tmp = 2170166060.881797, tmp))&(x>>>(tmp = -1749295923.1498983, tmp)))>>(((-1014973878)|x)&(1302866805.684057)))*(tmp = 560439074.4002491, tmp))))))|(-2758270803.4510045)))&x));
+ assertEquals(0, x |= x);
+ assertEquals(0, x += ((x>>((x+(tmp = -2776680860.870219, tmp))-(((688502468)<<(((tmp = 475364260.57888806, tmp)<<x)+(329071671)))/(-1097134948))))*(tmp = -1281834214.3416953, tmp)));
+ assertEquals(0, x *= ((((1159762330)<<(tmp = -1892429200, tmp))%x)<<x));
+ assertEquals(0, x >>>= (-770595225));
+ assertEquals(NaN, x += (((x>>x)/(tmp = 281621135, tmp))/x));
+ assertEquals(0, x >>= (1363890241));
+ assertEquals(1639023942.9945002, x += (1639023942.9945002));
+ assertEquals(-2568590958567747000, x *= (-1567146697));
+ assertEquals(1793554700, x ^= (tmp = 3215813388.405799, tmp));
+ assertEquals(437879, x >>= x);
+ assertEquals(1339485943, x |= (1339220210));
+ assertEquals(1, x /= x);
+ assertEquals(512, x <<= (2509226729.1477118));
+ assertEquals(512, x <<= ((x>>(1326274040.7181284))<<(tmp = -760670199, tmp)));
+ assertEquals(1, x /= (x<<(x^x)));
+ assertEquals(0, x >>>= (((((1382512625.8298302)&(x>>>x))*(tmp = -815316595, tmp))>>>x)-(-95538051)));
+ assertEquals(-544344229.3548596, x -= (tmp = 544344229.3548596, tmp));
+ assertEquals(-1088688458.7097192, x += x);
+ assertEquals(-1022850479579041900, x *= (939525418.3104812));
+ assertEquals(2069622661, x |= (-2632744187.7721186));
+ assertEquals(-1353480538017756400, x -= ((tmp = 1308085980, tmp)*((x>>>(-629663391.5165792))&(tmp = 3182319856.674114, tmp))));
+ assertEquals(1.3702811563654176e+27, x *= ((((3061414617.6321163)/(tmp = 2628865442, tmp))+(-1549548261))+(x&((tmp = 809684398, tmp)|(x^(tmp = 801765002, tmp))))));
+ assertEquals(0, x >>>= ((-2988504159)&((tmp = -260444190.02252054, tmp)^(2178729442.260293))));
+ assertEquals(-1518607002, x -= (tmp = 1518607002, tmp));
+ assertEquals(724566016, x <<= (tmp = 1042915731.7055794, tmp));
+ assertEquals(707584, x >>>= (-208959862.93305588));
+ assertEquals(0, x >>>= (((tmp = 877181764, tmp)>>(-970697753.3318911))%x));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x += x);
+ assertEquals(0, x <<= x);
+ assertEquals(0, x /= (x^((x/(-2903618412.4936123))+(tmp = 1169288899, tmp))));
+ assertEquals(0, x >>>= x);
+ assertEquals(-1302645245, x ^= ((1855892732.3544865)+(tmp = 1136429319.5633948, tmp)));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x &= (-1384534597.409375));
+ assertEquals(-0, x /= (tmp = -680466419.8289509, tmp));
+ assertEquals(-0, x *= (318728599.95017374));
+ assertEquals(NaN, x %= (x>>(2019695267)));
+ assertEquals(0, x >>= (tmp = 1280789995, tmp));
+ assertEquals(0, x *= (tmp = 2336951458, tmp));
+ assertEquals(0, x >>= ((2981466013.758637)%(731947033)));
+ assertEquals(0, x -= x);
+ assertEquals(0, x ^= x);
+ assertEquals(0, x /= ((((3068070149.1452317)>>x)%(((1448965452)*((tmp = -2961594129, tmp)+(1829082104.0681171)))>>(-2331499703)))>>>(tmp = -3206314941.2626476, tmp)));
+ assertEquals(0, x >>= (x%(1869217101.9823673)));
+ assertEquals(0, x <<= (x+x));
+ assertEquals(0, x >>>= ((1202130282)>>>x));
+ assertEquals(0, x += x);
+ assertEquals(2603245248.6273212, x += (tmp = 2603245248.6273212, tmp));
+ assertEquals(-1691864471, x ^= (x>>>(2504513614.117516)));
+ assertEquals(136835305, x -= ((-1618979896)&(-746953306)));
+ assertEquals(-2568499564.1261334, x += (tmp = -2705334869.1261334, tmp));
+ assertEquals(1038075700, x ^= (1530399136));
+ assertEquals(2076151400, x += x);
+ assertEquals(-524018410.1751909, x -= ((2398973627.175191)-(-201196183)));
+ assertEquals(0.327110599608614, x /= ((3181340288.602796)&x));
+ assertEquals(0.327110599608614, x %= (tmp = -2284484060, tmp));
+ assertEquals(0, x |= x);
+ assertEquals(403217947.5779772, x += (tmp = 403217947.5779772, tmp));
+ assertEquals(403217947, x |= x);
+ assertEquals(-Infinity, x *= ((58693583.845808744)+(((tmp = -1527787016, tmp)*x)/((((2532689893.3191843)/(tmp = 2781746479.850424, tmp))|(((((460850355.9211761)/((((tmp = 626683450, tmp)<<((tmp = 1349974710, tmp)-((tmp = -1349602292, tmp)/(-2199808871.1229663))))>>((x/(-3092436372.3078623))&(tmp = -1190631012.0323825, tmp)))^((-2907082828.4552956)-(tmp = 1858683340.1157017, tmp))))^(-1513755598.5398848))%x)/x))&(1147739260.136806)))));
+ assertEquals(0, x &= (tmp = -3047356844.109563, tmp));
+ assertEquals(637934616, x -= (tmp = -637934616, tmp));
+ assertEquals(-1553350083, x ^= (-2056266203.094929));
+ assertEquals(-0.13467351026547192, x %= ((tmp = 824736251, tmp)/(2544186314)));
+ assertEquals(1, x /= x);
+ assertEquals(1, x |= x);
+ assertEquals(0, x >>>= (2166609431.9515543));
+ assertEquals(0, x <<= (x|(tmp = 121899222.14603412, tmp)));
+ assertEquals(0, x *= (1300447849.6595674));
+ assertEquals(0, x %= (tmp = -2360500865.3944597, tmp));
+ assertEquals(0, x %= (tmp = -1693401247, tmp));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x /= (471265307));
+ assertEquals(257349748, x ^= (257349748.689448));
+ assertEquals(257349748, x &= x);
+ assertEquals(981, x >>>= (tmp = -1959001422, tmp));
+ assertEquals(0, x >>= ((-79932778.18114972)/x));
+ assertEquals(0, x <<= (((-2599621472)^(tmp = 662071103, tmp))%(tmp = -2675822640.7641535, tmp)));
+ assertEquals(0, x &= (tmp = 2582354953.878623, tmp));
+ assertEquals(0, x /= ((-953254484)/((-2571632163.376176)-(tmp = -342034471, tmp))));
+ assertEquals(0, x <<= ((x-(tmp = -3013057672, tmp))&(tmp = -3204761036, tmp)));
+ assertEquals(0, x ^= ((x&((515934453)>>>x))/x));
+ assertEquals(1, x |= ((-1914707646.2075093)>>>(tmp = -1918045025, tmp)));
+ assertEquals(-2002844120.8792589, x += (tmp = -2002844121.8792589, tmp));
+ assertEquals(573030794, x >>>= (tmp = 1707788162, tmp));
+ assertEquals(1.917619109627369, x /= ((1909436830.484202)%((123114323)<<(tmp = -1288988388.6444468, tmp))));
+ assertEquals(-1400358045, x |= (-1400358046));
+ assertEquals(-2043022529.4273133, x += (tmp = -642664484.4273133, tmp));
+ assertEquals(-81408068.86728716, x %= (tmp = -980807230.2800131, tmp));
+ assertEquals(0.1436896445024992, x /= (((tmp = 3201789924.913518, tmp)%(tmp = -962242528.6008646, tmp))^((tmp = -338830119.55884504, tmp)*(tmp = -916120166, tmp))));
+ assertEquals(0.1436896445024992, x %= (tmp = 2598469263, tmp));
+ assertEquals(0, x *= (x-x));
+ assertEquals(-1409286144, x += (((-111514798.64745283)|(2372059654))<<(tmp = 175644313, tmp)));
+ assertEquals(-2393905467.0073113, x += (-984619323.0073113));
+ assertEquals(-835111172.0073113, x %= (x^(-765900532.5585573)));
+ assertEquals(-835111172.0073113, x %= (tmp = -946478116, tmp));
+ assertEquals(-100, x >>= ((-1020515908)>>(((x&((x^(169474253.53811646))>>(-221739002)))+x)*((201939882.92880356)/(tmp = -50402570, tmp)))));
+ assertEquals(2131506964, x &= (tmp = -2163460268, tmp));
+ assertEquals(1074275840, x &= ((-1561930379.8719592)*(tmp = -2871750052.876917, tmp)));
+ assertEquals(-954232605.5377102, x -= (tmp = 2028508445.5377102, tmp));
+ assertEquals(-29, x >>= (-279577351.87217045));
+ assertEquals(-232, x <<= x);
+ assertEquals(-70, x |= (215185578));
+ assertEquals(-1, x >>= (x>>(-1691303095)));
+ assertEquals(1, x /= x);
+ assertEquals(3149465364.2236686, x *= (3149465364.2236686));
+ assertEquals(3304787832.3790073, x += (tmp = 155322468.15533853, tmp));
+ assertEquals(100068712.23500109, x %= (tmp = 3204719120.1440063, tmp));
+ assertEquals(91628864, x &= (tmp = 629090241, tmp));
+ assertEquals(-113202292046379710, x *= (-1235443583));
+ assertEquals(122, x >>>= (tmp = 3196555256, tmp));
+ assertEquals(122, x >>>= (((2226535734)-x)^(2248399036.393125)));
+ assertEquals(6.904199169070746e-8, x /= (tmp = 1767040564.9149356, tmp));
+ assertEquals(-212687449.99999994, x += ((((2244322375)*(((2515994102)^x)>>x))<<(x-(-832407685.3251972)))^(2266670502)));
+ assertEquals(366515938514778750, x *= (tmp = -1723260768.3940866, tmp));
+ assertEquals(366515938514778750, x += ((-1643386193.9159095)/(tmp = 425161225.95316494, tmp)));
+ assertEquals(654872716.4123061, x /= ((-1377382984)-(tmp = -1937058061.811642, tmp)));
+ assertEquals(654872716, x &= x);
+ assertEquals(-86260926.17813063, x -= (tmp = 741133642.1781306, tmp));
+ assertEquals(1052176592, x >>>= x);
+ assertEquals(2020882856, x ^= (-3107796616));
+ assertEquals(0, x <<= ((606939871.9812952)|(tmp = -3127138319.1557302, tmp)));
+ assertEquals(NaN, x -= ((x%((1120711400.2242608)%x))*(tmp = -930171286.7999947, tmp)));
+ assertEquals(NaN, x %= (3215044180));
+ assertEquals(NaN, x %= (tmp = 2882893804.20102, tmp));
+ assertEquals(NaN, x %= ((217170359.5778643)^x));
+ assertEquals(0, x &= ((-1095125960.9903677)>>(x^(-2227981276))));
+ assertEquals(-748549860, x += (-748549860));
+ assertEquals(1816208256, x <<= (-610872411.3826082));
+ assertEquals(201400576, x &= (((tmp = 1910394603.4836266, tmp)<<x)^x));
+ assertEquals(0, x %= x);
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x <<= (((((2670901339.6696005)%(2180020861))*((2134469504)/(2237096063.0680027)))*((tmp = 1203829756, tmp)>>((765467065)+(x|(2673651811.9494815)))))<<((-1463378514)|(((x/(tmp = -1075050081, tmp))-((-879974865)+x))>>>(tmp = 2172883926, tmp)))));
+ assertEquals(433013198, x ^= (433013198.2833413));
+ assertEquals(0, x >>= ((((-2404431196)%(x%(tmp = 1443152875.8809233, tmp)))&(x|((1414364997.0517852)/((tmp = -435854369, tmp)+(tmp = 2737625141, tmp)))))|(((tmp = 2241746562.2197237, tmp)^(tmp = -1606928010.1992552, tmp))|((tmp = -3083227418.686173, tmp)>>(tmp = -2717460410, tmp)))));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x *= ((tmp = 2302521322, tmp)>>>(((((((tmp = 344089066.9725498, tmp)%(tmp = 1765830559, tmp))-x)|x)^(((-2450263325)/(tmp = 371928405.17475057, tmp))>>>(1330100413.7731652)))^(((173024329)%(tmp = -2927276187, tmp))+(x>>>(-1042229940.308507))))|(((((tmp = 379074096, tmp)+((142762508)-((-2773070834.526266)-(x&((tmp = 57957493, tmp)<<(2189553500))))))+((36991093)+(tmp = 339487168.58069587, tmp)))*(-1257565451))&(tmp = 645233114, tmp)))));
+ assertEquals(-2644503151.1185284, x += (-2644503151.1185284));
+ assertEquals(-5289006302.237057, x += x);
+ assertEquals(-4008773824.2370567, x -= (tmp = -1280232478, tmp));
+ assertEquals(1975449413, x |= ((tmp = 1957832005.4285066, tmp)>>((1681236712.9715524)&(-675823978))));
+ assertEquals(-146472960, x <<= (-648510672.5644083));
+ assertEquals(-3, x |= (((((x>>>(tmp = 2271744104, tmp))+(tmp = -210058133.30147195, tmp))+(tmp = -2827493425, tmp))/(tmp = 765962538, tmp))%(tmp = 1048631551, tmp)));
+ assertEquals(1, x /= x);
+ assertEquals(0, x >>= (1070524782.5154183));
+ assertEquals(0, x <<= (462502504));
+ assertEquals(0, x %= (540589670.0730014));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x /= ((-1268640098)%x));
+ assertEquals(NaN, x %= (1741157613.744652));
+ assertEquals(NaN, x += x);
+ assertEquals(NaN, x %= ((x|(tmp = 1992323492.7000637, tmp))*x));
+ assertEquals(NaN, x /= ((tmp = -2271503368.0341196, tmp)>>((tmp = 1224449194, tmp)>>>(tmp = 2976803997, tmp))));
+ assertEquals(NaN, x += (tmp = -1078313742.1633894, tmp));
+ assertEquals(NaN, x += (-787923311));
+ assertEquals(NaN, x %= x);
+ assertEquals(-1299878219, x ^= (2995089077));
+ assertEquals(536887953, x &= ((625660571.2651105)&(x^(((tmp = 950150725.2319129, tmp)+(-2122154205.466675))/(tmp = 1754964696.974752, tmp)))));
+ assertEquals(4096, x >>>= x);
+ assertEquals(1, x /= x);
+ assertEquals(-82508517, x ^= (((-930231800)%(tmp = -423861640.4356506, tmp))+x));
+ assertEquals(-82508517, x &= (x&x));
+ assertEquals(-479519, x %= ((tmp = 1861364600.595756, tmp)|x));
+ assertEquals(479518, x ^= (((x>>(-1539139751.6860313))>>(tmp = -456165734, tmp))|(-2786433531)));
+ assertEquals(959036, x += x);
+ assertEquals(29, x >>>= ((tmp = -1049329009.7632706, tmp)^(((((((1117739997)/(((-841179741.4939663)*(-1211599672))>>>((-413696355)%(tmp = -1753423217.2170188, tmp))))<<(tmp = 1599076219.09274, tmp))>>>(-1382960317))^(((x^(tmp = 515115394, tmp))>>>(tmp = -388476217, tmp))>>>(x/x)))^x)<<(136327532.213817))));
+ assertEquals(24, x &= (2388755418));
+ assertEquals(0, x >>>= (tmp = -405535917, tmp));
+ assertEquals(0, x &= (tmp = -1427139674, tmp));
+ assertEquals(NaN, x /= (x^((1530470340)%x)));
+ assertEquals(0, x |= ((x>>(-1429690909.8472774))*((((tmp = 2033516515, tmp)/(1314782862))>>>x)>>(tmp = 1737186497.6441216, tmp))));
+ assertEquals(0, x -= x);
+ assertEquals(0, x %= (3115422786));
+ assertEquals(-0, x *= (x+(tmp = -2558930842.267017, tmp)));
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x &= (2695531252.254449));
+ assertEquals(-613178182, x ^= (-613178182));
+ assertEquals(54, x >>>= (x%(((tmp = 2277868389, tmp)^((((tmp = -1143932265.3616111, tmp)^((x&((x-((-2100384445.7850044)|(tmp = 908075129.3456883, tmp)))*x))+(((tmp = 1031013284.0275401, tmp)*((((tmp = -233393205, tmp)>>>(tmp = -111859419, tmp))*(-1199307178))|(tmp = -1998399599, tmp)))>>>((((-731759641.9036775)>>>(tmp = 2147849691, tmp))>>>(tmp = -2121899736, tmp))>>>(x>>>x)))))>>((1900348757.360562)^(tmp = 2726336203.6149445, tmp)))>>>((x*((tmp = -2697628471.0234947, tmp)%((x^(tmp = -2751379613.9474974, tmp))*x)))+(x>>(tmp = 42868998.384643435, tmp)))))+(598988941))));
+ assertEquals(34, x &= ((tmp = 2736218794.4991407, tmp)%(2169273288.1339874)));
+ assertEquals(2.086197133417468, x /= ((tmp = 2176358852.297597, tmp)%x));
+ assertEquals(2, x <<= (((tmp = -1767330075, tmp)|(-3107230779.8512735))&x));
+ assertEquals(4194304, x <<= (tmp = 1061841749.105744, tmp));
+ assertEquals(48609515, x ^= (44415211.320786595));
+ assertEquals(48609515, x %= (1308576139));
+ assertEquals(23735, x >>>= ((-324667786)-x));
+ assertEquals(23735, x <<= ((-1270911229)<<(((((tmp = -882992909.2692418, tmp)+(tmp = 394833767.947718, tmp))-x)<<(702856751))/x)));
+ assertEquals(-31080872939240, x *= (tmp = -1309495384, tmp));
+ assertEquals(-14625.31935626114, x /= ((668084131)+(1457057357)));
+ assertEquals(-14625.31935626114, x %= (266351304.6585492));
+ assertEquals(-12577, x |= (-945583977.619837));
+ assertEquals(-4097, x |= ((tmp = -2621808583.2322493, tmp)-(tmp = -2219802863.9072213, tmp)));
+ assertEquals(-1004843865, x &= ((-1004839768)+((tmp = 2094772311, tmp)/(-1340720370.275643))));
+ assertEquals(-31401371, x >>= ((2035921047)>>>((tmp = -1756995278, tmp)>>>(-537713689))));
+ assertEquals(1791746374.016472, x -= ((tmp = -1823147745, tmp)-(x/(tmp = -1906333520, tmp))));
+ assertEquals(3.7289343120517406, x /= (tmp = 480498240, tmp));
+ assertEquals(7.457868624103481, x += x);
+ assertEquals(234881024, x <<= (-781128807.2532628));
+ assertEquals(67108864, x &= (tmp = -2060391332, tmp));
+ assertEquals(-605958718, x -= (673067582));
+ assertEquals(-605958718, x <<= ((x%x)&((tmp = 1350579401.0801518, tmp)|x)));
+ assertEquals(-109268090.4715271, x %= (tmp = -496690627.5284729, tmp));
+ assertEquals(-109268090, x <<= (((-2004197436.8023896)%((x|((tmp = 271117765.61283946, tmp)-((1595775845.0754795)*(555248692.2512416))))/x))<<x));
+ assertEquals(-652725370, x &= (-543590449));
+ assertEquals(0.321858133298825, x /= (tmp = -2027990914.2267523, tmp));
+ assertEquals(1959498446, x ^= (1959498446));
+ assertEquals(1959498446, x &= (x%(tmp = 3155552362.973523, tmp)));
+ assertEquals(14949, x >>>= ((tmp = 586618136, tmp)>>>(tmp = 699144121.9458897, tmp)));
+ assertEquals(-28611391568319.285, x *= (tmp = -1913933478.3811147, tmp));
+ assertEquals(1680557633, x &= (((tmp = 2606436319.199714, tmp)<<(1575299025.6917372))|((-1092689109)/(735420388))));
+ assertEquals(1680361024, x &= ((tmp = 1860756552.2186172, tmp)|(-360434860.1699109)));
+ assertEquals(820488, x >>>= (1788658731));
+ assertEquals(820488, x >>= (-1555444352));
+ assertEquals(2104296413, x ^= (2103543509));
+ assertEquals(16843328, x &= ((x<<((-2920883149)/(1299091676)))-(((((tmp = 3199460211, tmp)+(-237287821.61504316))&(tmp = -1524515028.3596857, tmp))-(tmp = -700644414.6785603, tmp))+(-180715428.86124516))));
+ assertEquals(1326969834, x |= (tmp = -2968063574.793867, tmp));
+ assertEquals(0, x %= (x>>>(tmp = 1350490461.0012388, tmp)));
+ assertEquals(0, x &= ((-2620439260.902854)+x));
+ assertEquals(-1775533561, x |= ((-1775533561)|(((x>>>((861896808.2264911)>>>(970216466.6532537)))%x)%(tmp = 2007357223.8893046, tmp))));
+ assertEquals(-1775533561, x &= x);
+ assertEquals(-23058877.415584415, x /= ((tmp = -3002439857, tmp)>>((((x-(tmp = 1583620685.137125, tmp))|x)%(-2568798248.6863875))^x)));
+ assertEquals(-577.4155844151974, x %= (((-1440361053.047877)+((tmp = 821546785.0910633, tmp)-(((tmp = 1023830881.1444875, tmp)/(-754884477))+(tmp = 651938896.6258571, tmp))))>>(tmp = 346467413.8959185, tmp)));
+ assertEquals(-1, x >>= (tmp = 2993867511, tmp));
+ assertEquals(-1, x |= (tmp = 823150253.4916545, tmp));
+ assertEquals(-0, x %= x);
+ assertEquals(-0, x /= ((tmp = 997969036, tmp)&((((tmp = 928480121, tmp)>>(((-2610875857.086055)>>>(tmp = -2251704283, tmp))|x))+(10781750))>>x)));
+ assertEquals(0, x >>>= ((tmp = -1872319523, tmp)>>>(-278173884)));
+ assertEquals(0, x |= (x/(x*x)));
+ assertEquals(0, x %= ((77912826.10575807)^(tmp = 2770214585.3019757, tmp)));
+ assertEquals(0, x &= (tmp = 722275824, tmp));
+ assertEquals(-1417226266, x |= (tmp = 2877741030.1195555, tmp));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x %= (tmp = -1740126105, tmp));
+ assertEquals(910709964, x |= (tmp = 910709964, tmp));
+ assertEquals(-1744830464, x <<= (tmp = -2445932551.1762686, tmp));
+ assertEquals(318767104, x >>>= (tmp = -2465332061.628887, tmp));
+ assertEquals(301989888, x &= (-2771167302.022801));
+ assertEquals(301989888, x |= x);
+ assertEquals(37748736, x >>= (tmp = -835820125, tmp));
+ assertEquals(1474977371, x ^= (tmp = -2857738661.6610327, tmp));
+ assertEquals(470467500, x += (-1004509871));
+ assertEquals(0.30466562575942585, x /= (((tmp = 1515955042, tmp)<<(x+((1607647367)-(tmp = 1427642709.697169, tmp))))^x));
+ assertEquals(1.0348231148499734e-10, x /= (tmp = 2944132397, tmp));
+ assertEquals(0, x >>= (x>>>(tmp = -2847037519.569043, tmp)));
+ assertEquals(NaN, x /= x);
+ assertEquals(0, x >>>= (-1817784819.9058492));
+ assertEquals(0, x >>= x);
+ assertEquals(-0, x *= ((tmp = -1387748473, tmp)|(x+(352432111))));
+ assertEquals(-0, x *= (((-2591789329)/(tmp = -2144460203, tmp))>>(tmp = -568837912.5033123, tmp)));
+ assertEquals(0, x <<= (-2963600437.305708));
+ assertEquals(0, x &= ((588720662)>>>x));
+ assertEquals(1561910729, x += (1561910729));
+ assertEquals(0, x ^= x);
+ assertEquals(-0, x *= (-2722445702));
+ assertEquals(0, x &= (tmp = -2738643199.732308, tmp));
+ assertEquals(0, x /= (((1859901899.227291)>>>((tmp = -1067365693, tmp)+((-1975435278)|x)))|((1844023313.3719304)&(tmp = -624215417.0227654, tmp))));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x %= (-2852766277));
+ assertEquals(0, x <<= (-1482859558));
+ assertEquals(0, x >>= x);
+ assertEquals(-1196775786, x += (tmp = -1196775786, tmp));
+ assertEquals(-68176201, x |= ((tmp = 2336517643, tmp)+x));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x <<= x);
+ assertEquals(0, x >>= (2969141362.868086));
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x >>= ((x-((((tmp = -905994835, tmp)|(tmp = 2850569869.33876, tmp))<<((-2405056608.27147)>>(tmp = 1280271785, tmp)))&(-1942926558)))*(tmp = 707499803.177796, tmp)));
+ assertEquals(0, x &= ((-697565829.8780258)+((2978584888.549406)%x)));
+ assertEquals(0, x >>= (748642824.4181392));
+ assertEquals(0, x += x);
+ assertEquals(0, x >>>= (-1701028721));
+ assertEquals(92042539, x -= ((-92042539)|(x*(x%(-293705541.00228095)))));
+ assertEquals(0, x %= x);
+ assertEquals(0, x >>= x);
+ assertEquals(0, x %= (-2278672472.458228));
+ assertEquals(0, x %= (((-2374117528.0359464)/((tmp = -2809986062, tmp)|(tmp = 895734980, tmp)))&(tmp = 1564711307.41494, tmp)));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x += x);
+ assertEquals(-0, x /= ((tmp = -2749286790.3666043, tmp)<<(x^(-2966741582.324482))));
+ assertEquals(0, x *= x);
+ assertEquals(0, x >>>= x);
+ assertEquals(-1882562314, x ^= (2412404982.782115));
+ assertEquals(-806620, x %= (((tmp = 1527219936.5232096, tmp)*(-1139841417))>>>(tmp = 201632907.3236668, tmp)));
+ assertEquals(-1613240, x += x);
+ assertEquals(-1664766177387640, x *= (1031939561));
+ assertEquals(-9.478083550117849e+23, x *= (tmp = 569334221.1571662, tmp));
+ assertEquals(-8.462574598319509e+21, x /= ((x-(tmp = -2985531211.114498, tmp))>>(tmp = 174615992.91117632, tmp)));
+ assertEquals(1638924288, x <<= (((((x>>((-1823401733.4788911)+((tmp = 1362371590, tmp)>>>x)))^(tmp = -56634380, tmp))/(tmp = 2387980757.1540084, tmp))%((((tmp = -3175469977, tmp)^(tmp = -1816794042, tmp))+(232726694))*(tmp = 822706176, tmp)))/(tmp = 1466729893.836311, tmp)));
+ assertEquals(2686072821796307000, x *= x);
+ assertEquals(-1007977445.9812208, x /= (-2664814408.800125));
+ assertEquals(-1007977445, x &= x);
+ assertEquals(322314656346249100, x *= (tmp = -319763758.54942775, tmp));
+ assertEquals(197436885.26815608, x /= (tmp = 1632494637, tmp));
+ assertEquals(-67191339, x |= ((-399580815.1746769)/((1335558363)/(tmp = 224694526, tmp))));
+ assertEquals(1229588737, x &= (tmp = 1296763683.5732255, tmp));
+ assertEquals(1229588737, x -= ((((1171546503)|((tmp = -2701891308, tmp)%(-2155432197.022206)))/(-306122816.85682726))>>x));
+ assertEquals(4162606632, x -= (tmp = -2933017895, tmp));
+ assertEquals(1.6487311395551163, x /= (2524733434.1748486));
+ assertEquals(-1929308648.9913044, x += (-1929308650.6400356));
+ assertEquals(-3858617297.982609, x += x);
+ assertEquals(788529152, x <<= (x^(1401824663)));
+ assertEquals(6160384, x >>>= ((((((x>>>x)>>((((x*(tmp = -1958877151, tmp))>>>(1310891043))-(tmp = 564909413.9962088, tmp))%(-175978438)))%x)|((tmp = -1193552419.7837512, tmp)*(tmp = 1508330424.9068346, tmp)))|(1428324616.3303494))-((1828673751)/(tmp = 1281364779, tmp))));
+ assertEquals(6160384, x |= x);
+ assertEquals(1, x /= x);
+ assertEquals(1, x &= (tmp = -855689741, tmp));
+ assertEquals(0, x >>>= x);
+ assertEquals(-1088569655.3528988, x -= (tmp = 1088569655.3528988, tmp));
+ assertEquals(-1088569655, x >>= ((tmp = 2429646226.626727, tmp)<<((-1539293782.4487276)>>(x^((tmp = 1140855945.537702, tmp)+x)))));
+ assertEquals(-311, x %= ((x/x)<<x));
+ assertEquals(1.2007722007722008, x /= (x|(tmp = 448796341.87655175, tmp)));
+ assertEquals(3, x |= (x+x));
+ assertEquals(-9.32416092168023e-10, x /= (-3217447688));
+ assertEquals(0, x >>= (615837464.0921166));
+ assertEquals(0, x >>>= (tmp = -2993750670.683118, tmp));
+ assertEquals(0, x >>>= (x%x));
+ assertEquals(1610612736, x ^= ((-1322905256.6770213)<<(-2567950598)));
+ assertEquals(1693676493, x ^= (83063757.63660407));
+ assertEquals(-758030371, x ^= (tmp = -1239274480, tmp));
+ assertEquals(-758030371, x %= (tmp = 1961339006, tmp));
+ assertEquals(-1509754528, x ^= (tmp = 1960027837, tmp));
+ assertEquals(-1509754528, x <<= x);
+ assertEquals(-1509754528, x -= (((tmp = -50690205.33559728, tmp)/((tmp = -1364565380, tmp)<<(tmp = 2585052504, tmp)))<<(tmp = -2356889596, tmp)));
+ assertEquals(1, x >>>= (-3204164321));
+ assertEquals(1, x *= x);
+ assertEquals(1114370230.591965, x *= ((tmp = 1114370229.591965, tmp)+x));
+ assertEquals(-4.886305275432552, x /= ((-228059887.33344483)%(2841553631.3685856)));
+ assertEquals(2.358309397373389e-9, x /= (((x*(tmp = 203428818.08174622, tmp))&(x-(((510438355)*x)+x)))+x));
+ assertEquals(0, x >>>= ((tmp = 1444810010, tmp)&(tmp = -3135701995.2235208, tmp)));
+ assertEquals(0, x /= (1865982928.6819582));
+ assertEquals(0, x *= x);
+ assertEquals(2078726016.3772051, x -= (tmp = -2078726016.3772051, tmp));
+ assertEquals(1580337898, x ^= ((tmp = -2714629398.447015, tmp)^x));
+ assertEquals(1268363034, x -= ((x+((tmp = 1144068248.3834887, tmp)&(-954104940.155973)))<<(tmp = 1270573731.7828264, tmp)));
+ assertEquals(1744830464, x <<= (((1444869551.7830744)>>>((((x+(tmp = -904688528, tmp))<<x)-((tmp = 121151912.85873199, tmp)/(tmp = -2414150217.66479, tmp)))|(((-472906698)|(3215236833.8417764))+(907737193.9056952))))-((x&(-732223723))|(-221800427.7392578))));
+ assertEquals(717338523283226600, x *= (x^(tmp = -2407450097.0604715, tmp)));
+ assertEquals(402653184, x >>= ((-3191405201.168252)*((tmp = -1941299639.695196, tmp)|(((x>>(((3215741220)>>>x)/(x+x)))^(((tmp = -2144862025.9842231, tmp)|((tmp = -1966913385, tmp)&x))%x))*((tmp = -1124749626.6112225, tmp)/(tmp = 837842574, tmp))))));
+ assertEquals(402653184, x &= ((x|x)>>x));
+ assertEquals(134217728, x &= ((2720231644.3849487)*x));
+ assertEquals(134217726.75839183, x -= ((2438054684.738043)/(((((-984359711)*(x|((tmp = 177559682, tmp)^x)))/(-1253443505))/((2727868438.416792)*(x+((x<<(((tmp = 3023774345, tmp)&(-705699616.0846889))/x))<<x))))^(1963626488.548761))));
+ assertEquals(1, x /= x);
+ assertEquals(245781494, x += ((tmp = 2551445099, tmp)^(2528486814)));
+ assertEquals(-1474427807, x ^= (-1497868393.342241));
+ assertEquals(-1057271682, x += ((((((x>>x)%(-1556081693))|(x/(((1166243186.6325684)-(((tmp = 2870118257.1019487, tmp)/(x+(-69909960)))^(2270610694.671496)))/((1463187204.5849519)-x))))-x)-(x<<(-3077313003)))%x));
+ assertEquals(-1065725846, x &= ((tmp = -1808223767, tmp)|(-481628214.3871765)));
+ assertEquals(-1065725846, x ^= (x&(((tmp = -1785170598, tmp)-(tmp = -2525350446.346484, tmp))/((((((-1783948056)^(tmp = 3027265884.41588, tmp))|((((tmp = 2195362566.2237773, tmp)<<(-2919444619))<<((tmp = -2507253075.2897573, tmp)^(x^((tmp = 1067516137, tmp)+((667737752)^(x*(tmp = -1187604212.7293758, tmp)))))))%(-617406719.5140038)))*(tmp = 511060465.6632478, tmp))*((tmp = 2580189800.752836, tmp)|((((tmp = 2357895660, tmp)%((-814381220)*(x-((x>>>(((x<<x)<<(tmp = 1919573020, tmp))-x))>>>((-2756011312.136148)>>(tmp = -1603458856, tmp))))))/((tmp = -1609199312, tmp)&(-3127643445)))%x)))<<(-2261731798)))));
+ assertEquals(1.6020307924030301, x /= (tmp = -665234308.2628405, tmp));
+ assertEquals(-1120020556.697667, x *= (tmp = -699125486.2321637, tmp));
+ assertEquals(-215875188, x -= (((((tmp = -1307845034, tmp)>>>((((-2820720421)^x)-(((x<<x)|(tmp = -3042092997.57406, tmp))+(((-1294857544)+((tmp = -668029108.1487186, tmp)>>(x<<x)))^(912144065.5274727))))^(389671596.2983854)))|(-2774264897.146559))%(x-((tmp = 1378085269, tmp)^x)))+((-1659377450.5247462)&(((1613063452.834885)>>>((-344896580.0694165)>>>((-13450558)+x)))^x))));
+ assertEquals(1, x /= x);
+ assertEquals(0, x >>>= (2355750790));
+ assertEquals(1969435421.4409347, x += (1969435421.4409347));
+ assertEquals(0, x -= x);
+ assertEquals(0, x >>>= (((x*((-1022802960.6953495)<<(tmp = -2848428731.8339424, tmp)))^(-1630921485))%(1532937011)));
+ assertEquals(0, x <<= ((x+((x^(x^(tmp = 2017651860, tmp)))&(((x<<(((tmp = -1913317290.8189478, tmp)|(x-((((x%((tmp = -3035245210, tmp)+(-2270863807)))>>>((-2351852712)*(x^(-2422943296.0239563))))&((((-1578312517)%x)*x)*(-65592270.28452802)))>>>(tmp = 1104329727.2094703, tmp))))-(tmp = -1431159990.3340137, tmp)))&x)|((tmp = -2589292678.801344, tmp)&(x+((((tmp = -2557773457.456996, tmp)>>(451910805.309445))-x)>>(((tmp = -1937832765.7654495, tmp)^x)%x)))))))%x));
+ assertEquals(0, x %= (tmp = -626944459, tmp));
+ assertEquals(-732310021, x |= (tmp = -732310021, tmp));
+ assertEquals(-732310021, x |= x);
+ assertEquals(671352839, x ^= (x-((-3087309090.7153115)|x)));
+ assertEquals(134479872, x &= (tmp = 2357183984, tmp));
+ assertEquals(18084835973136384, x *= x);
+ assertEquals(0, x <<= ((1040482277)-(tmp = -357113781.82650447, tmp)));
+ assertEquals(74957, x |= ((((tmp = -70789345.7489841, tmp)%(tmp = 1415750131, tmp))&x)|((307027314)>>(2284275468))));
+ assertEquals(9, x >>>= x);
+ assertEquals(0, x &= (x&((x*((x*(x%x))%(x>>x)))/x)));
+ assertEquals(-1872875060, x |= (2422092236.6850452));
+ assertEquals(9, x >>>= (-382763684));
+ assertEquals(4608, x <<= x);
+ assertEquals(40.480234260614935, x /= (((((((tmp = 814638767.5666755, tmp)&((tmp = 2081507162, tmp)^(x>>>(1460148331.2229118))))&(tmp = 1187669197.7318723, tmp))<<(412000677.93339765))^((tmp = 556111951, tmp)>>(tmp = -2232569601.292395, tmp)))&(-3006386864))/x));
+ assertEquals(32, x &= (-3053435209.383913));
+ assertEquals(418357217, x ^= (418357185));
+ assertEquals(204275, x >>= ((-1188650337.9010527)^((51494580)%(-2544545273))));
+ assertEquals(982392804, x += (((x+(((tmp = -982596937.9757051, tmp)+x)%(-2298479347)))^((((tmp = 1610297674.0732534, tmp)>>>x)*(((x>>(-2746780903.08599))&(-2376190704.247188))^(((20545353)/(tmp = 1468302977, tmp))-(x<<x))))>>(((-1434332028.0447056)/((tmp = 1983686888, tmp)&((tmp = 2324500847, tmp)%(394330230.6163173))))%(((-1129687479.2158055)+((-3127595161)*((-3066570223)&((tmp = 3192134577.4963055, tmp)/(-2697915283.3233275)))))+(-1112243977.5306559)))))|(x&(-2622725228))));
+ assertEquals(-2735750653096133600, x *= (-2784782870.9218984));
+ assertEquals(-1876329472, x |= ((((((2752866171)<<(-1681590319))/x)>>((tmp = 1451415208, tmp)>>>(1126858636.6634417)))+(((tmp = 2165569430.4844217, tmp)/x)^(((tmp = -1675421843.4364457, tmp)-(-2187743422.2866993))|x)))*x));
+ assertEquals(3520612287495799000, x *= x);
+ assertEquals(-200278016, x |= ((((-2379590931)%((((-1558827450.833285)&x)>>(-665140792))-((tmp = -445783631.05567217, tmp)+(tmp = 93938389.53113222, tmp))))/(3103476273.734701))^x));
+ assertEquals(-9178285062592.75, x *= ((2042671875.7211144)%(((tmp = 589269308.0452716, tmp)/x)<<(-130695915.9934752))));
+ assertEquals(60048960, x |= (x<<x));
+ assertEquals(60048960, x <<= ((((((tmp = -2793966650, tmp)/(-2882180652))&(((x<<((tmp = -384468710, tmp)+(2236162820.9930468)))>>>((((969371919)>>((tmp = -3153268403.2565875, tmp)-((((573811084)/x)^(tmp = -968372697.4844134, tmp))>>>(((-3096129189)>>x)/(tmp = 830228804.6249363, tmp)))))<<(((1243972633.3592157)|x)&((-1687610429)&(tmp = -1945063977.458529, tmp))))<<(((tmp = -217456781.37068868, tmp)-(400259171.68077815))^x)))>>>x))%(((2728450651.300167)/(((-2713666705.089135)%(tmp = 740472459, tmp))^x))|x))^x)*(-2463032364)));
+ assertEquals(60048960, x %= (tmp = -442107222.9513445, tmp));
+ assertEquals(-1573781504, x <<= (960581227));
+ assertEquals(1297, x >>>= (tmp = -1692919563, tmp));
+ assertEquals(1297, x &= x);
+ assertEquals(-3113308397155.233, x *= (tmp = -2400391979.3024154, tmp));
+ assertEquals(-3115513013486.233, x -= (2204616331));
+ assertEquals(-3113809649082.233, x -= (-1703364404));
+ assertEquals(0, x >>>= (((-1181206665)-(550946816.586771))|(tmp = -2346300456, tmp)));
+ assertEquals(0, x %= (tmp = 1649529739.2785435, tmp));
+ assertEquals(0, x ^= ((tmp = -2452761827.2870226, tmp)%(((1090281070.5550141)/(tmp = 992149154.6500508, tmp))*(x<<((((((x>>>x)|((tmp = -2410892363, tmp)%(tmp = 2585150431.0231533, tmp)))/x)*(tmp = 1541294271, tmp))+x)&((97566561.77126992)&((((-640933510.1287451)&(((((x>>>((-1821077041)<<((tmp = -1138504062.093695, tmp)-(tmp = -181292160, tmp))))%x)-(x>>((x&(((tmp = 1067551355, tmp)/(x|(1004837864.8550552)))&(x-(-103229639.25084043))))&((tmp = 2064184671.210937, tmp)+((((tmp = -2245728052, tmp)|(1538407002.8365717))+(x<<((x>>((76549490)/(tmp = 628901902.6084052, tmp)))<<((x<<x)^(-1907669184)))))+(-1409123688))))))>>>((((-1911547456.933543)-((-512313175)+((tmp = -2620903017, tmp)^(tmp = 2148757592.244808, tmp))))<<((-1740876865)>>>x))+((tmp = 691314720.9488736, tmp)<<(614057604.4104803))))|(x^((tmp = -3040687.291528702, tmp)/(x^(((x+(-2899641915))^((tmp = -1220211746, tmp)/x))%x))))))^(tmp = 119850608, tmp))%(2091975696))))))));
+ assertEquals(291273239, x -= (tmp = -291273239, tmp));
+ assertEquals(2206394018, x += (1915120779));
+ assertEquals(235641480, x <<= (x&(x&(-1810963865.1415658))));
+ assertEquals(28764, x >>= ((tmp = -1927011875, tmp)^((tmp = -1986461808, tmp)|((-868139264.8399222)*((421956566)%(3068424525))))));
+ assertEquals(-99780626900900, x *= ((tmp = -1512869526.3223472, tmp)+(tmp = -1956071751, tmp)));
+ assertEquals(51218520, x &= (((-2353401311)>>>x)-(2216842509)));
+ assertEquals(51218520, x >>>= ((tmp = -1534539302.6990812, tmp)<<x));
+ assertEquals(-2147483648, x <<= (-292608644));
+ assertEquals(-2147483648, x |= ((((((x<<((-2981292735)-x))>>((tmp = 2540545320.96558, tmp)&(tmp = -2343790880, tmp)))>>>((((((x^((-172697043.94487858)/((2627260337)>>(2879112814.1247935))))&(tmp = 3000943191, tmp))<<(tmp = 1094830905, tmp))-x)>>>x)>>((((tmp = 3095796200, tmp)^(x|(tmp = 1460377694, tmp)))<<(x^(tmp = -357546193, tmp)))/((2729539495)>>x))))%(tmp = 268894171.74961245, tmp))|(x>>(tmp = 2735650924, tmp)))/(-2197885357.09768)));
+ assertEquals(-2147483648, x |= x);
+ assertEquals(-1967162776824578000, x *= (tmp = 916031551, tmp));
+ assertEquals(-2147483648, x &= x);
+ assertEquals(-457743917756973060, x *= (tmp = 213153622, tmp));
+ assertEquals(0, x >>>= ((((tmp = 2930076928.480559, tmp)+(x^x))<<(tmp = -1349755597.1280541, tmp))|(x+(2865632849))));
+ assertEquals(0, x <<= ((x>>x)-(x>>(-2629977861))));
+ assertEquals(0, x <<= x);
+ assertEquals(NaN, x /= x);
+ assertEquals(0, x |= x);
+ assertEquals(0, x >>>= x);
+ assertEquals(749327478, x |= ((tmp = 749327478, tmp)^(x>>(tmp = 881107862, tmp))));
+ assertEquals(1897869364, x += (1148541886));
+ assertEquals(463347, x >>>= (tmp = -726431220, tmp));
+ assertEquals(-395990542, x += (-396453889));
+ assertEquals(-2824792585.1675367, x -= (2428802043.1675367));
+ assertEquals(-2147483648, x <<= (tmp = -1420072385.9175675, tmp));
+ assertEquals(8388608, x >>>= (-2211390680.488455));
+ assertEquals(8388608, x >>= (((x/(x|(((x^(((tmp = -2175960170.8055067, tmp)|((tmp = -1964957385.9669886, tmp)/(tmp = -475033330, tmp)))&((x|((tmp = 1386597019.2014387, tmp)>>((tmp = -2406589229.8801174, tmp)+x)))<<(tmp = -844032843.8415492, tmp))))>>(x^x))|x)))-((x&((tmp = 1858138856, tmp)*(-3156357504)))%x))<<(((2046448340)+x)/(-2645926916))));
+ assertEquals(8359470765396279, x *= ((tmp = 871437183.7888144, tmp)-(-125089387.17460155)));
+ assertEquals(0, x ^= x);
+ assertEquals(-303039014, x += ((tmp = -2475713214, tmp)|(-372871718.2343409)));
+ assertEquals(2655126577, x -= (-2958165591));
+ assertEquals(1830332793, x ^= (tmp = -212161208, tmp));
+ assertEquals(1830332793, x ^= (((2352454407.0126333)<<((((tmp = 3083552367, tmp)/x)-(-1243111279))-((tmp = -1669093976, tmp)%(((-757485455)-(tmp = -116051602, tmp))<<x))))>>(((((-2235071915.9536905)>>(tmp = -1284656185, tmp))-x)>>((-1807028069.7202528)>>>((x%((tmp = -3070857953.311804, tmp)+((tmp = 2759633693.441942, tmp)%((169489938)*(-1582267384)))))<<(x^((tmp = -787578860, tmp)<<x)))))>>((x/(x|(409464362)))-(tmp = -64033017, tmp)))));
+ assertEquals(397605933.90319204, x %= (tmp = 716363429.548404, tmp));
+ assertEquals(186400, x &= (((x%(-1745754586))>>>x)<<(x&(x&((-2163627752)-((1784050895)+(((-2864781121.899456)>>>x)&x)))))));
+ assertEquals(186400, x %= (tmp = -423209729, tmp));
+ assertEquals(186400, x <<= ((x<<(x+(1232575114.4447284)))*x));
+ assertEquals(1386299, x ^= ((tmp = -1074209615, tmp)>>>(x>>>((tmp = -1456741008.2654872, tmp)>>((1724761067)>>(-2016103779.9084842))))));
+ assertEquals(347302967.20758367, x -= (-345916668.20758367));
+ assertEquals(1.9325619389304094, x /= (179711170.03359854));
+ assertEquals(-3703324711.628227, x *= (tmp = -1916277371, tmp));
+ assertEquals(-920980517031624800, x *= (tmp = 248690187.53332615, tmp));
+ assertEquals(0, x &= (((tmp = -2753945953.082594, tmp)*x)-(172907186)));
+ assertEquals(-0, x /= (((((-2744323543.187253)>>((tmp = 2663112845, tmp)>>(((-121791600)+(x^x))*(2758944252.4214177))))|x)/(tmp = -2746716631.6805267, tmp))-x));
+ assertEquals(0, x ^= ((tmp = 983113117, tmp)&((2638307333)+((((tmp = 3076361304.56189, tmp)<<(-2663410588.5895214))%((-1109962112)-(tmp = -2381021732, tmp)))%((tmp = 410559095, tmp)&x)))));
+ assertEquals(0, x <<= (tmp = 1510895336.5111506, tmp));
+ assertEquals(0, x <<= (tmp = -1688348296.2730422, tmp));
+ assertEquals(2269471424, x -= (-2269471424));
+ assertEquals(-2022580224, x ^= (x%((tmp = 160999480.21415842, tmp)&x)));
+ assertEquals(-2077171712, x &= (tmp = 3032415014.3817654, tmp));
+ assertEquals(270727, x >>>= (2973489165.1553965));
+ assertEquals(270727, x |= x);
+ assertEquals(-1895894537, x |= ((tmp = -1895903118.129186, tmp)|x));
+ assertEquals(-1895894537, x -= ((((((((3143124509)>>>(-2866190144.8724117))*((x>>((961021882)*(tmp = 2363055833.8634424, tmp)))/((2032785518)+((2713643671.3420825)>>((-447782997.0173557)*((tmp = 1174918125.3178625, tmp)*((((tmp = -541539365.548115, tmp)%(-359633101))|(1765169562.2880063))+(tmp = -2512371966.374508, tmp))))))))/x)>>(x*((((-847238927.6399388)&(857288850))%(-2427015402))^((2221426567)%(x+x)))))>>>x)<<((tmp = 2009453564.2808268, tmp)>>((2924411494)<<(x>>(tmp = -1240031020.8711805, tmp)))))%(tmp = 3118159353, tmp)));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x %= (-30151583));
+ assertEquals(-1035186736, x ^= ((tmp = -517593368, tmp)<<(tmp = 3216155585, tmp)));
+ assertEquals(49740, x >>>= x);
+ assertEquals(49740, x %= (640223506));
+ assertEquals(388, x >>>= ((x>>(tmp = 3161620923.50496, tmp))+(2605183207)));
+ assertEquals(776, x += x);
+ assertEquals(-97905, x ^= ((((((tmp = 145447047.8783008, tmp)^(((x>>>(tmp = 3014858214.2409887, tmp))>>>(629911626.132971))>>(((x+((369309637.229408)-x))<<(-2661038814.9204755))*(x+(x%(3025191323.4780884))))))+x)*(-482550691))|(-632782135))/x));
+ assertEquals(-97905, x %= ((((-492914681)-((-2508632959.269368)&(tmp = 1209318291, tmp)))>>(-723512989.459533))>>>(((-528429623.985692)&(x^(tmp = -925044503, tmp)))-(-1696531234))));
+ assertEquals(9585389025, x *= x);
+ assertEquals(-715425728, x <<= ((583763091)<<(-1223615295)));
+ assertEquals(-520093696, x <<= ((tmp = -1891357699.671592, tmp)*(((tmp = 3206095739.5163193, tmp)+(-2908596651.798733))>>>((tmp = -2820415686, tmp)>>(x|((((tmp = -566367675.6250327, tmp)*(-959117054))>>((((-187457085.89686918)*x)*(tmp = -2394776877.5373516, tmp))>>>x))|(((tmp = 80478970.46290505, tmp)<<(tmp = 2173570349.493097, tmp))-(x/((-2896765964)-((x/((tmp = 198741535.7034216, tmp)%(436741457)))%(tmp = 2936044280.0587225, tmp)))))))))));
+ assertEquals(-2520.5909527086624, x /= ((211290893.06029093)>>(663265322)));
+ assertEquals(-2520.5909527086624, x %= (x^((1057915688)<<(tmp = 1914820571.1142511, tmp))));
+ assertEquals(1, x >>>= (((894963408.7746166)+(tmp = -2888351666, tmp))|x));
+ assertEquals(-1989841636629996300, x += ((1424670316.224575)*((-2144149843.0876865)|((((421479301.0983993)|((3082651798)^(tmp = -271906497, tmp)))>>x)+((tmp = -178372083, tmp)%x)))));
+ assertEquals(17935384255.088326, x /= (((((((tmp = 1168194849.2361898, tmp)>>>(-107316520.53815603))>>>(x^(((x%((x>>>(((-2456622387)/x)&((2124689803)|(((-1130151701)^(2796315158))>>x))))-((-884686033.5491502)>>>((-2371185318.5358763)&x))))+(tmp = 558422989, tmp))|((tmp = -420359120.0596726, tmp)/((-1820568437.0587764)&(2298602280.266465))))))>>(x-((tmp = -1164568978, tmp)^x)))^x)-x)+x));
+ assertEquals(134233150, x &= ((x>>(((tmp = 98498118.13041973, tmp)-(804574397))/(tmp = -1564490985.7904541, tmp)))+x));
+ assertEquals(4, x >>= (449610809));
+ assertEquals(1912543790, x |= (1912543790));
+ assertEquals(2487274263, x += (tmp = 574730473, tmp));
+ assertEquals(-2140759118, x ^= (tmp = 338055333.9701035, tmp));
+ assertEquals(311607367, x += (2452366485));
+ assertEquals(9509, x >>= (372113647.84365284));
+ assertEquals(-2001075684.1562128, x += (-2001085193.1562128));
+ assertEquals(-638703280, x ^= (((tmp = 1096152237, tmp)&x)|((2707404245.0966487)-(((tmp = 1550233654.9691348, tmp)+(tmp = 2008619647, tmp))&((tmp = -2653266325, tmp)+(tmp = -280936332, tmp))))));
+ assertEquals(-101811850, x |= (-2250090202));
+ assertEquals(-13, x >>= ((-561312810.0218933)|(tmp = 79838949.86521482, tmp)));
+ assertEquals(-13, x >>= ((tmp = -936543584, tmp)/(1180727664.1746705)));
+ assertEquals(-1547, x *= (((tmp = 1005197689, tmp)>>>x)>>>(tmp = 34607588, tmp)));
+ assertEquals(2393209, x *= x);
+ assertEquals(2393209, x |= x);
+ assertEquals(0, x >>= (-2691279235.1215696));
+ assertEquals(0, x *= (((896175510.4920144)*((((tmp = 1770236555.7788959, tmp)%(537168585.7310632))/x)&(tmp = 1094337576, tmp)))&(((x-x)-x)>>x)));
+ assertEquals(-1922620126, x ^= (-1922620126));
+ assertEquals(3.43481396325761, x /= (tmp = -559745053.6088333, tmp));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x >>>= (tmp = 2106956255.6602135, tmp));
+ assertEquals(-1339003770, x ^= ((tmp = 2955963526.960022, tmp)+x));
+ assertEquals(-0, x *= ((((tmp = 368669994, tmp)>>>(x*x))<<(tmp = 2355889375, tmp))&(tmp = -2267550563.9174895, tmp)));
+ assertEquals(0, x >>= (753848520.8946902));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x %= ((tmp = -2872753234.2257266, tmp)|x));
+ assertEquals(NaN, x %= (x>>>(tmp = 890474186.0898918, tmp)));
+ assertEquals(NaN, x %= ((tmp = 1341133992.284471, tmp)&(tmp = -2979219283.794898, tmp)));
+ assertEquals(NaN, x += (-2865467651.1743298));
+ assertEquals(NaN, x += ((-1424445677)%(x^(tmp = 1150366884, tmp))));
+ assertEquals(0, x &= (x+((tmp = 1499426534, tmp)+x)));
+ assertEquals(0, x |= (((((tmp = -2413914642, tmp)<<((x>>>x)^(1218748804)))+((((-1085643932.2642736)-(-1199134221.533854))>>(tmp = 2148778719, tmp))-((tmp = 1589158782.0040946, tmp)/(tmp = -2485474016.1575155, tmp))))>>>(x>>x))/(2230919719)));
+ assertEquals(0, x %= ((tmp = -2576387170.517563, tmp)>>>((tmp = -2362334915.919525, tmp)>>>(((3096453582)-(700067891.4834484))^(2396394772.9253683)))));
+ assertEquals(-1798103432, x ^= (((((tmp = 2396144191, tmp)*(x>>>(1512158325)))&(((-1256228298.5444434)&(((-2963136043.434966)&((tmp = 2472984854, tmp)+(tmp = -454900927, tmp)))%(tmp = 484255852.65332687, tmp)))>>((x%x)-x)))&(tmp = 929723984, tmp))^(tmp = -1798103432.5838807, tmp)));
+ assertEquals(-2137913344, x &= ((((x|(-2970116473))&(((x/x)/((tmp = 2853070005, tmp)>>>x))%(((tmp = -3123344846, tmp)/((2224296621.6742916)-(tmp = -2246403296.455411, tmp)))+((x&(((x^(x*(2829687641)))+x)&(tmp = 988992521, tmp)))^x))))<<((((-820608336)^(tmp = 2851897085, tmp))>>(tmp = -402427624, tmp))>>>x))-(((x*(((-2287402266.4821453)%(tmp = -520664172.1831205, tmp))^(x/(1875488837))))<<(tmp = 402393637, tmp))&(tmp = 1576638746.3047547, tmp))));
+ assertEquals(-2827557853031924000, x *= (tmp = 1322578326.6507945, tmp));
+ assertEquals(6.424459501778244e+27, x *= (tmp = -2272087729.3065624, tmp));
+ assertEquals(-1586887483, x |= (-1586887483));
+ assertEquals(-567868980691736100, x *= (tmp = 357850816, tmp));
+ assertEquals(1489101591, x ^= (x%(x|(421921075))));
+ assertEquals(-801213804822328000, x *= (x|(-672326904.6888077)));
+ assertEquals(612257233.6612054, x /= (((tmp = -350127617, tmp)>>>(-1140467595.9752212))<<((x^x)+(-3117914887))));
+ assertEquals(19097.231243331422, x /= ((x^(tmp = -570012517, tmp))>>>x));
+ assertEquals(0, x >>= ((x%(((-2347648358)%((x-(tmp = -456496327, tmp))|(x^(-1977407615.4582832))))<<(x/(tmp = -2021394626.214082, tmp))))%(tmp = -949323000.2442119, tmp)));
+ assertEquals(0, x <<= x);
+ assertEquals(NaN, x %= (x^(x>>(((tmp = 597147546.7701412, tmp)&(((((-972400689.6267757)|(tmp = -2390675341.6367044, tmp))|(tmp = 1890069123.9831812, tmp))<<(((1606974563)-(tmp = -2211617255.8450356, tmp))&((((x+((2433096953)&(-2527357746.681596)))*(tmp = -313956807.55609417, tmp))|((tmp = -2146031047.968496, tmp)/(tmp = 2851650714.68952, tmp)))>>(((tmp = 2630692376.6265225, tmp)-(tmp = -3162222598, tmp))>>((tmp = 1915552466, tmp)*(x>>>(-2413248225.7536864)))))))&(x%((((1218471556)|x)+(tmp = -849693122.6355379, tmp))+x))))>>>(x/((tmp = 689889363, tmp)/x))))));
+ assertEquals(0, x >>>= (45649573.23297));
+ assertEquals(0, x >>>= (tmp = 1084439432.771266, tmp));
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x *= (tmp = 1642750077, tmp));
+ assertEquals(0, x >>>= (tmp = -1944001182.0778434, tmp));
+ assertEquals(1682573000, x |= (tmp = -2612394296.2858696, tmp));
+ assertEquals(3041823595, x -= (((tmp = 720576773, tmp)|(x^(-1068335724.2253149)))>>(x*(-2501017061))));
+ assertEquals(6083647190, x += x);
+ assertEquals(-6536258988089986000, x *= ((tmp = 632312939.6147232, tmp)|((-1621821634)+(((tmp = -2281369913.562131, tmp)&((tmp = -381226774, tmp)|x))&(664399051)))));
+ assertEquals(4.272268155938712e+37, x *= x);
+ assertEquals(733271152, x %= (-1345127171));
+ assertEquals(847089925, x ^= (tmp = 432620917.57699084, tmp));
+ assertEquals(1337073824, x <<= x);
+ assertEquals(-25810602, x ^= (tmp = 2982414838, tmp));
+ assertEquals(-25282209, x |= ((tmp = -2927596922, tmp)>>>(-2404046645.01413)));
+ assertEquals(639190091919681, x *= x);
+ assertEquals(173568320, x &= ((((tmp = -718515534.4119437, tmp)&(tmp = 2989263401, tmp))<<x)|((tmp = 537073030.5331153, tmp)-(tmp = 883595389.314624, tmp))));
+ assertEquals(0, x -= x);
+ assertEquals(0, x >>>= (tmp = -1844717424.917882, tmp));
+ assertEquals(0, x >>= (tmp = -462881544.2225325, tmp));
+ assertEquals(0, x >>= x);
+ assertEquals(-1868450038, x ^= (2426517258.6111603));
+ assertEquals(1, x /= x);
+ assertEquals(1175936039.4202638, x += (tmp = 1175936038.4202638, tmp));
+ assertEquals(-127916015, x ^= ((x/(1841969600.3012052))-(tmp = 1099467723, tmp)));
+ assertEquals(395713785658171900, x *= (-3093543726));
+ assertEquals(395713787128560900, x += (((((-717204758)*(tmp = -588182129.6898501, tmp))-x)+(tmp = 20638023, tmp))^x));
+ assertEquals(-962609355, x |= ((x^(-3118556619.912983))<<((tmp = 876126864, tmp)&x)));
+ assertEquals(-962609355, x %= (tmp = -2079049990, tmp));
+}
+f();
diff --git a/src/3rdparty/v8/test/mjsunit/numops-fuzz-part2.js b/src/3rdparty/v8/test/mjsunit/numops-fuzz-part2.js
new file mode 100644
index 0000000..51260a4
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/numops-fuzz-part2.js
@@ -0,0 +1,1178 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+function f() {
+ var x = -962609355;
+ var tmp = 0;
+ assertEquals(-114583755, x -= (((-2806715240)&(((1961136061.0329285)>>>((2087162059)*x))+((tmp = -1890084022.7631018, tmp)%(tmp = 2137514142.358262, tmp))))+(x<<(tmp = 2991240918, tmp))));
+ assertEquals(-425721856, x <<= x);
+ assertEquals(3778560, x >>>= ((x|(3198503572))>>(1158434541.1099558)));
+ assertEquals(3778560, x %= (tmp = -2592585378.9592104, tmp));
+ assertEquals(624640, x &= (tmp = 2261638192.9864054, tmp));
+ assertEquals(1249280, x += x);
+ assertEquals(1048576, x &= ((tmp = -2144301819.9892588, tmp)^((x-x)<<x)));
+ assertEquals(2097152, x <<= (x/x));
+ assertEquals(5069061551149729, x *= (tmp = 2417116904.8069615, tmp));
+ assertEquals(1.4836296666029616e+25, x += ((tmp = 2926833006.7121572, tmp)*x));
+ assertEquals(-256, x >>= ((-469330345.3589895)%((x^(((2554170843.4978285)/(2495676674.815263))>>>x))*(-918892963))));
+ assertEquals(-134217728, x <<= (x|(((((1687450853.1321645)+(tmp = 2369533014.5803776, tmp))+(tmp = -2613779445, tmp))+(tmp = -2488826226.3733397, tmp))>>(tmp = -220646936.41245174, tmp))));
+ assertEquals(704164545131708400, x *= ((-2632786741)+(-2613647956)));
+ assertEquals(9216, x >>>= (-1925405359.657349));
+ assertEquals(4491403261551.008, x *= (tmp = 487348444.1787118, tmp));
+ assertEquals(4490606381829.008, x -= (tmp = 796879722, tmp));
+ assertEquals(-60294056, x >>= x);
+ assertEquals(-3193966580.494005, x += (tmp = -3133672524.494005, tmp));
+ assertEquals(550500358, x >>>= ((tmp = -2779637628.390116, tmp)-((tmp = 29230786.984039664, tmp)%(tmp = -310649504.7704866, tmp))));
+ assertEquals(68812544, x >>= (-1347584797));
+ assertEquals(1.2120221595741834e-11, x /= ((2791020260)*((((1964870148.6358237)^x)|(-3082869417))-((x^x)&((1234292117.8790703)<<(-1792461937.2469518))))));
+ assertEquals(1.2120221595741834e-11, x %= (x-(2780439348)));
+ assertEquals(-1421552183, x |= (tmp = -1421552183.5930738, tmp));
+ assertEquals(-1420954119, x |= ((((-2547788562.5735893)<<x)%(435385623))>>(x|x)));
+ assertEquals(1, x /= x);
+ assertEquals(1, x >>= (x>>>(((2975715011.501709)-(tmp = -1473273552.981069, tmp))/(1654883913.042487))));
+ assertEquals(-65382, x ^= ((x/((tmp = -2780026200, tmp)<<x))^(((-2683084424)<<x)>>(-1716245874))));
+ assertEquals(1530921106, x &= (1530940914));
+ assertEquals(1, x /= x);
+ assertEquals(0, x >>= x);
+ assertEquals(0, x /= (tmp = 773741434.1972584, tmp));
+ assertEquals(0, x |= x);
+ assertEquals(0, x <<= (-67977514.99888301));
+ assertEquals(0, x %= (2496550482.524729));
+ assertEquals(-0, x /= (tmp = -515040417, tmp));
+ assertEquals(0, x <<= (-1673460935.2858837));
+ assertEquals(-2638209488, x += (-2638209488));
+ assertEquals(-2400951839498683400, x *= (910068685));
+ assertEquals(1600582036, x ^= (((-1247602308.4812562)>>(((-2393714444.179732)>>>x)%(-778140635.7165127)))+(-1933914727.2268424)));
+ assertEquals(0, x *= ((x-x)>>(-1270234575)));
+ assertEquals(0, x >>>= (tmp = 3193676327.493656, tmp));
+ assertEquals(0, x ^= (x>>>(1148676785.389884)));
+ assertEquals(0, x >>= (tmp = -2269181763.8663893, tmp));
+ assertEquals(0, x >>= (3149450221));
+ assertEquals(0, x >>= (1069630750));
+ assertEquals(-625009654, x ^= ((-2143499112)%(-759244728.6214335)));
+ assertEquals(3583943, x >>>= (-2942645558.1204453));
+ assertEquals(1791971, x >>= (x/x));
+ assertEquals(223996, x >>= x);
+ assertEquals(6999, x >>= (tmp = -1051883611.9443719, tmp));
+ assertEquals(1459617792, x <<= (-1572314984));
+ assertEquals(2622356453.269262, x -= (tmp = -1162738661.2692618, tmp));
+ assertEquals(5103676461.269262, x += (2481320008));
+ assertEquals(823989684.2692623, x %= (x^(((((1048362966)*((tmp = -2423040747.6233954, tmp)>>>x))*((tmp = 2330818588.4081, tmp)>>(tmp = 103312020.98346841, tmp)))+(tmp = 2264492857.144133, tmp))>>>((tmp = 2523442834, tmp)<<x))));
+ assertEquals(0, x >>>= (tmp = -2018700898.531027, tmp));
+ assertEquals(NaN, x /= x);
+ assertEquals(0, x <<= (tmp = -2489442223, tmp));
+ assertEquals(0, x >>= ((3045836220)>>>x));
+ assertEquals(-1156905149, x ^= (3138062147));
+ assertEquals(-0, x %= x);
+ assertEquals(-3118433907.512866, x -= ((tmp = 1338611238, tmp)-(-1779822669.5128663)));
+ assertEquals(100679693, x &= (1040565279));
+ assertEquals(10136400582574248, x *= x);
+ assertEquals(0, x %= x);
+ assertEquals(2400318405, x += (2400318405));
+ assertEquals(1.0036190808578471, x /= (((tmp = -2313492253.9889445, tmp)|(x-((tmp = -205459123, tmp)>>x)))+x));
+ assertEquals(0, x >>>= (tmp = 882343227.1675215, tmp));
+ assertEquals(0, x &= ((tmp = 2307828832.2706165, tmp)^((((((1404388047)<<((807879382)-(-2862921873)))-x)*(tmp = -1897734732, tmp))>>(tmp = 1981888881.2306776, tmp))%x)));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x *= (((x*x)*((((2764801384.171454)%(x>>>x))&(384818815))+(x>>(tmp = -1481683516, tmp))))&x));
+ assertEquals(0, x >>= (tmp = -2202536436, tmp));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x &= (tmp = 15161124, tmp));
+ assertEquals(-1586110900, x ^= (-1586110900));
+ assertEquals(-1586127952, x -= ((tmp = 560737212, tmp)%((1349529668)>>>(tmp = -1956656528, tmp))));
+ assertEquals(-1174945870, x -= ((1178456190)|x));
+ assertEquals(1335167624.3422346, x -= (tmp = -2510113494.3422346, tmp));
+ assertEquals(1329952126.3422346, x -= (x>>x));
+ assertEquals(1, x >>= x);
+ assertEquals(3, x |= (x<<x));
+ assertEquals(3, x -= (x-x));
+ assertEquals(-1938525669, x |= (tmp = 2356441625.5128202, tmp));
+ assertEquals(-1938525669, x ^= ((tmp = -197149141.3622346, tmp)/(2833823156)));
+ assertEquals(-2.6292393147661324, x /= (737295254.2254335));
+ assertEquals(2925975987.370761, x -= (-2925975990));
+ assertEquals(2925975987.370761, x %= (tmp = 3041184582.8197603, tmp));
+ assertEquals(-1908068660, x ^= ((tmp = -1380575181, tmp)-(2375164084.8366547)));
+ assertEquals(-477017165, x >>= (tmp = 2420877826.353099, tmp));
+ assertEquals(-477017165, x %= ((tmp = -2919204062.3683634, tmp)-(tmp = -2263328990, tmp)));
+ assertEquals(-2105539936, x &= ((tmp = -1630795440, tmp)-(x&((933423833)>>(-475069901)))));
+ assertEquals(-4979480720, x -= (tmp = 2873940784, tmp));
+ assertEquals(-4190953472, x -= (x&(tmp = -645918862.9001305, tmp)));
+ assertEquals(17564091004468855000, x *= x);
+ assertEquals(-857277134, x |= (tmp = 2363948338, tmp));
+ assertEquals(1015632515, x -= (-1872909649));
+ assertEquals(-1150380043, x ^= (tmp = -2014853770, tmp));
+ assertEquals(1607729152, x <<= ((2194449589)+(x|(tmp = -1470075256.4605722, tmp))));
+ assertEquals(1608356496, x |= ((((x|(670426524))<<((-2415862218)>>(tmp = 1572561529.9213061, tmp)))^((-1989566800.3681061)|x))&(2170270618.3401785)));
+ assertEquals(-1836056576, x <<= (tmp = 2906301296.540217, tmp));
+ assertEquals(-2952415961567723500, x *= (tmp = 1608020145, tmp));
+ assertEquals(1435500544, x <<= x);
+ assertEquals(700928, x >>>= (tmp = 2924829771.1804566, tmp));
+ assertEquals(0, x <<= ((x^(2410009094))|(((-164334714.18698573)%(x*x))|(tmp = 2182431441.2575436, tmp))));
+ assertEquals(-143321285, x ^= (tmp = -143321285, tmp));
+ assertEquals(-2, x >>= x);
+ assertEquals(-1, x >>= (x&(1109737404)));
+ assertEquals(1, x >>>= x);
+ assertEquals(0, x ^= x);
+ assertEquals(-2463707358.165766, x += (-2463707358.165766));
+ assertEquals(1831259938, x >>= (((((x-(tmp = 1359448920.5452857, tmp))%(tmp = -104541523, tmp))/((3133289055.9780197)*x))>>x)%x));
+ assertEquals(1858895646, x ^= ((tmp = 131424376, tmp)>>(tmp = -396761023, tmp)));
+ assertEquals(1, x >>= x);
+ assertEquals(-1888369021, x |= ((tmp = -2038869285.046599, tmp)^((tmp = -1318286592.4250565, tmp)-(tmp = 2825123496, tmp))));
+ assertEquals(1036458508, x <<= ((tmp = 2722401450, tmp)/((tmp = 1090712291, tmp)>>((tmp = -2155694696.9755683, tmp)*(tmp = 1661107340, tmp)))));
+ assertEquals(1, x /= (x%((tmp = -1716050484, tmp)+(tmp = -1683833551.797319, tmp))));
+ assertEquals(0, x >>= (tmp = -2899315628, tmp));
+ assertEquals(0, x |= x);
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x <<= x);
+ assertEquals(1546062911, x |= (1546062911));
+ assertEquals(1546195271, x += ((tmp = -3210667091, tmp)>>(tmp = 1323121165, tmp)));
+ assertEquals(3092390542, x += x);
+ assertEquals(-1199626354, x |= (406783756));
+ assertEquals(-3650317194584908300, x *= (tmp = 3042878461.625484, tmp));
+ assertEquals(-7.650495675092354e+27, x *= (2095844078));
+ assertEquals(0, x >>= (tmp = 342617880.3384919, tmp));
+ assertEquals(22, x ^= (((tmp = 381409558.9104688, tmp)>>((2823172888.974557)>>x))>>x));
+ assertEquals(736383550, x += (736383528));
+ assertEquals(0, x %= x);
+ assertEquals(0, x += x);
+ assertEquals(-1553157831, x -= (1553157831));
+ assertEquals(1838556960, x <<= (3158944357.262641));
+ assertEquals(5503285699.188747, x *= ((tmp = 2437440276, tmp)/(814308583.8128904)));
+ assertEquals(5824889900.188747, x -= (((tmp = 1171445694, tmp)-(tmp = -1584666956, tmp))^(tmp = 1217545373, tmp)));
+ assertEquals(747032, x >>>= (-89332085));
+ assertEquals(747032, x |= (x^(x^(x>>>x))));
+ assertEquals(747032, x >>>= ((-1558482440)*((tmp = -2413907480, tmp)+(3003996862.384156))));
+ assertEquals(7.747761349084291e+23, x += ((tmp = 518064022.64624584, tmp)*((tmp = 2001951702, tmp)*x)));
+ assertEquals(0, x <<= (2769324707.5640426));
+ assertEquals(NaN, x %= (((((((-2458056470.7717686)&x)>>(tmp = -361831232.42602444, tmp))*(2611108609.6727047))>>>x)/(-1713747021.8431413))*(-1143281532)));
+ assertEquals(NaN, x %= ((x^((-613836813)*(tmp = -3180432597.0601435, tmp)))%x));
+ assertEquals(NaN, x /= ((-1607092857)^x));
+ assertEquals(0, x &= (-1190719534));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x += (x>>(642177579.1580218)));
+ assertEquals(-3129552333, x += (-3129552333));
+ assertEquals(1165414963, x &= x);
+ assertEquals(2222, x >>= (((tmp = 2606317568, tmp)|x)+(tmp = 1844107136, tmp)));
+ assertEquals(NaN, x %= ((x^x)<<(x/(((tmp = -1362148700, tmp)&((tmp = 76371048, tmp)<<x))>>>((x^(-2605741153))>>(((tmp = -2131608159.7634726, tmp)|(((2827792229.8004875)|(((-848439251)+(-2576768890.123433))|((tmp = -2617711776, tmp)-((-199980264)&((tmp = -46967951.76266599, tmp)/(-733253537))))))*(tmp = 1820087608, tmp)))>>>(tmp = -3118359396.4298744, tmp)))))));
+ assertEquals(NaN, x /= ((2144871731)*x));
+ assertEquals(NaN, x *= x);
+ assertEquals(NaN, x %= (tmp = 234811462.08692443, tmp));
+ assertEquals(0, x >>>= ((1121416685)|(x^(((tmp = -2905413334, tmp)<<(tmp = -3091554324.030834, tmp))<<x))));
+ assertEquals(-55938048, x |= ((tmp = -55938048, tmp)+(x*(tmp = -1518809027.2695136, tmp))));
+ assertEquals(-3.3234995678333864e-10, x /= (x*(tmp = -3008876576, tmp)));
+ assertEquals(0, x <<= (x/((((((-2168824234.2418427)>>(((tmp = 1976810951, tmp)%x)<<(x*(x>>(x%(3146266192))))))%(tmp = 1756971968.122397, tmp))>>>(-2859440157.8352804))/(-1001406.1919288635))>>>(-1358031926))));
+ assertEquals(-0, x *= (tmp = -1756000533, tmp));
+ assertEquals(-0, x %= (2522761446.869926));
+ assertEquals(0, x >>>= (((1087690535)>>>(2741387979))^x));
+ assertEquals(0, x -= x);
+ assertEquals(0, x >>= (-819422694.2188396));
+ assertEquals(0, x ^= x);
+ assertEquals(NaN, x /= x);
+ assertEquals(0, x &= (tmp = 86627723, tmp));
+ assertEquals(0, x += x);
+ assertEquals(0, x %= (tmp = -2317915475, tmp));
+ assertEquals(Infinity, x += (((-3072799584)^(-2487458319))/(((tmp = -3050692353, tmp)&x)>>(-777977292.8500206))));
+ assertEquals(Infinity, x += x);
+ assertEquals(Infinity, x -= (tmp = 484428269, tmp));
+ assertEquals(Infinity, x *= x);
+ assertEquals(Infinity, x /= (2059586218.2278104));
+ assertEquals(Infinity, x *= (tmp = 415918523.8350445, tmp));
+ assertEquals(-1800869091, x |= (((-1800869091)>>>(x>>>(tmp = -2832575051, tmp)))>>>x));
+ assertEquals(6196126991451132000, x *= ((-1467292383.8458765)+(-1973339154.7911158)));
+ assertEquals(6196126992684649000, x += (1233517421));
+ assertEquals(1, x /= x);
+ assertEquals(-7153809722216516000, x -= (((-2984550787.146106)<<(tmp = 743743974, tmp))*((3155151275)/((-1771412568.8965073)%x))));
+ assertEquals(-7153809721471491000, x -= (-745024056));
+ assertEquals(5.117699353102001e+37, x *= x);
+ assertEquals(0, x >>= x);
+ assertEquals(-0, x *= ((-2651785447.666973)<<(-1124902998)));
+ assertEquals(-0, x /= (2119202944));
+ assertEquals(1042673805.5205957, x -= ((x<<x)-(tmp = 1042673805.5205957, tmp)));
+ assertEquals(62, x >>>= (tmp = 2769597912.977452, tmp));
+ assertEquals(34, x &= ((tmp = -61541150, tmp)%(x^(-943160469))));
+ assertEquals(34, x ^= ((-2625482224.4605474)<<(-2277806338.3461556)));
+ assertEquals(536870912, x <<= ((-2373927426.4757633)^x));
+ assertEquals(536870912, x &= x);
+ assertEquals(512, x >>>= ((-1626769708.310139)<<((tmp = 641796314, tmp)/(721629637.3215691))));
+ assertEquals(0, x <<= (-113973033));
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x += (-1602711788.2390788));
+ assertEquals(NaN, x *= (x%x));
+ assertEquals(0, x &= (x<<(x|(x>>((x>>>(x%((1182960050)^(((-220896609)-((((tmp = 1518275435.360103, tmp)/(tmp = -88234820, tmp))^x)/x))>>(3169930777.548236)))))-(tmp = -2912668817.662395, tmp))))));
+ assertEquals(0, x *= ((2323969408.7524366)/(((tmp = -3089229853, tmp)>>>((((tmp = -1012580544.5631487, tmp)>>(1138049418.9023373))>>x)&x))*(tmp = 626912001, tmp))));
+ assertEquals(0, x >>>= x);
+ assertEquals(NaN, x /= (x%(-868024322)));
+ assertEquals(NaN, x /= (tmp = -1749532322, tmp));
+ assertEquals(1861918711, x |= (-2433048585.853014));
+ assertEquals(1861918711, x >>= (((102451747)>>>((((241651917.47259736)/((((((((1759022236)^(tmp = -2592022722, tmp))+((-1748044969)>>>(704597925)))/(-1639604842))%((1349846853.7345295)<<(-729695861)))/(x>>((tmp = -2654474404.7365866, tmp)>>x)))>>>(((-480356478)|(x%((tmp = -1668269244.6979945, tmp)+(tmp = -2441424458.565183, tmp))))^((1634981212.7598324)>>>(tmp = 122455570.22000062, tmp))))<<x))*((tmp = -1058636137.5037816, tmp)+((2794083757.138838)&((x/(50081370))&x))))/x))/((tmp = -243106636, tmp)<<((x*((tmp = -648475219.5971704, tmp)>>((tmp = -1568913034, tmp)-((tmp = 911458615, tmp)|x))))>>>(tmp = 2714767933.920696, tmp)))));
+ assertEquals(0, x ^= x);
+ assertEquals(-2080484602, x |= (((1544771831.4758213)|x)^(-538113039)));
+ assertEquals(696451072, x <<= (tmp = -1587032689, tmp));
+ assertEquals(-162595645, x += (tmp = -859046717, tmp));
+ assertEquals(516546456, x >>>= x);
+ assertEquals(623083588, x += ((-1371850352)^(tmp = -1469933252, tmp)));
+ assertEquals(92342412, x %= (tmp = -132685294, tmp));
+ assertEquals(500272110, x |= ((tmp = 1616032506, tmp)%((tmp = 1589569590.4269853, tmp)|(-972791738.1829333))));
+ assertEquals(3247086, x %= (((tmp = 1372216208, tmp)|(-638950076.3387425))&((-2619249161.849716)&(73957896))));
+ assertEquals(0, x >>>= (tmp = -1482343462.6911879, tmp));
+ assertEquals(1265125662, x ^= (tmp = -3029841634, tmp));
+ assertEquals(4941897, x >>>= (-2039728632));
+ assertEquals(206857, x &= (tmp = 226962365.45571184, tmp));
+ assertEquals(1.0925018562586405e+24, x += ((tmp = 2687424146, tmp)*(((-1998020319)%x)*(-2080331363))));
+ assertEquals(-1.755270751212437e+32, x *= (-160665242));
+ assertEquals(0, x <<= (3152796521.6427975));
+ assertEquals(0, x ^= ((((((tmp = -855001595, tmp)<<(2007525777))-(x-(x-x)))/(3036585090.9701214))&(1827983388))*((tmp = -915604789.0515733, tmp)&(((((tmp = -806628722.7820358, tmp)%x)/(tmp = -2773117447, tmp))|x)<<(((tmp = -2902300974.7300634, tmp)|x)/(-1608133440))))));
+ assertEquals(0, x |= ((((((119024954)*(((x^(tmp = 2939514414, tmp))|x)^(x-(tmp = -1597415597.6795669, tmp))))+(((tmp = -182277816.14547157, tmp)<<(((-2983451324.3908825)^(tmp = 1572568307, tmp))+(-1165604960.8619013)))/(x>>((tmp = -2127699399, tmp)>>((x^(((((tmp = -1968667383, tmp)^(tmp = 3120052415.9964113, tmp))|(((x|(((x^((tmp = 2831505153, tmp)<<((-3150506831.547093)+((x%(tmp = 383761651, tmp))%(2856803457)))))+(((tmp = -2426953997, tmp)^(tmp = -2667954801.1010714, tmp))*(tmp = -2707801631, tmp)))&(tmp = 2082935238.794707, tmp)))^((tmp = 697573323.5349133, tmp)-x))%(tmp = 661936357, tmp)))/(-1717944600.261446))>>>((2423776015.0968056)^((-1410322010)|((x<<(tmp = 2935993226, tmp))/(tmp = -1533896392, tmp))))))*(tmp = -596675330, tmp))))))>>>(((2944268153)^(x&(144579050.93126357)))/(-2123810677.2619643)))>>>(1473040195.9009588))*x));
+ assertEquals(0, x /= (2877666495));
+ assertEquals(2174852514, x -= (tmp = -2174852514, tmp));
+ assertEquals(543713128, x >>>= x);
+ assertEquals(2978128878.939105, x += (tmp = 2434415750.939105, tmp));
+ assertEquals(3529591145844655600, x *= (tmp = 1185170719.3753138, tmp));
+ assertEquals(659, x >>>= ((((((x<<(((((-425423078)/(((tmp = 160617689.20550323, tmp)&(-1524740325.5003028))%(tmp = -1869426475, tmp)))<<(((x^(-487449247))>>>(tmp = -1962893666.7754712, tmp))%x))*x)>>((tmp = 623413085, tmp)&(x+(((((-2200726309.083274)-(x-x))+x)&(-1304849509))|((((tmp = -431896184, tmp)>>>(x>>(-1932126133)))<<((1078543321.2196498)*(-10761352)))>>(tmp = -2681391737.5003796, tmp)))))))/x)-(tmp = -1768629117, tmp))/(((((tmp = -2320718566.0664535, tmp)%x)+(-2831503351.995921))>>>(-2695416841.3578796))*(943979723)))<<x)|((652520546.7651662)>>(1045534827.6806792))));
+ assertEquals(531, x &= (tmp = -293707149, tmp));
+ assertEquals(0, x >>= (tmp = -678056747.5701449, tmp));
+ assertEquals(1184651529.8021393, x += (tmp = 1184651529.8021393, tmp));
+ assertEquals(1721719611, x |= (tmp = 1645413178, tmp));
+ assertEquals(-406880257, x |= (tmp = 2268544460, tmp));
+ assertEquals(-4194304, x <<= (tmp = -109701322.43455839, tmp));
+ assertEquals(17592186044416, x *= x);
+ assertEquals(0, x ^= (x&x));
+ assertEquals(0, x <<= (tmp = 1715401127, tmp));
+ assertEquals(-1793087394, x |= (tmp = -1793087394.730585, tmp));
+ assertEquals(-2, x >>= x);
+ assertEquals(263607360.10747814, x += (tmp = 263607362.10747814, tmp));
+ assertEquals(1073214955, x |= (893759979.3631718));
+ assertEquals(703953930, x -= ((2738450011)%(x^(tmp = 679402836, tmp))));
+ assertEquals(1, x >>= (tmp = 2262515165.6670284, tmp));
+ assertEquals(0, x >>= (((tmp = 747896494, tmp)^((tmp = -1005070319, tmp)+x))|x));
+ assertEquals(0, x >>= ((953612771)>>>(tmp = 3066170923.3875694, tmp)));
+ assertEquals(-314941454, x -= (x+(((314941454)%(((tmp = 2200222912.9440064, tmp)>>>(2534128736.805429))>>>(x|((747716234)%(((tmp = -252254528, tmp)%(-1553513480.1875453))&x)))))<<x)));
+ assertEquals(-535686958, x &= (-522809126));
+ assertEquals(0.5480312086215239, x /= (tmp = -977475278, tmp));
+ assertEquals(-1199953459.6090598, x *= ((-2189571393)+((3186862741.37774)>>(tmp = -2193090564.5026345, tmp))));
+ assertEquals(-1199953459.6090598, x %= ((tmp = 2986532440, tmp)*(2685122845)));
+ assertEquals(-1199953459.6090598, x %= (1951182743.7399902));
+ assertEquals(51262285383887820, x *= (-42720228));
+ assertEquals(-424776752, x |= x);
+ assertEquals(166221344210236600, x *= (tmp = -391314598.6158786, tmp));
+ assertEquals(-1883425600, x >>= (((tmp = -1020679296, tmp)^((-1416867718)+(-1412351617)))<<(-2743753169)));
+ assertEquals(0, x &= (x/(-2250026610)));
+ assertEquals(-1111956501, x ^= (tmp = 3183010795, tmp));
+ assertEquals(2012059503, x ^= (tmp = -900369276, tmp));
+ assertEquals(15719214, x >>>= (tmp = -3196277049, tmp));
+ assertEquals(15719214, x |= x);
+ assertEquals(100779035, x -= ((-1245802025)^(-2964289852)));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x &= (((x<<((2361941389.708063)%x))>>((328256762.09842086)>>>((((tmp = 3094192285, tmp)-(((x>>(tmp = -2920437464, tmp))<<(tmp = -2693021467, tmp))-(x>>>((2410065554)%(x%(tmp = 2487056196.689908, tmp))))))-(tmp = -866314146, tmp))^((1754098471)-((((((-2450740191)-(tmp = 1977885539.6785035, tmp))*((tmp = -1205431332, tmp)>>>x))>>(-870601854))>>(tmp = -301859264, tmp))|((tmp = -2308971516.8301244, tmp)/x))))))&((2307007357)-((tmp = -1518812934, tmp)+(2562270162)))));
+ assertEquals(0, x <<= x);
+ assertEquals(-1802124619, x |= (-1802124619));
+ assertEquals(-1802124619, x %= ((1617132364.306333)+((1678465962.079633)|((516698570)%(((569813606)*(-1800804098.6270027))%((tmp = 1976706935, tmp)-((tmp = -1830228989.5488424, tmp)>>(((x^((tmp = 1015246068.3791624, tmp)>>x))^((-2171682812.246772)-(tmp = -398330350, tmp)))&x))))))));
+ assertEquals(904564673.6237984, x -= (tmp = -2706689292.6237984, tmp));
+ assertEquals(818237248768128900, x *= x);
+ assertEquals(254842325.2585001, x %= (1550087667.9657679));
+ assertEquals(-1163919360, x <<= x);
+ assertEquals(-3.4644526843674166, x /= ((-446801454)+(x>>>(tmp = -2025151870, tmp))));
+ assertEquals(0, x &= ((((((((-1739617728)&(x&(((tmp = -2946470036.552597, tmp)/x)*x)))^(-1130501404))>>>x)/((1870230831)>>>(840301398)))%x)/x)/(-2927537567)));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x >>>= (x&(x&x)));
+ assertEquals(0, x &= ((-579614044)-(-756012505.4048488)));
+ assertEquals(-2970367642, x -= (tmp = 2970367642, tmp));
+ assertEquals(-415129376, x ^= (tmp = 2847041926.060355, tmp));
+ assertEquals(-1505681312, x &= (tmp = -1225184902.9215767, tmp));
+ assertEquals(-3174471329.5807734, x += (-1668790017.5807734));
+ assertEquals(-Infinity, x /= (x>>x));
+ assertEquals(NaN, x -= x);
+ assertEquals(0, x ^= (x^(((-1407936301.5682082)<<((x^(((tmp = 3213446217.307076, tmp)|x)|((tmp = 3219810777.3171635, tmp)/(tmp = 1561807400, tmp))))>>>((tmp = 2449910203.0949173, tmp)|((((1954662538.7453175)>>(tmp = -1711636239.9916713, tmp))>>>(tmp = 406219731.214718, tmp))<<(((-907908634.4609842)^((((((tmp = 2408712345, tmp)*(tmp = 1740346634.5154347, tmp))>>(tmp = 715783991, tmp))^(tmp = -655628853.2821262, tmp))%(tmp = 2819143280.434571, tmp))/(-1240412852)))*x)))))/x)));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x <<= x);
+ assertEquals(0, x >>>= (((-3198075268.8543105)>>(((((x+((tmp = -133461401.50823164, tmp)-((x&(((((tmp = 2617977319, tmp)>>((tmp = -2704719576.8734636, tmp)|((tmp = -977362542.2423751, tmp)<<(x<<(tmp = 3054487697.1441813, tmp)))))>>>((-1635655471)%x))/(-2079513672))%(tmp = 1993563806, tmp)))<<(tmp = -1310524200.6106496, tmp))))%((((-2558804500.7722936)+(tmp = -1641265491, tmp))<<((tmp = -1309608349, tmp)>>>x))/((tmp = -2306644272, tmp)<<x)))*(-2009396162.3063657))+(267343314.3720045))-(-2212612983.661479)))|x));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x *= x);
+ assertEquals(-824822309, x |= (-824822309));
+ assertEquals(-807944741, x |= (((-598067403)*((x&(tmp = 2897778389, tmp))>>>(-1322468310.3699632)))|x));
+ assertEquals(90004223.44097246, x /= (((tmp = -481122620, tmp)&x)%((tmp = 1109368524, tmp)/(((-3150568522.633032)<<(tmp = 2923396776, tmp))^(x-((x/x)&(x/(-287976185.1049104))))))));
+ assertEquals(0.4521931751193329, x /= (tmp = 199039323, tmp));
+ assertEquals(1.8110466604491368e-10, x /= (2496860986.492693));
+ assertEquals(0, x |= x);
+ assertEquals(-1225944576, x += ((tmp = -807700791.631221, tmp)<<((-700782615.4781106)-((((-2954619897)>>>x)<<((tmp = 997657844, tmp)>>>(1227994596)))/((-1234591654.8495834)*((tmp = -191189053.70693636, tmp)+(tmp = -3027659304, tmp)))))));
+ assertEquals(-1225811383, x |= (-1866233271));
+ assertEquals(3069155913, x >>>= (((x/(-99524153.40911508))%(x>>>((((tmp = 2985975640, tmp)/(tmp = 2781516546.2494454, tmp))&(((2234114508)|(((x/(tmp = -1224195047, tmp))<<x)^(x>>>((537884375.5698513)+x))))^((tmp = -2144817497.5089426, tmp)|(-498079183.8178189))))>>>((x+x)&(-3086080103.6460695)))))<<(((tmp = 2151157136, tmp)*x)/(((x/x)>>>(-1149734628.4364533))-((3025445835.654089)+(tmp = 530902725.91127443, tmp))))));
+ assertEquals(-1733702568, x ^= (tmp = 776361489.423534, tmp));
+ assertEquals(8981504, x &= ((tmp = 2902581847, tmp)*(x-(-2697760560))));
+ assertEquals(1153166.8526612986, x -= ((x/(tmp = -1375025594.5027463, tmp))+((3043576689.1538706)%(x+x))));
+ assertEquals(3389855, x |= (x+x));
+ assertEquals(-488458393.17759943, x += (-491848248.17759943));
+ assertEquals(40982867145206920, x *= ((3132857155)|(tmp = -218356553, tmp)));
+ assertEquals(688, x >>= (((((tmp = 403321821, tmp)+((tmp = 2536984658, tmp)%((tmp = 2759309029.8753624, tmp)|(((tmp = 1994203554.7417293, tmp)^((704660500.434877)*(tmp = 1536292958.2691746, tmp)))+(-164139788)))))/((1205950994.1255205)+x))^((((tmp = 975272146.0133443, tmp)-(150107797))/(-1764309514))^((x>>>(x^(x^x)))+(203250124))))>>>(tmp = 1864959239.512323, tmp)));
+ assertEquals(10, x >>= ((tmp = 1631996431.9620514, tmp)>>x));
+ assertEquals(10, x %= (tmp = 2678904916, tmp));
+ assertEquals(335544320, x <<= (tmp = -2759037415.6811256, tmp));
+ assertEquals(-153389967, x |= ((tmp = -2411636565, tmp)+(tmp = -2305156154, tmp)));
+ assertEquals(-1171, x >>= x);
+ assertEquals(813080576, x &= (((tmp = -65428547, tmp)&(tmp = 3163266999, tmp))<<x));
+ assertEquals(4346532303, x += ((tmp = -761515569.0707853, tmp)>>>(((tmp = 143240971.0661509, tmp)<<x)*(x^((tmp = -271697192.8471005, tmp)&x)))));
+ assertEquals(-863299035, x ^= ((((2663001827.1492147)>>>((x/(((tmp = 482665912, tmp)-(x>>(tmp = 354425840.784659, tmp)))>>((-2012932893)>>>x)))/((tmp = -1354385830.6042836, tmp)>>>(-2149023857))))^((tmp = 585746520, tmp)+(tmp = 756104608, tmp)))^(517529841.184085)));
+ assertEquals(-997654012, x &= (((tmp = -404836025.15326166, tmp)+((tmp = 3035650114.0402126, tmp)<<((-1308209196)>>(tmp = 693748480, tmp))))<<(((465774671.4458921)<<x)/(1971108057))));
+ assertEquals(-320581507110848260, x *= ((x-(tmp = -2266777911.7123194, tmp))^(tmp = -2810021113.304348, tmp)));
+ assertEquals(-320581508271196300, x += ((-1195215841.5355926)|(x-((2715907107.4276557)+(((-843426980)>>(x&(x%(tmp = -1139279208.34768, tmp))))^x)))));
+ assertEquals(368031616, x &= x);
+ assertEquals(368031616, x %= (tmp = 1211767328, tmp));
+ assertEquals(-67505614939510744, x *= (tmp = -183423412.56766033, tmp));
+ assertEquals(959424552, x >>= ((tmp = -171120122.5083747, tmp)/x));
+ assertEquals(30949179.096774194, x /= (((x-((((x&(tmp = -180770090, tmp))<<(((tmp = -2061363045.419958, tmp)*((655711531)^((1205768703)-(tmp = 2468523718.8679857, tmp))))+(-2746704581)))+((-853685888)*(tmp = -2299124234, tmp)))|(tmp = 2429502966, tmp)))|(((-985794986.0232368)>>>(2890862426))%x))>>(tmp = 1005542138.8415397, tmp)));
+ assertEquals(30949179, x |= x);
+ assertEquals(30949179, x %= (810126097.6814196));
+ assertEquals(120895, x >>= (tmp = 3065886056.1873975, tmp));
+ assertEquals(1934320, x <<= (1478650660.7445493));
+ assertEquals(0, x >>= (1069658046.2191329));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x %= (x*x));
+ assertEquals(NaN, x *= ((((2148513916)+(tmp = -210070225.85489202, tmp))>>(975470028))+((-3060642402)>>x)));
+ assertEquals(NaN, x *= (2888778384));
+ assertEquals(NaN, x -= (294531300.16350067));
+ assertEquals(-465620423, x ^= (tmp = -465620423.5891335, tmp));
+ assertEquals(1613303808, x &= (-2530649850.1952305));
+ assertEquals(2045458658, x |= (tmp = 432158946.5708574, tmp));
+ assertEquals(0, x >>>= (2277328255.770018));
+ assertEquals(0, x &= (-64904722.41319156));
+ assertEquals(0, x >>= x);
+ assertEquals(3109394857.361766, x += (3109394857.361766));
+ assertEquals(1519021650, x ^= ((tmp = -2632472653, tmp)|(tmp = 2161964921.8225584, tmp)));
+ assertEquals(370854, x >>>= ((1486892931.4564312)-((tmp = 3017755741.9547133, tmp)>>>x)));
+ assertEquals(1333145110.39802, x -= ((-1051580495.39802)-(tmp = 281193761, tmp)));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x |= x);
+ assertEquals(0, x <<= x);
+ assertEquals(0, x >>>= x);
+ assertEquals(799202788.1455135, x -= (tmp = -799202788.1455135, tmp));
+ assertEquals(1539080192, x <<= (x%(((((x-x)|(((((x%(959993901))+(tmp = -2647575570.092733, tmp))/(tmp = -2040600976.5104427, tmp))*(x*(tmp = 2785252760, tmp)))>>(-377867259)))/((x&(1549738240.013423))>>>(tmp = -1502185618, tmp)))*x)%(1159283801.0002391))));
+ assertEquals(0, x >>= (-268660225));
+ assertEquals(-0, x /= (-2795206270.635887));
+ assertEquals(0, x >>>= (1869556260.2489955));
+ assertEquals(64202212, x ^= ((((tmp = -942983515.5386059, tmp)*(((1057759788)-x)*(tmp = 2038041858, tmp)))>>x)+(tmp = 64202212, tmp)));
+ assertEquals(2021126977, x -= ((tmp = -2009912898, tmp)^((2240062309)%x)));
+ assertEquals(4332348265459724000, x *= (tmp = 2143530968, tmp));
+ assertEquals(1472, x >>>= ((283380755)<<x));
+ assertEquals(-1672370407872, x *= (tmp = -1136121201, tmp));
+ assertEquals(338573318, x ^= (tmp = 2329579078.4832354, tmp));
+ assertEquals(2377388772.1662374, x -= (tmp = -2038815454.1662374, tmp));
+ assertEquals(-1.264761712403516, x /= ((((tmp = -2106209534, tmp)>>((((((tmp = 626190172, tmp)/x)>>>(-824270996.8545206))/((1258369810.9498723)-(tmp = -2947556209, tmp)))^((((366784589.24711144)|(1462064104.828938))-(1571045395.777879))<<(444685689.60103726)))>>(tmp = -2757110357.410516, tmp)))/(x>>>((tmp = 829226010, tmp)>>>(629512715))))|x));
+ assertEquals(-2905481691.264762, x -= (2905481690));
+ assertEquals(-1710543566.1481905, x -= (-1194938125.1165714));
+ assertEquals(-3421087132.296381, x += x);
+ assertEquals(-884178944, x <<= ((-1820881235)|x));
+ assertEquals(-884178944, x &= (x%(tmp = -2298828530, tmp)));
+ assertEquals(1516503040, x <<= ((tmp = -3039882653, tmp)+((tmp = 1956034508, tmp)<<(x>>(tmp = 280388051, tmp)))));
+ assertEquals(3033006080, x += x);
+ assertEquals(846431222.321887, x %= (x+(-1939718651.1609435)));
+ assertEquals(-846431224, x ^= ((-1742116766.54132)/x));
+ assertEquals(1157918728, x &= (tmp = 1966568030, tmp));
+ assertEquals(1157918728, x >>>= ((((((tmp = -2392096728.184257, tmp)*(x&(-3051259597.301086)))>>>(((tmp = 1712991918.071982, tmp)*(tmp = -714525951, tmp))-((-1784801647)>>((-1270567991)%(((214272558)/(((-3110194570)|(tmp = 2558910020, tmp))&(-1266294955.717899)))*((2654922400.609189)>>>(tmp = 370485018, tmp)))))))*(((tmp = -2621203138.1838865, tmp)%(858913517))*((tmp = -1564229442.2596471, tmp)>>((tmp = 1898557618, tmp)|(-1282356275)))))*(tmp = -1253508468, tmp))+((-361964404.75944185)|x)));
+ assertEquals(961668975, x += (-196249753));
+ assertEquals(1, x >>= (tmp = 890453053, tmp));
+ assertEquals(1, x >>= (((((tmp = 871309275, tmp)/(x>>>((tmp = 2033022083, tmp)&(tmp = -1393761939, tmp))))%((437488665.104565)^(tmp = 2808776860.4572067, tmp)))-((tmp = -359283111.49483967, tmp)<<((tmp = 2985855945, tmp)%(tmp = -596479825.9114966, tmp))))/(-1965528507)));
+ assertEquals(0, x >>= ((tmp = -1753776989, tmp)%(tmp = 322622654, tmp)));
+ assertEquals(84411424, x ^= (((x|(x|(tmp = -1617122265, tmp)))&(tmp = -313813263, tmp))&(1472888112.0258927)));
+ assertEquals(67633184, x &= ((1556833131.0776267)<<(x<<(1501219716.5575724))));
+ assertEquals(68002293, x |= (((tmp = 188984203.0350548, tmp)>>>(tmp = 1356052777, tmp))%(x*(tmp = -2944960865, tmp))));
+ assertEquals(67108864, x &= (((1046644783.9042064)<<x)+((-2796345632)>>>(((-1913290350.3687286)<<(((((tmp = -2223692353, tmp)>>x)&(x<<(x>>((((tmp = -976850020, tmp)%(tmp = 1379692507, tmp))>>>(1120103052.2077985))>>(tmp = 5592070.612784743, tmp)))))<<(x+((tmp = -3154037212.9764376, tmp)%(((x-(-1961060483.6965141))+(((1920670676)-(2852444470.7530622))/(((1445954602)>>((1353665887)>>(tmp = 111411560.64111042, tmp)))<<x)))+x))))<<((-1773130852.6651905)^((1216129132)>>(1511187313.2680469)))))|((tmp = -1107142147, tmp)|(tmp = -768165441.4956136, tmp))))));
+ assertEquals(0, x -= x);
+ assertEquals(0, x %= (tmp = -1655707538.0778136, tmp));
+ assertEquals(-184120712930843900, x += (x+((tmp = -3174410166, tmp)+((tmp = -301807453, tmp)*(tmp = 610060182.1666535, tmp)))));
+ assertEquals(-54598560, x >>= (-1365351357));
+ assertEquals(-6763.94449950446, x /= (((-1953016847)<<((673287269.7002038)%(-558739761)))>>>(tmp = 1607754129, tmp)));
+ assertEquals(-1, x >>= x);
+ assertEquals(1, x >>>= x);
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x >>= ((-384747983)+((((tmp = -949058352.381772, tmp)>>>(-1920744986))-(-882729639))^((x^((tmp = 2351364046, tmp)<<(((tmp = -3110165747, tmp)^(-1266489735))-((tmp = -371614326, tmp)>>((tmp = -2064968414, tmp)&(-2075036504.617934))))))&(((-2616501739)&(tmp = 2591437335.4029164, tmp))>>x)))));
+ assertEquals(0, x >>>= ((tmp = 2946468282, tmp)&((-2741453019)>>x)));
+ assertEquals(0, x -= ((x%(-134700915))&(-1955768279)));
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x /= (x^(((((((tmp = 3185669685.772061, tmp)>>(tmp = -1973500738, tmp))-(tmp = -87401348.93002152, tmp))>>(tmp = -2813508730, tmp))&(tmp = -778957225, tmp))<<(x-(x&((-2821756608)+(((((tmp = 2475456548, tmp)/(tmp = 997998362, tmp))<<((tmp = -83043634, tmp)|x))%(636120329))%(tmp = -1910213427.7556462, tmp))))))%x)));
+ assertEquals(0, x &= x);
+ assertEquals(0, x <<= x);
+ assertEquals(0, x >>>= (x%x));
+ assertEquals(0, x %= (745221113));
+ assertEquals(0, x >>>= ((1467615554.7672596)|x));
+ assertEquals(0, x /= (tmp = 735317995, tmp));
+ assertEquals(-1513001460, x |= (2781965836));
+ assertEquals(-1513001460, x |= (x%(1970577124.3780568)));
+ assertEquals(-0, x %= x);
+ assertEquals(1864972269, x ^= (-2429995027.840316));
+ assertEquals(1226843341, x &= (tmp = -639621923.5135081, tmp));
+ assertEquals(1226843339.3171186, x += ((1297620268.272113)/(-771070549)));
+ assertEquals(76677708, x >>>= (1009134980));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x ^= x);
+ assertEquals(NaN, x /= x);
+ assertEquals(716040787, x |= ((1851586229)-(1135545441.3502865)));
+ assertEquals(1385693184, x <<= x);
+ assertEquals(1321, x >>= (x^((tmp = -1576632297.0860603, tmp)>>>(405218605))));
+ assertEquals(-1319012931, x |= (-1319014243));
+ assertEquals(-1319012931, x >>= ((((1689898279.3580785)<<((((x^(x>>>((((tmp = 2635260332, tmp)*(tmp = 2053357650, tmp))*x)*(2856480122.339903))))>>x)&(-2382703000.077593))%(1183918594)))*(tmp = -1670081449, tmp))<<x));
+ assertEquals(-528327581.7646315, x %= (tmp = -790685349.2353685, tmp));
+ assertEquals(2073431790, x ^= (tmp = 2601800333, tmp));
+ assertEquals(-6514722684180, x -= (((tmp = 824141806.0668694, tmp)>>>(((-1865885282.8723454)&(x&(x|((900188006.3757659)>>>(x&x)))))+(2227126244.0526423)))*x));
+ assertEquals(1450593, x >>>= ((2157053647)>>(x+(-2934071355.418474))));
+ assertEquals(576782336, x <<= ((1054640368.827202)&((tmp = -3182236876.434615, tmp)>>(tmp = 2129856634.0328193, tmp))));
+ assertEquals(2950754326, x -= (tmp = -2373971990, tmp));
+ assertEquals(738197504, x <<= (1188157369.5988827));
+ assertEquals(0, x <<= (x+((tmp = -839533141, tmp)&((((((tmp = -1148768474.7306862, tmp)|(172650299))+(tmp = -2739838654, tmp))/(3132557129))%x)>>>(tmp = -1229961746.2466633, tmp)))));
+ assertEquals(0, x %= (tmp = -2974207636, tmp));
+ assertEquals(0, x %= ((2323482163)>>>x));
+ assertEquals(0, x &= (((x/(x+(x>>((tmp = 55935149, tmp)%x))))|((3109182235)>>>(tmp = 1217127738.8831062, tmp)))+((((tmp = -385114910, tmp)*((((((tmp = -2535158574.634239, tmp)&(x+x))<<(-2821692922.43476))&(-776804130.9457026))>>((-1374832535)^(tmp = 2175402162.701251, tmp)))%(-1646995095)))-(x*(tmp = -921556123, tmp)))^(79224621))));
+ assertEquals(128935435, x |= ((tmp = 2279459038, tmp)%(tmp = -537630900.5271742, tmp)));
+ assertEquals(128935435, x /= ((((((x<<(2750024311))-((-1332480769.4784315)&(1418160003)))&(1551783357))<<(((((-2870460218.55027)|((-1958752193.7746758)&(2551525625)))>>>((((tmp = -1698256471, tmp)^(((((((((tmp = -830799466, tmp)+x)-(-111590590))+(tmp = -1105568112.3921182, tmp))/((tmp = -3058577907, tmp)|(((-1944923240.2965696)%(-2884545285))<<(tmp = -1993196044.1645615, tmp))))^(x>>(tmp = -2961488181.3795304, tmp)))&x)*x)|(((tmp = 97259132.88922262, tmp)<<((1601451019.343733)&x))*(x|x))))+((((x>>x)<<x)+(-868409202.2512136))/(((tmp = -2893170791, tmp)-((x|(-853641616))%(((tmp = 549313922, tmp)&(-768036601.6759064))%(tmp = -543862220.9338839, tmp))))-((tmp = 1639851636, tmp)+((2164412959)/(-273028039.941242))))))>>>((((-2382311775.753495)^(-2062191030.2406163))>>>(tmp = -1054563031, tmp))/(-862111938.7009578))))%x)+(-3103170117.625942)))%((tmp = -1144062234, tmp)>>x))>>>(tmp = 1216332814.00042, tmp)));
+ assertEquals(41.631074722901715, x /= (x&(-2542806180.962227)));
+ assertEquals(41.631074722901715, x %= (-14003386.556780577));
+ assertEquals(8, x &= (x&((-2231622948)%(tmp = 488279963.9445952, tmp))));
+ assertEquals(9.002961614252625e-9, x /= ((53802728.56204891)<<(((867697152.3709695)-(538719895.5707034))&(-631307825.4491808))));
+ assertEquals(0, x >>= x);
+ assertEquals(-0, x *= (tmp = -785674989, tmp));
+ assertEquals(-0, x += x);
+ assertEquals(0, x /= (-250703244));
+ assertEquals(0, x <<= ((tmp = -661062581.5511999, tmp)|x));
+ assertEquals(0, x &= (-1299482308));
+ assertEquals(0, x &= ((-399690060)>>>(2448074202.385213)));
+ assertEquals(0, x &= (2574341201));
+ assertEquals(0, x <<= ((x|(((tmp = 2458873162.645012, tmp)+(tmp = -1999705422.8188977, tmp))<<((x^(tmp = -392530472, tmp))>>>x)))&(((tmp = 2463000826.7781224, tmp)|(tmp = 3020656037, tmp))-x)));
+ assertEquals(1397603760, x += ((tmp = -1359413071, tmp)-(tmp = -2757016831, tmp)));
+ assertEquals(513823851, x -= (883779909));
+ assertEquals(-1765712747, x ^= (2288060670.6797976));
+ assertEquals(3117741504918286000, x *= x);
+ assertEquals(3117741506284045300, x += (1365759456));
+ assertEquals(6035555595.597267, x /= (tmp = 516562470, tmp));
+ assertEquals(104203275, x &= (tmp = 376835755.32434213, tmp));
+ assertEquals(10858322520725624, x *= x);
+ assertEquals(59458951, x >>>= (153765028));
+ assertEquals(49370856, x += ((tmp = -1291276092, tmp)>>x));
+ assertEquals(0, x %= x);
+ assertEquals(0, x += x);
+ assertEquals(-1494589645, x -= (1494589645));
+ assertEquals(-0, x %= x);
+ assertEquals(0, x <<= (x&((2730708043.467806)<<x)));
+ assertEquals(0, x /= ((tmp = -1483912394.153527, tmp)>>>((tmp = 1800568769, tmp)^((((((tmp = 1351568510, tmp)>>(tmp = -1337992543.2562337, tmp))>>>(tmp = 2602239360.40513, tmp))*x)%x)+(-2095840128.0700707)))));
+ assertEquals(-0, x /= ((2363946613)^(tmp = -2227868069, tmp)));
+ assertEquals(0, x &= ((((2634933507)<<(2798775374.140882))>>>x)>>>(((tmp = 1135200853.6396222, tmp)-(tmp = -1529829490.7007523, tmp))-(((((((((x^((x|(2135742668.591568))-(924230444.8390535)))%(tmp = -2459525610.51898, tmp))+(x&((tmp = 1177231743.809653, tmp)/(tmp = 1743270357.2735395, tmp))))|(((tmp = -1894305017, tmp)^((tmp = 1791704240, tmp)&x))%(-1569751461)))>>>(tmp = -2078321944, tmp))|x)*(((x*(tmp = -163239354, tmp))<<((tmp = 2859087562.694203, tmp)&(-657988325.9410558)))^(2508013840)))-((-243572350)+(x%((-1095206140)+((tmp = 3213566608.942816, tmp)*((2256442613)%((tmp = 1723751298, tmp)^(x-((-1145710681.2693722)|x)))))))))+(1556870627)))));
+ assertEquals(130883024.97423434, x -= (-130883024.97423434));
+ assertEquals(0.046720352789736276, x /= (tmp = 2801413456, tmp));
+ assertEquals(1806558189, x |= (tmp = 1806558189.157823, tmp));
+ assertEquals(72.40475060062144, x /= (x%((1932591076.531628)>>(1982030182))));
+ assertEquals(-1077558321.5975945, x += (tmp = -1077558394.002345, tmp));
+ assertEquals(98187, x >>>= x);
+ assertEquals(97792, x &= (tmp = -1032487404, tmp));
+ assertEquals(709197609, x |= (x^(709179177)));
+ assertEquals(11081212, x >>>= (tmp = 1412940006.169063, tmp));
+ assertEquals(11081212, x &= x);
+ assertEquals(-1920311203, x -= ((tmp = 1931392415, tmp)<<((x%(tmp = -2873576383, tmp))%x)));
+ assertEquals(-1920311203, x |= (x&(-993884718.2172024)));
+ assertEquals(-4, x >>= (1409411613.0051966));
+ assertEquals(-7947632484, x *= ((-2856731734)^((-1181032235.9132767)-((tmp = 780101930, tmp)+((tmp = -1732707132.6253016, tmp)^x)))));
+ assertEquals(-2016362769, x ^= (tmp = 2711125619.2455907, tmp));
+ assertEquals(-61535, x >>= x);
+ assertEquals(-124771649, x ^= (tmp = 124726558, tmp));
+ assertEquals(-1, x >>= x);
+ assertEquals(-0, x %= (x*x));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x /= (2444628112));
+ assertEquals(0, x <<= ((-38968517.72504854)<<x));
+ assertEquals(-1504619917, x |= (tmp = 2790347379, tmp));
+ assertEquals(-1504619917, x &= x);
+ assertEquals(2790347379, x >>>= ((1825218368)<<(-1843582593.2843356)));
+ assertEquals(7786038495492170000, x *= x);
+ assertEquals(-11011696, x |= (((tmp = 2931644407.4936504, tmp)-(3077095016.001658))%(tmp = -1731851949, tmp)));
+ assertEquals(-107866, x %= ((-697845074.1661191)>>(772708134)));
+ assertEquals(356779149, x ^= (-356884949.503757));
+ assertEquals(0, x %= x);
+ assertEquals(0, x *= ((tmp = 1542291783, tmp)^x));
+ assertEquals(0, x += ((tmp = 1105314644.002441, tmp)&x));
+ assertEquals(-1005882993, x ^= (-1005882993.0899806));
+ assertEquals(-1301065066, x += (tmp = -295182073, tmp));
+ assertEquals(-1454702592, x <<= ((-2440858737.390277)&(-1363565201.7888322)));
+ assertEquals(-201539012492525570, x *= ((((tmp = -1416268089, tmp)|x)-(tmp = 1669129769, tmp))&(x<<((x/(-2614041678.7423654))%x))));
+ assertEquals(-2.1995276811535986e+25, x *= (x/(-1846667987.154371)));
+ assertEquals(0, x |= ((x*(((x>>>((tmp = 1044173034, tmp)>>>((x<<((tmp = -2906412863, tmp)%((tmp = -437401503, tmp)<<(((((x|(2167319070))<<((tmp = 2766179640.1840167, tmp)&(-2372076054)))*(tmp = -241617431.06416297, tmp))*((((((tmp = 2570465382.5574293, tmp)>>>(x/((-2851324509.354545)%x)))>>(((x+((tmp = -614687945, tmp)^x))^((((tmp = 1653437743, tmp)>>x)/(tmp = 3072995069, tmp))>>x))*(((((-290508242)>>((tmp = 2969511554, tmp)<<(tmp = 158176292.95642304, tmp)))<<(32376015))+(tmp = 2391895870.4562025, tmp))*x)))&((((x/(tmp = 365292078.53605413, tmp))>>x)/(1167322811.0008812))|(((tmp = 2487970377.365221, tmp)^x)<<((tmp = 2342607988.711308, tmp)/(((2276081555.340126)-(((tmp = -2571071930, tmp)>>(tmp = -248468735.76550984, tmp))>>>(tmp = -2862254985.608489, tmp)))^(-1312017395))))))<<x)&(2762717852.949236)))+((((-2492896493)&x)<<(-2756272781.4642315))/x)))))*(2405395452))))>>((-1433975206)/((tmp = -2064757738.6740267, tmp)<<((((tmp = -1563531255, tmp)-(-589277532.2110934))<<x)^(2249328237.0923448)))))-x))-(-225624231)));
+ assertEquals(0, x *= (tmp = 1657982666.2188392, tmp));
+ assertEquals(86443387, x |= (tmp = 86443387.25165462, tmp));
+ assertEquals(86443387, x %= (-1341731981.702294));
+ assertEquals(172886774, x <<= ((-1799840391)&(1011948481.310498)));
+ assertEquals(-1115684864, x <<= x);
+ assertEquals(-2098253702059525600, x *= (1880686715.1865616));
+ assertEquals(-2098253700213206300, x -= (tmp = -1846319435.0583687, tmp));
+ assertEquals(570692096, x &= (((tmp = -1572055366.64332, tmp)%(tmp = 1720120910, tmp))%((x-(912386952.5959761))*(tmp = -1146251719.4027123, tmp))));
+ assertEquals(603979776, x <<= ((-329752233.8144052)&(tmp = -368636559, tmp)));
+ assertEquals(603979776, x <<= x);
+ assertEquals(364791569817010200, x *= x);
+ assertEquals(0, x &= ((2074587775.983799)/(tmp = 438856632.76449287, tmp)));
+ assertEquals(0, x &= (((1509671758)*(tmp = -935801537.7325008, tmp))>>>(((tmp = -1752877566, tmp)<<x)%(tmp = -517163766, tmp))));
+ assertEquals(-2031730599, x ^= ((2264285273)&(tmp = -1762662949.014101, tmp)));
+ assertEquals(-843578945, x %= (-1188151654));
+ assertEquals(-2147483648, x <<= x);
+ assertEquals(-2147483648, x >>= (tmp = -3165079200.229641, tmp));
+ assertEquals(-44086313.1323726, x %= ((x%(-254466243.48728585))-((x>>(-457411829.1063688))-((-2606923436.9333453)/x))));
+ assertEquals(-44086313, x |= x);
+ assertEquals(1037812, x >>>= ((tmp = 342497258.9786743, tmp)+(1652928385.8150895)));
+ assertEquals(-2371695599678100, x *= (tmp = -2285284425, tmp));
+ assertEquals(-2371697387004653, x += (tmp = -1787326553.0542095, tmp));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x >>= ((x^(tmp = 544039787, tmp))>>>x));
+ assertEquals(0, x &= ((x%(((((((tmp = -424572417.1088555, tmp)|(-2381863189))/(tmp = -2007482475.1809125, tmp))&(((((tmp = 311016073, tmp)>>(tmp = -1548839845, tmp))+((-2557740399.7947464)<<(2399113209)))&x)>>>x))%(-297180308.7721617))-(tmp = 860906293, tmp))^x))%(-2740622304)));
+ assertEquals(4971841192462909000, x += ((tmp = -2723203837.572612, tmp)+((((-2909100706)+(-951999374))|(-3116735764))*(3087123539.422669))));
+ assertEquals(-460, x >>= (1081807537.557404));
+ assertEquals(2354165127.3906384, x += (tmp = 2354165587.3906384, tmp));
+ assertEquals(357.8680960002211, x /= ((((x<<(((x&x)+(1113841407))|((x/(tmp = 384533564, tmp))>>>(-605853882))))%x)&((tmp = 2050375842, tmp)>>>x))>>(((2745147573)^x)<<(x-(900043292)))));
+ assertEquals(0, x *= (x>>>(-295974954.5058532)));
+ assertEquals(0, x *= ((-2448592125.815531)*(tmp = -94957474.8986013, tmp)));
+ assertEquals(0, x &= ((x>>x)^(tmp = -1335129180, tmp)));
+ assertEquals(395092065, x |= ((3081659156)^(tmp = -1608334475, tmp)));
+ assertEquals(395092065, x &= x);
+ assertEquals(-413337639, x += (x^(tmp = -664996071.3641524, tmp)));
+ assertEquals(-1604423637896759800, x *= (x>>>(tmp = 1242912352.955432, tmp)));
+ assertEquals(0, x &= ((((((tmp = 651293313, tmp)|(((2541604468.635497)>>>(tmp = 758815817.7145422, tmp))>>>((-1948795647)/x)))&x)/((tmp = -3161497100, tmp)+(782910972.3648237)))>>>x)%(834206255.5560443)));
+ assertEquals(0, x >>>= (tmp = 125945571, tmp));
+ assertEquals(NaN, x -= (x%x));
+ assertEquals(NaN, x %= (tmp = 282259853, tmp));
+ assertEquals(NaN, x += (tmp = -2081332383, tmp));
+ assertEquals(0, x >>>= (((x>>(-2298589097.7522116))|((((x>>>(x-(tmp = 755218194, tmp)))|x)%x)-(tmp = 2206031927, tmp)))>>>((((x&(x-x))^(tmp = 2836686653, tmp))*((x<<(tmp = -1624140906.4099245, tmp))>>>((2942895486)|((x>>>x)>>>(-1586571476)))))|((781668993)+(-1857786909)))));
+ assertEquals(0, x &= (tmp = -708084218.9248881, tmp));
+ assertEquals(0, x %= (1645913394.5625715));
+ assertEquals(0, x <<= ((x^((tmp = 1185413900, tmp)*((-2441179733.997965)*(tmp = 2554099020.066989, tmp))))%((1704286567.29923)/x)));
+ assertEquals(0, x += x);
+ assertEquals(0, x *= x);
+ assertEquals(0, x |= (x>>>(139138112.141927)));
+ assertEquals(0, x >>>= (tmp = 2142326564, tmp));
+ assertEquals(0, x |= x);
+ assertEquals(-0, x /= ((((x+(2817799428))|x)%((1050079768)-(x>>>((1452893834.8981247)|((((tmp = -1737187310.889149, tmp)/(tmp = -362842139, tmp))%(1234225406))%(((x|x)*((-1055695643.739629)-((x-x)*(945954197.676585))))-(tmp = 786185315.346615, tmp)))))))<<(-173891691)));
+ assertEquals(0, x &= (-2842855092.319309));
+ assertEquals(0, x &= ((-3188403836.570895)/x));
+ assertEquals(0, x *= (x+x));
+ assertEquals(NaN, x /= (x>>>(((tmp = 391037497.68871593, tmp)/((192754032)*(1382659402.5745282)))/((((-2187364928)>>>x)>>(tmp = 2563448665.7594023, tmp))^(tmp = 1500866009.7632217, tmp)))));
+ assertEquals(NaN, x /= ((tmp = -935036555.2500343, tmp)-(x/(((x&(x^(tmp = -3001352832.5034075, tmp)))^x)/((1122547613)>>x)))));
+ assertEquals(0, x >>= (tmp = -2951766379.0809536, tmp));
+ assertEquals(-632945188, x ^= (-632945188.7188203));
+ assertEquals(-632945188, x %= ((((((tmp = -3181527314.82724, tmp)&(2280175415))>>(x^(x|x)))^(tmp = -524233678.52970886, tmp))*x)|((tmp = 1782882786, tmp)>>>(tmp = -592607219, tmp))));
+ assertEquals(404189184, x <<= ((tmp = -2761472127, tmp)^(36616299.88780403)));
+ assertEquals(872651572, x ^= (tmp = 739568436.6252247, tmp));
+ assertEquals(13, x >>>= ((tmp = -1033843418.865577, tmp)%(x%(1247263629.0445533))));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x >>= (3189175317));
+ assertEquals(0, x &= (((2391973519.6142406)^((-2950058736.191456)|(x*x)))>>(tmp = 343822384.294345, tmp)));
+ assertEquals(0, x >>>= (tmp = -2306246544, tmp));
+ assertEquals(-1572339598, x ^= ((tmp = 2991380083.337327, tmp)&(tmp = -1361507970, tmp)));
+ assertEquals(649, x >>>= ((1961407923.4950056)>>(x-(-872821523.7513013))));
+ assertEquals(649, x ^= (((x&(tmp = -702931788, tmp))^(((x>>x)|(((tmp = 2710759269, tmp)/(x>>(x*((((((tmp = -2428445134.9555864, tmp)+(-1859938743))%(x<<x))*((236868604)+((tmp = -3066688385, tmp)/(787503572.8839133))))/(tmp = 3215629315, tmp))>>(-1315823020)))))%(1461368627.1293125)))>>>(tmp = -2921804417.5735087, tmp)))/(x>>>(((tmp = 2175260691.824617, tmp)/((-582958935.7628009)-((((((x>>x)|(2590503723.4810824))^(tmp = -1994324549, tmp))-(-684683327))/(tmp = -3133419531, tmp))|(tmp = -328974092.05095506, tmp))))>>(-447624639.4518213)))));
+ assertEquals(649, x %= ((((1854382717)|(((x+(tmp = 2568081234, tmp))-x)+((tmp = 1043086140, tmp)<<((tmp = 2979118595.0496006, tmp)+((x&(2669577199.852803))/(-2567808445.101112))))))<<((((tmp = -1471092047, tmp)&((-3099138855.21041)-((tmp = -798574377.526715, tmp)&((2255586141)<<(-1069867774)))))>>>(((x*(tmp = -2810255707.781517, tmp))/x)*(2706435744.054121)))^(394262253)))^((844325548.0612085)/(tmp = 1434691648, tmp))));
+ assertEquals(823215943.1924392, x += (tmp = 823215294.1924392, tmp));
+ assertEquals(536872706, x &= ((-334612686)%((1303605874)|x)));
+ assertEquals(-30666374.413486242, x += ((tmp = -567539080.4134862, tmp)%(tmp = -1655555936.3195171, tmp)));
+ assertEquals(-56438727096752984, x *= (tmp = 1840410814, tmp));
+ assertEquals(-33200107.984488487, x %= (((tmp = 3007206509, tmp)-(3079337725.6659536))%(1819565202.5011497)));
+ assertEquals(-1214493182, x ^= (-3060193769));
+ assertEquals(-1214493179.1335113, x -= ((-3218099496.595745)/(1122662554)));
+ assertEquals(-1214493179, x >>= ((-375364195)<<(((tmp = 619439637.8754326, tmp)>>(-1830023279.9486575))&(tmp = -1106180387.2448823, tmp))));
+ assertEquals(-303623295, x >>= (-2109241374.3349872));
+ assertEquals(-0, x %= x);
+ assertEquals(0, x |= x);
+ assertEquals(1917126206, x -= (-1917126206));
+ assertEquals(2659779928, x -= (tmp = -742653722, tmp));
+ assertEquals(-1635187368, x >>= ((tmp = -674385169, tmp)*((9848362.783326745)|(x*(55220544.00989556)))));
+ assertEquals(-1981113695, x ^= ((tmp = 392404985, tmp)>>(((x<<((2006207061)<<(tmp = 2558988218, tmp)))*((((tmp = 1789304307.1153054, tmp)/(2538061546))<<(tmp = 556026116, tmp))&((tmp = 1076457999.6424632, tmp)*(tmp = -1822378633.2489474, tmp))))%(((((-1117046924)&((-69013651)%(x&(((-2320327696)/(x&x))-(tmp = 2458222544, tmp)))))>>((-3092360983.0037227)/(-3171415636)))*(((tmp = 2520431213, tmp)<<(1066492762.6149663))+((tmp = 1272200889, tmp)^((1687693123.2295754)+x))))-(-1096823395)))));
+ assertEquals(-990556848, x >>= x);
+ assertEquals(981202869119695100, x *= x);
+ assertEquals(981202869119695100, x -= (x/x));
+ assertEquals(0, x ^= (x>>x));
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x ^= x);
+ assertEquals(0, x *= ((((2980512718)>>>x)<<((x^(-1111233869))>>((2531466092.6036797)>>>(((tmp = -1791229364, tmp)*(-2210950307.206208))%((tmp = -806645443, tmp)<<((((((((tmp = 112334634.26187229, tmp)%(x|((((2154021796.1166573)+x)&((-1047293079.9686966)^(tmp = -1894127139, tmp)))+(tmp = 1910946653.2314827, tmp))))^(293142672.5016146))-x)<<(-1593533039.8718698))+x)>>(x<<(((46359706.50393462)&(tmp = 272146661, tmp))|(tmp = 2117690168, tmp))))%(tmp = -1784737092.4924843, tmp)))))))-(1465796246)));
+ assertEquals(0, x &= x);
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x &= (x+(-1612418456)));
+ assertEquals(0, x &= ((tmp = -843964311, tmp)/x));
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x *= x);
+ assertEquals(NaN, x += (x>>>(54020240)));
+ assertEquals(489206868, x |= (489206868));
+ assertEquals(489206868, x &= x);
+ assertEquals(489206848, x &= ((tmp = -1699133906.2361684, tmp)>>(tmp = 2658633814, tmp)));
+ assertEquals(489206848, x |= x);
+ assertEquals(1910559006, x -= (tmp = -1421352158, tmp));
+ assertEquals(1, x >>= x);
+ assertEquals(0, x -= x);
+ assertEquals(0, x %= (x^(tmp = 2745376003.2927403, tmp)));
+ assertEquals(0, x %= (((tmp = 3199743302.1063356, tmp)^((-1905944176)&(x>>>(187247029.5209098))))<<((x*((-1394648387)*(1252234289)))-(3140049815))));
+ assertEquals(0, x <<= (-2567872355));
+ assertEquals(0, x %= (tmp = 1057707555.8604916, tmp));
+ assertEquals(0, x %= ((tmp = -1877857405.0228279, tmp)>>>(((tmp = 423831184, tmp)*((tmp = -2106757468.324615, tmp)%(tmp = -1197717524.6540637, tmp)))>>(tmp = -93746263.46774769, tmp))));
+ assertEquals(0, x |= x);
+ assertEquals(-0, x *= ((tmp = 1317609776.6323466, tmp)*(tmp = -26959885.89325118, tmp)));
+ assertEquals(0, x >>= (-1288116122.0091262));
+ assertEquals(0, x &= ((370818172.92511404)%((tmp = -528319853.54781747, tmp)*(x/((tmp = -2839758076, tmp)^(x+(((-1258213460.041857)<<(tmp = 302017800.72064054, tmp))|((((tmp = -624254210, tmp)^((-338165065.97507)|((623392964)-x)))>>>x)%(tmp = 2767629843.0643625, tmp)))))))));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x |= ((-2001549164.1988192)*x));
+ assertEquals(0, x -= x);
+ assertEquals(0, x *= (((((165836842.14390492)*(tmp = -3220002961, tmp))|(-2840620221.747431))%((x/(tmp = 3153915610, tmp))>>>(tmp = 2018941558, tmp)))>>>x));
+ assertEquals(-0, x *= (-231994402.93764925));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x %= (tmp = 2702385056.1149964, tmp));
+ assertEquals(0, x <<= (tmp = 378459323, tmp));
+ assertEquals(0, x >>>= ((x&(x&(((-1014963013)<<(x&((tmp = -3110294840, tmp)|(x+(x<<(1129643420))))))+(1093795819.1853619))))+((((tmp = -2295103369.697398, tmp)&(((370501313.43019223)>>>(2465439579))/x))-x)>>x)));
+ assertEquals(0, x /= ((tmp = 1779625847, tmp)+(tmp = -662459654.6908865, tmp)));
+ assertEquals(0, x -= x);
+ assertEquals(0, x %= ((tmp = 2723291421, tmp)|(277246502.4027958)));
+ assertEquals(0, x ^= (((-2936270162)>>>((((tmp = -2019015609.1648235, tmp)|(47218153))*(-823685284))+x))&(x<<(x*(x|(((tmp = -941955398, tmp)^(tmp = -2365238993.5300865, tmp))-(778674685)))))));
+ assertEquals(0, x >>>= x);
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x &= (-175235975.8858137));
+ assertEquals(-2684493800.1062117, x += (tmp = -2684493800.1062117, tmp));
+ assertEquals(-1290806265.6063132, x -= (-1393687534.4998984));
+ assertEquals(-1290806265, x >>= (((x>>(tmp = -1710112056.4935386, tmp))*(586227650.2860553))<<(tmp = -2918251533.6052856, tmp)));
+ assertEquals(23470008, x >>>= x);
+ assertEquals(1668734969, x |= ((-295560682.9663689)^(x|((((tmp = -1183847364, tmp)&(3135327694))+(1679127747.1406744))-((-1895825528)%((tmp = -3180115006, tmp)+((tmp = 2373812187, tmp)|x)))))));
+ assertEquals(1744306169, x |= (1188503928.5009093));
+ assertEquals(1744306169, x %= (tmp = -2723982401.4997177, tmp));
+ assertEquals(3488612338, x += x);
+ assertEquals(3488612337, x += (((x/(-325849204))>>x)|(-1820624550.9149108)));
+ assertEquals(-1511119305, x ^= (tmp = 1778506182.2952862, tmp));
+ assertEquals(-12211415, x %= (x^(tmp = -54943035, tmp)));
+ assertEquals(-12211415, x %= ((-1267051884)%(-643566443.0122576)));
+ assertEquals(-30.84976063258681, x /= (((1052047194)>>>x)&(1495698235.5117269)));
+ assertEquals(-61.69952126517362, x += x);
+ assertEquals(-244, x <<= (x^(x+(tmp = -2822258210.076373, tmp))));
+ assertEquals(-6652, x &= ((tmp = 2593685093, tmp)>>((((2047688852.4609032)<<((x*(-611076291))*x))^(-2665364024.817528))>>>(165267874))));
+ assertEquals(0, x -= x);
+ assertEquals(0, x /= (2454186758));
+ assertEquals(0, x &= (tmp = -2226895206, tmp));
+ assertEquals(0, x += x);
+ assertEquals(-21390701, x += ((-1369004846.0816503)>>(tmp = -2661552634.039692, tmp)));
+ assertEquals(-0.012568536912921919, x /= (1701924507.856429));
+ assertEquals(7.09517966608176e-11, x /= (tmp = -177141911.8955555, tmp));
+ assertEquals(0, x >>= (tmp = 231535697, tmp));
+ assertEquals(1383687797, x ^= (tmp = -2911279499.568808, tmp));
+ assertEquals(1383687797, x %= (tmp = -2258636646.5294995, tmp));
+ assertEquals(1319, x >>= ((tmp = -2549411892.8426056, tmp)/(((((1532476676)^(153720871.82640445))+x)/(((2988190456.3206205)&(tmp = -2920873674, tmp))-(((((tmp = -1044518167.0581458, tmp)>>x)-((((tmp = -194701879.13505793, tmp)&(498352051))&((tmp = -2167339635.6529818, tmp)^(((x>>(tmp = 700159851, tmp))*(tmp = 2874921158, tmp))/x)))-((2856128689)|((-1876321441)>>>(2110732915)))))^((((tmp = -193379494.18825436, tmp)/(-3055182489.533142))<<x)+((tmp = -2286109605, tmp)>>(tmp = 698475484.3987849, tmp))))^(3182231653.500364))))|(((tmp = -194670835, tmp)>>>((786780139)%(((2114171416.2305853)^(1703145352.8143656))/x)))>>>((tmp = -3029462067, tmp)>>((67647572.02624655)&(x*(-2394283060))))))));
+ assertEquals(13903855, x |= ((tmp = -2515306586, tmp)>>>x));
+ assertEquals(54311, x >>>= ((-2413722658)-((tmp = -2159787584, tmp)^(tmp = 949937622.9744623, tmp))));
+ assertEquals(108622, x += x);
+ assertEquals(1250717187, x ^= ((tmp = 842692148, tmp)+(((2649331689.694273)<<x)-(tmp = -2992181273, tmp))));
+ assertEquals(4536777, x %= (tmp = 73304730, tmp));
+ assertEquals(0, x -= x);
+ assertEquals(-580081499, x ^= ((tmp = -580081499.0170684, tmp)^(x%(tmp = -1542730817.88261, tmp))));
+ assertEquals(-1382738784, x <<= x);
+ assertEquals(-1382738784, x <<= x);
+ assertEquals(2912228512, x >>>= (x*(x>>>x)));
+ assertEquals(-1076374105, x |= (2589443367));
+ assertEquals(-0.2818750938197037, x /= (((tmp = -1559525732.9603848, tmp)|(-477068917.5483327))>>>((-688616257)*((((tmp = -1192490153.1226473, tmp)*(-502280624.0265591))<<(-442688727.4881985))%(x+(((((tmp = -2948836853.831935, tmp)-(tmp = -2850398330.910424, tmp))>>>(x>>>(-1947835558)))^x)+(x*x)))))));
+ assertEquals(2032826546, x |= (tmp = 2032826546.819327, tmp));
+ assertEquals(3408404827.14316, x += (tmp = 1375578281.1431599, tmp));
+ assertEquals(258183922.14315987, x %= (tmp = 350024545, tmp));
+ assertEquals(479694848, x <<= (tmp = -481187157, tmp));
+ assertEquals(-2147483648, x <<= (((tmp = -2956588045.472398, tmp)>>>(((tmp = -1838455399.1775856, tmp)&(((((tmp = -637547, tmp)/x)&(x^((-44876328.1767962)+(((-2059598286)-(1071496688))%(tmp = -1492254402, tmp)))))-(x%x))*(x|x)))>>(1226250760)))<<x));
+ assertEquals(-2288163338.9020815, x -= (140679690.9020816));
+ assertEquals(4954833118513997000, x *= (-2165419327.4906025));
+ assertEquals(1578331238, x ^= (-2410854298.2270393));
+ assertEquals(-810627292, x += (-2388958530));
+ assertEquals(-810627292, x ^= ((1495296640.4087524)/(tmp = 1561790291, tmp)));
+ assertEquals(657116606535253200, x *= x);
+ assertEquals(0.675840332689047, x %= (((-1816548473)^(((tmp = -151918689.19451094, tmp)|(1819911186.535233))/((((((1514297447)+(tmp = 856485190.9684253, tmp))&(((1809369464.4363992)<<(493538496))*x))+((x*(x>>(x&(tmp = 222293461, tmp))))>>>(((784519621)|x)^((-580766922)>>(tmp = -947264116, tmp)))))>>>((((2794210354.22964)>>>(((2896952532.0183973)*((x+(tmp = -1813175940, tmp))<<(tmp = -1302618293, tmp)))&x))>>(x-(((x|((1456466890.1952953)*x))^(-169979758.19158387))-(x-x))))>>x))&(tmp = 2671604078.3026733, tmp))))/(-1701675745)));
+ assertEquals(0.675840332689047, x %= ((tmp = 2421871143, tmp)^x));
+ assertEquals(NaN, x %= ((((tmp = 1175526323.433271, tmp)+(tmp = 2813009575.952405, tmp))%((tmp = -3112133516.3303423, tmp)&x))&((((((-424329392)^(tmp = 1430146361, tmp))+x)-(1533557337.268306))%((tmp = -3117619446, tmp)-(-3127129232)))>>>x)));
+ assertEquals(NaN, x += x);
+ assertEquals(0, x >>>= ((1710641057.7325037)%(104961723.56541145)));
+ assertEquals(0, x <<= (tmp = -970072906, tmp));
+ assertEquals(0, x *= (87768668));
+ assertEquals(-1464968122, x ^= (tmp = -1464968122, tmp));
+ assertEquals(-1467983895, x ^= ((tmp = -1204896021, tmp)>>>(((91792661)&(x>>>(((-2364345606)>>>x)*x)))+x)));
+ assertEquals(2.991581508270506, x /= (-490704963.5591147));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x >>= ((tmp = 639854873, tmp)%(tmp = 743486160.3597239, tmp)));
+ assertEquals(0, x <<= (tmp = 1045577245.3403939, tmp));
+ assertEquals(0, x >>= ((tmp = -1932462290, tmp)|(tmp = 1629217987, tmp)));
+ assertEquals(517617438, x ^= ((tmp = 2737789043, tmp)%(tmp = -2220171604.135681, tmp)));
+ assertEquals(126371, x >>>= ((tmp = 205210223.69909227, tmp)-(tmp = 598118404, tmp)));
+ assertEquals(918548455, x |= ((918228734.8363427)+(x+x)));
+ assertEquals(918548455, x |= ((tmp = 599828198, tmp)>>((tmp = -851081330, tmp)|(tmp = -1152596996.8443217, tmp))));
+ assertEquals(918548443.7739062, x -= ((tmp = 1497642976.2260938, tmp)%(x>>(tmp = -548469702.5849569, tmp))));
+ assertEquals(0.7739062309265137, x %= (x&x));
+ assertEquals(2317939163.8239403, x *= (tmp = 2995116296, tmp));
+ assertEquals(1014415360, x <<= (-279972114));
+ assertEquals(0, x &= ((296810932)/(x*(tmp = -2750499950, tmp))));
+ assertEquals(0, x *= (x%((126285451.05086231)>>>(x*(tmp = -2789790532, tmp)))));
+ assertEquals(0, x >>>= ((975695102.5771483)%(x-((-1011726540)-((tmp = 2223194882, tmp)/x)))));
+ assertEquals(-1747794584, x |= (-1747794584.3839395));
+ assertEquals(-543544679, x %= (tmp = -1204249905, tmp));
+ assertEquals(-543544679, x %= (-881024001));
+ assertEquals(1, x /= x);
+ assertEquals(-1879376393, x |= ((tmp = 161643764, tmp)|(tmp = 2281346499.9084272, tmp)));
+ assertEquals(1.321124264431369, x /= (-1422558379.7061746));
+ assertEquals(1, x >>>= (x&(tmp = -963118950.4710281, tmp)));
+ assertEquals(3, x ^= ((x+x)/x));
+ assertEquals(1, x /= x);
+ assertEquals(1, x &= (2090796073));
+ assertEquals(-1284301873, x ^= (((-11041168.146357536)+(tmp = -1273260707.8134556, tmp))+x));
+ assertEquals(292559045, x &= (x&((-2401110739)^((tmp = 630802904, tmp)^(((1012634447.0346229)+x)%((tmp = -1240091095, tmp)%(x/(-1483936527))))))));
+ assertEquals(0, x %= x);
+ assertEquals(0, x /= (tmp = 613145428.3653506, tmp));
+ assertEquals(0, x /= ((x-(tmp = 3116638456, tmp))*(-973300716)));
+ assertEquals(0, x %= (tmp = -1794741286.0464535, tmp));
+ assertEquals(0, x &= x);
+ assertEquals(0, x >>= (-551370105.0746605));
+ assertEquals(-1471996874, x ^= ((2822970422.2331414)-x));
+ assertEquals(-277914313, x |= (tmp = -818980601.2544096, tmp));
+ assertEquals(-34, x >>= x);
+ assertEquals(305422768, x -= (-305422802));
+ assertEquals(-2406146240, x += (tmp = -2711569008, tmp));
+ assertEquals(1073745408, x &= (tmp = -3046625618, tmp));
+ assertEquals(1073745408, x <<= ((-1234108306.7646303)<<((-233519302)|x)));
+ assertEquals(1073745408, x %= (tmp = 1898831268, tmp));
+ assertEquals(1073745408, x <<= (((tmp = 3089406038, tmp)/x)&(-2960027680)));
+ assertEquals(65536, x >>>= (2858188366));
+ assertEquals(128, x >>>= ((-2640257239.857275)%((tmp = -3185405235.3177376, tmp)*x)));
+ assertEquals(128, x >>>= x);
+ assertEquals(128, x -= (x&(x-(tmp = -247588018, tmp))));
+ assertEquals(81616906825.07776, x *= (tmp = 637632084.57092, tmp));
+ assertEquals(78860097686.07776, x -= (((1507215684)^((709254783)+(((x<<x)*((-2890828152.667641)%(2537817529.2041526)))^x)))+(3114024487)));
+ assertEquals(-2920545695.721283, x += (((tmp = -2555437435, tmp)>>>x)-((2920546109.72129)+x)));
+ assertEquals(-2879412281.721283, x += ((-1662428756)>>>(tmp = -1928491386.6926208, tmp)));
+ assertEquals(67403845, x &= (tmp = 2921644117, tmp));
+ assertEquals(16850961, x >>>= (((-1039328365)>>>(tmp = -768615112, tmp))<<((1037261855)*(tmp = -2906902831.4797926, tmp))));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x *= ((-2729056530)/((-1776175111)%(1493002300.4604707))));
+ assertEquals(0, x *= (tmp = 370696035.22912216, tmp));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x |= ((((((tmp = -1541196993, tmp)^x)/(854730380.1799632))/(2879117705.492209))+((((-2892068577)^(-2460614446.1044483))>>>((743413943)<<(-1285280084.4220598)))/(tmp = -1719994579.5141463, tmp)))%(((((tmp = 2522797851.088227, tmp)<<(tmp = 2257160597.1538725, tmp))/(-680406007))&((x>>>(tmp = -260350730, tmp))^(tmp = 1920522110.852598, tmp)))>>(-697620442))));
+ assertEquals(0, x &= x);
+ assertEquals(-591399642.958673, x += (x-(tmp = 591399642.958673, tmp)));
+ assertEquals(27, x >>>= (tmp = -726721317.2109983, tmp));
+ assertEquals(-2043736843, x -= (2043736870));
+ assertEquals(-3991674, x >>= (tmp = 1098126089, tmp));
+ assertEquals(-997919, x >>= ((x%(((x*(((-1497329257.1781685)%(2334511329.2690516))/(-3072526140.6635056)))+(-1843998852))-(tmp = 240300314.34070587, tmp)))+(714080860.6032693)));
+ assertEquals(-0, x %= x);
+ assertEquals(NaN, x /= x);
+ assertEquals(0, x >>= (tmp = 538348328.5363884, tmp));
+ assertEquals(0, x *= (800317515));
+ assertEquals(0, x -= x);
+ assertEquals(0, x >>= (984205514));
+ assertEquals(857282491, x += (tmp = 857282491, tmp));
+ assertEquals(587792897, x &= (tmp = 2951307845.164059, tmp));
+ assertEquals(595301269, x |= (tmp = 24285588.90314555, tmp));
+ assertEquals(1190602538, x += x);
+ assertEquals(0, x -= x);
+ assertEquals(-442423060, x |= ((x^((x-(tmp = 2342497475.637024, tmp))%(-1900074414.7678084)))|((tmp = 1932380130, tmp)%(x%(2291727569.817062)))));
+ assertEquals(-442423060, x %= (((tmp = 703479475.545413, tmp)>>(x-x))<<(2435723056.753845)));
+ assertEquals(1, x /= x);
+ assertEquals(0, x >>= x);
+ assertEquals(-1265317851, x |= (tmp = -1265317851, tmp));
+ assertEquals(-2, x >>= (-2015895906.8256726));
+ assertEquals(-0, x %= x);
+ assertEquals(-0, x %= (((1219237746)+(284683029))*(((tmp = 2288119628, tmp)|(-404658161.2563329))*(-265228691.74142504))));
+ assertEquals(1039509109, x -= (-1039509109));
+ assertEquals(2079018218, x += x);
+ assertEquals(-1979.9362673719077, x /= ((3219723500)>>x));
+ assertEquals(-62, x >>= ((x/(326466691))*(tmp = -607654070, tmp)));
+ assertEquals(-45, x |= (tmp = -2954888429.549882, tmp));
+ assertEquals(-1180929712, x &= (3114037588.570232));
+ assertEquals(815550480, x &= (-2302684143.3378315));
+ assertEquals(815550480, x %= (-2177479570));
+ assertEquals(815550480, x %= (tmp = 2895822167, tmp));
+ assertEquals(815550480, x %= (-1247621230.5438688));
+ assertEquals(283929811, x -= ((tmp = 251831053.17096448, tmp)|((tmp = 1140463506.004994, tmp)+(tmp = -743224673.546309, tmp))));
+ assertEquals(1825767424, x <<= (((tmp = 1732353599, tmp)^(tmp = 658726044, tmp))>>>((-2827889370.932477)%(tmp = 1950139204.3291233, tmp))));
+ assertEquals(1828450414, x |= (tmp = 1618538606, tmp));
+ assertEquals(0, x <<= (-2411670689.045702));
+ assertEquals(0, x <<= (-27744888.428537607));
+ assertEquals(-0, x /= (tmp = -1597552450, tmp));
+ assertEquals(0, x >>>= (((2165722776.7220936)>>>(tmp = 1233069931, tmp))>>>(-1120420811)));
+ assertEquals(-0, x *= ((tmp = -1505252656, tmp)>>((((3035637099.6156535)&((467761577.7669761)>>(-361034537)))^(tmp = -2347994840.6541123, tmp))*(tmp = -2191739821, tmp))));
+ assertEquals(0, x &= (795727404.0738752));
+ assertEquals(-0, x *= (tmp = -3125944685.3991394, tmp));
+ assertEquals(-0, x *= (x&x));
+ assertEquals(0, x >>= ((tmp = -2045709233, tmp)^x));
+ assertEquals(NaN, x /= (x>>(x/(3102894071))));
+ assertEquals(NaN, x += ((tmp = 2149079756.8941655, tmp)-(tmp = 810121645.305179, tmp)));
+ assertEquals(0, x >>>= (-859842989));
+ assertEquals(0, x >>>= (tmp = 2530531143.9369526, tmp));
+ assertEquals(0, x >>= (((-932981419.6254237)|(tmp = 1591591715, tmp))>>>(x+((3149795006)>>>(tmp = 613352154, tmp)))));
+ assertEquals(-4294967295, x -= ((((-2289331668)%(-282648480.0078714))>>(-1373720705.5142756))>>>((tmp = 15511563.517014384, tmp)/(360279080))));
+ assertEquals(1, x &= x);
+ assertEquals(0, x >>= (x^(-2791872557.5190563)));
+ assertEquals(0, x &= ((tmp = 336466956.7847167, tmp)>>((1235728252.053619)|(x<<((1828176636.13488)%x)))));
+ assertEquals(-0, x *= (-364042830.8894656));
+ assertEquals(0, x >>>= x);
+ assertEquals(-1675298680, x |= ((2323049541.321387)+(296619075)));
+ assertEquals(-0, x %= x);
+ assertEquals(-1583048579.4420977, x += (-1583048579.4420977));
+ assertEquals(0, x -= x);
+ assertEquals(-2, x ^= ((603171992.0545617)/(((-271888695.718297)%(tmp = -400159585, tmp))^((((tmp = 1536123971, tmp)-(tmp = -2310418666.6243773, tmp))|((tmp = 2242779597.1219435, tmp)<<(tmp = 1758127684.4745512, tmp)))/x))));
+ assertEquals(-2, x &= (x&x));
+ assertEquals(0, x &= ((tmp = -1098806007.4049063, tmp)/(((2862384059.3229523)/((((tmp = -92960842, tmp)-(x>>(tmp = 1244068344.2269042, tmp)))&x)*(tmp = -1919148313, tmp)))<<(-2486665929))));
+ assertEquals(0, x &= x);
+ assertEquals(-1441272634.582818, x -= (1441272634.582818));
+ assertEquals(-3, x >>= (tmp = 3186393693.7727594, tmp));
+ assertEquals(-1206855850, x ^= (((tmp = 607979495.303539, tmp)-(tmp = -2480131951, tmp))^(x*((tmp = 1324153477, tmp)/((1248126288)+(x|(1917331780.0741704)))))));
+ assertEquals(-1206855853, x ^= (x>>>(653288765.1749961)));
+ assertEquals(-1206857725, x &= (3149461539.6019173));
+ assertEquals(3088109571, x >>>= (x*(x<<(tmp = 1543540084, tmp))));
+ assertEquals(536903680, x &= (tmp = 644851760, tmp));
+ assertEquals(536903674.312194, x += (((-3183290076)-((tmp = 40738191.12097299, tmp)-x))/((x>>>(3151371851.9408646))^(tmp = 472698205.22445416, tmp))));
+ assertEquals(2127424750.0506563, x -= (tmp = -1590521075.7384624, tmp));
+ assertEquals(2127424750.0506563, x %= (tmp = 3027273433.361373, tmp));
+ assertEquals(0, x >>= (x>>(1445204441.702043)));
+ assertEquals(NaN, x %= (x<<x));
+ assertEquals(0, x ^= ((tmp = -2903841152.136344, tmp)-(x%(2938662860))));
+ assertEquals(0, x <<= (x<<x));
+ assertEquals(0, x >>>= (tmp = -979481631.33442, tmp));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x &= (((x%((((((tmp = 1657446354.6820035, tmp)>>(-1916527001.2992697))/x)>>(tmp = 1450467955, tmp))&(277676820))+(x/(-945587805))))/((tmp = -690095354, tmp)^x))+(tmp = -2651195021, tmp)));
+ assertEquals(0, x <<= (752343428.2934296));
+ assertEquals(0, x /= (tmp = 3022310299, tmp));
+ assertEquals(0, x >>= (x%((388245402)>>>x)));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x %= ((tmp = 1205123529.8649468, tmp)>>>(-2848300932)));
+ assertEquals(0, x >>= ((x>>>x)<<(tmp = 487841938, tmp)));
+ assertEquals(0, x *= (((273436000.9463471)|(tmp = 141134074.27978027, tmp))^(tmp = 1220326800.7885802, tmp)));
+ assertEquals(1525600768, x |= (((x^(-2674777396))-(tmp = 1966360716.3434916, tmp))<<(794782595.9340223)));
+ assertEquals(761927595, x %= (tmp = -763673173, tmp));
+ assertEquals(1.1353588586934338, x /= ((x&((-1897159300.4789193)*(-348338328.0939896)))&(978680905.6470605)));
+ assertEquals(8.631173314966319e-10, x /= (1315416592));
+ assertEquals(0, x >>= ((tmp = -2581239435, tmp)-((-628818404.1122074)<<x)));
+ assertEquals(0, x -= x);
+ assertEquals(0, x *= (2925158236));
+ assertEquals(0, x /= (x+(tmp = 1405531594.0181243, tmp)));
+ assertEquals(0, x *= (2712022631.230831));
+ assertEquals(0, x >>= (tmp = 80518779.81608999, tmp));
+ assertEquals(1953477932.8046472, x += (tmp = 1953477932.8046472, tmp));
+ assertEquals(1953477932, x >>= (tmp = 3025539936, tmp));
+ assertEquals(1953477932, x -= ((-2675119685.8812313)>>(x/(-1808264410.9754841))));
+ assertEquals(1292620430, x += ((-660857502)%((((tmp = -698782819, tmp)%(tmp = 2847304199, tmp))<<(-2423443217.1315413))+x)));
+ assertEquals(78895, x >>>= x);
+ assertEquals(2, x >>= x);
+ assertEquals(2, x <<= (tmp = 1313641888.8301702, tmp));
+ assertEquals(1857416935.2532766, x += (tmp = 1857416933.2532766, tmp));
+ assertEquals(-1677721600, x <<= (tmp = -2482476902, tmp));
+ assertEquals(309226853.62854385, x -= (tmp = -1986948453.6285439, tmp));
+ assertEquals(33965156, x &= (2409088742));
+ assertEquals(Infinity, x /= (x-(x<<((x/(tmp = -3106546671.536726, tmp))/((tmp = 2695710176, tmp)-((((-2102442864)&(857636911.7079853))/x)%(-65640292)))))));
+ assertEquals(1270005091, x |= (tmp = 1270005091.0081215, tmp));
+ assertEquals(1270005091, x %= (tmp = -1833876598.2761571, tmp));
+ assertEquals(158750636, x >>>= x);
+ assertEquals(-1000809106.0879555, x -= (tmp = 1159559742.0879555, tmp));
+ assertEquals(72400936, x &= ((2448271389.3097963)%(tmp = 1517733861, tmp)));
+ assertEquals(282816, x >>= x);
+ assertEquals(282816, x %= (tmp = 3192677386, tmp));
+ assertEquals(0.00021521351827207216, x /= (1314118194.2040696));
+ assertEquals(Infinity, x /= (((tmp = 2822091386.1977024, tmp)&x)%(tmp = -3155658210, tmp)));
+ assertEquals(NaN, x %= (-359319199));
+ assertEquals(0, x >>>= (((tmp = -2651558483, tmp)-(x<<(tmp = 2537675226.941645, tmp)))<<(tmp = 667468049.0240343, tmp)));
+ assertEquals(-0, x *= (tmp = -2827980482.12998, tmp));
+ assertEquals(-0, x %= (((tmp = -689972329.3533998, tmp)>>>x)|(tmp = -7488144, tmp)));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x |= x);
+ assertEquals(-2410373675.2262926, x -= (2410373675.2262926));
+ assertEquals(1840423, x >>= ((-1081642113)^x));
+ assertEquals(-4829451429403412, x *= (-2624098606.35485));
+ assertEquals(-94552231, x %= (tmp = -97015883, tmp));
+ assertEquals(-94433287, x ^= (((tmp = -2297735280, tmp)&(((tmp = 2261074987.7072973, tmp)%((((2565078998)^(-2573247878))|x)|(((tmp = -2120919004.7239416, tmp)>>(tmp = -579224101, tmp))>>>(1905808441))))*(x|(3149383322))))>>(542664972)));
+ assertEquals(0, x ^= (x<<(tmp = -3112569312, tmp)));
+ assertEquals(0, x <<= (-2141934818.7052917));
+ assertEquals(0, x >>= (tmp = -2539525922, tmp));
+ assertEquals(-434467613, x ^= (tmp = -434467613, tmp));
+ assertEquals(-274792709, x |= (1233452601.462551));
+ assertEquals(-274726917, x |= (-2130333750));
+ assertEquals(-272629761, x |= (-1516071602.5622227));
+ assertEquals(-272629761, x |= ((tmp = 3012131694, tmp)&((tmp = -2595342375.8674774, tmp)-((tmp = -2710765792, tmp)>>>((x-(tmp = 2397845540, tmp))+(2496667307))))));
+ assertEquals(-4194305, x |= (1343705633.165825));
+ assertEquals(4190207, x >>>= ((tmp = 276587830, tmp)*((tmp = -1517753936, tmp)>>x)));
+ assertEquals(0, x >>= (x|((2247486919)-((-1664642412.4710495)*((((tmp = -358185292.17083216, tmp)-(tmp = -1472193444, tmp))*(tmp = 2699733752, tmp))&((x|(x<<(1137610148.1318119)))>>(((375089690.8764564)*x)&(tmp = 859788933.9560187, tmp))))))));
+ assertEquals(0, x %= (3080673960));
+ assertEquals(0, x >>>= (1328846190.1963305));
+ assertEquals(1249447579, x |= (-3045519717.580775));
+ assertEquals(-0.8743931060971377, x /= (-1428931187));
+ assertEquals(1, x |= ((tmp = -1756877535.7557893, tmp)/((-142900015.93200803)<<(1414557031.347334))));
+ assertEquals(759627265, x ^= (759627264.0514802));
+ assertEquals(741823, x >>= (1106391210));
+ assertEquals(610451, x &= ((x>>>((919849416)+((tmp = -427708986, tmp)^((x%x)|(tmp = -2853100288.932063, tmp)))))*x));
+ assertEquals(372650423401, x *= x);
+ assertEquals(410404493, x >>>= ((((-1425086765)>>>x)>>((2813118707.914771)>>(-424850240)))^x));
+ assertEquals(120511585729013, x *= ((tmp = -1889454669, tmp)>>>x));
+ assertEquals(120513295294304.22, x -= (tmp = -1709565291.2115698, tmp));
+ assertEquals(6164, x >>>= ((2244715719.397763)^(tmp = -741235818.6903033, tmp)));
+ assertEquals(937572790.468221, x -= (tmp = -937566626.468221, tmp));
+ assertEquals(937572790, x |= ((2129102867.156146)*(x%x)));
+ assertEquals(32, x &= ((2700124055.3712993)>>>((1977241506)>>>(-2915605511))));
+ assertEquals(32, x %= (tmp = -2513825862, tmp));
+ assertEquals(0, x <<= (-1379604802));
+ assertEquals(0, x >>>= (tmp = -1033248759, tmp));
+ assertEquals(-1151517050, x ^= (3143450246));
+ assertEquals(-180577, x |= ((738373819.4081701)^(-357134176)));
+ assertEquals(-0, x %= x);
+ assertEquals(-2086887759, x |= (tmp = 2208079537, tmp));
+ assertEquals(-2, x >>= (1460216478.7305799));
+ assertEquals(-2, x %= ((-1979700249.0593133)^(-3156454032.4790583)));
+ assertEquals(-256, x <<= ((1810316926)>>>(tmp = 414362256, tmp)));
+ assertEquals(-1, x >>= (((((((-1616428585.595561)*((tmp = 2574896242.9045777, tmp)|(86659152.37838173)))>>(((tmp = 2476869361, tmp)&((x+((tmp = -2445847462.1974697, tmp)>>(tmp = -1960643509.5255682, tmp)))+(x|(((((2231574372.778028)|(tmp = 1824767560, tmp))>>>((1108035230.2692142)|(tmp = 2354035815, tmp)))/((tmp = -2602922032, tmp)>>(-925080304.7681987)))-x))))-(x>>x)))>>>((tmp = 751425805.8402164, tmp)|(tmp = 1165240270.3437088, tmp)))-x)*(2870745939))-(x>>>((tmp = 2986532631.405425, tmp)>>>(((tmp = 2547448699, tmp)+(((((x<<(((((-2756908638.4197435)>>>(3134770084))-(-1147872642.3756688))%(x*(tmp = -282198341.6600039, tmp)))+(-770969864.2055655)))+((-2725270341)^x))/(-3093925722))>>(x&x))>>((tmp = -2705768192, tmp)>>>(((tmp = 577253091.6042917, tmp)/(((x&(((((x+x)>>>(-1000588972))/(x&(717414336)))^(tmp = 428782104.21504414, tmp))>>>(1084724288.953223)))%(tmp = -2130932217.4562194, tmp))&x))-(-286367389)))))+((x>>(tmp = 2001277117, tmp))>>((tmp = 1028512592, tmp)^((tmp = 2055148650, tmp)+((tmp = 1490798399, tmp)/(tmp = -2077566434.2678986, tmp))))))))));
+ assertEquals(-1, x |= (tmp = 1542129482, tmp));
+ assertEquals(-671816743, x &= (tmp = -671816743.9111726, tmp));
+ assertEquals(-1840333080, x -= (1168516337));
+ assertEquals(-1755382023, x |= ((((tmp = 2625163636.0142937, tmp)>>>((tmp = 1534304735, tmp)^x))-(tmp = -1959666777.9995313, tmp))%x));
+ assertEquals(-1750421896, x += (x>>>(tmp = -1364828055.1003118, tmp)));
+ assertEquals(-72864007, x %= (tmp = 239651127, tmp));
+ assertEquals(-72863956, x -= (((tmp = -1103261657.626319, tmp)*((tmp = 2789506613, tmp)+((tmp = 2294239314, tmp)>>>(2588428607.5454817))))>>x));
+ assertEquals(-170337477, x -= (tmp = 97473521, tmp));
+ assertEquals(-170337477, x |= (((tmp = 246292300.58998203, tmp)/(((tmp = -2664407492, tmp)|((-2416228818)^(tmp = 909802077, tmp)))%(tmp = 532643021.68109465, tmp)))/(tmp = 1015597843.8295637, tmp)));
+ assertEquals(1, x >>>= (((tmp = -2247554641.7422867, tmp)/(1186555294))%(tmp = -785511772.3124621, tmp)));
+ assertEquals(1188939891.668705, x -= (tmp = -1188939890.668705, tmp));
+ assertEquals(1188939891, x &= x);
+ assertEquals(1188413555, x &= (((tmp = -372965330.5709038, tmp)%(((tmp = 3108909487, tmp)|(x^(-1056955571.9951684)))^(-1549217484.009048)))/(x>>>(1403428437.9368362))));
+ assertEquals(-0.7343692094664643, x /= (-1618278026.4758227));
+ assertEquals(0, x -= x);
+ assertEquals(0, x &= (-2701762139.7500515));
+ assertEquals(0, x >>>= (((-1692761485.2299166)^x)+(tmp = -1221349575.938864, tmp)));
+ assertEquals(0, x <<= ((2148160230)<<x));
+ assertEquals(0, x <<= (((x<<(-740907931.38363))&(tmp = -930960051.6095045, tmp))>>(x/((tmp = -1921545150.1239789, tmp)/(-3015379806)))));
+ assertEquals(0, x <<= x);
+ assertEquals(NaN, x /= (x|x));
+ assertEquals(0, x >>= (tmp = -2265988773, tmp));
+ assertEquals(-0, x *= (((x<<(-928153614))<<(-989694208))^(2544757713.481016)));
+ assertEquals(0, x >>= ((tmp = 578009959.5299993, tmp)>>x));
+ assertEquals(0, x /= ((((tmp = 412689800.0431709, tmp)&(1630886276))*(tmp = 2028783080.7296097, tmp))/x));
+ assertEquals(0, x |= ((((x*(-2197198786))>>((2719887264.761987)<<(tmp = 2253246512, tmp)))-(tmp = -150703768.07045603, tmp))/(((-3160098146)%(((((1486098047.843547)>>(((tmp = -593773744.1144242, tmp)&(x<<(2651087978)))|((-680492758.930413)>>(tmp = 88363052.13662052, tmp))))<<x)<<(tmp = 2232672341, tmp))/((x<<x)&(((((348589117.64135563)<<(-1010050456.3097556))^(x/(tmp = -2282328795, tmp)))-(tmp = 1653716293, tmp))-((3157124731)/((tmp = 3007369535.341745, tmp)%(tmp = -2246556917, tmp)))))))+x)));
+ assertEquals(0, x >>= ((1935211663.5568764)>>(x-(tmp = 2116580032, tmp))));
+ assertEquals(-1725272693, x ^= (tmp = -1725272693, tmp));
+ assertEquals(313683, x >>>= (-1782632531.2877684));
+ assertEquals(0.009772287443565642, x /= (tmp = 32099240, tmp));
+ assertEquals(-647945916.9902277, x += (-647945917));
+ assertEquals(3647021380, x >>>= ((((((((2470411371.688199)<<x)>>x)-(x>>>((tmp = 1750747780, tmp)/x)))-x)<<(tmp = -2666186351.695101, tmp))^(((tmp = 2749205312.6666174, tmp)%x)&(2069802830.360536)))<<(tmp = 6051917.9244532585, tmp)));
+ assertEquals(-647939220, x |= ((x>>>((tmp = -2980404582.794245, tmp)>>>(-996846982)))^x));
+ assertEquals(-572178450, x |= ((-800571300.3277931)+(tmp = 2084365671, tmp)));
+ assertEquals(1172311208, x &= (x&((tmp = -1207487657.8953774, tmp)^x)));
+ assertEquals(12176516458994, x += ((((tmp = -1534997221, tmp)%(412142731))*((tmp = 2958726303, tmp)>>(1489169839)))+(((-574726407.2051775)>>>(((1772885017)<<(947804536.9958035))>>(-2406844737)))>>x)));
+ assertEquals(-1480065024, x <<= x);
+ assertEquals(-1736999042.227129, x += (tmp = -256934018.22712898, tmp));
+ assertEquals(-1338699394, x ^= ((((((x%(((tmp = -2551168455.222048, tmp)|(3213507293.930222))/((-1559278033)>>((tmp = 3107774495.3698573, tmp)-(2456375180.8660913)))))*((x*(tmp = 1088820004.8562922, tmp))+((tmp = 1850986704.9836102, tmp)%(tmp = -1226590364, tmp))))*(1786192008))&(((2193303940.310299)%(tmp = 1041726867.0602217, tmp))|((2210722848)/((-1293401295.6714435)&((tmp = 3052430315, tmp)|x)))))>>>(tmp = -2028014470.1524236, tmp))+(((1695818039.0383925)<<((1669068145)*(-2746592133.899276)))<<(tmp = 519092169, tmp))));
+ assertEquals(-334674849, x >>= (1170377794));
+ assertEquals(-10214, x >>= ((tmp = 1074704264.3712895, tmp)>>>((tmp = -1200860192, tmp)^((tmp = 539325023.4101218, tmp)*((tmp = -588989295, tmp)|x)))));
+ assertEquals(1384169472, x &= (1384171140));
+ assertEquals(1384169472, x >>>= ((tmp = -2161405973.830981, tmp)*(tmp = 2054628644, tmp)));
+ assertEquals(1610140972, x |= (527961388));
+ assertEquals(1073273198, x += ((tmp = -259650225.71344328, tmp)&(tmp = -344359694, tmp)));
+ assertEquals(65507, x >>= ((x<<((tmp = 2925070713.5245204, tmp)%(x+((tmp = -1229447799, tmp)/(((x/(x|(((-2337139694)|((((((2996268529.7965417)&x)%(((tmp = -1088587413, tmp)>>(-1384104418.90339))>>((tmp = -1643984822.3946526, tmp)+x)))%(((1118125268.4540217)-((((-1975051668.6652594)-(-704573232))+((tmp = 1674952373, tmp)/(tmp = 1321895696.0062659, tmp)))*(tmp = 1820002533.2021284, tmp)))>>>(tmp = -583960746.9993203, tmp)))|((tmp = -2577675508.550925, tmp)&x))/(tmp = 1459790066, tmp)))/(((((1051712301.7804044)&(tmp = -2726396354, tmp))^(tmp = 263937254.18934345, tmp))+(((x^x)*(((tmp = -2289491571, tmp)+x)%(-2239181148)))&x))>>(tmp = -1743418186.3030887, tmp)))))/(tmp = 1475718622, tmp))<<x)))))|(x&((((tmp = -2934707420, tmp)<<x)/x)^(1022527598.7386684)))));
+ assertEquals(2047, x >>= (x-(tmp = 2300626270, tmp)));
+ assertEquals(8384512, x <<= (tmp = -1917680820, tmp));
+ assertEquals(0, x <<= (2393691134));
+ assertEquals(0, x >>= x);
+ assertEquals(649995936.5853252, x -= (tmp = -649995936.5853252, tmp));
+ assertEquals(649995936, x &= x);
+ assertEquals(-0.33672017582945424, x /= (tmp = -1930374188, tmp));
+ assertEquals(-0.33672017582945424, x += (x&((1208055031)^(-2761287670.968586))));
+ assertEquals(0, x |= x);
+ assertEquals(0, x <<= ((-2038368978)/x));
+ assertEquals(0, x >>= (x&((tmp = 2481378057.738218, tmp)&(x+(1172701643)))));
+ assertEquals(0, x <<= ((x*(((((((tmp = 70690601.3046323, tmp)&(((((((((((x+(x+(x^(3118107461))))<<(264682213.41888392))&(tmp = -709415381.8623683, tmp))%(((((-1840054964)>>>(tmp = -405893120.89603686, tmp))|((-625507229)^(3128979265)))>>(x>>((tmp = -2480442390, tmp)*((x>>(tmp = -421414980.88330936, tmp))>>>((tmp = 1850868592, tmp)&(-2948543832.879225))))))|((2986545185)&((tmp = -1947550706, tmp)%(((tmp = 2590238422.1414256, tmp)/(((tmp = -361038812, tmp)>>x)|(((tmp = 1798444068, tmp)|((x&((tmp = -3104542069, tmp)-x))*((tmp = -1158658918, tmp)+((tmp = 2777031040.5552707, tmp)<<(-2816019335.9008327)))))<<x)))/(((2287795988.231702)/x)/(((-2588712925)>>>(2521189250))*((tmp = -2533527920, tmp)+(tmp = 1762281307.2162101, tmp)))))))))/x)/(tmp = 1047121955.5357032, tmp))|(((-121292251)<<(x^(x-(tmp = 1420006180, tmp))))%((-2278606219)>>>(((tmp = -1412487726, tmp)&(((((tmp = 253596554.16016424, tmp)/(tmp = 2083376247.0079951, tmp))^(x^((1549116789.8449988)>>>((((-1844170084)^(tmp = 1886066422, tmp))&x)<<(34918329)))))^(tmp = -440805555.3369155, tmp))-x))%(-1936512969)))))+(2911511178.4035435))|(1012059391))|(x>>>(tmp = -2551794626.158037, tmp)))+((2926596072.210515)/(tmp = -280299595.0450909, tmp))))&((tmp = 1501086971, tmp)^(tmp = 2114076983, tmp)))-((-1679390574.1466925)-(941349044)))-((x>>x)>>((-2600539474.2033434)+(tmp = 2567056503.9079475, tmp))))*(tmp = 1285896052, tmp))%(((tmp = 1191465410.7595167, tmp)>>((tmp = -2857472754, tmp)%x))>>>(((tmp = 1960819627.6552541, tmp)&(-2651207221.127376))*((((-687312743)+((x>>x)<<x))|((((((1549588195)*((tmp = 2733091019, tmp)^((527322540)<<(x>>x))))%(tmp = -2063962943, tmp))*x)*(734060600))&(-3049417708)))+(((((1084267726)+((x|x)^((tmp = -1917070472.4858549, tmp)%((690016078.9375831)*x))))%((((((tmp = -2091172769, tmp)%(2532365378))>>>(-871354260))/(tmp = 254167019.07825458, tmp))&(1330216175.9871218))>>(tmp = 1931099207, tmp)))^(-1116448185.2618852))>>((961660080.8135855)/x)))))))>>>(-1486048007.7053368)));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x %= (tmp = -1202200444.6506357, tmp));
+ assertEquals(-0, x *= (-527500796.4145117));
+ assertEquals(0, x >>= (tmp = -2082822707, tmp));
+ assertEquals(0, x *= ((-1882398459.290778)>>>x));
+ assertEquals(0, x &= (x/(tmp = -1569332286.392817, tmp)));
+ assertEquals(-390169607, x |= (-390169607.11600184));
+ assertEquals(-780339214, x += x);
+ assertEquals(-780339214, x %= (2765959073));
+ assertEquals(-5954, x >>= (tmp = -1900007055, tmp));
+ assertEquals(743563420, x &= ((((-1520146483.5367205)|(-2075330284.3762321))-(tmp = -2263151872, tmp))%(-1264641939.957402)));
+ assertEquals(1487126840, x += (x>>>(((x+((tmp = -1263274491, tmp)>>>x))&(470419048.0490037))%(tmp = -2642587112, tmp))));
+ assertEquals(Infinity, x /= (x^x));
+ assertEquals(0, x ^= ((tmp = -1436368543, tmp)+(x/(tmp = -1125415374.3297129, tmp))));
+ assertEquals(0, x += x);
+ assertEquals(0, x <<= x);
+ assertEquals(0, x &= (tmp = 3101147204.2905564, tmp));
+ assertEquals(0, x &= (tmp = 2914487586.606511, tmp));
+ assertEquals(0, x += x);
+ assertEquals(0, x -= (((-1738542908.6138556)&(((x+x)-(tmp = -2801153969, tmp))%(tmp = -1206684064.1477358, tmp)))>>((-2575546469.271897)|(tmp = -2573119106, tmp))));
+ assertEquals(-1468808707, x ^= (tmp = -1468808707, tmp));
+ assertEquals(1357349882, x <<= (tmp = -2808501087.7003627, tmp));
+ assertEquals(-572025862, x |= ((((tmp = -2415486246.573399, tmp)/((tmp = -707895732.4593301, tmp)&x))%((-1960091005.0425267)*(972618070.9166157)))-(1649962343)));
+ assertEquals(327213586796843100, x *= (x%(1337884626)));
+ assertEquals(42991616, x &= (-2905576654.1280055));
+ assertEquals(-26049289585042860, x *= (-605915571.6557121));
+ assertEquals(597809748, x >>= ((362850791.077795)/(tmp = 1222777657.4401796, tmp)));
+ assertEquals(597809748, x |= x);
+ assertEquals(770065246, x -= ((-711227660)|(tmp = -508554506, tmp)));
+ assertEquals(593000483097040500, x *= x);
+ assertEquals(0, x %= x);
+ assertEquals(0, x <<= (317862995.456813));
+ assertEquals(0, x >>= ((tmp = 2518385735, tmp)+((-2973864605.267604)/(-930953312.718833))));
+ assertEquals(1227822411, x ^= (x^(1227822411.8553264)));
+ assertEquals(1090520320, x &= (x+((((-2100097959)>>(x/(tmp = -2002285068, tmp)))/(-364207954.9242482))-((tmp = 2771293106.7927113, tmp)-(tmp = -847237774, tmp)))));
+ assertEquals(1090520320, x >>= (((((2439492849)<<((-2932672756.2578926)*((743648426.7224461)+((2942284935)<<((x/(((tmp = 886289462.6565771, tmp)+(-459458622.7475352))>>(tmp = -785521448.4979162, tmp)))|(tmp = -11630282.877367258, tmp))))))-(tmp = -647511106.9602091, tmp))^x)&x));
+ assertEquals(115944291.48829031, x %= (243644007.12792742));
+ assertEquals(1, x /= x);
+ assertEquals(0, x >>>= ((tmp = -819782567, tmp)%(tmp = 2774793208.1994505, tmp)));
+ assertEquals(0, x >>= (tmp = 721096000.2409859, tmp));
+ assertEquals(0, x &= ((x%x)%x));
+ assertEquals(-0, x *= ((-1670466344)<<x));
+ assertEquals(0, x >>= (-677240844.904707));
+ assertEquals(NaN, x %= (((((-1575993236.6126876)/(-2846264078.9581823))^((((-2220459664)-(((-1809496020)>>>(tmp = -3015964803.4566207, tmp))&x))/(tmp = -3081895596.0486784, tmp))>>>(x&x)))%(x^(-1338943139)))^(x-((((2074140963.2841332)^(tmp = 1878485274, tmp))%(((x/(-2568856967.6491556))^x)<<((x+x)^((((2139002721)|(x<<(-1356174045.840464)))>>x)-(tmp = 2305062176, tmp)))))>>>(((((x<<(tmp = -1663280319.078543, tmp))-((1498355849.4158854)-((-1321681257)>>>(tmp = -1321415088.6152222, tmp))))^(-2266278142.1584673))+(858538943))&((((x-((x|(((tmp = -1576599651, tmp)+((tmp = 1595319586, tmp)&(-2736785205.9203863)))>>((x+((-1856237826)+x))<<(tmp = -1590561854.3540869, tmp))))^(((-41283672.55606127)&(tmp = 2971132248, tmp))+x)))/(-849371349.1667476))%(x*((-1705070934.6892798)>>>x)))<<((2418200640)*x)))))));
+ assertEquals(0, x >>>= (tmp = 664214199.5283061, tmp));
+ assertEquals(0, x <<= ((-2827299151)<<(1815817649)));
+ assertEquals(1405772596, x |= (tmp = 1405772596, tmp));
+ assertEquals(-1483422104, x <<= (-2791499935.6822596));
+ assertEquals(-45271, x >>= (1740128943.4254808));
+ assertEquals(-45271, x <<= ((2072269957)-((tmp = -2553664811.4472017, tmp)*(tmp = -2502730352, tmp))));
+ assertEquals(1192951471.6745887, x -= (-1192996742.6745887));
+ assertEquals(-353370112, x <<= (tmp = -1410280844, tmp));
+ assertEquals(0, x ^= (x%((2754092728)*(-1017564599.1094015))));
+ assertEquals(-2662096003.2397957, x -= (tmp = 2662096003.2397957, tmp));
+ assertEquals(-2587094028.50764, x -= (tmp = -75001974.7321558, tmp));
+ assertEquals(6693055512339889000, x *= x);
+ assertEquals(897526784, x %= (x-((tmp = 897526813, tmp)%(-1525574090))));
+ assertEquals(7011928, x >>= ((-440899641.344357)%x));
+ assertEquals(8382047686388683, x += (x*(1195398423.8538609)));
+ assertEquals(16764095372777366, x += x);
+ assertEquals(16764096859576696, x -= (tmp = -1486799329.7207344, tmp));
+ assertEquals(16764099774187724, x += (2914611029));
+ assertEquals(16764102926624664, x -= (-3152436939.724612));
+ assertEquals(-538220648, x |= x);
+ assertEquals(269110324, x /= (((-2114698894.6014318)/(tmp = 767687453, tmp))>>(623601568.1558858)));
+ assertEquals(256, x >>= x);
+ assertEquals(-293446891, x += (x+(-293447403)));
+ assertEquals(119, x >>>= ((1759400753)>>(2481263470.4489403)));
+ assertEquals(14, x >>= (762849027.89693));
+ assertEquals(16, x += (x&(x>>(1104537666.1510491))));
+ assertEquals(-12499808227.980995, x *= (tmp = -781238014.2488122, tmp));
+ assertEquals(1, x /= x);
+ assertEquals(1, x &= x);
+ assertEquals(0, x >>>= ((tmp = 1513381008, tmp)|(tmp = 1593208075.7259543, tmp)));
+ assertEquals(0, x &= (-788154636.2843091));
+ assertEquals(-0, x /= (tmp = -2124830879, tmp));
+ assertEquals(0, x &= (934237436));
+ assertEquals(0, x |= x);
+ assertEquals(-79370942.97651315, x += (-79370942.97651315));
+ assertEquals(-79370942.97651315, x %= ((tmp = -2683255523, tmp)<<(tmp = 2323123280.287587, tmp)));
+ assertEquals(-79370942, x |= x);
+ assertEquals(0.05861647801688159, x /= (-1354072177.061561));
+ assertEquals(0, x <<= (((((((tmp = 1989257036, tmp)&(tmp = 1565496213.6578887, tmp))&x)&(tmp = -2798643735.905287, tmp))&(2354854813.43784))%(tmp = 1118124748, tmp))<<((tmp = 2453617740, tmp)*(((tmp = 1762604500.492329, tmp)<<(-2865619363))%(((2474193854.640994)|((tmp = 1425847419.6256948, tmp)|(((-1271669386)%((x|((tmp = -2059795445.3607287, tmp)+x))*(x*x)))>>>(tmp = -2997360849.0750895, tmp))))/(tmp = 2326894252, tmp))))));
+ assertEquals(0, x >>>= ((-671325215)/((-727408755.8793397)>>(tmp = 315457854, tmp))));
+ assertEquals(0, x >>= (x&x));
+ assertEquals(0, x <<= ((x/x)>>>(((((x&x)-((x*(((tmp = -2689062497.0087833, tmp)^x)/((-1465906334.9701924)<<(tmp = -349000262, tmp))))*x))%(1630399442.5429945))*x)+((tmp = 605234630, tmp)%(tmp = 2325750892.5065155, tmp)))));
+ assertEquals(0, x |= (x%((x>>(((((tmp = 1622100459, tmp)<<x)&((((((tmp = 2411490075, tmp)<<x)|x)>>((x<<x)-(-2133780459)))/x)&(x+x)))%(x/((((tmp = 580125125.5035453, tmp)>>>(-470336002.1246581))|((tmp = 871348531, tmp)*x))>>(2866448831.23781))))-((2352334552)-(-562797641.6467373))))-(x^(tmp = -681731388, tmp)))));
+ assertEquals(0, x <<= (tmp = -1358347010.3729038, tmp));
+ assertEquals(-260967814, x |= ((tmp = -260967814.45976686, tmp)%(tmp = 1126020255.1772437, tmp)));
+ assertEquals(NaN, x %= ((((tmp = 3176388281, tmp)<<(tmp = 611228283.2600244, tmp))>>>((tmp = 3068009824, tmp)+(tmp = 2482705111, tmp)))>>>((tmp = -750778285.2580311, tmp)>>>x)));
+ assertEquals(0, x <<= (x>>>x));
+ assertEquals(0, x /= (1238919162));
+ assertEquals(0, x >>= (x^x));
+ assertEquals(0, x &= (-2137844801));
+ assertEquals(0, x >>>= (x^(x*(-1774217252))));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x |= x);
+ assertEquals(0, x &= (x<<(tmp = 2791377560, tmp)));
+ assertEquals(-1330674638.8117397, x += (tmp = -1330674638.8117397, tmp));
+ assertEquals(353, x >>>= (-212202857.4320326));
+ assertEquals(353, x ^= ((((x+(tmp = 1448262278, tmp))-(-3141272537))>>(tmp = 1116596587.7832575, tmp))>>>((x-(((tmp = 303953098, tmp)>>>((tmp = 691514425, tmp)/((176223098)*(((2876180016)%(-1805235275.892374))|x))))<<(((tmp = 528736141.838547, tmp)^(2556817082))*(2898381286.2846575))))|((-1445518239)&(tmp = 389789481.9604758, tmp)))));
+ assertEquals(0, x >>>= (-227376461.14343977));
+ assertEquals(0, x <<= (tmp = -2575967504, tmp));
+ assertEquals(0, x <<= (x^((-2668391896)>>((x+(tmp = 598697235.9205595, tmp))+((((-2105306785)|((-1174912319.794015)>>>(x-((148979923)%((((tmp = -2459140558.4436393, tmp)|(1265905916.494016))^(tmp = 1213922357.2230597, tmp))|(1028030636))))))%x)+(((tmp = 1393280827.0135512, tmp)^((tmp = 1210906638, tmp)+(-1572777641.1396031)))<<x))))));
+ assertEquals(0, x *= (tmp = 2134187165, tmp));
+ assertEquals(-1084549964, x -= (tmp = 1084549964, tmp));
+ assertEquals(-2045706240, x &= ((tmp = -1250758905.7889671, tmp)*(x+(((x<<(x/(tmp = -738983664.845448, tmp)))>>>x)&(tmp = 2197525295, tmp)))));
+ assertEquals(-2045706240, x ^= (((522049712.14743733)>>(tmp = -2695628092, tmp))>>>(tmp = -2603972068, tmp)));
+ assertEquals(2249261056, x >>>= x);
+ assertEquals(-33291, x |= ((((1891467762)<<(184547486.213719))-((458875403.50689447)^(((x&(x*x))|x)%(-3127945140))))|(-100765232)));
+ assertEquals(-33291, x %= (1460486884.1367688));
+ assertEquals(-1, x >>= (tmp = -2667341441, tmp));
+ assertEquals(-3.6289151568259606e-10, x /= (tmp = 2755644474.4072013, tmp));
+ assertEquals(-3.6289151568259606e-10, x %= (tmp = 1186700893.0751028, tmp));
+ assertEquals(0, x <<= (tmp = -1199872107.9612694, tmp));
+ assertEquals(371216449, x ^= ((tmp = 371324611.1357789, tmp)&(x-(x|((tmp = -518410357, tmp)>>((tmp = 687379733, tmp)/x))))));
+ assertEquals(0.3561383159088311, x /= (((((x%(((((-2293101242)%((((495316779)/x)-((-3198854939.8857965)>>>((tmp = -288916023, tmp)-(x^(tmp = -2504080119.431858, tmp)))))^(-1201674989)))-((2965433901)*(405932927)))/((1974547923)|(tmp = 534069372, tmp)))-(x-((x+(-1258297330))%x))))<<(((-2648166176.4947824)^(-3043930615))&(1550481610)))<<(tmp = -3118264986.743822, tmp))<<x)|x));
+ assertEquals(-46272499.15029934, x -= (tmp = 46272499.50643766, tmp));
+ assertEquals(-6, x >>= ((tmp = -731454087.0621192, tmp)>>>x));
+ assertEquals(-2.7207928474520667e-9, x /= (((x<<(x|((tmp = -1650731700.9540024, tmp)/(tmp = -677823292, tmp))))^((((((1972576122.928667)>>x)%(2952412902.115453))<<((-2888879343)+(tmp = -425663504, tmp)))>>>(((((tmp = 1089969932, tmp)>>>(x|((-2088509661)/(1131470551))))>>>x)+x)|(tmp = 955695979.7982506, tmp)))|(((((tmp = 826954002.6188571, tmp)^(2016485728))|((x/(((x<<(tmp = 2493217141, tmp))/(-2259979800.997408))-(tmp = -427592173.41389966, tmp)))%(((-471172918)/x)>>>((383234436.16425097)&(tmp = 1664411146.5308032, tmp)))))*(tmp = 1863669754.7545495, tmp))*(x>>(2062197604)))))>>>((x-(2624545856))*(tmp = 1025803102, tmp))));
+ assertEquals(0, x >>= ((tmp = 1068702028, tmp)*(296106770)));
+ assertEquals(0, x ^= (x/x));
+ assertEquals(85359536, x ^= (((x|(((tmp = 740629227, tmp)<<(-1107397366))%((tmp = 2315368172, tmp)>>(((-2269513683)|(-2698795048))+(-396757976)))))*(929482738.803125))^(((-1415213955.4198723)-(tmp = -2885808324, tmp))>>>((tmp = -472842353.85736656, tmp)&(tmp = 1684231312.4497018, tmp)))));
+ assertEquals(2075131904, x <<= x);
+ assertEquals(123, x >>>= (x>>>(tmp = 754093009, tmp)));
+ assertEquals(0, x >>= ((-2690948145)/((1988638799)+x)));
+ assertEquals(0, x >>>= (tmp = -798849903.2467625, tmp));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x *= (2431863540.4609756));
+ assertEquals(484934656, x |= ((-2322193663)*(tmp = -2754666771, tmp)));
+ assertEquals(-82505091404694530, x *= (tmp = -170136513, tmp));
+ assertEquals(-82505090515370620, x += ((-148762237)&(tmp = 889417717, tmp)));
+ assertEquals(-908221124, x %= (tmp = -2346393300, tmp));
+ assertEquals(-1242515799, x ^= (2083328917));
+ assertEquals(-1126056310271520600, x *= ((((tmp = -3065605442, tmp)<<(-3012703413))|x)^(-2081329316.4781387)));
+ assertEquals(-1126056309941068000, x += ((((tmp = 1886925157, tmp)&((tmp = -163003119.31722307, tmp)/((tmp = 2094816076, tmp)>>((tmp = -706947027, tmp)^x))))^((1819889650.5261197)<<(-1641091933)))>>x));
+ assertEquals(-1864360191, x |= (((x/x)|x)|x));
+ assertEquals(-1864360191, x &= x);
+ assertEquals(-3728720382, x += x);
+ assertEquals(1042663165, x ^= (535165183.4230335));
+ assertEquals(2644530017.8833704, x += (1601866852.8833704));
+ assertEquals(-574949401, x |= ((tmp = 943193254.5210983, tmp)^((x%(tmp = -2645213497, tmp))*(-1904818769))));
+ assertEquals(1763223578, x ^= ((x^(tmp = -2244359016, tmp))^(tmp = 320955522, tmp)));
+ assertEquals(-1.9640961474334235, x /= (tmp = -897727731.0502782, tmp));
+ assertEquals(1, x >>>= (x-(-3183031393.8967886)));
+ assertEquals(1, x &= (tmp = 1732572051.4196641, tmp));
+ assertEquals(1, x >>= (-1642797568));
+ assertEquals(-2339115203.3140306, x += (-2339115204.3140306));
+ assertEquals(1955852093, x ^= (((((-1469402389)/(-2648643333.1454573))>>>x)<<(x/x))>>x));
+ assertEquals(-965322519, x ^= (3001399252));
+ assertEquals(-2139727840, x &= (tmp = 2298411812.964484, tmp));
+ assertEquals(2103328, x &= (tmp = -2488723009, tmp));
+ assertEquals(1799011007, x |= (tmp = -2498057537.226923, tmp));
+ assertEquals(1799011007, x |= ((-308193085)>>>x));
+ assertEquals(1799011007, x |= x);
+ assertEquals(818879107, x ^= (1542823996.423564));
+ assertEquals(-2601416919234843600, x *= ((-2357923057.076759)-x));
+ assertEquals(-2601416920481796600, x -= (x|(tmp = -3048039765, tmp)));
+ assertEquals(-33690112, x <<= x);
+ assertEquals(1039491072, x &= (tmp = 1039491474.3389125, tmp));
+ assertEquals(126891, x >>= (-3079837011.6151257));
+ assertEquals(-163191923097543, x *= (((tmp = -2847221258.4048786, tmp)*(x-(tmp = 1527622853.5925639, tmp)))^x));
+ assertEquals(753616551, x ^= (-946895202));
+ assertEquals(-347691264, x <<= (tmp = -433184408.33790135, tmp));
+ assertEquals(0, x <<= (x|(tmp = -1911731462.6835637, tmp)));
+ assertEquals(-0, x *= (tmp = -2616154415.1662617, tmp));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x -= x);
+ assertEquals(0, x *= (2272504250.501526));
+ assertEquals(0, x ^= x);
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x >>>= (2475346113));
+ assertEquals(NaN, x /= (((x+(-2646140897))&(((tmp = 1039073714.142481, tmp)-x)*x))|(x*(((-1277822905.773948)>>(tmp = 2035512354.2400663, tmp))*(77938193.80013895)))));
+ assertEquals(0, x ^= (x<<(tmp = 2491934268, tmp)));
+ assertEquals(0, x &= (tmp = 569878335.4607931, tmp));
+ assertEquals(-88575883, x ^= ((453890820.8012209)-((1569189876)%((-1280613677.7083852)^(-1902514249.29567)))));
+ assertEquals(-88575883, x %= (tmp = 257947563.19206762, tmp));
+ assertEquals(-88575881.7863678, x -= ((tmp = 1257547359.029678, tmp)/(x^(tmp = 948265672.821815, tmp))));
+ assertEquals(-169, x >>= (tmp = -2530523309.6703596, tmp));
+ assertEquals(-1, x >>= x);
+ assertEquals(-1, x |= x);
+ assertEquals(131071, x >>>= (-673590289));
+}
+f();
diff --git a/src/3rdparty/v8/test/mjsunit/numops-fuzz-part3.js b/src/3rdparty/v8/test/mjsunit/numops-fuzz-part3.js
new file mode 100644
index 0000000..7813f91
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/numops-fuzz-part3.js
@@ -0,0 +1,1178 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+function f() {
+ var x = 131071;
+ var tmp = 0;
+ assertEquals(1117196836, x -= (-1117065765));
+ assertEquals(3092236000.7125187, x -= (-1975039164.7125185));
+ assertEquals(1, x /= x);
+ assertEquals(-1599945863, x ^= (tmp = 2695021432.453696, tmp));
+ assertEquals(940543782, x ^= (tmp = 2561494111, tmp));
+ assertEquals(891400321673221800, x *= (tmp = 947749949.2662871, tmp));
+ assertEquals(-1509927296, x >>= ((tmp = 1113290009, tmp)-x));
+ assertEquals(-23, x >>= (tmp = 3216989626.7370152, tmp));
+ assertEquals(-0, x %= x);
+ assertEquals(0, x <<= (431687857.15246475));
+ assertEquals(-0, x /= (tmp = -1924652745.081665, tmp));
+ assertEquals(0, x <<= (1312950547.2179976));
+ assertEquals(0, x %= ((tmp = 2110842937.8580878, tmp)|(x<<x)));
+ assertEquals(0, x >>>= ((((-386879000)-((tmp = -2334036143.9396124, tmp)/((tmp = 965101904.2841234, tmp)<<(((3029227182.8426695)<<((tmp = -464466927, tmp)>>((((((tmp = 849594477.4111787, tmp)^(x&((513950657.6663146)%(x>>>x))))-((2898589263)|x))+(tmp = 2842171258.621288, tmp))>>>(tmp = -3158746843, tmp))<<(tmp = -2891369879, tmp))))-(x-(x&(tmp = -1707413686.2706504, tmp)))))))-(-2860419051))*(-1708418923)));
+ assertEquals(-328055783, x += ((((2857010474.8010874)|((tmp = -1415997622.320347, tmp)-(-1706423374)))%(tmp = 824357977.1339042, tmp))^(x>>(x|x))));
+ assertEquals(-168539902503779140, x *= ((tmp = -1057687018, tmp)<<((1408752963)-(2030056734))));
+ assertEquals(-Infinity, x /= ((x-(2232683614.320658))*(((tmp = 195551174, tmp)*((((739595970)>>>(tmp = -2218890946.8788786, tmp))>>>(((tmp = -240716255.22407627, tmp)&(((((1598029916.3478878)|((tmp = -881749732, tmp)+(x>>x)))^(4443059))<<(((tmp = 2453020763, tmp)+((x>>>(tmp = -1904203813, tmp))&(-355424604.49235344)))<<(tmp = 2814696070, tmp)))%((tmp = -250266444, tmp)>>>(((((2710614972)&(((tmp = 910572052.6994087, tmp)^(tmp = -1028443184.3220406, tmp))/((-2718010521)^(tmp = 676361106, tmp))))|x)^(-1326539884))>>(-1573782639.7129154)))))/(tmp = 1923172768, tmp)))>>>(tmp = -2858780232.4886074, tmp)))/((((((-2060319376.353397)%x)>>(tmp = -3122570085.9065285, tmp))/(tmp = -1499018723.8064275, tmp))*((-655257391)<<x))>>x))));
+ assertEquals(NaN, x += ((3059633304)%((((tmp = 2538190083, tmp)*((tmp = -2386800763.356364, tmp)/x))&(1341370996))%(-2929765076.078223))));
+ assertEquals(NaN, x %= ((x&(347774821))>>>(462318570.2578629)));
+ assertEquals(NaN, x *= ((2829810152.071517)*(tmp = 768565684.6892327, tmp)));
+ assertEquals(NaN, x -= x);
+ assertEquals(0, x >>>= (x&(tmp = 1786182552, tmp)));
+ assertEquals(973967377, x ^= ((tmp = 2115869489.836838, tmp)&(994956497)));
+ assertEquals(985246427.4230617, x += (11279050.423061728));
+ assertEquals(985246427, x &= x);
+ assertEquals(0, x >>= ((tmp = 1090502660.1867907, tmp)>>((-1599370623.5747645)-(tmp = -1321550958, tmp))));
+ assertEquals(0, x %= (tmp = -2386531950.018572, tmp));
+ assertEquals(0, x >>>= x);
+ assertEquals(NaN, x /= x);
+ assertEquals(0, x >>>= (tmp = -1535987507.682257, tmp));
+ assertEquals(-0, x /= (-2570639987));
+ assertEquals(-542895632, x |= (tmp = -542895632, tmp));
+ assertEquals(-33930977, x >>= (tmp = -861198108.1147206, tmp));
+ assertEquals(-0, x %= x);
+ assertEquals(0, x ^= (x*(-608154714.1872904)));
+ assertEquals(-140011520, x |= ((tmp = 377418995, tmp)<<((1989575902)>>(tmp = -2558458031.066773, tmp))));
+ assertEquals(-140026048, x -= ((((tmp = 1465272774.7540011, tmp)<<((2164701398)<<(tmp = -818119264, tmp)))>>((tmp = -1490486001, tmp)>>(664410099.6412607)))>>(x>>>(((tmp = -2438272073.2205153, tmp)%(tmp = 2142162105.4572072, tmp))-(tmp = 2259040711.6543813, tmp)))));
+ assertEquals(39214588236996610, x *= (x<<(-401696127.06632423)));
+ assertEquals(1, x /= x);
+ assertEquals(0, x %= x);
+ assertEquals(0, x *= ((tmp = -1709874807.176726, tmp)&(-2786424611)));
+ assertEquals(-1320474063.3408537, x += (tmp = -1320474063.3408537, tmp));
+ assertEquals(88, x >>>= (tmp = -3179247911.7094674, tmp));
+ assertEquals(1606348131, x += ((tmp = 1555621121.5726175, tmp)|(-3026277110.9493155)));
+ assertEquals(200793516, x >>>= x);
+ assertEquals(-2952688672.1074514, x -= (tmp = 3153482188.1074514, tmp));
+ assertEquals(1342278624, x >>>= ((x>>>((tmp = 1264475713, tmp)-(-913041544)))>>>((tmp = 2008379930, tmp)%(tmp = 3105129336, tmp))));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x /= (tmp = 788363717, tmp));
+ assertEquals(430466213, x -= (tmp = -430466213, tmp));
+ assertEquals(164757385222499550, x *= (tmp = 382741735, tmp));
+ assertEquals(164757385222499550, x %= (((tmp = 1974063648, tmp)%((806015603)>>>x))*((tmp = 2836795324, tmp)<<(tmp = -1785878767, tmp))));
+ assertEquals(-190957725.86956096, x /= (x^((-2939333300.066044)-(x|(-2085991826)))));
+ assertEquals(-190957725.86956096, x %= (tmp = -948386352, tmp));
+ assertEquals(0.6457336106922105, x /= (-295722141));
+ assertEquals(0, x |= ((415991250)&((x>>(tmp = -3188277823, tmp))<<(511898664.1008285))));
+ assertEquals(0, x &= ((793238922)|x));
+ assertEquals(-1576701979, x ^= (2718265317));
+ assertEquals(-49271937, x >>= x);
+ assertEquals(-49271937, x |= x);
+ assertEquals(-49271937, x &= x);
+ assertEquals(775316382, x -= (-824588319));
+ assertEquals(912498176, x <<= (tmp = -2223542776.836312, tmp));
+ assertEquals(0, x -= (x&((tmp = 1999412385.1074471, tmp)/(-1628205254))));
+ assertEquals(0, x -= x);
+ assertEquals(0, x >>= (-768730139.7749677));
+ assertEquals(-1861304245, x |= (((5128483)^(((tmp = -1768372004, tmp)/(x^(tmp = 1310002444.757094, tmp)))*((tmp = 188242683.09898067, tmp)^(tmp = -2263757432, tmp))))^((tmp = 2223246327, tmp)*((tmp = -2360528979, tmp)-((tmp = 2442334308, tmp)>>(458302081))))));
+ assertEquals(1, x /= x);
+ assertEquals(2, x += x);
+ assertEquals(1, x /= x);
+ assertEquals(0, x ^= x);
+ assertEquals(-0, x *= (-1852374359.3930533));
+ assertEquals(0, x <<= (tmp = 1223645195.148961, tmp));
+ assertEquals(1789655087, x |= ((-2505312209.770559)>>x));
+ assertEquals(-65568768, x <<= x);
+ assertEquals(4229398528, x >>>= x);
+ assertEquals(-8408187, x |= (-3029781627));
+ assertEquals(-8408187, x |= (((2322165037)-((tmp = -1424506897.362995, tmp)%x))&x));
+ assertEquals(-7884926, x += (x>>>(x|(2738095820))));
+ assertEquals(-7884926, x %= (576507013));
+ assertEquals(751801768, x ^= (tmp = -750241238, tmp));
+ assertEquals(-1986010067668600800, x *= (tmp = -2641667195, tmp));
+ assertEquals(1921196240, x ^= (x%(-1954178308)));
+ assertEquals(847388880, x ^= ((tmp = 1632856124, tmp)&((tmp = -1536309755, tmp)<<(tmp = -3158362800, tmp))));
+ assertEquals(-469662000.6651099, x += (tmp = -1317050880.6651099, tmp));
+ assertEquals(-812358332, x ^= ((-2832480471)>>>(2016495937)));
+ assertEquals(21, x ^= (((tmp = 1815603134.2513008, tmp)/((tmp = 147415927, tmp)%(-1059701742)))+x));
+ assertEquals(-2844409139.792712, x += (tmp = -2844409160.792712, tmp));
+ assertEquals(177070, x >>>= x);
+ assertEquals(0, x %= x);
+ assertEquals(0, x >>= x);
+ assertEquals(1459126376, x ^= (tmp = -2835840920, tmp));
+ assertEquals(1459126376, x %= (-1462864282));
+ assertEquals(0, x >>>= (tmp = 2922724319, tmp));
+ assertEquals(338995506, x ^= (338995506.6411549));
+ assertEquals(336896258, x &= (2635904967));
+ assertEquals(336634112, x -= (x&(tmp = 1659656287, tmp)));
+ assertEquals(NaN, x %= (x-x));
+ assertEquals(NaN, x /= (tmp = -674606200, tmp));
+ assertEquals(NaN, x %= ((x|(2788108542))/(x+(tmp = 600941473, tmp))));
+ assertEquals(0, x >>>= ((-1858251597.3970242)>>>x));
+ assertEquals(1951294747, x |= (tmp = 1951294747, tmp));
+ assertEquals(1951294747, x &= x);
+ assertEquals(-153190625, x |= (-1500095737));
+ assertEquals(23467367587890624, x *= x);
+ assertEquals(346531290.1813514, x /= (((((-513617734.11148167)|x)/((tmp = -2042982150.1170752, tmp)%((x%((x%x)>>>(((-1369980151)&(((922678983)%(x&(tmp = -855337708, tmp)))-((tmp = -2717183760, tmp)>>>((1939904985.4701347)%(((tmp = -2473316858, tmp)&((tmp = -599556221.9046664, tmp)>>((tmp = -6352213, tmp)/x)))&x)))))%x)))/((tmp = -1842773812.8648412, tmp)>>>(((x>>>(tmp = 499774063, tmp))<<(((tmp = -1353532660.5755146, tmp)*(-3070956509))>>(((-905883994.0188017)>>(tmp = -16637173, tmp))<<((tmp = 471668537, tmp)*((tmp = -232036004.26637793, tmp)/x)))))&(tmp = 85227224, tmp))))))>>>(x|(-2528471983)))-((tmp = 1531574803, tmp)+((x>>>x)-(2889291290.158888)))));
+ assertEquals(-94.42225749399837, x /= (((tmp = 2381634642.1432824, tmp)>>(tmp = -2637618935, tmp))|(2307200473)));
+ assertEquals(-47, x >>= (1524333345.141235));
+ assertEquals(-2.8699253616435082e-8, x /= (1637673252));
+ assertEquals(0, x |= x);
+ assertEquals(1083427040, x += ((-2012055268)<<(tmp = -2192382589.6911573, tmp)));
+ assertEquals(1083427040, x %= (x*x));
+ assertEquals(2694039776, x += ((((-1740065704.9004602)<<(-736392934))%(2781638048.424092))>>>(x&x)));
+ assertEquals(-1600927520, x |= ((tmp = 2904430054.869525, tmp)*(((1054051883.4751332)*x)*((-939020743)-(tmp = 1636935481.1834455, tmp)))));
+ assertEquals(-1600927520, x -= (x%x));
+ assertEquals(3037584978216498700, x *= (tmp = -1897390694, tmp));
+ assertEquals(372598954.1823988, x %= (tmp = 1553763703.5082102, tmp));
+ assertEquals(-1476395008, x <<= ((x>>((tmp = 282496335.49494267, tmp)^((-1948623419.6947453)|((((((tmp = -1203306995, tmp)-(-5554612.355098486))>>>(1867254951.4836824))>>x)|(-695777865))/((-59122652.19377303)<<(-609999229.7448442))))))>>(x/(tmp = -1207010654.9993455, tmp))));
+ assertEquals(-2.2540185787941605, x /= (((tmp = 1364159859.9199843, tmp)*x)>>x));
+ assertEquals(-2, x |= x);
+ assertEquals(2241824008, x *= ((3174055292.962967)>>(((-2379151623.602476)>>(tmp = -1423760236, tmp))>>(tmp = -522536019.2225733, tmp))));
+ assertEquals(-2138158385, x ^= ((x>>((((1316131966.9180691)-((x*x)>>x))>>>x)>>((-2712430284)|(((((x<<(-616185937.6090865))-(((x-(tmp = 2957048661, tmp))<<(tmp = 617564839.888214, tmp))/(x%((tmp = -447175647.9393475, tmp)<<(2203298493.460617)))))-((x&((x<<(914944265))^(((-1294901094)*((tmp = 2512344795, tmp)+((((tmp = -1227572518, tmp)%(1831277766.4920158))*((x|x)^(tmp = 2515415182.6718826, tmp)))*x)))-(961485129))))>>>(tmp = 2079018304, tmp)))>>(tmp = 734028202, tmp))^(554858721.6149715)))))-((tmp = 1312985279.5114603, tmp)^(tmp = 2450817476.179955, tmp))));
+ assertEquals(2.759030298237921, x /= (x|(tmp = -775901745.3688724, tmp)));
+ assertEquals(8, x <<= x);
+ assertEquals(NaN, x %= (((x&((1792031228.831834)>>(-1174912501)))%(((-2351757750)+(tmp = -2610099430, tmp))*(-2811655968)))*(x&(tmp = -1881632878, tmp))));
+ assertEquals(0, x &= ((x*(616116645.7508612))^(2789436828.536846)));
+ assertEquals(0, x *= x);
+ assertEquals(35097452, x ^= ((tmp = 1023684579, tmp)%(((x|((tmp = -757953041, tmp)+(772988909)))+(tmp = -2934577578, tmp))>>>((tmp = -1973224283, tmp)>>>((x*(2244818063.270375))|(x-(-716709285)))))));
+ assertEquals(0.015207441433418992, x /= (2307913014.4056892));
+ assertEquals(-5865042.942076175, x -= (5865042.957283616));
+ assertEquals(-67719.94207617454, x %= (((1464126615.2493973)+(398302030.0108756))>>>x));
+ assertEquals(4294899577, x >>>= (x<<x));
+ assertEquals(-1, x >>= (tmp = 607447902, tmp));
+ assertEquals(-1, x >>= (3081219749.9119744));
+ assertEquals(6.53694303504065e-10, x /= (tmp = -1529767040.4034374, tmp));
+ assertEquals(6.53694303504065e-10, x %= ((tmp = 899070650.7190754, tmp)&(tmp = -1101166301, tmp)));
+ assertEquals(6.53694303504065e-10, x %= (tmp = -2207346460, tmp));
+ assertEquals(NaN, x %= (((x&x)>>x)%(((-10980184)+x)&(tmp = -1473044870.4729445, tmp))));
+ assertEquals(NaN, x -= x);
+ assertEquals(-1755985426, x ^= (tmp = 2538981870, tmp));
+ assertEquals(-13842, x %= ((((-2258237411.3816605)+(-1325704332.0531585))<<((tmp = -877665450.1877053, tmp)>>(((((2420989037)+(2084279990.6278818))*(-327869571.9348242))+x)^x)))>>>x));
+ assertEquals(1, x /= x);
+ assertEquals(1, x >>= ((2241312290)^(2859250114)));
+ assertEquals(0, x >>= x);
+ assertEquals(-1615631756, x |= (-1615631756.1469975));
+ assertEquals(-1615631756, x |= x);
+ assertEquals(-627245056, x <<= ((x*(tmp = -1308330685.5971081, tmp))|(tmp = 1479586158, tmp)));
+ assertEquals(-627245056, x |= x);
+ assertEquals(1786953888, x ^= (-1340096352.1839824));
+ assertEquals(1668014353, x -= (tmp = 118939535, tmp));
+ assertEquals(1, x /= x);
+ assertEquals(-645681, x ^= ((-1322356629)>>(tmp = 1829870283, tmp)));
+ assertEquals(-1322354688, x <<= (-794779253));
+ assertEquals(-4310084378.672725, x += (-2987729690.6727247));
+ assertEquals(-8620168757.34545, x += x);
+ assertEquals(-8720421, x |= (tmp = -748107877.6417065, tmp));
+ assertEquals(-1508858270, x ^= (1500137913));
+ assertEquals(-0.825735756765112, x /= (1827289490.1767085));
+ assertEquals(1253449509.1742642, x += (((tmp = 1253449509.9576545, tmp)-(((tmp = 2860243975, tmp)+(367947569.85976696))>>(((((530960315)>>>((((x%(tmp = -2203199228, tmp))<<(x*(((tmp = -117302283, tmp)/(x-((2579576936)%(-1225024012))))&(tmp = -2857767500.1967726, tmp))))/((x/((tmp = -166066119, tmp)<<x))|x))>>>x))|(((2771852372)>>(((tmp = -3103692094.1463976, tmp)-(tmp = 2867208546.069278, tmp))>>>(702718610.1963737)))|(tmp = 2680447361, tmp)))>>x)>>(-2006613979.051014))))^((-1665626277.9339101)/(x<<(tmp = 342268763, tmp)))));
+ assertEquals(1693336701.1742642, x += (tmp = 439887192, tmp));
+ assertEquals(0.8479581831275719, x /= ((1171383583)+(((x&x)>>>(51482548.618915915))-(tmp = -825572595.1031849, tmp))));
+ assertEquals(28, x |= ((tmp = -2355932919.6737213, tmp)>>(tmp = -2395605638, tmp)));
+ assertEquals(0, x %= x);
+ assertEquals(0, x -= x);
+ assertEquals(0, x <<= (x^((tmp = 2793423893.484949, tmp)*(1585074754.3250475))));
+ assertEquals(0, x >>= (x/(x-((957719861.9175875)&(1288527195)))));
+ assertEquals(0, x >>>= ((-1429196921.4432657)/x));
+ assertEquals(-852424225.734199, x -= (tmp = 852424225.734199, tmp));
+ assertEquals(-46674433, x |= ((tmp = -2335242963, tmp)*((2135206646.2614377)>>(tmp = 505649511.8292929, tmp))));
+ assertEquals(2944662357, x += (tmp = 2991336790, tmp));
+ assertEquals(1404, x >>>= (849155189.1503456));
+ assertEquals(-846755170, x ^= (tmp = -846753822.4471285, tmp));
+ assertEquals(52615, x >>>= ((-517068110)+x));
+ assertEquals(1475021859.9916897, x += (tmp = 1474969244.9916897, tmp));
+ assertEquals(0, x %= x);
+ assertEquals(0, x %= ((539583595.8244679)*(tmp = 1469751690.9193692, tmp)));
+ assertEquals(0, x &= (807524227.2057163));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x -= (x^((tmp = -362481588, tmp)%(2611296227))));
+ assertEquals(NaN, x *= x);
+ assertEquals(0, x >>= ((-2519875630.999908)<<x));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x += (((tmp = 2485209575, tmp)>>(tmp = 2326979823, tmp))%(x-(((-1296334640.7476478)&x)<<x))));
+ assertEquals(0, x >>= (((tmp = 1370704131, tmp)^((((tmp = 793217372.7587746, tmp)>>(((-1455696484.109328)|(((((-2186284424.5379324)<<(tmp = 3052914152.254852, tmp))-(x>>(tmp = 3121403408, tmp)))+((778194280)-(((((tmp = 2398957652, tmp)-(x+(((-2592019996.937958)>>((tmp = 1648537981, tmp)>>x))<<(-677436594))))<<(39366669.09012544))|((tmp = 3133808408.9582872, tmp)-(-2987527245.010673)))*x)))|((tmp = -2178662629, tmp)<<x)))^(((tmp = 909652440.3570575, tmp)%(-2572839902.6852217))%(-1879408081))))*(tmp = -2910988598, tmp))&(((x^x)>>(2822040993))|((x*x)^(((1072489842.6785052)|(x-(((464054192.7390214)^x)<<(tmp = -2754448095, tmp))))*((tmp = -1544182396, tmp)/(tmp = -3198554481, tmp)))))))^(tmp = 1946162396.9841106, tmp)));
+ assertEquals(371272192, x |= (((x^((x-(x/x))&(tmp = 2370429394, tmp)))-(tmp = -403692829, tmp))*(tmp = 2808636109, tmp)));
+ assertEquals(929786482, x |= ((729966239.8987448)^(x-((tmp = 120127779, tmp)^((tmp = -3088531385, tmp)>>>((x+((tmp = 2364833601, tmp)>>>(((599149090.6666714)>>(tmp = 2838821032, tmp))%(tmp = -662846011, tmp))))-(tmp = 1168491221.1813436, tmp)))))));
+ assertEquals(-681121542, x += ((-1610909505.998718)^((tmp = -957338882, tmp)>>>(tmp = 1935594133.6531684, tmp))));
+ assertEquals(-2147483648, x <<= ((tmp = 15161708, tmp)|(2453975670)));
+ assertEquals(-2147483648, x >>= x);
+ assertEquals(0, x <<= (2080486058));
+ assertEquals(0, x &= (((x&(tmp = -767821326, tmp))/((tmp = 1877040536, tmp)>>>(tmp = 2378603217.75597, tmp)))*(-1601799835)));
+ assertEquals(0, x %= (-1820240383));
+ assertEquals(1621233920, x ^= ((tmp = 820230232, tmp)*(1727283900)));
+ assertEquals(1621233920, x |= (x>>>x));
+ assertEquals(1621233931, x += ((tmp = 794966194.9011587, tmp)>>(tmp = -597737830.5450518, tmp)));
+ assertEquals(1621276543, x |= (((x^((2354444886)+(tmp = 685142845.4708651, tmp)))-(tmp = 790204976.9120214, tmp))>>>((((tmp = -2792921939, tmp)/(((((tmp = -80705524, tmp)<<x)-(((((((tmp = 1951577216.379527, tmp)>>>x)%((-529882150)>>>(tmp = -1682409624, tmp)))<<((-42043756.29025769)-(-1803729173.6855814)))/(2937202170.118023))*(tmp = -1998098798.5722106, tmp))*(tmp = -2996229463.904228, tmp)))&x)>>>(-301330643)))/(-2858859382.0050273))-(tmp = 1571854256.0740635, tmp))));
+ assertEquals(810638271, x >>>= (x/(1553632833)));
+ assertEquals(810638271, x <<= (tmp = -1467397440, tmp));
+ assertEquals(-2147483648, x <<= x);
+ assertEquals(871068871, x ^= (tmp = 3018552519, tmp));
+ assertEquals(-1073743881, x |= ((tmp = 2294122324.020989, tmp)|(tmp = -1799706842.4493146, tmp)));
+ assertEquals(-77816868, x += (((-2225296403)&x)>>(tmp = -2667103424.445239, tmp)));
+ assertEquals(-1215889, x >>= (tmp = 1876107590.8391647, tmp));
+ assertEquals(-2431778, x += x);
+ assertEquals(4292535518, x >>>= (((x>>(-1825580683))/x)%x));
+ assertEquals(4292802560, x -= (x|(1492864090)));
+ assertEquals(0, x -= x);
+ assertEquals(0, x >>= x);
+ assertEquals(0, x %= (tmp = 2173121205, tmp));
+ assertEquals(0, x *= (x>>x));
+ assertEquals(1565261471, x |= ((1565261471.323931)>>>x));
+ assertEquals(0, x -= x);
+ assertEquals(-86980804, x |= (-86980804));
+ assertEquals(-698956484, x -= (((((2754713793.1746016)*(((((-1514587465.0698888)>>(tmp = -1307050817, tmp))/(tmp = 2368054667.438519, tmp))*(-1908125943.5714772))<<(x>>>(-357164827.4932244))))+(1257487617))<<(2954979945))&(612330472)));
+ assertEquals(-1073741824, x <<= x);
+ assertEquals(54497747, x ^= (-1019244077.098908));
+ assertEquals(54501375, x |= (((tmp = 1944912427, tmp)>>>x)%x));
+ assertEquals(0, x -= x);
+ assertEquals(0, x -= x);
+ assertEquals(-0, x *= (-1748215388));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x >>>= (((tmp = 988769112, tmp)%(tmp = -3133658477, tmp))<<x));
+ assertEquals(0, x %= (1685221089.2950323));
+ assertEquals(0, x >>>= (x+((793467168)-(tmp = 135877882, tmp))));
+ assertEquals(0, x %= ((tmp = -2406801984, tmp)%(tmp = -987618172, tmp)));
+ assertEquals(0, x *= ((-2943444887.953456)|(tmp = -2327469738.4544783, tmp)));
+ assertEquals(0, x >>= x);
+ assertEquals(-145484729.70167828, x += (tmp = -145484729.70167828, tmp));
+ assertEquals(1140855872, x &= (x^(tmp = 3151437967.965556, tmp)));
+ assertEquals(1486808408, x += (tmp = 345952536, tmp));
+ assertEquals(107846582.36594129, x %= (-1378961825.6340587));
+ assertEquals(-642031616, x <<= (x+x));
+ assertEquals(151747770.95108718, x *= (x/(tmp = 2716379907, tmp)));
+ assertEquals(192723456, x <<= (tmp = -1731167384, tmp));
+ assertEquals(2151208003, x -= ((-2151208003)+x));
+ assertEquals(1, x /= x);
+ assertEquals(1, x |= x);
+ assertEquals(1996766603, x |= (1996766602));
+ assertEquals(895606123, x ^= (tmp = 1113972960.966081, tmp));
+ assertEquals(-1500036886, x ^= (tmp = 2482412929, tmp));
+ assertEquals(-1542644247, x ^= (x>>>((tmp = 51449105, tmp)>>>(((-2057313176)*x)/(-1768119916)))));
+ assertEquals(-1496074063273093600, x *= ((tmp = 786152274, tmp)^(387292498)));
+ assertEquals(-794329073, x %= (((tmp = -2314637675.617696, tmp)*((((x*(411053423.29070306))-(2889448433.4240828))/((-970630131)/(tmp = -2886607600.7423067, tmp)))<<(tmp = 1263617112.9362245, tmp)))|(2816980223.8209996)));
+ assertEquals(2468008436047106600, x *= (tmp = -3107035257.725115, tmp));
+ assertEquals(3040956928, x >>>= ((tmp = 1514372119.1787262, tmp)*(3169809008)));
+ assertEquals(-19, x >>= (tmp = -266966022.10604453, tmp));
+ assertEquals(-1.6505580654964654e-8, x /= ((-3143841480)>>(x-x)));
+ assertEquals(-2.2420284729165577e-7, x *= (x*((((703414102.2523813)%(tmp = 2989948152, tmp))-((-1583401827.2949386)^((tmp = -1916731338, tmp)%((331500653.3566053)|(((tmp = 29865940, tmp)+((tmp = -2294889418.6764183, tmp)<<(tmp = -1558629267.255229, tmp)))>>>(x*(x+x)))))))|((988977957)&(-2986790281)))));
+ assertEquals(0, x ^= (x/(tmp = 781117823.345541, tmp)));
+ assertEquals(NaN, x *= (((x^((((tmp = -2969290335, tmp)+(((((tmp = -175387021, tmp)&(tmp = -1080807973, tmp))<<(tmp = -2395571076.6876855, tmp))|((tmp = -1775289899.4106793, tmp)^x))|(-2963463918)))*(tmp = -1761443911, tmp))^(tmp = 847135725, tmp)))<<((146689636)<<x))%x));
+ assertEquals(0, x ^= x);
+ assertEquals(1720182184, x -= (((tmp = 3184020508, tmp)|((-489485703)+(tmp = -2644503573, tmp)))&(tmp = 2575055579.6375213, tmp)));
+ assertEquals(1720182184, x >>= (x<<(-45408034)));
+ assertEquals(5.759243187540471e+27, x *= (((x&(1456298805))+(x<<(106573181)))*((566861317.2877743)+(2262937360.3733215))));
+ assertEquals(5.759243187540471e+27, x -= (tmp = -1365873935, tmp));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x >>= (1960073319.3465362));
+ assertEquals(0, x <<= x);
+ assertEquals(560463904, x += ((tmp = 1844076589.9286406, tmp)&((((((-691675777.5800121)|(-745631201))|x)+(tmp = 1504458593.2843904, tmp))-x)<<x)));
+ assertEquals(-513210271, x -= (x|(1052702623.7761713)));
+ assertEquals(3781757025, x >>>= ((-1346666404.362477)*(tmp = 2798191459, tmp)));
+ assertEquals(1080100929, x &= (1122097879.882534));
+ assertEquals(1276833905.8093092, x *= ((1276833905.8093092)/x));
+ assertEquals(1276833905.8093092, x %= (1796226525.7152414));
+ assertEquals(1276833905, x <<= (((tmp = -491205007.83412814, tmp)*(tmp = 1496201476.496839, tmp))>>(x+((tmp = -854043282.114594, tmp)-((x|(tmp = -807842056, tmp))*x)))));
+ assertEquals(1276833905, x %= (((-1870099318)>>>(((tmp = -2689717222, tmp)/(248095232))/(tmp = 1036728800.5566598, tmp)))&(((((857866837)>>(tmp = 3034825801.740485, tmp))|(-1676371984))>>>(x<<x))%((-3035366571.0221004)*(1578324367.8819473)))));
+ assertEquals(1, x /= x);
+ assertEquals(2819223656.189109, x += (2819223655.189109));
+ assertEquals(-1475743640, x >>= (((tmp = 2586723314.38089, tmp)/(x&(tmp = -697978283.9961061, tmp)))<<(x%((-1167534676)>>(x^((tmp = -284763535, tmp)*((x%x)&((((tmp = 2916973220.726839, tmp)%x)/(tmp = -1338421209.0621986, tmp))|((tmp = -834710536.803335, tmp)%x)))))))));
+ assertEquals(-3267683406, x -= (tmp = 1791939766, tmp));
+ assertEquals(-2090420900700614100, x *= (639725653));
+ assertEquals(-1540353536, x %= ((-1800269105)<<((((x&(((tmp = 1135087416.3945065, tmp)^(613708290))>>x))>>(tmp = -1234604858.7683473, tmp))^(2404822882.7666225))>>>((tmp = -287205516, tmp)-((1648853730.1462333)^((x+(x%((tmp = 359176339, tmp)%((2856479172)<<(tmp = -1995209313, tmp)))))^(((tmp = 2857919171.839304, tmp)>>>(tmp = 2779498870, tmp))>>x)))))));
+ assertEquals(-2093767030, x ^= (654554250.498078));
+ assertEquals(1, x >>>= ((tmp = -166296226.12181997, tmp)^(x/x)));
+ assertEquals(-1487427474, x -= ((x<<x)|(1487427475.4063978)));
+ assertEquals(-1487427470.562726, x += ((-1226399959.8267038)/((tmp = 2172365551, tmp)<<x)));
+ assertEquals(-3457859227618939400, x *= (tmp = 2324724597.3686075, tmp));
+ assertEquals(396221312, x >>= (-1354035390));
+ assertEquals(0, x %= x);
+ assertEquals(0, x &= (tmp = 2733387603, tmp));
+ assertEquals(1485905453, x |= ((((tmp = -1321532329.304437, tmp)&((((tmp = 1817382709.4180388, tmp)%(((tmp = 2089156555.7749293, tmp)-(-1555460267))|(tmp = 717392475.9986715, tmp)))%(tmp = 1976713214, tmp))^x))>>>x)+(tmp = -2812404197.002721, tmp)));
+ assertEquals(1485905453, x |= x);
+ assertEquals(-997658264, x <<= (-1409757949.6038744));
+ assertEquals(-997657290, x -= ((-2041106361)>>(tmp = -2014750507, tmp)));
+ assertEquals(-2138512124, x &= (tmp = 2565597060, tmp));
+ assertEquals(8422400, x &= ((-2819342693.5172367)*(tmp = 1441722560, tmp)));
+ assertEquals(111816531.81703067, x -= (-103394131.81703067));
+ assertEquals(59606682.673836395, x *= ((tmp = -1451690098, tmp)/(x-(2835050651.717734))));
+ assertEquals(-119213365.34767279, x *= (x|((-2656365050)/((-66180492)+(tmp = 284225706.32323086, tmp)))));
+ assertEquals(-232839, x >>= (1694344809.435083));
+ assertEquals(-1, x >>= x);
+ assertEquals(1, x *= x);
+ assertEquals(1, x |= x);
+ assertEquals(0, x >>= (tmp = 397239268, tmp));
+ assertEquals(-1525784563, x -= (tmp = 1525784563, tmp));
+ assertEquals(-153.62740888512675, x /= (((tmp = -2040622579.5354173, tmp)*(tmp = -1149025861.549324, tmp))%(((tmp = 2981701364.0073133, tmp)*(tmp = 2993366361, tmp))|(x|(tmp = 1800299979, tmp)))));
+ assertEquals(-1671795135, x &= (-1671795135.6173766));
+ assertEquals(-4253, x |= ((((x*((1533721762.8796673)<<((tmp = 1026164775.0081646, tmp)<<x)))<<(((x-((((x>>((((((tmp = -481536070.7067797, tmp)&(tmp = 1663121016, tmp))>>>(-2974733313.5449667))+(tmp = -493019653, tmp))>>x)&(tmp = 879307404.8600142, tmp)))>>>x)%(x-(tmp = -1806412445.788453, tmp)))%x))<<(x<<(x+x)))+x))>>((tmp = -332473688.28477216, tmp)<<((tmp = 1701065928, tmp)+(((((tmp = -2407330783, tmp)+x)-((tmp = 584100783, tmp)%(tmp = -3077106506, tmp)))^x)>>x))))<<x));
+ assertEquals(-0, x %= x);
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x >>>= (1578470476.6074834));
+ assertEquals(0, x >>>= (974609751));
+ assertEquals(-120, x += (x-((tmp = -245718438.0842378, tmp)>>>(tmp = -1870354951, tmp))));
+ assertEquals(-6.134465505515781e-8, x /= (1956160645));
+ assertEquals(-0, x %= x);
+ assertEquals(0, x *= (tmp = -399718472.70049024, tmp));
+ assertEquals(-1803198769.8413258, x += (-1803198769.8413258));
+ assertEquals(988624943, x ^= ((((tmp = 320776739.5608537, tmp)*(((tmp = -983452570.3150327, tmp)^x)&(tmp = -3181597938, tmp)))-(tmp = -1367913740.9036021, tmp))/(((tmp = -535854933.2943456, tmp)-(717666905.8122432))>>>(((((x^(tmp = 380453258.60062766, tmp))^(tmp = -1242333929, tmp))/((tmp = 1072416261, tmp)+(((2090466933)*(x*(tmp = -386283072, tmp)))|((tmp = 789259942, tmp)<<(tmp = -1475723636.1901488, tmp)))))>>>x)%((x>>(tmp = -1243048658.3818703, tmp))|((((((tmp = -619553509, tmp)|x)/(878117279.285609))|((x<<(x>>>(tmp = -749568437.7390883, tmp)))*x))/(tmp = 1674804407, tmp))-(x*(tmp = 1528620873, tmp))))))));
+ assertEquals(988625135, x |= (x>>>(tmp = 2402222006, tmp)));
+ assertEquals(988625135, x %= (-2691094165.990094));
+ assertEquals(0, x %= x);
+ assertEquals(-0, x *= (tmp = -1409904262, tmp));
+ assertEquals(-0, x /= ((1176483512.8626208)<<x));
+ assertEquals(0, x &= ((((1677892713.6240005)^(tmp = 2575724881, tmp))^(tmp = -2935655281.208194, tmp))*(216675668)));
+ assertEquals(0, x >>= (tmp = -1296960457, tmp));
+ assertEquals(0, x |= x);
+ assertEquals(NaN, x /= x);
+ assertEquals(0, x <<= (x>>(-3127984289.9112387)));
+ assertEquals(0, x %= ((tmp = 190018725.45957255, tmp)<<((x>>>x)/x)));
+ assertEquals(0, x /= (1185681972));
+ assertEquals(0, x &= ((tmp = -1285574617, tmp)>>x));
+ assertEquals(0, x >>>= ((tmp = 2498246277.2054763, tmp)+(((tmp = 924534435, tmp)&x)>>(tmp = 1379755429, tmp))));
+ assertEquals(0, x -= x);
+ assertEquals(0, x /= (3093439341));
+ assertEquals(0, x *= (x>>>x));
+ assertEquals(0, x &= (tmp = 551328367, tmp));
+ assertEquals(-0, x /= (-3153411714.834353));
+ assertEquals(1217585288, x ^= (tmp = -3077382008.637764, tmp));
+ assertEquals(-639702017, x |= ((tmp = -640922633, tmp)%(tmp = -879654762, tmp)));
+ assertEquals(-1645297680, x <<= (tmp = 1418982820.8182912, tmp));
+ assertEquals(-1.4059558868398736, x /= (1170234212.4674253));
+ assertEquals(-2650856935.66554, x *= (1885448157));
+ assertEquals(1326259953.26931, x *= (((x>>(x|(-496195134.78045774)))+((2029515886)%(tmp = 1148955580, tmp)))/(tmp = -1760016519, tmp)));
+ assertEquals(0, x &= (((((-273334205)+(tmp = 797224093.682485, tmp))/x)>>>((((tmp = -887577414, tmp)/x)+x)%(tmp = 720417467, tmp)))^(((x-(tmp = -309071035, tmp))>>(-3123114729.33889))/x)));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x %= ((tmp = -2243857462, tmp)/((((((2642220700.6673346)&x)*(tmp = 1454878837, tmp))|((-25825087.30002737)%(851535616.3479034)))<<(tmp = -697581582, tmp))%(tmp = 2248990486, tmp))));
+ assertEquals(0, x >>= (((x|(((tmp = -220437911, tmp)&((((255690498)*(((2993252642)>>>(tmp = 300426048.0338713, tmp))>>x))&((-364232989)+(x<<(-1824069275))))%(x+(tmp = 2696406059.026349, tmp))))+((tmp = 2911683270, tmp)/(tmp = 2718991915, tmp))))*(x/(((tmp = -982851060.0744538, tmp)^((-2903383954)<<((-85365803.80553412)^x)))%(1489258330.5730634))))>>>x));
+ assertEquals(0.7805921633088815, x += (((-1886920875)/(-2417294156.5304217))%(tmp = -1176793645.8923106, tmp)));
+ assertEquals(0, x <<= x);
+ assertEquals(-2215008905, x -= (2215008905));
+ assertEquals(1931542900, x &= (-215923724.72133207));
+ assertEquals(907191462, x ^= (-3133954606.357727));
+ assertEquals(453595731, x >>>= (((tmp = 2726241550, tmp)/(tmp = -332682163, tmp))*((((tmp = 2500467531, tmp)>>>(((x<<(tmp = -1847200310.4863105, tmp))/x)^x))+x)<<(191688342.22953415))));
+ assertEquals(-0.21671182880645923, x /= ((((-1169180683.1316955)%x)>>>(1650525418))^((2198033206.797462)&((-6913973.910871983)%(1758398541.8440342)))));
+ assertEquals(-375102237.1603561, x += (tmp = -375102236.9436443, tmp));
+ assertEquals(1, x &= (((84374105.89811504)|((tmp = -2480295008.926951, tmp)>>((605043461)>>(tmp = -2495122811, tmp))))>>(-2129266088)));
+ assertEquals(1, x |= x);
+ assertEquals(0.0000024171579540208214, x /= (((-2600416098)>>(-2076954196))^x));
+ assertEquals(0.0000024171579540208214, x %= (tmp = -2632420148.815531, tmp));
+ assertEquals(1809220936.0126908, x -= (-1809220936.0126884));
+ assertEquals(1682452118.2686126, x += (((2358977542)<<(x/(tmp = -2862107929, tmp)))+(x+(x%((-3101674407)/(((x*((x>>(tmp = 630458691.3736696, tmp))>>>(tmp = -852137742, tmp)))/x)-((-1875892391.1022017)&(tmp = -1027359748.9533749, tmp))))))));
+ assertEquals(1682452118, x <<= (((tmp = -80832958.07816291, tmp)>>x)%(x-((x^(x<<(tmp = -156565345, tmp)))|((tmp = -1208807363.727137, tmp)/(tmp = 2614737513.304538, tmp))))));
+ assertEquals(6572078, x >>= (-1573364824));
+ assertEquals(13144156, x += x);
+ assertEquals(1731678184, x ^= ((tmp = 593370804.9985657, tmp)|(-3124896848.53273)));
+ assertEquals(845545, x >>>= (tmp = -605637621.2299933, tmp));
+ assertEquals(-1383361088, x ^= (tmp = -1383632087, tmp));
+ assertEquals(-82545896480031520, x += ((x+(1023183845.7316296))*((((tmp = 576673669, tmp)>>(((-584800080.1625061)/(2388147521.9174623))+((((x>>>(-905032341.5830328))^(tmp = -2170356357, tmp))-x)+((136459319)+(-1799824119.689473)))))|x)&(tmp = -2688743506.0257063, tmp))));
+ assertEquals(-895206176, x |= x);
+ assertEquals(-0, x %= x);
+ assertEquals(1791306023, x ^= ((tmp = -3219480856, tmp)+(tmp = 715819582.0181161, tmp)));
+ assertEquals(1791306023, x &= x);
+ assertEquals(2725167636753240600, x *= (1521330025));
+ assertEquals(-281190679, x |= (tmp = -1422045975.798171, tmp));
+ assertEquals(-281190679, x += (x%x));
+ assertEquals(-2342097426.906673, x -= (tmp = 2060906747.906673, tmp));
+ assertEquals(-4651462701.906673, x -= (2309365275));
+ assertEquals(1878, x >>>= (2544974549.345834));
+ assertEquals(1964, x += (x&((1067649861)>>(182139255.7513579))));
+ assertEquals(2209, x += (x>>(tmp = -1775039165, tmp)));
+ assertEquals(0, x -= x);
+ assertEquals(-0, x /= (tmp = -1634697185, tmp));
+ assertEquals(NaN, x /= x);
+ assertEquals(0, x >>>= ((tmp = 3075747652, tmp)&(tmp = 819236484, tmp)));
+ assertEquals(0, x /= ((1276203810.476657)%(-2434960500.784484)));
+ assertEquals(0, x >>>= (tmp = -503633649, tmp));
+ assertEquals(-982731931, x |= (-982731931));
+ assertEquals(-1965463862, x += x);
+ assertEquals(-0.221469672913716, x %= ((tmp = -1742292120, tmp)/x));
+ assertEquals(-0.221469672913716, x %= (-2021391941.1839576));
+ assertEquals(0, x <<= (((((tmp = -2802447851, tmp)>>((2534456072.6518855)&x))%(tmp = 2841162496.610816, tmp))<<((89341820)/(2565367990.0552235)))>>(tmp = 2700250984.4830647, tmp)));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x >>= ((tmp = -636189745, tmp)>>>(x/(((tmp = 2634252476, tmp)%(2026595795))>>(tmp = -2048078394.743723, tmp)))));
+ assertEquals(NaN, x %= ((x%((((x%((tmp = -2583207106, tmp)&x))|(190357769))<<(tmp = 595856931.2599536, tmp))%x))*((-2433186614.6715775)<<((2856869562.1088696)^(tmp = 1112328003, tmp)))));
+ assertEquals(1621713910, x |= (tmp = 1621713910.0282416, tmp));
+ assertEquals(3243427820, x += x);
+ assertEquals(0, x *= (x&(x-x)));
+ assertEquals(0, x >>>= (((2871235439)<<((x+((tmp = -1319445828.9659343, tmp)+(tmp = 1595655077.959171, tmp)))>>(tmp = -86333903, tmp)))-(x/(2907174373.268768))));
+ assertEquals(0, x >>= (-1091774077.2173789));
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x *= (tmp = 1976023677.7015994, tmp));
+ assertEquals(NaN, x -= (-3013707698));
+ assertEquals(NaN, x += ((x+(((tmp = -3119865782.9691515, tmp)<<(1327383504.0158405))^(((-143382411.7239611)>>>((-2157016781)+(((-335815848)/x)<<(tmp = 1953515427, tmp))))&(-2715729178))))/(413738158.2334299)));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x += (-845480493));
+ assertEquals(-789816013, x |= (tmp = -789816013.129916, tmp));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x <<= (3032573320));
+ assertEquals(47630, x ^= ((1086705488)%((x^(tmp = -1610832418, tmp))>>>(tmp = 1136352558, tmp))));
+ assertEquals(47630, x >>= (tmp = 1035320352.4269229, tmp));
+ assertEquals(47630, x >>= ((((x^x)<<(x*((((x&((-1657468419)*((tmp = -674435523, tmp)&((tmp = 2992300334, tmp)|x))))*((tmp = -489509378.31950426, tmp)*(tmp = 2276316053, tmp)))>>>x)<<x)))%(tmp = -1209988989, tmp))/(tmp = -2080515253.3541622, tmp)));
+ assertEquals(3192518951.8129544, x += (3192471321.8129544));
+ assertEquals(648116457.8129544, x %= (-2544402494));
+ assertEquals(0, x -= x);
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x /= x);
+ assertEquals(0, x <<= x);
+ assertEquals(0, x >>= x);
+ assertEquals(0, x *= (tmp = 30051865, tmp));
+ assertEquals(0, x ^= ((x&(((x&x)>>>(((((((x+(2319551861.0414495))>>>(tmp = -3099624461, tmp))^((((tmp = 1574312763, tmp)|x)>>>((-2723797246)&(tmp = -1993956152, tmp)))|(-1830179045)))|(((((((-2545698704.3662167)>>>x)-(((-79478653)|x)%(x+(x>>((tmp = 2386405508.2180576, tmp)/x)))))>>((((-1947911815.2808042)*((x+(368522081.2884482))-(tmp = 2452991210, tmp)))>>(343556643.1123545))>>((((tmp = 1869261547.537739, tmp)>>(3193214755))|x)&(x*(2027025120)))))<<((-1149196187)>>>(814378291.8374172)))+((((((((-160721403)/(2079201480.2186408))+((x|((((tmp = -299595483.16805863, tmp)>>>((x|((x+x)/(-2359032023.9366207)))<<(tmp = -3095108545, tmp)))>>((tmp = -1547963617.9087071, tmp)*(x>>x)))&((tmp = -1568186648.7499216, tmp)+(((2646528453)^(-2004832723.0506048))>>>(tmp = -3188715603.921877, tmp)))))+(tmp = 1578824724, tmp)))^x)^x)/(tmp = -985331362, tmp))|(tmp = 445135036, tmp))<<(tmp = -73386074.43413758, tmp)))+(((-1674995105.9837937)-(tmp = 1392915573, tmp))>>x)))%(tmp = 1215953864, tmp))&((tmp = -439264643.5238693, tmp)>>>x))+(((tmp = 2311895902, tmp)|(1604405793.6399229))&(tmp = -565192829, tmp))))-x))>>(-2455985321)));
+ assertEquals(0, x %= ((1177798817)>>(tmp = 2081394163.5420477, tmp)));
+ assertEquals(0, x >>>= ((x^(tmp = -41947528.33954811, tmp))>>(x>>>((tmp = 1367644771, tmp)+x))));
+ assertEquals(0, x %= ((x+((tmp = 163275724, tmp)<<((tmp = -514460883.3040788, tmp)+x)))|(tmp = -287112073.2482593, tmp)));
+ assertEquals(0, x &= (3067975906));
+ assertEquals(201342051, x |= (tmp = 201342051, tmp));
+ assertEquals(0, x %= (((((-2580351108.8990865)<<(tmp = 2675329316, tmp))&((1338398946)%((-1548041558)+((x>>(-1568233868.7366815))|((x>>((tmp = -1064582207, tmp)/(-1062237014)))>>(tmp = 854123209, tmp))))))<<(((989032887)*(1842748656))%(tmp = -1566983130, tmp)))-x));
+ assertEquals(-0, x /= (tmp = -828519512.617768, tmp));
+ assertEquals(0, x &= ((((1449608518)+(-1829731972))*(1828894311))*(((tmp = -1121326205.614264, tmp)^(-2057547855))<<(tmp = -2758835896, tmp))));
+ assertEquals(NaN, x %= ((tmp = -2138671333, tmp)%x));
+ assertEquals(0, x &= x);
+ assertEquals(665568613.0328879, x += (665568613.0328879));
+ assertEquals(317, x >>= (2627267349.735873));
+ assertEquals(0, x -= x);
+ assertEquals(0, x &= (((tmp = 3030611035, tmp)*(((tmp = 476143340.933007, tmp)>>(x-(2238302130.2331467)))|(x|x)))%(tmp = 320526262, tmp)));
+ assertEquals(0, x <<= (tmp = 729401206, tmp));
+ assertEquals(0, x >>>= (1721412276));
+ assertEquals(217629949.3530736, x += ((tmp = 217629949.3530736, tmp)%((-931931100.601475)%(x^(tmp = -2149340123.548764, tmp)))));
+ assertEquals(217629949.3530736, x %= (tmp = 2275384959.4243402, tmp));
+ assertEquals(0, x >>>= (1112677437.5524077));
+ assertEquals(0, x *= (500256656.7476063));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x -= x);
+ assertEquals(0, x -= x);
+ assertEquals(0, x &= (-1076968794));
+ assertEquals(0, x /= (tmp = 1774420931.0082943, tmp));
+ assertEquals(0, x |= x);
+ assertEquals(0, x >>= x);
+ assertEquals(0, x %= (-2978890122.943079));
+ assertEquals(-0, x /= (tmp = -2954608787, tmp));
+ assertEquals(-800048201, x ^= ((tmp = -800048201.7227018, tmp)>>>((-2016227566.1480863)/(tmp = -2263395521, tmp))));
+ assertEquals(3333, x >>>= (-2038839052));
+ assertEquals(487957736.625432, x += (487954403.625432));
+ assertEquals(-1650983426, x |= (2643918270));
+ assertEquals(-1861867448, x &= (tmp = -251254199.12813115, tmp));
+ assertEquals(-7.934314690172143e-18, x %= ((((x^(-703896560.6519544))>>(tmp = -1853262409, tmp))/(tmp = -1168012152.177894, tmp))/(tmp = 837616075.1097361, tmp)));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x &= (tmp = -2328150260.5399947, tmp));
+ assertEquals(-1954860020, x |= (tmp = 2340107276, tmp));
+ assertEquals(-1954860020, x >>= ((tmp = 159177341, tmp)*(x&(-705832619))));
+ assertEquals(-1954895727, x -= (x>>>((-1443742544.7183702)^((((tmp = 869581714.0137681, tmp)+x)^((x%(tmp = -1036566362.5189383, tmp))^(x%x)))>>x))));
+ assertEquals(1.0241361338078498, x /= (tmp = -1908824093.2692068, tmp));
+ assertEquals(16777216, x <<= (x*(((-1925197281)^(tmp = -1392300089.4750946, tmp))|x)));
+ assertEquals(-225882765524992, x *= (tmp = -13463662, tmp));
+ assertEquals(-1845493760, x |= x);
+ assertEquals(-1845493760, x %= (tmp = 3181618519.786825, tmp));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x <<= x);
+ assertEquals(0, x >>>= x);
+ assertEquals(NaN, x /= (x>>>x));
+ assertEquals(NaN, x %= (((((tmp = -521176477, tmp)>>(((tmp = 370693623, tmp)/(((tmp = -1181033022.4136918, tmp)>>(x|(x*(2601660441))))+(tmp = -1696992780, tmp)))|(x|(-1197454193.198036))))>>>(((2512453418.3855605)+((((((tmp = 799501914, tmp)&(((1788580469.7069902)*(((((1476778529.5109258)<<(tmp = -1873387738.3541565, tmp))-((tmp = -521988584.7945764, tmp)*(-1598785351.3914914)))&(-1899161721.8061454))&((x/x)*(690506460))))>>>((tmp = 2255896398.840741, tmp)>>((tmp = -1331486014.6180065, tmp)+(-1159698058.534132)))))*((1112115365.2633948)&((x>>((x>>(-784426389.4693215))&(-492064338.97227573)))>>x)))^((x-((tmp = 2986028023, tmp)>>(tmp = 2347380320.00517, tmp)))*(tmp = -1463851121, tmp)))*(tmp = -1059437133, tmp))%(x-(tmp = 1238739493.7636225, tmp))))^(2029235174)))*(-1923899530))>>>x));
+ assertEquals(0, x >>>= (2848792983.510682));
+ assertEquals(0, x >>= (((tmp = 3042817032.705198, tmp)>>>x)&((((tmp = -829389221, tmp)-((2669682285.8576303)+(tmp = 1812236814.3082042, tmp)))^x)%((tmp = -2401726554, tmp)^((tmp = 2464685683, tmp)|(-2685039620.224061))))));
+ assertEquals(2069649722, x |= (2069649722.311271));
+ assertEquals(NaN, x %= (((((-68757739.39282179)&(-1382816369))/(3122326124))<<(x-(-507995800.3369653)))<<(((-1962768567.343907)+((tmp = 1357057125, tmp)/x))^(tmp = 1997617124, tmp))));
+ assertEquals(NaN, x += x);
+ assertEquals(0, x >>= (26895919));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x %= (tmp = 1092448030, tmp));
+ assertEquals(0, x <<= (tmp = -477672441.46258235, tmp));
+ assertEquals(0, x /= (2113701907));
+ assertEquals(0, x >>>= x);
+ assertEquals(NaN, x /= x);
+ assertEquals(1341078673, x |= (-2953888623));
+ assertEquals(1341078673, x &= x);
+ assertEquals(0, x %= x);
+ assertEquals(414817852.151006, x -= (-414817852.151006));
+ assertEquals(1006632960, x <<= ((((((126465614.8316778)+(x-(2511803375)))+(tmp = 1620717148.352402, tmp))*x)/(tmp = -3013745105.5275207, tmp))-((tmp = -418034061.6865432, tmp)/(-300492911))));
+ assertEquals(1055624813, x |= (tmp = 921407085, tmp));
+ assertEquals(-3, x |= ((((tmp = 1382397819.7507677, tmp)+(tmp = -111851147.7289567, tmp))+x)/((tmp = 247980405.7238742, tmp)^(tmp = -592156399.8577058, tmp))));
+ assertEquals(35161, x &= (((((((-2973570544.725141)*(tmp = -1244715638, tmp))+x)<<(x/((x>>>(-2143371615.073137))/(226072236))))%((x-(tmp = 1971392936, tmp))^(tmp = 2653103658, tmp)))%((tmp = 2828319571.7066674, tmp)>>((1528970502)^((tmp = -55869558, tmp)%x))))>>(889380585.6738582)));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x *= (2749718750));
+ assertEquals(0, x >>>= ((((-1633495402.6252813)*(tmp = 2943656739.1108646, tmp))+(tmp = 977432165, tmp))&((tmp = -2338132019, tmp)*(408176349.8061733))));
+ assertEquals(-1778794752, x -= (((tmp = -1391412154.5199084, tmp)-((-3172342474)|x))&(1854366052)));
+ assertEquals(-1778794752, x %= (tmp = 2024807296.6901965, tmp));
+ assertEquals(-1114410.466337204, x %= ((tmp = -240344444.24487805, tmp)%(-47661164)));
+ assertEquals(-0, x %= x);
+ assertEquals(0, x >>= (x>>x));
+ assertEquals(0, x *= x);
+ assertEquals(0, x /= ((-3134902611)|(tmp = -3131158951, tmp)));
+ assertEquals(-0, x /= (((tmp = 1430247610.634234, tmp)&x)+((tmp = -2047191110.8623483, tmp)-((((x%((((x/(tmp = -2599234213, tmp))|(tmp = 2650380060, tmp))|x)+x))>>>x)&(-1961373866))<<x))));
+ assertEquals(-718394682, x -= ((x|(tmp = 1764417670.8577194, tmp))%(1046022988)));
+ assertEquals(3576572614, x >>>= (((tmp = 2480472883.078992, tmp)<<x)>>((2035208402.8039393)&(tmp = 492980449, tmp))));
+ assertEquals(434034142, x %= (x&((x>>>(311110449.48751545))|(-243530647))));
+ assertEquals(524703439.3065736, x += (((tmp = 1392771723.3065736, tmp)%(x&x))%(tmp = -2199704930, tmp)));
+ assertEquals(373686272, x &= (x<<((tmp = 2103372351.9456532, tmp)%(tmp = -1367109519, tmp))));
+ assertEquals(373686272, x >>= x);
+ assertEquals(-0.12245430020241108, x /= (tmp = -3051638622.5907507, tmp));
+ assertEquals(1, x /= x);
+ assertEquals(1, x %= (3095983855));
+ assertEquals(-1454736871, x ^= (x*(tmp = -1454736872, tmp)));
+ assertEquals(-1454736866, x ^= (((724989405.7338341)|(tmp = -2834298786.384371, tmp))>>>(tmp = -2029602148.1758833, tmp)));
+ assertEquals(-1454736866, x &= x);
+ assertEquals(-197394432, x <<= (tmp = -1562128975, tmp));
+ assertEquals(251658240, x <<= (tmp = 2126510950, tmp));
+ assertEquals(3295700610.703306, x -= (tmp = -3044042370.703306, tmp));
+ assertEquals(-51152917, x |= ((949179883.1784958)|(((tmp = -2046168220, tmp)>>(x/x))/(((835064313)*(tmp = 2197600689, tmp))^(((tmp = 2717104216, tmp)&x)<<(-1402661995.3845913))))));
+ assertEquals(-1549204421, x ^= ((((tmp = -481013711, tmp)>>>((tmp = 119589341.80209589, tmp)%(-995489985.2905662)))-(635717011))^(x+(x*x))));
+ assertEquals(-1078356672.3999934, x += (470847748.6000067));
+ assertEquals(1484987268.4638166, x += (tmp = 2563343940.86381, tmp));
+ assertEquals(277020804, x &= (tmp = 2532819117, tmp));
+ assertEquals(-2097118208, x <<= (x>>>x));
+ assertEquals(-2147483648, x <<= (tmp = 761285045, tmp));
+ assertEquals(2147483648, x >>>= x);
+ assertEquals(-935909870282997800, x *= ((-2583300643)|x));
+ assertEquals(-370753566.54721737, x %= (-1084543510.4524941));
+ assertEquals(-177, x >>= (-946264747.6588805));
+ assertEquals(-416077682, x ^= (tmp = 416077761, tmp));
+ assertEquals(NaN, x %= ((((tmp = 779607408, tmp)*(((tmp = -3007128117, tmp)*(851442866.6153773))+x))&(1283388806))/(-876363553)));
+ assertEquals(NaN, x %= (x/(tmp = -1668413939.652408, tmp)));
+ assertEquals(-1726405921, x ^= (tmp = -1726405921, tmp));
+ assertEquals(-1, x >>= ((3031008213.807012)>>x));
+ assertEquals(4294967295, x >>>= ((x>>>x)&(tmp = 2788082290, tmp)));
+ assertEquals(8544111670008449000, x *= (tmp = 1989331020.0417833, tmp));
+ assertEquals(268435456, x <<= (tmp = 3121736017.2098465, tmp));
+ assertEquals(-2.1011176170964474e+26, x -= (((tmp = 1392503299, tmp)*(tmp = 1446108825.1572113, tmp))*(x^(tmp = 372776014.213725, tmp))));
+ assertEquals(0, x |= x);
+ assertEquals(0, x >>= ((-112413907.70074797)*(-702798603)));
+ assertEquals(1829518838, x |= (tmp = -2465448458, tmp));
+ assertEquals(57172463, x >>= ((tmp = 2979642955.241792, tmp)%(tmp = -2464398693.291434, tmp)));
+ assertEquals(114344926, x += x);
+ assertEquals(113279134, x &= (2397742238.6877637));
+ assertEquals(54, x >>= (1908522709.6377516));
+ assertEquals(-2.966982919573829e-7, x /= (tmp = -182003070, tmp));
+ assertEquals(0, x <<= (-1078417156));
+ assertEquals(-147831390, x ^= (((-147831390)>>>x)+x));
+ assertEquals(0, x -= x);
+ assertEquals(-242221450.44696307, x -= (tmp = 242221450.44696307, tmp));
+ assertEquals(-484442900, x <<= (((tmp = -2033947265.088614, tmp)&x)/(x^(tmp = -2893953848, tmp))));
+ assertEquals(-3227648, x <<= (x<<((tmp = -193993010, tmp)*((983187830)|(3146465242.2783365)))));
+ assertEquals(-6455296, x += x);
+ assertEquals(-1771542585, x -= (x^(tmp = -1767335879, tmp)));
+ assertEquals(-0, x %= x);
+ assertEquals(0, x >>>= ((((tmp = -1612864670.4532743, tmp)*(tmp = 786265765.210487, tmp))*((((tmp = -893735877.3250401, tmp)*((x^(tmp = -2804782464.233885, tmp))<<x))&(x-x))^x))<<x));
+ assertEquals(0, x -= (x>>>(-1648118674.380736)));
+ assertEquals(0, x >>= ((tmp = -2706058813.0028524, tmp)>>(2745047169)));
+ assertEquals(0, x += x);
+ assertEquals(0, x %= (-898267735.137356));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x >>= ((265527509)/((tmp = 2190845136.7048635, tmp)+((x>>x)>>>((x%(x-x))&((((-2080184609.8989801)&((-327231633)>>>((tmp = 864849136, tmp)%(((-524363239)*(((((tmp = 2245852565.3713694, tmp)&(1918365.8978698254))>>>(tmp = -2463081769, tmp))-(((2438244059.471446)|((((-135303645.38470244)*(-861663832.2253196))%(tmp = 1273185196.0261836, tmp))|((2261539338.832875)%((320267076.2363237)+x))))>>(tmp = -2731398821, tmp)))/(tmp = -1947938611, tmp)))^x))))>>(tmp = 833666235, tmp))|x))))));
+ assertEquals(-1116704570, x ^= (-1116704570));
+ assertEquals(1379561710, x ^= (tmp = -280362968.19654894, tmp));
+ assertEquals(-1673822208, x <<= x);
+ assertEquals(-1673822208, x |= (x<<(tmp = 1389479193.9038138, tmp)));
+ assertEquals(2559712, x >>>= (-2703763734.0354066));
+ assertEquals(2593499, x ^= (x>>>((tmp = 148668150.03291285, tmp)^(tmp = -1580360304, tmp))));
+ assertEquals(2070393855, x |= (tmp = -2227002907, tmp));
+ assertEquals(304197770, x &= (tmp = 2453257354, tmp));
+ assertEquals(304197770, x <<= ((-669331453.8814087)-(x^(x^(tmp = 33804899.98928583, tmp)))));
+ assertEquals(297068, x >>= x);
+ assertEquals(Infinity, x /= (x-x));
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x ^= x);
+ assertEquals(0, x %= ((tmp = 1723087085, tmp)%(2859382131.304421)));
+ assertEquals(0, x %= (((tmp = 2935439763, tmp)<<(-3163992768.637094))%(tmp = 67176733, tmp)));
+ assertEquals(0, x &= (tmp = 2480771277, tmp));
+ assertEquals(0, x >>>= (x+(tmp = -3168690063, tmp)));
+ assertEquals(0, x *= ((tmp = -1915275449.1806245, tmp)>>>((tmp = -1644482094.1822858, tmp)/(tmp = -432927173, tmp))));
+ assertEquals(0, x += (((2766509428.071809)/(x/((942453848.5423365)/(((tmp = -1284574492, tmp)&((tmp = 760186450.7301528, tmp)-(2464974117.358138)))/((x/(x|(672536969)))*(x>>(-1272232579)))))))>>(x*(-3175565978))));
+ assertEquals(-1277710521, x -= (1277710521));
+ assertEquals(-1277710521, x >>= (((tmp = -2349135858, tmp)-x)-x));
+ assertEquals(-1277710521, x >>= ((tmp = 2135645051, tmp)*(tmp = -2468555366, tmp)));
+ assertEquals(-155971, x >>= (-1294859507));
+ assertEquals(-0, x %= x);
+ assertEquals(0, x >>>= (((861078292.6597499)|(-268063679))-(((((-221864206.9494424)-(-3186868203.2201176))&(tmp = 1287132927, tmp))<<(((tmp = 1964887915, tmp)<<((25908382)^(tmp = -688293519.875164, tmp)))*(2075946055)))&(x-((x>>x)&(1395338223.7954774))))));
+ assertEquals(788002218, x -= (-788002218));
+ assertEquals(716399906, x &= (-1145868506));
+ assertEquals(145776674, x &= (-1661931477.360386));
+ assertEquals(145776674, x |= x);
+ assertEquals(-0.05255700469257692, x /= (tmp = -2773686873, tmp));
+ assertEquals(-660918434, x |= (-660918434.2915542));
+ assertEquals(1223537346, x ^= (tmp = -1871274596, tmp));
+ assertEquals(305884336, x >>= (x&x));
+ assertEquals(-1.1123775647978218e-8, x *= ((tmp = -793393031.4229445, tmp)/((tmp = -503919284, tmp)*(((((tmp = 429810625, tmp)>>>x)-((2091544148.870375)<<(((((x^x)%x)|x)/(-260773261))<<((tmp = -1323834653, tmp)&x))))*((-1231800099.3724015)+x))*((x+((-559726167)^x))>>>((-549148877)<<((((tmp = 1196115201, tmp)/((tmp = -2654658968.390111, tmp)%(tmp = -1044419580, tmp)))*(((((x>>>(733571228))+(2919762692.511447))/(-2718451983.570547))^x)+((2891533060.1804514)^((tmp = -2514488663, tmp)&x))))<<(tmp = -2526139641.6733007, tmp))))))));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x *= x);
+ assertEquals(0, x |= x);
+ assertEquals(3076984066.336236, x -= ((tmp = -3076984066.336236, tmp)+((tmp = -446575828.5155368, tmp)&x)));
+ assertEquals(1, x /= x);
+ assertEquals(1513281647.839972, x *= (1513281647.839972));
+ assertEquals(1251138155, x ^= ((tmp = 2124481052, tmp)&(2431937351.4392214)));
+ assertEquals(1, x /= x);
+ assertEquals(0, x &= (tmp = 627050040, tmp));
+ assertEquals(497153016, x ^= (497153016));
+ assertEquals(-1112801283, x |= (tmp = 2752196557, tmp));
+ assertEquals(0.5735447276296568, x /= ((((tmp = -500878794, tmp)%(tmp = -2559962372.2930336, tmp))%(2661010102))+(tmp = -1439338297, tmp)));
+ assertEquals(1.0244795995097235e-9, x /= (559840067));
+ assertEquals(0.43468811912309857, x *= (424301391));
+ assertEquals(-1972757928, x ^= (tmp = -1972757928.9227014, tmp));
+ assertEquals(-606757265, x ^= (tmp = -2923461577.264596, tmp));
+ assertEquals(-37, x >>= (((-2736561559.7474318)%(tmp = -27668972.662741184, tmp))*(2774711606)));
+ assertEquals(-1923785671, x += ((-1923785597)+x));
+ assertEquals(-3877639176, x += (tmp = -1953853505, tmp));
+ assertEquals(-4688259242, x -= ((810620066.4394455)>>(((-1474285107.459875)>>x)/(((((-570672326.4007359)>>(tmp = -3086802075, tmp))%x)>>>(((tmp = 286938819.28193486, tmp)>>>((1712478502)>>(tmp = 3045149117.796816, tmp)))<<(tmp = 750463263.292952, tmp)))&(tmp = 2055350255.5669963, tmp)))));
+ assertEquals(-0, x %= x);
+ assertEquals(0, x <<= (1037856162.5105649));
+ assertEquals(0, x *= x);
+ assertEquals(0, x &= (997845077.4917375));
+ assertEquals(0, x *= x);
+ assertEquals(0, x *= x);
+ assertEquals(0, x <<= (((x<<x)&(57691805))>>(786927663)));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x += x);
+ assertEquals(0, x &= (-2131910624.1429484));
+ assertEquals(0, x >>>= (-43787814));
+ assertEquals(-2415062021, x += (tmp = -2415062021, tmp));
+ assertEquals(-4830124042, x += x);
+ assertEquals(-186683401, x |= (tmp = 1960135383, tmp));
+ assertEquals(NaN, x *= ((tmp = -1674740173.9864025, tmp)%(((((((-432895485.7261934)-x)^x)>>>(((-1627743078.3383338)>>(179992151))<<((tmp = 911484278.0555259, tmp)|(((tmp = -3042492703, tmp)>>(((-663866035.302746)>>(((x-((440661929.50030375)>>>(tmp = 263692082, tmp)))*x)+x))/((1546004407)^(((tmp = 2023662889.1594632, tmp)*(tmp = -2456602312, tmp))+(tmp = 755602286.1810379, tmp)))))%((tmp = -336449961, tmp)|(tmp = 206780145, tmp))))))/(1068005219.1508512))<<(tmp = -474008862.6864624, tmp))/(((((((1518711056.5437899)>>>(tmp = 287418286.63085747, tmp))<<(tmp = 2823048707, tmp))^(((x<<(x^(-1600970311)))&(x>>(((tmp = 157300110.7636031, tmp)*(tmp = -3047000529, tmp))&(1743024951.3535223))))>>x))-(tmp = -2895435807, tmp))*((tmp = -314120704, tmp)&(tmp = 1759205369, tmp)))>>(tmp = 1833555960.046526, tmp)))));
+ assertEquals(NaN, x -= (tmp = 694955369, tmp));
+ assertEquals(NaN, x *= (x%x));
+ assertEquals(0, x |= x);
+ assertEquals(0, x ^= x);
+ assertEquals(0, x &= x);
+ assertEquals(NaN, x /= (x+x));
+ assertEquals(NaN, x %= ((tmp = -1595988845, tmp)*((1754043345)>>>(-601631332))));
+ assertEquals(0, x >>>= (tmp = 862768754.5445609, tmp));
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x *= (tmp = -1774545519, tmp));
+ assertEquals(0, x >>>= (tmp = -2492937784, tmp));
+ assertEquals(0, x %= ((((x<<(-1657262788.2028513))&((x^(tmp = -671811451, tmp))<<(-2984124996)))^(1455422699.7504625))-((-340550620)>>x)));
+ assertEquals(918278025, x ^= ((tmp = -918278027, tmp)^((tmp = 2889422870, tmp)/(tmp = -657306935.7725658, tmp))));
+ assertEquals(918278025, x %= (2603186571.0582614));
+ assertEquals(107034679.32509923, x %= (tmp = -811243345.6749008, tmp));
+ assertEquals(53517339, x >>= (x%((((x*((tmp = -983766424, tmp)^(-1881545357.8686862)))|(tmp = -1429937087, tmp))>>((x<<x)>>((((tmp = -2347470476, tmp)&x)+((x&x)<<(396061331.6476157)))*(tmp = -3136296453.209073, tmp))))>>>(((tmp = 908427836, tmp)|(tmp = 207737064, tmp))|(((1253036041)-(tmp = 2705074182, tmp))+(-431215157.82083917))))));
+ assertEquals(53477378, x &= ((((-1128036654.165636)*x)+x)>>(x>>(3080099059))));
+ assertEquals(0, x >>= (-590692293));
+ assertEquals(0, x %= (-2395850570.9700127));
+ assertEquals(0, x *= ((tmp = 1377485272, tmp)&(1129370608)));
+ assertEquals(0, x += (x>>>(x%(((((tmp = -1746827236, tmp)+((tmp = -326913490, tmp)&((-58256967)&x)))*(tmp = -1176487022.001651, tmp))>>>(-2089147643))-x))));
+ assertEquals(0, x <<= (tmp = 1073298160.2914447, tmp));
+ assertEquals(-837811832, x ^= (-837811832));
+ assertEquals(102760448, x <<= (tmp = 2833582450.4544373, tmp));
+ assertEquals(0, x &= (((((((tmp = 2595641175, tmp)*x)+(tmp = -2049260172.1025927, tmp))%((2986747823)>>(tmp = -2120598518, tmp)))&((tmp = -2742408622, tmp)&x))>>x)*((1043474247.9601482)&(tmp = 1686365779.9885998, tmp))));
+ assertEquals(0, x >>= ((tmp = 1717862848, tmp)-(tmp = 1077024446.4160957, tmp)));
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x /= (-1669429787.975099));
+ assertEquals(NaN, x -= (-2299895633.4807186));
+ assertEquals(138173970, x ^= (138173970.56627905));
+ assertEquals(-2084183776, x <<= (3073345316));
+ assertEquals(-0, x %= x);
+ assertEquals(0, x >>= (-3080556066.068573));
+ assertEquals(0, x &= ((tmp = -2587514820, tmp)*(x-((x^(1995672257))*(1125326747.2339358)))));
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x >>= (tmp = 2139186585, tmp));
+ assertEquals(-1904096640, x |= ((-602301360.1919911)*(-1270444810)));
+ assertEquals(1073741824, x <<= (tmp = -1069467849, tmp));
+ assertEquals(1073741824, x ^= (x-x));
+ assertEquals(536870912, x >>>= (-1579466367.160293));
+ assertEquals(512, x >>= (972402804.3890183));
+ assertEquals(512, x &= (tmp = 2664796831, tmp));
+ assertEquals(16777216, x <<= (-2738292561));
+ assertEquals(0, x >>>= ((((1397663615.3889246)|(1117420260.6730964))-(-1173734560))<<((tmp = 1007006104.0172879, tmp)<<((tmp = -623002097, tmp)%(tmp = -35829654.379403114, tmp)))));
+ assertEquals(1200191544, x ^= (tmp = -3094775752, tmp));
+ assertEquals(71, x >>>= x);
+ assertEquals(71, x |= x);
+ assertEquals(1394763772, x += (1394763701));
+ assertEquals(-1.492717171027427, x /= ((x&(tmp = 1243787435, tmp))-(2043911970.26752)));
+ assertEquals(-1.1002448961224718e-8, x /= ((((835185744)*(((tmp = 2165818437, tmp)^(tmp = 2567417009.1166553, tmp))/x))/x)/(((63485842.39971793)^(2668248282.597389))/x)));
+ assertEquals(0, x <<= (tmp = 1598238578.637568, tmp));
+ assertEquals(0, x |= (x&((tmp = -1812945547.5373957, tmp)>>>x)));
+ assertEquals(0, x >>>= (x+(-1969679729.7299538)));
+ assertEquals(1582033662, x += (tmp = 1582033662, tmp));
+ assertEquals(1, x >>>= x);
+ assertEquals(-550748739, x += ((tmp = -550748740, tmp)/(x&((2537822642.235506)^((-2167656297)%(tmp = 1161201210, tmp))))));
+ assertEquals(-268921, x >>= (tmp = 1916069547.7381654, tmp));
+ assertEquals(-0.00021776939364231114, x /= (tmp = 1234888868, tmp));
+ assertEquals(0, x <<= (-1036375023));
+ assertEquals(0, x &= ((((x/(2398886792.27443))&(x|((-1813057854.1797302)-x)))&(x/(((tmp = 3091133731.4967556, tmp)|(3013139691.823039))<<x)))>>>(2542784636.963599)));
+ assertEquals(0, x += ((x*x)/(tmp = 347079383, tmp)));
+ assertEquals(788347904, x |= ((1462257124.6374629)*((3180592147.4065146)-(x&(1922244678)))));
+ assertEquals(2130672735, x |= (tmp = -2846986145, tmp));
+ assertEquals(-1331327970, x ^= ((656251304)-(tmp = 1489152359, tmp)));
+ assertEquals(-0.14377179742889856, x %= (((2889747597.813753)-(1730428996))/(((tmp = -1378710998, tmp)&x)|x)));
+ assertEquals(-1754612583.143772, x += ((-1754725729)^((-2285838408)>>>(1434074349))));
+ assertEquals(-0, x %= x);
+ assertEquals(0, x &= (tmp = -1031961332, tmp));
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x /= (3059476325));
+ assertEquals(NaN, x *= ((x*((((tmp = 13529540.462185979, tmp)&x)^((x<<(-1312696238.1628869))&(-2029766712.3852897)))>>x))/x));
+ assertEquals(1657339940, x ^= ((tmp = -488956817.1491232, tmp)&(tmp = -2352413900.1983714, tmp)));
+ assertEquals(-530683621952432200, x *= (tmp = -320202035.2882054, tmp));
+ assertEquals(229226258, x ^= ((tmp = -1263410990.026416, tmp)+(((-808046349)&(tmp = -1294442506, tmp))&((tmp = 1147437219, tmp)<<((tmp = -820299900, tmp)-(tmp = -1947748943.3443851, tmp))))));
+ assertEquals(7163320, x >>= (-2631307131));
+ assertEquals(-68, x |= (((-1271721343)>>x)%x));
+ assertEquals(-39956523818.38862, x *= (587595938.505715));
+ assertEquals(0, x -= x);
+ assertEquals(0, x >>>= ((x^(x+x))<<(tmp = 265212367, tmp)));
+ assertEquals(0, x |= (((x>>((tmp = 2294761023, tmp)/(x>>(2125624288))))&((-2125650113)|(tmp = 1014409884, tmp)))%(tmp = -527324757, tmp)));
+ assertEquals(0, x >>= ((tmp = 2267075595, tmp)*(-1681569641.8304193)));
+ assertEquals(0, x >>>= x);
+ assertEquals(0.5738410949707031, x -= ((tmp = -1846572645.573841, tmp)%((((((x^(((-156613905.64173532)/x)<<x))+((x|((2405109060)>>>x))^x))/(570585894.8542807))+(x&(-2544708558)))^((((tmp = -2539082152.490635, tmp)+((((-657138283)/(2204743293))-((tmp = -1422552246.565012, tmp)+x))<<(x-x)))>>(x/(x>>>(tmp = -3027022305.484394, tmp))))<<x))&((-2066650303.3258202)/(tmp = -1666842593.0050385, tmp)))));
+ assertEquals(0, x >>>= ((((tmp = 2473451837.613817, tmp)>>((2526373359.1434193)>>(x<<x)))+((tmp = -579162065, tmp)+((tmp = -3115798169.551487, tmp)-(tmp = 933004398.9618305, tmp))))&(tmp = 131167062, tmp)));
+ assertEquals(-2067675316, x ^= (-2067675316.6300585));
+ assertEquals(543772, x >>>= x);
+ assertEquals(-1073741824, x <<= x);
+ assertEquals(3221225472, x >>>= ((x*(1478586441.081221))&(tmp = -3050416829.2279186, tmp)));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x *= x);
+ assertEquals(-1017771903.0298333, x -= (1017771903.0298333));
+ assertEquals(0.6404112721149928, x /= ((tmp = -144667370, tmp)^(-2849599562)));
+ assertEquals(-2410517638773644000, x -= (((tmp = 1759631550, tmp)*x)*((((tmp = -2949481475, tmp)>>>x)*x)|(tmp = -2977983804, tmp))));
+ assertEquals(-0, x %= (x+((((tmp = -1307866327.7569134, tmp)<<((x&((tmp = -2380043169.8405933, tmp)|x))>>(472992789.7639668)))|(((((x<<(tmp = -1416427232.7298179, tmp))%(-1404989679.409946))*((x/(tmp = -992416608, tmp))/(tmp = 524646495, tmp)))-(tmp = 734405570, tmp))>>x))/(1079256317.7325506))));
+ assertEquals(0, x <<= (tmp = 2459834668, tmp));
+ assertEquals(-0, x /= (tmp = -1892164840.5719755, tmp));
+ assertEquals(0, x >>= (x|(((1299844244)>>>(((tmp = -2422924469.9824634, tmp)|x)-((((1914590293.2194016)+(-3033885853.8243046))-((tmp = -1720088308, tmp)%x))<<(tmp = 2210817619, tmp))))<<x)));
+ assertEquals(0, x <<= (((tmp = 3192483902.841396, tmp)>>>(((x^(2944537154))|(tmp = -1334426566, tmp))*(((((((-2705218389)&x)+(1987320749))+(tmp = -111851605, tmp))|(2894234323))-(265580345))&x)))%(((tmp = 1431928204.6987057, tmp)&(tmp = 914901046, tmp))&(x>>>x))));
+ assertEquals(0, x >>>= (tmp = 1941940941, tmp));
+ assertEquals(0, x %= (3089014384));
+ assertEquals(0, x += ((tmp = 2948646615, tmp)*x));
+ assertEquals(-0, x /= (tmp = -1480146895, tmp));
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x %= (-2995257724.158043));
+ assertEquals(NaN, x %= (tmp = 2714835455, tmp));
+ assertEquals(NaN, x /= (tmp = -311440765.98078775, tmp));
+ assertEquals(NaN, x -= (-1600234513.697098));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x <<= (-1499045929));
+ assertEquals(-0, x *= (-2491783113));
+ assertEquals(0, x ^= (x%((x>>(((1234398704.3681123)>>>x)%(x+x)))>>(402257223.4673699))));
+ assertEquals(-643225204, x ^= (((-55960194.698637486)+((((721411198)-(((tmp = 1308676208.7953796, tmp)%(2242904895))-x))>>((((tmp = 332791012, tmp)&((tmp = -2094787948, tmp)/((x/(2427791092))^(2444944499.6414557))))%(((x+(1253986263.5049214))+(((((3135584075.248715)+((tmp = -2569819028.5414333, tmp)%(440908176.1619092)))>>>(x<<((3061615025)-x)))%x)%(x+((2369612016)*((((tmp = 1173615806, tmp)*(-1910894327))&(2428053015.077821))*(-55668334.70082307))))))<<(tmp = -2129259989.0307562, tmp)))+(1579400360)))%((-3053590451.8996153)>>x)))+(x>>(x%(x^((-1772493876)^x))))));
+ assertEquals(413738663060841600, x *= x);
+ assertEquals(1581062538.4501781, x %= ((tmp = -1298397672.0300272, tmp)-((2237197923)+(tmp = -1385478459, tmp))));
+ assertEquals(755644566.8709538, x %= (tmp = -825417971.5792243, tmp));
+ assertEquals(1, x /= x);
+ assertEquals(0, x >>>= ((89330582)%(-1012731642.4855506)));
+ assertEquals(0, x >>>= x);
+ assertEquals(NaN, x %= ((x>>>((x/(tmp = -1848848941.2352903, tmp))>>>(tmp = -71862893, tmp)))&(-2385996598.2015553)));
+ assertEquals(NaN, x += (-2292484503.318904));
+ assertEquals(NaN, x *= (2961064461));
+ assertEquals(NaN, x += (x<<((2076798243.6442)/((tmp = -81541044.75366282, tmp)^((3041366498.551101)+((2126874365)/(tmp = -177610359, tmp)))))));
+ assertEquals(NaN, x %= ((x/((x/x)+x))>>>x));
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x += (1171761980.678));
+ assertEquals(NaN, x += ((2355675823)<<(-390497521)));
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x &= (tmp = -658428225.56619, tmp));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x <<= (1643310725.5713737));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x <<= (-397005335.3712895));
+ assertEquals(0, x >>>= (tmp = -2804713458.166788, tmp));
+ assertEquals(0, x <<= (((((((tmp = 1879988501, tmp)%(1528081313.9360204))+(1376936736))*((((x>>>((1736268617.339198)>>>(-2598735297.4277673)))<<((((((((-2742982036)/(231867353.4549594))-(875335564))<<x)|((2241386341.742653)<<((-22024910.828409433)&(x<<x))))*(-756987803.5693252))+x)^(tmp = 1084498737, tmp)))<<(1920373881.8464394))&(2370827451.82652)))&(x^(tmp = -891503574, tmp)))<<x)>>>((-1519588625.2332087)^(483024636.2600144))));
+ assertEquals(52193878.40997505, x -= ((tmp = -341753803.40997505, tmp)%(tmp = -96519975, tmp)));
+ assertEquals(-1665844168.938803, x -= (1718038047.348778));
+ assertEquals(3.6962232549405003e-19, x /= (((((-809583468.5507183)>>>((tmp = 286797763, tmp)%((1579183142.7321532)/(1853824036.001172))))<<x)>>(((x|x)^((tmp = -2641304815, tmp)<<(x<<x)))>>(((((268338128.8300134)&(-1778318362.8509881))*(751081373.346478))<<(((525066612)>>(-1139761212))*(2949167563.299916)))<<x)))+((tmp = 664905121, tmp)*((-2208280205)*(3069462420)))));
+ assertEquals(4710721795.110161, x += (((217604832)+((1307891481.781326)-x))+(tmp = 3185225481.328835, tmp)));
+ assertEquals(0, x %= x);
+ assertEquals(0, x -= (((x>>>(x/(tmp = 46977522.46204984, tmp)))>>(-2466993199.615269))&(tmp = 14524430.287991166, tmp)));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x /= (tmp = 578120637, tmp));
+ assertEquals(-17267104, x -= (((tmp = 1515285919.495792, tmp)+(((tmp = -1364790286.7057304, tmp)+((954599071)>>((897770243.1509961)*x)))^x))>>>(566027942.1732262)));
+ assertEquals(-17267104, x &= x);
+ assertEquals(189138241, x ^= ((tmp = 1565742675.9503145, tmp)-((tmp = 1737806643, tmp)|((x*(tmp = -1382435297.5955122, tmp))*(-2820516692.153056)))));
+ assertEquals(189138241, x %= (x*(tmp = -1670678493, tmp)));
+ assertEquals(1693, x %= ((-2328713314)>>>(1623637325)));
+ assertEquals(1693, x %= ((-1019394014)*(x|x)));
+ assertEquals(3386, x += x);
+ assertEquals(9268970871604, x *= (2737439714));
+ assertEquals(-4720.120483643183, x /= (tmp = -1963714889, tmp));
+ assertEquals(-1, x >>= ((x^(((-2404688047.455056)|((1439590234.6203847)<<(tmp = -2496557617, tmp)))/((x<<((tmp = 1865549512.282249, tmp)/(((360384191.55661833)>>(tmp = -1225297117.344188, tmp))>>>(2703264010.4122753))))*(1521960888.0071676))))%(tmp = 2834001448.0508294, tmp)));
+ assertEquals(63, x >>>= (x&(-3079339174.6490154)));
+ assertEquals(0, x >>>= (1039770956.6196513));
+ assertEquals(0, x >>>= (-1074820214));
+ assertEquals(0, x >>>= (x/x));
+ assertEquals(0, x >>= ((tmp = -449117604.2811785, tmp)&x));
+ assertEquals(-0, x /= (tmp = -118266935.1241343, tmp));
+ assertEquals(2226140134, x += (tmp = 2226140134, tmp));
+ assertEquals(2068827161, x ^= ((tmp = -1950744808.846384, tmp)>>((2258661151)^((tmp = -1118176421.8650177, tmp)<<(2828634014)))));
+ assertEquals(123, x >>>= (-1779624840.0515127));
+ assertEquals(0, x >>>= (x|((tmp = -239082904, tmp)<<(tmp = 1404827607, tmp))));
+ assertEquals(0, x >>>= x);
+ assertEquals(1793109749, x ^= (tmp = -2501857547.710491, tmp));
+ assertEquals(855, x >>>= x);
+ assertEquals(0, x >>>= (-847289833));
+ assertEquals(0, x %= (-2271241045));
+ assertEquals(169648072, x ^= (((tmp = 169648072.66759944, tmp)^x)|x));
+ assertEquals(176025927479164930, x *= ((tmp = 1111997198.8803885, tmp)<<(tmp = 2913623691, tmp)));
+ assertEquals(176025926613281700, x += ((tmp = -865883245, tmp)<<(x+(-2624661650))));
+ assertEquals(3406506912, x >>>= ((x|(tmp = 2436016535, tmp))*(((tmp = -1222337225, tmp)<<((1765930268)&x))*(tmp = 1600702938, tmp))));
+ assertEquals(1.694694170868292, x %= (x/(-1597121830.794548)));
+ assertEquals(0, x >>= (tmp = -2443203089, tmp));
+ assertEquals(0, x >>>= (1323174858.2229874));
+ assertEquals(0, x &= ((tmp = 846556929.2764134, tmp)|(((1483000635.0020065)|(-3151225553))|(tmp = -229028309, tmp))));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x >>= ((((((-2677334787)>>>x)>>((tmp = 496077992, tmp)&((((x<<(x*(tmp = 1095163344.2352686, tmp)))+(-952017952))%((x<<((x*x)/(tmp = 2983152477, tmp)))^((tmp = -939521852.1514642, tmp)^(tmp = 143967625.83755958, tmp))))*((tmp = 551827709.8366535, tmp)>>>x))))^((-1552681253.69869)-(-1874069995)))>>>(x>>(x%(tmp = -2554673215, tmp))))|(tmp = -190693051.77664518, tmp)));
+ assertEquals(0, x /= (tmp = 427402761.37668264, tmp));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x |= (x>>>(((((-543326164.0673618)>>>(-2344090136.707964))>>>((((-563350246.6026886)/x)/(1525481037.3332934))&(tmp = -2917983401.88958, tmp)))^(-1094667845.1208413))^x)));
+ assertEquals(0, x &= (1080322749.897747));
+ assertEquals(0, x %= (tmp = -1572157280, tmp));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x %= ((377280936)|x));
+ assertEquals(708335912, x -= (tmp = -708335912, tmp));
+ assertEquals(2766937, x >>>= x);
+ assertEquals(547342779, x += (tmp = 544575842, tmp));
+ assertEquals(546273751, x -= ((x>>>(472833385.9560914))|((tmp = -1164832103.9970903, tmp)/(3147856452.1699758))));
+ assertEquals(546273751, x &= x);
+ assertEquals(0, x ^= x);
+ assertEquals(0, x >>>= (tmp = -3181805175, tmp));
+ assertEquals(-375546685, x |= (-375546685.08261824));
+ assertEquals(1089992785780217200, x *= (tmp = -2902416209, tmp));
+ assertEquals(0, x %= x);
+ assertEquals(-1854981526, x -= ((x-x)-(-1854981526)));
+ assertEquals(-3709963052, x += x);
+ assertEquals(-316772482, x %= (tmp = -1696595285, tmp));
+ assertEquals(-316772482, x |= x);
+ assertEquals(1, x /= x);
+ assertEquals(0, x -= x);
+ assertEquals(-1418375842, x ^= (-1418375842));
+ assertEquals(-2, x >>= x);
+ assertEquals(-4, x += x);
+ assertEquals(-8388608, x &= (x<<(-350555339.30086184)));
+ assertEquals(-16777216, x += x);
+ assertEquals(-0, x %= x);
+ assertEquals(1083355129, x += (tmp = 1083355129, tmp));
+ assertEquals(0, x &= (((tmp = 389729053, tmp)-(tmp = 2944192190.0939536, tmp))/(x-(2081712461.2657034))));
+ assertEquals(0, x += x);
+ assertEquals(-3, x += ((3147270119.5831738)>>((2455837253.1801558)%((-2100649096)>>(((290236808.01408327)|(x&((2661741230.3235292)|((tmp = 1686874589.4690177, tmp)<<x))))*(x+(tmp = 2327674670, tmp)))))));
+ assertEquals(-3, x %= ((x>>(((-2962686431)%x)>>((((2438370783)-(tmp = 2667305770.4839745, tmp))>>>x)>>>x)))<<((x&(tmp = 1428498616, tmp))|((tmp = 2621728539.102742, tmp)/(-204559901)))));
+ assertEquals(2, x ^= (x|((((tmp = 1751230118.6865973, tmp)/(-867465831.207304))>>((-808143600.0912395)+(-2882191493.0506454)))^x)));
+ assertEquals(2, x %= (-2015954220.2250996));
+ assertEquals(0, x >>>= (tmp = 401373999, tmp));
+ assertEquals(0, x >>= (2371830723));
+ assertEquals(0, x >>>= ((((tmp = 2765919396, tmp)-x)-(530310269.7131671))|(tmp = -615761207.9006102, tmp)));
+ assertEquals(-145389011, x ^= (tmp = -145389011, tmp));
+ assertEquals(-145389011, x |= x);
+ assertEquals(1632929832, x &= (-2518898392));
+ assertEquals(4190540017.751949, x += (tmp = 2557610185.751949, tmp));
+ assertEquals(4980024282.153588, x += ((1841304364.1177452)%(tmp = 1051820099.7161053, tmp)));
+ assertEquals(0, x >>>= (((((1379314342.4233718)>>((-2782805860)^((x%(tmp = 1328845288, tmp))>>>(tmp = 901403219.858733, tmp))))+(x/((tmp = -3078904299, tmp)/x)))/x)|(x|(1399702815))));
+ assertEquals(-1820494882, x ^= (tmp = -1820494882.407127, tmp));
+ assertEquals(-305870376, x %= (tmp = -757312253, tmp));
+ assertEquals(-577530443, x += (x|(tmp = -1958083619.6653333, tmp)));
+ assertEquals(333541412591776260, x *= x);
+ assertEquals(-949341696, x >>= ((((1550069663)<<((x>>>(tmp = 2406565178.902887, tmp))>>>((1844746612.632984)/((tmp = 2233757197, tmp)*((-1524891464.1028347)>>(tmp = 2498623474.5616803, tmp))))))&x)<<(x&(tmp = -370379833.3884752, tmp))));
+ assertEquals(-277202090, x |= ((-762200848.8405354)-(tmp = 1749136282, tmp)));
+ assertEquals(0.13704539927239265, x /= (tmp = -2022702633.373563, tmp));
+ assertEquals(0, x -= x);
+ assertEquals(0, x %= ((132951580.19304836)-((427623236.27544415)-(1212242858))));
+ assertEquals(0, x &= ((449148576)&(-1609588210.249217)));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x -= x);
+ assertEquals(-0, x /= (tmp = -1640777090.9694843, tmp));
+ assertEquals(0, x &= (((tmp = -1923412153, tmp)>>>((x>>(tmp = 3027958119.0651507, tmp))+(60243350)))>>(tmp = -2610106062, tmp)));
+ assertEquals(0, x ^= (((-186998676)/(tmp = 2697937056, tmp))-x));
+ assertEquals(-1147950080, x |= ((2425449461)*(tmp = -2525854833, tmp)));
+ assertEquals(457688198, x ^= (2698274950.660941));
+ assertEquals(8724, x %= ((1174351031)>>>((371599047.36048746)+(3025292010))));
+ assertEquals(0, x <<= (tmp = -710011617, tmp));
+ assertEquals(0, x >>>= (1693410026));
+ assertEquals(1443005362, x ^= ((tmp = -2851961934, tmp)+((((x%x)-(tmp = 547622400, tmp))<<(((tmp = 722396486.5553623, tmp)|x)>>>((((tmp = -542268973.5080287, tmp)<<(tmp = 1347854903.771954, tmp))>>>(tmp = -889664427.7115686, tmp))&((tmp = 1549560114, tmp)*(tmp = 964918035, tmp)))))&(-2422502602.920377))));
+ assertEquals(3986573462, x -= (-2543568100));
+ assertEquals(7973146924, x += x);
+ assertEquals(-1, x >>= (-75987297));
+ assertEquals(-12, x += ((2940824338.64834)>>(tmp = 3061467355, tmp)));
+ assertEquals(-3.8229398525977614e-8, x /= (313894554));
+ assertEquals(-2.890709270374084e-17, x /= (tmp = 1322491989, tmp));
+ assertEquals(0, x |= (x-x));
+ assertEquals(0, x >>>= (tmp = -1205300664, tmp));
+ assertEquals(-0, x /= (((2869505187.6914144)>>(tmp = 1541407065, tmp))/(((-571132581)>>>(x>>x))/((x^(170373762.8793683))>>>((((tmp = -363073421.05897164, tmp)|(((tmp = -1591421637, tmp)>>(1095719702.8838692))&(636687681.9145031)))^x)^(x|x))))));
+ assertEquals(-1487828433, x ^= (-1487828433.3462324));
+ assertEquals(-0, x %= x);
+ assertEquals(1716342498, x -= ((tmp = 2578624798, tmp)^x));
+ assertEquals(1636, x >>= ((264194540)>>>(-801900756)));
+ assertEquals(0, x >>>= ((tmp = 2502688876, tmp)+((x<<(x|((-628272226.0338528)|((x<<(-2083074091))>>>(tmp = 1692123246.8418589, tmp)))))>>(1594759826.990993))));
+ assertEquals(0, x <<= (tmp = -904399643, tmp));
+ assertEquals(NaN, x /= ((x^(x-x))%((tmp = 1744962024.4882128, tmp)%x)));
+ assertEquals(NaN, x /= (-1013142883.1845908));
+ assertEquals(NaN, x /= ((tmp = 793633198, tmp)^(-2993598490.8659954)));
+ assertEquals(0, x &= (x>>((tmp = 1200937851, tmp)<<(((tmp = -2807378465, tmp)&(tmp = -143778237, tmp))|(tmp = -1200772223, tmp)))));
+ assertEquals(0, x <<= x);
+ assertEquals(88144, x |= (((((tmp = 3002723937.8560686, tmp)*(tmp = -3171720774.2612267, tmp))%(((tmp = -2586705978.7271833, tmp)%((x+(-1553704278))&(2405085526.501994)))>>((-240842053)>>>(((((tmp = -1886367228.4794896, tmp)>>>x)^(tmp = 2604098316, tmp))^(tmp = 1362808529, tmp))<<((tmp = -1062263918, tmp)|((-172718753)%(tmp = -1910172365.4882073, tmp)))))))^((1444153362)>>((x&((-1205465523.2604182)^(tmp = -2062463383, tmp)))>>(tmp = 956712476, tmp))))>>((((-1004215312)^((((-1707378612.5424936)^(tmp = 2372161553, tmp))/((tmp = 1802586581, tmp)*((2082257.1896460056)&((tmp = -1270773477, tmp)^(tmp = 942517360.3447798, tmp)))))+x))%((((666494127)^(x^x))>>>(tmp = -2592829775, tmp))+((-1601528223)+((x+(tmp = -2417034771.7409983, tmp))>>>((tmp = -730673817, tmp)*x)))))>>x)));
+ assertEquals(-2603179111.7557006, x -= ((2603267255.755627)+(x/(1200979191.2823262))));
+ assertEquals(1691788185, x >>= (tmp = 3088840032, tmp));
+ assertEquals(-168382533, x |= (tmp = -780750941.4590135, tmp));
+ assertEquals(-168382533, x >>= (60741120.48285198));
+ assertEquals(-134287365, x |= (x*(tmp = 834637940.7151251, tmp)));
+ assertEquals(-1481917089, x -= (tmp = 1347629724, tmp));
+ assertEquals(1, x >>>= x);
+ assertEquals(262144, x <<= (2680216914));
+ assertEquals(1075132032, x ^= (x-((tmp = 3220359552.3398685, tmp)^(((-434474746.6039338)|((((((((tmp = 1945689314.9683735, tmp)>>(1300022273))>>>(333705550))&x)%(588357521))-(x+(x^(((tmp = -134560382, tmp)+x)-((((994246147.7195556)-(-1506599689.7383268))%(x<<x))>>((1256426985.5269494)+(tmp = 1860295952.8232574, tmp)))))))^(((tmp = 917333220.2226384, tmp)>>x)>>>(tmp = 865898066, tmp)))%((x|(x%((tmp = -2660580370, tmp)&(tmp = 2966426022, tmp))))*x)))/(((tmp = 682585452, tmp)&(-3219368609))+((tmp = -1330253964, tmp)+((x&(2857161427))/x)))))));
+ assertEquals(274944, x &= ((2606953028.1319966)-(-1707165702)));
+ assertEquals(266752, x &= ((x<<((x+(x+(x^(-1570175484))))^x))^(x+(x<<(tmp = 90330700.84649956, tmp)))));
+ assertEquals(266752, x &= ((((x*(tmp = 2033225408, tmp))-(x-((tmp = 1507658653, tmp)/(-3016036094))))>>>((1497480588)>>(2784070758)))|(tmp = -3025904401.93921, tmp)));
+ assertEquals(-1680442631, x |= ((x/(445284843))|((tmp = 2614520057.2723284, tmp)<<x)));
+ assertEquals(40851947, x >>>= (tmp = -1577031386.938616, tmp));
+ assertEquals(2493, x >>= ((3044630989.3662357)-(-2670572992.8580284)));
+ assertEquals(-0.0000017317105653562252, x /= (-1439617017.9207587));
+ assertEquals(0, x &= (2359806567));
+ assertEquals(623768541, x ^= (623768541));
+ assertEquals(1028567149.0716183, x += (((tmp = 1307794561, tmp)%(x>>x))-(-404798608.0716183)));
+ assertEquals(-1.2971762489811298, x /= (tmp = -792927830.6471529, tmp));
+ assertEquals(-1.2971762489811298, x %= ((-2426421701.2490773)/(-689566815.3393874)));
+ assertEquals(-2147483648, x <<= x);
+ assertEquals(-2147483648, x &= (tmp = -869991477, tmp));
+ assertEquals(-268435456, x >>= (1383186659));
+ assertEquals(0, x -= x);
+ assertEquals(-2009742037, x |= (-2009742037.5389993));
+ assertEquals(-1386630820, x ^= (627864695));
+ assertEquals(-1033479103975173600, x *= (tmp = 745316697.9046186, tmp));
+ assertEquals(-1628048487, x |= (2662654361));
+ assertEquals(325551, x >>>= (340874477));
+ assertEquals(-1235730537, x ^= (tmp = 3059533880.0725217, tmp));
+ assertEquals(-1235730537, x %= (2247137328));
+ assertEquals(-220200960, x <<= ((x>>x)-x));
+ assertEquals(0, x <<= ((tmp = 337220439.90653336, tmp)|(tmp = 2901619168.375105, tmp)));
+ assertEquals(0, x >>>= ((-2114406183)/x));
+ assertEquals(0, x %= ((1425828626.3896675)/x));
+ assertEquals(0, x >>>= ((3213757494)>>>(2595550834.3436537)));
+ assertEquals(0, x <<= x);
+ assertEquals(-0, x /= ((1544519069.5634403)/((tmp = -1332146306, tmp)&(-762835430.0022461))));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x >>= (x|((((x*((-786272700)+x))<<x)+((tmp = -1868484904, tmp)-(tmp = -1692200376, tmp)))+(-1010450257.6674457))));
+ assertEquals(0, x -= x);
+ assertEquals(0, x ^= (x>>>(706010741)));
+ assertEquals(-964928697, x |= (-964928697));
+ assertEquals(1, x /= x);
+ assertEquals(0, x >>= ((((tmp = 1778003555.3780043, tmp)>>(x%((tmp = -766158535, tmp)^((-2681449292.8257303)%((x-(x|(tmp = 1966478387.2443752, tmp)))^(((tmp = -1848398085, tmp)&x)>>>(tmp = -2860470842, tmp)))))))%(tmp = 2315077030, tmp))^x));
+ assertEquals(0, x ^= x);
+ assertEquals(-288007757, x ^= ((tmp = 183607156.1803962, tmp)-(tmp = 471614914, tmp)));
+ assertEquals(-270573581, x |= (tmp = -849475741.9424644, tmp));
+ assertEquals(-2129929, x |= (((((1942852445)&(tmp = 1280372312, tmp))*(x*(tmp = -1601900291, tmp)))^((509080002.81080174)-(tmp = 2699498226.9164257, tmp)))>>(((-335361221)>>(tmp = 843134832, tmp))%(-35532542))));
+ assertEquals(-232622355, x ^= ((-3060885134.5375547)-(((tmp = 1965966723, tmp)-((tmp = 1248630129.6970558, tmp)<<(tmp = 1859637857.5027392, tmp)))*x)));
+ assertEquals(-52149658093200070, x *= (224181627.31264615));
+ assertEquals(-697122968, x ^= (x-(x+(tmp = 2747211186.407712, tmp))));
+ assertEquals(-2146269688, x &= ((tmp = -1466710519, tmp)^(x/(1419998975))));
+ assertEquals(-536567422, x >>= (((((tmp = -1760701688.999274, tmp)>>(-1821976334))/(((tmp = -1660849531, tmp)>>>x)-((x+((tmp = -2489545009.4327965, tmp)>>>((tmp = -267360771.39148235, tmp)^x)))*(((-1453528661)%x)>>>(((243967010.3118453)/((((((2977476024)>>>((-1630798246)<<x))&(591563895.2506002))*(((2668543723.9720144)>>>x)|(1600638279)))^x)>>(x<<(tmp = -152589389, tmp))))>>>(x|(2821305924.9225664)))))))+(618968002.8307843))%(tmp = -1005408074.368274, tmp)));
+ assertEquals(40962, x &= (114403906));
+ assertEquals(19741977727890, x *= ((-2367133915.963945)>>>(-3119344126)));
+ assertEquals(1313341440, x <<= x);
+ assertEquals(626, x >>>= ((((-333992843)%(tmp = -2742280618.6046286, tmp))>>>x)|x));
+ assertEquals(0, x <<= (2598188575));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x ^= (x%((2507288229.3233204)&(tmp = -1714553169.9276752, tmp))));
+ assertEquals(0, x /= ((633436914.3859445)>>>(tmp = 1579804050.6442273, tmp)));
+ assertEquals(0, x *= ((tmp = 1172218326, tmp)<<((tmp = -2491306095.8456626, tmp)*(((tmp = 1305371897.9753594, tmp)>>((x^(((3077992060)*x)<<(492815553.904796)))>>((652151523)|x)))%x))));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x %= (1118131711));
+ assertEquals(0, x &= ((tmp = 2734673884, tmp)|(x-((tmp = 2694578672.8975897, tmp)*(((x>>(2350811280.974167))*(1052548515))&(x^(x*(tmp = -1336287059.0982835, tmp))))))));
+ assertEquals(-2632782867.1256156, x += ((tmp = -2743992725.1256156, tmp)+(tmp = 111209858, tmp)));
+ assertEquals(-0, x %= x);
+ assertEquals(0, x >>>= (((tmp = -2050519887, tmp)^(106865302.74529803))>>(1642851915.2909596)));
+ assertEquals(-171964826, x |= (tmp = -171964826.6087358, tmp));
+ assertEquals(-2.113405951193522, x /= (tmp = 81368572.80206144, tmp));
+ assertEquals(3, x >>>= x);
+ assertEquals(0, x %= x);
+ assertEquals(-1717345907.837667, x += (-1717345907.837667));
+ assertEquals(-100964883, x |= (tmp = -109574931.80629134, tmp));
+ assertEquals(-33849857, x |= (-974111718.2433801));
+ assertEquals(1, x >>>= (tmp = -2556222849.005595, tmp));
+ assertEquals(1, x /= x);
+ assertEquals(0, x >>>= (-1796630999.4739401));
+ assertEquals(0, x >>>= x);
+ assertEquals(2031695758, x += (((x/(((tmp = -2364918403, tmp)%(x^((tmp = 277767803.6375599, tmp)>>((((tmp = 540036080, tmp)/(x|(2665298931)))/(x|((x>>(-2035456216.6165116))<<(2143184420.5651584))))^x))))&(tmp = 927798419.8784283, tmp)))-(-2031695758))>>>x));
+ assertEquals(2031695758, x |= x);
+ assertEquals(2031695758, x <<= (((x>>(x%x))|(tmp = -1164531232.7384055, tmp))*x));
+ assertEquals(124004, x >>>= x);
+ assertEquals(529846352, x += ((529722348)%((2417645298.865121)|(x>>(x>>>(x+x))))));
+ assertEquals(60067920, x &= (((tmp = -3166008541.8486233, tmp)-x)|(x%x)));
+ assertEquals(1415594240755200, x *= ((-2786707452.873729)>>(((tmp = -2369315809, tmp)*((1559868465)|(1011218835.1735028)))>>>x)));
+ assertEquals(1415595182259140, x += (941503939.9023957));
+ assertEquals(0, x <<= ((tmp = 2887184784.265529, tmp)/(-2575891671.0881453)));
+ assertEquals(0, x &= ((tmp = -1546339583, tmp)>>>(tmp = -587433830, tmp)));
+ assertEquals(0, x *= (((tmp = 1356991166.5990682, tmp)%(tmp = -284401292, tmp))*(1869973719.9757812)));
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x ^= (((tmp = 92575404.43720293, tmp)>>>(263475358.17717505))%x));
+ assertEquals(0, x <<= (((561514358)*(tmp = -439584969, tmp))%((((-3005411368.7172136)+x)|(-2230472917))&x)));
+ assertEquals(0, x >>= ((x>>>x)-((x-(1630649280.510933))+x)));
+ assertEquals(0, x >>= (tmp = -1772403084.7012017, tmp));
+ assertEquals(0, x *= x);
+ assertEquals(0, x += x);
+ assertEquals(0, x &= x);
+ assertEquals(0, x >>= (tmp = 1622680387, tmp));
+ assertEquals(1033887633558225200, x -= ((-510616337)*(tmp = 2024783695, tmp)));
+ assertEquals(-2.8073538539158063e+27, x *= (tmp = -2715337492, tmp));
+ assertEquals(-2.8073538539158063e+27, x -= ((tmp = -1664804757, tmp)&((tmp = -226616419, tmp)>>>(1006711498))));
+ assertEquals(1894539615, x |= (tmp = -2400427681.1831083, tmp));
+ assertEquals(7400545, x >>= (774629608.4463601));
+ assertEquals(456756268, x += (449355723));
+ assertEquals(285771784, x &= (-1316427366));
+ assertEquals(17, x >>= ((tmp = -220509931.20787525, tmp)*(((tmp = 2518859292, tmp)+(-1477543005.1586645))>>(tmp = 3172820250.687789, tmp))));
+ assertEquals(85924262443, x *= (x*((tmp = -2856669745.965829, tmp)&(((tmp = 401420695, tmp)^(tmp = 2355371132, tmp))|(tmp = 590645330.021911, tmp)))));
+ assertEquals(1703875715, x ^= ((-2576394029.7843904)-x));
+ assertEquals(1703875715, x %= (tmp = 2234144310, tmp));
+ assertEquals(271405807, x ^= (1973569132));
+ assertEquals(1060178, x >>>= (tmp = -84823096, tmp));
+ assertEquals(8, x >>>= (tmp = 2246120561.905554, tmp));
+ assertEquals(-2846791089, x += (-2846791097));
+ assertEquals(104933962, x &= (x-(-2969030955.99584)));
+ assertEquals(489215611.96215343, x -= (-384281649.96215343));
+ assertEquals(489215611, x |= x);
+ assertEquals(1186191360, x <<= ((tmp = 774407142.993727, tmp)%x));
+ assertEquals(1186191360, x %= (1555004022));
+ assertEquals(-1697134080, x ^= (tmp = -597421568, tmp));
+ assertEquals(-1102053376, x <<= ((-927370769.4059179)^((tmp = 1093490918, tmp)>>(((-2522227493.3821955)%x)+(-2657319903)))));
+ assertEquals(1086450058, x ^= (-23991926.187098265));
+ assertEquals(1086450058, x |= x);
+ assertEquals(-1.6554590588410778, x /= (x|(x<<(x+x))));
+ assertEquals(67108863, x >>>= ((-926530233)+x));
+ assertEquals(494553310, x ^= (tmp = 512079649, tmp));
+ assertEquals(207751168, x &= (2892146720.6261826));
+ assertEquals(207751168, x &= x);
+ assertEquals(207751168, x |= x);
+ assertEquals(6340, x >>>= (((((x<<(x-((-2819638321)*((x<<x)+x))))>>x)+(tmp = 2016170261, tmp))+(tmp = 2755496043.772017, tmp))+(-841368625.1402085)));
+ assertEquals(6340, x ^= ((x/(tmp = -192734784, tmp))>>>(((-140306239)&x)-x)));
+ assertEquals(1, x /= x);
+ assertEquals(0, x >>= x);
+ assertEquals(26786600, x ^= (tmp = 26786600, tmp));
+ assertEquals(-0.014657576899542954, x /= ((-1454855938.0338)+(-372635753.3681567)));
+ assertEquals(0, x &= ((tmp = 2480635933, tmp)&(-2986584704.9165974)));
+ assertEquals(-2108639122, x += ((tmp = 2108639123.8683565, tmp)^((-881296055)/(((x<<(2026200582))|(tmp = -862495245.138771, tmp))-(-1111596494.892467)))));
+ assertEquals(1893466112, x <<= (tmp = 607974481, tmp));
+ assertEquals(1893466112, x |= x);
+ assertEquals(1133122783.997418, x += ((tmp = -760343332, tmp)-((x-(tmp = -878561823.4218843, tmp))/(tmp = -693454632.596637, tmp))));
+ assertEquals(8, x >>>= (tmp = 700339003.3919828, tmp));
+ assertEquals(4.605305035175536e-9, x /= (1737127060.8343256));
+ assertEquals(4.605305035175536e-9, x -= ((x%(897221779))>>>x));
+ assertEquals(-1864423625.5704088, x += (tmp = -1864423625.5704088, tmp));
+ assertEquals(1132240092, x <<= (1304417186.1193643));
+ assertEquals(-2088985380, x ^= (x<<x));
+ assertEquals(-4, x >>= ((tmp = 1959823884.0935726, tmp)%(-1679792398.569136)));
+ assertEquals(-268435456, x <<= ((tmp = 2586838136, tmp)|((tmp = -481716750.718518, tmp)>>>((1485826674.882607)/(tmp = -2826294011, tmp)))));
+ assertEquals(-32768, x >>= (2060648973));
+ assertEquals(1, x /= x);
+ assertEquals(-2838976297, x -= (tmp = 2838976298, tmp));
+ assertEquals(-1382985298, x <<= ((tmp = -2104305023, tmp)&x));
+ assertEquals(10, x >>>= (x+x));
+ assertEquals(10, x -= (x>>>(361588901.70779836)));
+ assertEquals(854603510, x -= (-854603500));
+ assertEquals(-557842432, x <<= (tmp = 1212985813.6094751, tmp));
+ assertEquals(-459390188241943040, x *= (tmp = 823512450.6304014, tmp));
+ assertEquals(-232800033621957060, x /= ((((((686635689)/(tmp = 2013252543, tmp))*(tmp = -1591617746.8678951, tmp))|(((tmp = -1777454093.5611362, tmp)>>>((tmp = 2680809394, tmp)^(((x>>((((((tmp = -265022244, tmp)%((tmp = -3075004537, tmp)>>(((((1427784269.5686688)^((tmp = -1095171528.911587, tmp)^(-942424985.7979553)))>>(-1279441481.1987405))*((2493620394)>>(-2769016043)))/(x&((tmp = 2059033657, tmp)%(((tmp = 1948606940.1488457, tmp)-(tmp = -2645984114.13219, tmp))^x))))))^x)^x)%(x%((((tmp = 3209433446.4551353, tmp)%(tmp = 1364430104.0424738, tmp))/(tmp = -2103044578.349498, tmp))+(tmp = -2613222750, tmp))))*(2099218034)))&(((tmp = -378500985.49700975, tmp)>>(((x+x)|(x%(((-1841907486)<<(-1220613546.194021))<<(tmp = -1260884176, tmp))))^(tmp = 1858784116, tmp)))>>>((x%x)%((x>>>(tmp = -2540799113.7667685, tmp))|x))))/((((tmp = 642072894.6455215, tmp)-(-324951103.6679399))*(tmp = 1424524615, tmp))+((x<<(tmp = -904578863.5945344, tmp))*(tmp = 49233475.435349464, tmp))))))<<(tmp = 1680210257, tmp)))+((tmp = -1516431503, tmp)>>>(-1105406695.3068116)))/(-275019361.6764543)));
+ assertEquals(192359387.42913792, x /= (-1210234846));
+ assertEquals(192359387.42913792, x %= (-2920206625.0154076));
+ assertEquals(192359387.42913803, x -= (((((((tmp = -1263203016.3258834, tmp)-(2432034005.6011124))&x)<<(1479434294))>>((tmp = -1695856315.523002, tmp)>>>(tmp = 557391345, tmp)))/(tmp = -1280240246.2501266, tmp))%((tmp = -2196489823.034029, tmp)>>(((x&((912221637.1101809)+((tmp = -3003677979.652423, tmp)>>(tmp = -716129460.1668484, tmp))))-((x+(x-(-2780610859)))>>>(-2445608016)))<<((x*(x+(x+(((-2124412727.9007604)%(tmp = -593539041.5539455, tmp))&(tmp = 2404054468.768749, tmp)))))%(x>>(tmp = -2913066344.404591, tmp)))))));
+ assertEquals(11740, x >>= (688848398.7228824));
+ assertEquals(11740, x >>= ((1545765912)*(307650529.9764147)));
+ assertEquals(23480, x += x);
+ assertEquals(0, x >>>= ((tmp = 1313078391, tmp)|x));
+ assertEquals(1726251264, x -= ((1939413887)<<(1004888744.2840619)));
+ assertEquals(765324793.5278986, x %= (960926470.4721014));
+ assertEquals(747387, x >>= ((2483010044)-(tmp = -413698190, tmp)));
+ assertEquals(1, x /= x);
+ assertEquals(3016811624, x *= (3016811624));
+ assertEquals(17408, x &= (((tmp = -991624868, tmp)<<(((63107932)/(tmp = 2659939199, tmp))|(tmp = -1968768911.3575773, tmp)))>>(((-2876822038.9910746)|(tmp = 2550230179.243425, tmp))<<((x*(x<<((x<<((tmp = -1627718523.616604, tmp)|((2154120561.254636)-(x%(x<<(1484563622.1791654))))))<<((((x^(tmp = 3016524169, tmp))<<(((x+(tmp = 1887816698.2455955, tmp))+x)-x))-(-3023329069))-x))))+x))));
+ assertEquals(0, x <<= (((1247441062.177967)/(-1717276234))+x));
+ assertEquals(0, x |= ((x%((-1648299429.4520087)>>(-137511052)))>>(tmp = 221301016.4926411, tmp)));
+ assertEquals(0, x /= ((-2598501544.913707)>>>(-2177037696)));
+ assertEquals(NaN, x %= (x>>x));
+ assertEquals(0, x &= (tmp = 1852419158, tmp));
+ assertEquals(-829029120, x |= (((2122339180)*((((((tmp = 768748914, tmp)<<((1008490427)&((1937367899.957056)-(((635094486)>>(((tmp = -795046025, tmp)*(2665104134.4455256))^(tmp = 706594584.2462804, tmp)))/(504397522)))))/(-556057788))>>((x/(tmp = -2732280594, tmp))-x))+(-1989667473))+(tmp = 2766802447.789895, tmp)))<<(((tmp = -2969169096, tmp)-x)+(tmp = 2093593159.0942125, tmp))));
+ assertEquals(0.6451933462602606, x /= ((-1284931292)<<(x<<(tmp = 1294716764, tmp))));
+ assertEquals(1515416866.520901, x *= (2348779440));
+ assertEquals(-1620606242886682600, x *= ((-993898625.5357854)&(((tmp = -571100481, tmp)/x)*((2428590177.311031)%(tmp = -2671379453, tmp)))));
+ assertEquals(-1137472828, x %= (tmp = -1195183004, tmp));
+ assertEquals(-3096634005473250000, x *= (tmp = 2722380640, tmp));
+ assertEquals(-3096634003996758500, x -= (-1476491033.833419));
+ assertEquals(-3096634000805538000, x += (3191220521.978341));
+ assertEquals(-3096634000805468000, x += ((((tmp = -3024976741, tmp)&(952616360))|((x*(-1547952311))+(x*x)))>>>(tmp = 981373323, tmp)));
+ assertEquals(-3096633998655594000, x += (2149873927));
+ assertEquals(-118812224101.54297, x %= (((2641881276.9898443)*(((502159480)^x)<<x))%((tmp = -2840045365.547772, tmp)*(((((-2297661528)>>>(x>>(-229103883.94961858)))&(((-1285047374.6746495)<<((-360045084)>>>((x-(tmp = -956123411.1260898, tmp))%x)))>>((tmp = -2375660287.5213504, tmp)+((((tmp = -2753478891, tmp)>>>(((tmp = 101438098, tmp)>>(((tmp = -2736502951, tmp)<<((tmp = -3084561882.368902, tmp)&(tmp = 1491700884, tmp)))|x))&(tmp = 1627412882.6404104, tmp)))>>>(tmp = 1039002116.6784904, tmp))<<((tmp = -2840130800, tmp)-(tmp = -740035567, tmp))))))&(tmp = -416316142, tmp))>>x))));
+ assertEquals(86, x >>>= (tmp = -293489896.5572462, tmp));
+ assertEquals(172, x += (x%((((-2635082487.364155)|((-2361650420.634912)&(-2147095650.7451198)))<<((tmp = 2258905145.9231243, tmp)%((((tmp = -1365987098.5130103, tmp)*(((((((932437391)/x)/(289270413.0780891))%(x-x))+((((2194986374.917528)>>(((((tmp = -1553805025, tmp)|x)^(((x>>(-564400586.0780811))^(tmp = 1738428582.0238137, tmp))>>(tmp = 1717774140, tmp)))&(tmp = -2789427438, tmp))%(((tmp = -1386118057, tmp)*(-2333221237.7915535))*(x>>>(((((41346648.46438944)&x)%(-478973697.6792319))|(tmp = 2108106738, tmp))/x)))))-(tmp = -133437701.64136505, tmp))>>>x))+(tmp = -1567210003, tmp))*(x+((x&x)-(2942851671)))))>>>(tmp = -446377136, tmp))*((((((tmp = 1597203255, tmp)>>>(619157171))|(-2766246629.005985))>>((tmp = 3130227370, tmp)%x))*(tmp = 2072227901.6101904, tmp))|((tmp = 1369019520, tmp)^(759659487))))))>>>x)));
+ assertEquals(1996475731, x ^= ((1456327892.2281098)|(1728022827)));
+ assertEquals(0, x %= x);
+ assertEquals(0, x &= (1323847974));
+ assertEquals(3076829073.8848357, x += (3076829073.8848357));
+ assertEquals(9569842648396755000, x *= (3110293883.2782717));
+ assertEquals(9569842646260304000, x -= (2136450372.9038036));
+ assertEquals(9.158188827418242e+37, x *= x);
+ assertEquals(0, x <<= ((x&(tmp = -2241179286, tmp))+((tmp = 2553144081, tmp)&((tmp = -1914709694, tmp)^(tmp = -1469651409.0651562, tmp)))));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x /= (2177840666.276347));
+ assertEquals(0, x %= (-690827104));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x ^= x);
+ assertEquals(-0, x /= (tmp = -803415280, tmp));
+ assertEquals(-2355576914.316743, x += (-2355576914.316743));
+ assertEquals(-833671722514674000, x *= ((3053388806.692315)-(tmp = 2699474775.081724, tmp)));
+ assertEquals(1, x /= x);
+ assertEquals(1898147684, x += ((tmp = 1898147683, tmp)|(x<<x)));
+ assertEquals(2.192324660388075, x %= ((tmp = 2630187518, tmp)/((2868794982.790862)|(490860748))));
+ assertEquals(0, x >>>= ((2751021779)/(-952522559)));
+ assertEquals(321040461, x ^= ((321040461.153594)-x));
+ assertEquals(-2.3814602031636922, x /= ((tmp = -170472190, tmp)|x));
+ assertEquals(-1, x >>= (2200125174.177402));
+ assertEquals(-2964432647.9379396, x += (-2964432646.9379396));
+ assertEquals(-370116502.93793964, x %= (tmp = -518863229, tmp));
+ assertEquals(777927355.2283959, x -= (-1148043858.1663356));
+ assertEquals(0, x *= ((tmp = 1134913539, tmp)&(((x>>>((tmp = -989822787, tmp)>>>x))%x)&(tmp = 1078636160.7313156, tmp))));
+ assertEquals(-1089245637, x ^= (3205721659.3548856));
+ assertEquals(-1192493056, x <<= (-1173291054));
+ assertEquals(78013832, x += ((tmp = 2462999944, tmp)+x));
+ assertEquals(0, x %= x);
+ assertEquals(0, x >>>= (1794908927.7409873));
+ assertEquals(1708338504, x += ((-2586628792.3484306)<<x));
+ assertEquals(12, x >>= (-545794789.3827574));
+ assertEquals(0, x &= ((2753207225)<<(((-1776581207.557251)+((tmp = -2414140402, tmp)*x))+(x<<(x|(tmp = 772358560.3022032, tmp))))));
+ assertEquals(0, x <<= ((tmp = -2755724712.152605, tmp)/((x>>(-732875466))&x)));
+ assertEquals(NaN, x *= (((tmp = 2617815318.1134562, tmp)/x)%(x|((((((-851659337.194871)<<(tmp = 2072294700, tmp))%((x+(2193880878.5566335))^((tmp = 3005338026, tmp)-(2947963290))))/x)/(x+(2091745239.4210382)))-(x>>x)))));
+ assertEquals(NaN, x /= (tmp = -427684595.0278094, tmp));
+ assertEquals(NaN, x /= (tmp = -263945678, tmp));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x <<= x);
+ assertEquals(0, x -= (((x>>((x&x)-(tmp = -673697315, tmp)))>>(((1575095242.2330558)/(x-(-1816886266)))%(-1580195729)))>>>x));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x >>= (-2815518206));
+ assertEquals(0, x -= (x/(1795634670.692437)));
+ assertEquals(-2753579891, x += (tmp = -2753579891, tmp));
+ assertEquals(2.7773776150171776, x /= (tmp = -991431585, tmp));
+ assertEquals(5.554755230034355, x += x);
+ assertEquals(3.362161997528237e-9, x /= (1652137890.4758453));
+ assertEquals(3.362161997528237e-9, x %= (tmp = -10848734.527020693, tmp));
+ assertEquals(1, x /= x);
+ assertEquals(-2978012493, x -= (x+(2978012493)));
+ assertEquals(-5.158905851797543, x /= (((x+((tmp = -2548840164, tmp)>>x))<<(x^((tmp = -533281232.7294345, tmp)&x)))&(tmp = -1502692171, tmp)));
+ assertEquals(-5.158905851797543, x %= (-3009435255.5612025));
+ assertEquals(-20971520, x <<= ((tmp = -2728812464, tmp)%(2619809573.672677)));
+ assertEquals(-1900019712, x &= (2398099552));
+ assertEquals(-1991377, x %= ((tmp = 1562364373.7334614, tmp)>>>(((x-(-946283217))<<(-2044590694))^(((tmp = 1681238509, tmp)>>(-2801649769))-x))));
+ assertEquals(1, x /= x);
+ assertEquals(1, x %= (x/(x-x)));
+ assertEquals(1.3525631913093335e-9, x /= (739336991));
+ assertEquals(0, x &= ((x&(x|(-1530424204)))<<((((tmp = -295143065.9115021, tmp)>>x)+x)<<x)));
+ assertEquals(0, x <<= (-1311017801));
+ assertEquals(-0, x /= (-667133339.1918633));
+ assertEquals(1038307283, x += (1038307283));
+ assertEquals(506985, x >>>= ((tmp = 1550624472.9157984, tmp)^x));
+ assertEquals(506985, x >>>= ((254646626)<<(tmp = 1572845412.744642, tmp)));
+ assertEquals(32447040, x <<= (tmp = -2427326042, tmp));
+ assertEquals(0, x -= (x<<((x|x)>>>x)));
+ assertEquals(0, x &= x);
+ assertEquals(0, x &= ((-484420357)|((tmp = 807540590.6132902, tmp)/(x/x))));
+}
+f();
diff --git a/src/3rdparty/v8/test/mjsunit/numops-fuzz-part4.js b/src/3rdparty/v8/test/mjsunit/numops-fuzz-part4.js
new file mode 100644
index 0000000..c4ea614
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/numops-fuzz-part4.js
@@ -0,0 +1,1177 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+function f() {
+ var x = 0;
+ var tmp = 0;
+ assertEquals(-890607324, x ^= ((tmp = -890607324, tmp)>>((((-2876826295)>>x)<<((tmp = 2351495148.117994, tmp)>>(tmp = 1368611893.274765, tmp)))*(tmp = 1531795251, tmp))));
+ assertEquals(-729075363, x += (x+(tmp = 1052139285, tmp)));
+ assertEquals(531550884933581760, x *= x);
+ assertEquals(1980836332, x ^= ((-746269795.2320724)-((2400458512)>>((1290672548)>>>((((1536843439.5629003)&(3185059975.158061))*(tmp = -1339249276.2667086, tmp))&x)))));
+ assertEquals(941373096, x %= ((x+(-451098412))^(tmp = 1725497732, tmp)));
+ assertEquals(-1766019323, x += (tmp = -2707392419, tmp));
+ assertEquals(2528947973, x >>>= (x^(-896237435.3809054)));
+ assertEquals(-263192576, x <<= (-866361580));
+ assertEquals(-2008, x >>= (-2608071791));
+ assertEquals(-88, x %= (((-1076807218.4792447)&((tmp = 601044863, tmp)>>((tmp = 1228976729, tmp)+((((-2711426325)*x)|x)|(x%(-2700007330.3266068))))))&(tmp = 3147972836.778858, tmp)));
+ assertEquals(1762886843, x ^= (tmp = 2532080403, tmp));
+ assertEquals(1762886843, x %= ((((((tmp = -2059247788, tmp)>>x)/x)+(x<<x))^x)>>>(-1969283040.3683646)));
+ assertEquals(4812334726.587896, x += (tmp = 3049447883.587897, tmp));
+ assertEquals(1, x /= x);
+ assertEquals(1, x *= x);
+ assertEquals(-2150507334, x -= ((tmp = 1578221999, tmp)+(tmp = 572285336, tmp)));
+ assertEquals(-4546475858941548500, x *= ((tmp = -931533139.5546813, tmp)^(tmp = 3061503275, tmp)));
+ assertEquals(-269064192, x |= ((207217276.91936445)<<(tmp = -957353678.4997551, tmp)));
+ assertEquals(1, x /= x);
+ assertEquals(1, x <<= (((1463856021.8616743)%(x*(tmp = -2286419102, tmp)))/(-2852887593)));
+ assertEquals(2223868564.8383617, x *= (tmp = 2223868564.8383617, tmp));
+ assertEquals(918797189.9033995, x -= ((1305071374.9349623)%(x+(2211992629))));
+ assertEquals(-2212004787.4668465, x -= (tmp = 3130801977.370246, tmp));
+ assertEquals(31783, x >>= (2951958960));
+ assertEquals(31783, x ^= ((((tmp = -2441511566, tmp)&((tmp = 91427553.90168321, tmp)+((tmp = 3001737720.327718, tmp)%x)))>>>(-2263859841))>>>((2109161329)>>(tmp = -2816295136.7443414, tmp))));
+ assertEquals(4068224, x <<= (x%((tmp = -682576250.4464607, tmp)*(x/(((x-x)>>>(x&((((x<<(x<<x))>>>((((2243036981.528562)/(((-1839328916.9411087)>>(-1907748022.162144))<<(x+x)))+((tmp = 2362574171, tmp)<<(tmp = 1987834539, tmp)))|(-444329240)))|(399451601.1717081))>>x)))&(968363335.6089249))))));
+ assertEquals(0.0030991932898194294, x /= ((tmp = 1067316540.5529796, tmp)^(-2388640366)));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x >>>= (tmp = -393433349.1636851, tmp));
+ assertEquals(0, x *= (((x^(((1806955787.471396)<<x)^((517668047.55566347)>>>(x%(x<<(tmp = -276586733.4844558, tmp))))))%(1661242196.1472542))|x));
+ assertEquals(0, x |= (x>>x));
+ assertEquals(-155236210, x |= (tmp = -155236210.19366312, tmp));
+ assertEquals(-606392, x >>= ((tmp = -1533446042.97781, tmp)^x));
+ assertEquals(-1, x >>= (936126810));
+ assertEquals(2325115611, x -= (-2325115612));
+ assertEquals(0, x -= x);
+ assertEquals(0, x >>= (tmp = -354826623, tmp));
+ assertEquals(-0, x *= (-1232528947.7321298));
+ assertEquals(0, x |= x);
+ assertEquals(0, x <<= (((tmp = 187758893.4254812, tmp)&(x-(tmp = 648201576, tmp)))&(385106597)));
+ assertEquals(0, x >>= (tmp = 2554891961, tmp));
+ assertEquals(-1311492611.2970417, x += (-1311492611.2970417));
+ assertEquals(-688179220.3221785, x += (623313390.9748632));
+ assertEquals(1416835528, x &= (tmp = 1953739224, tmp));
+ assertEquals(-11.04719252755072, x /= (-128252995));
+ assertEquals(-6.287413042114223e-9, x /= (tmp = 1757033052.1558928, tmp));
+ assertEquals(-4231171, x |= (((((2022730885.7773404)*((-2495777565.221855)|(tmp = 274627292, tmp)))<<(-3072596920.4902725))>>>((-2215057529)+(-1134713759.4247034)))^((tmp = -1888181788, tmp)/(572025985.2748461))));
+ assertEquals(-4194305, x |= ((tmp = 167328318.038759, tmp)>>>(153800904.34551537)));
+ assertEquals(-1316525687, x -= (1312331382));
+ assertEquals(1448723245.7863903, x += (2765248932.7863903));
+ assertEquals(1.7219707102205526, x /= (tmp = 841317008, tmp));
+ assertEquals(1872027792.5217001, x *= (x|(tmp = 1087142645.6665378, tmp)));
+ assertEquals(3504488055973669400, x *= x);
+ assertEquals(-1075254784, x |= x);
+ assertEquals(-5, x >>= (((844461331.8957539)-((x&x)<<((tmp = 1443904777, tmp)+(tmp = 736164505.3670597, tmp))))-(((tmp = 1348422110, tmp)>>((tmp = -2878252514, tmp)/(-1175443113)))|((-2138724317)%(2057081133)))));
+ assertEquals(-3.038875804165675e-9, x /= (1645345292.8698258));
+ assertEquals(1.25204541454491e-18, x /= (-2427129055.274914));
+ assertEquals(-1.7151576137235622e-9, x *= (-1369884505.6247284));
+ assertEquals(1590804618, x ^= (1590804618.4910607));
+ assertEquals(5061318665300252000, x *= (x+x));
+ assertEquals(5061318665300252000, x %= ((tmp = 1102144242, tmp)*x));
+ assertEquals(-7, x >>= (2772167516.624264));
+ assertEquals(16383, x >>>= (-2979259214.5855684));
+ assertEquals(47108415435, x *= ((2944456517.839616)>>>(1041288554.5330646)));
+ assertEquals(61, x >>>= (x^(((-1305163705)<<((948566605)-x))-x)));
+ assertEquals(0, x %= x);
+ assertEquals(0, x ^= (((tmp = 1918861879.3521824, tmp)/((x%(tmp = 945292773.7188392, tmp))%(x|x)))>>x));
+ assertEquals(-0, x *= ((((x|((2810775287)|(tmp = 1265530406, tmp)))^((tmp = 3198912504.175658, tmp)-(((tmp = 1422607729.281712, tmp)<<(tmp = 2969836271.8682737, tmp))&x)))<<((tmp = 844656612, tmp)*(((((tmp = -828311659, tmp)%(((-2083870654)>>>(x^(((((933133782)-(tmp = 1033670745, tmp))-(629026895.4391923))%((-605095673.8097742)*((((-227510375.38460112)*x)+x)&(((((tmp = 472873752.68609154, tmp)^(tmp = 2815407038.712165, tmp))+((x>>>((tmp = -1331030665.3510115, tmp)>>>(2281234581)))-(x>>>x)))&(tmp = -2160840573.325921, tmp))&x))))<<(tmp = 1411888595, tmp))))|(((tmp = -915703839.0444739, tmp)/((x+(418836101.8158506))%(-1112605325.4404268)))&((-3098311830.6721926)-x))))-((49446671.477988124)*(-2522433127)))+((tmp = 443068797, tmp)>>(tmp = 418030554.97275746, tmp)))*((tmp = 38931296.738208175, tmp)+(1842742215.3282685)))))-((tmp = 1325672181.205841, tmp)^(tmp = 669284428, tmp))));
+ assertEquals(-0, x *= (tmp = 93843030, tmp));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x ^= x);
+ assertEquals(0, x <<= x);
+ assertEquals(0, x >>>= (x%((((((tmp = -107458601, tmp)>>(x*((x|((tmp = 2117286494, tmp)>>((x^(tmp = 114214295.42048478, tmp))>>>(tmp = 1032826615, tmp))))&((x*x)&(-225386977.67686415)))))^((-780566702.5911419)+(-1113319771)))|(((x^x)<<(1288064444))>>(-2292704291.619477)))>>(365125945))-((tmp = -1986270727.235776, tmp)/x))));
+ assertEquals(-0, x *= (((-18925517.67125845)|((((-1975220517)+(tmp = -1250070128.296064, tmp))+(1085931410.5895243))<<(((x|(((x*(tmp = 160207581.50536323, tmp))|(tmp = 1798744469.7958293, tmp))-x))>>>(((x+((x%x)&((((x^x)<<((tmp = 2538012074.623554, tmp)^x))*x)&x)))/(x+(tmp = -2563837407, tmp)))/(tmp = 2189564730, tmp)))/(((-1703793330.5770798)<<((176432492)|x))<<(1347017755.345185)))))<<(((tmp = -577100582.7258489, tmp)&x)/(-31246973))));
+ assertEquals(0, x >>>= x);
+ assertEquals(NaN, x %= ((x*(tmp = 1167625971, tmp))&(((tmp = -770445060, tmp)>>((339248786)^((2058689781.2387645)-((-2381162024)*(660448066)))))&x)));
+ assertEquals(NaN, x += ((3088519732.515986)-(-267270786.06493092)));
+ assertEquals(0, x &= (tmp = 2748768426.3393354, tmp));
+ assertEquals(-1109969306, x ^= ((-1109969306)>>>x));
+ assertEquals(-1109969306, x %= (tmp = 1150376563.581773, tmp));
+ assertEquals(-2058145178, x &= (-2057586057));
+ assertEquals(-850185626, x |= ((x^(tmp = 1223093422, tmp))&((-589909669)<<(2299786170))));
+ assertEquals(1489215443, x += (2339401069));
+ assertEquals(-23592960, x <<= x);
+ assertEquals(2063937322, x ^= (-2053296342.2317986));
+ assertEquals(12922122, x %= (x^((-2259987830)>>(x*(((tmp = -799867804.7716949, tmp)&(tmp = -1068744142, tmp))*(((((1091932754.8596292)-((tmp = -1778727010, tmp)>>(((tmp = 1207737073.2689717, tmp)-(x-(tmp = -1191958946, tmp)))+(-631801383.7488799))))-(-618332177))>>>(-156558558))>>>(3032101547.6262517)))))));
+ assertEquals(12922122, x &= x);
+ assertEquals(Infinity, x /= (x%x));
+ assertEquals(0, x &= (x*(-227800722.62070823)));
+ assertEquals(-865648691, x ^= (-865648691));
+ assertEquals(1, x /= (x%(tmp = 1524739353.8907173, tmp)));
+ assertEquals(16, x <<= (x<<(2335214658.789205)));
+ assertEquals(0, x &= ((tmp = 570332368.1239192, tmp)^(-2278439501)));
+ assertEquals(1881145344, x -= (((-569715735.8853142)+(2093355159))<<(tmp = 2788920949, tmp)));
+ assertEquals(0, x ^= x);
+ assertEquals(NaN, x -= ((tmp = -1427789954, tmp)%((((((411038329.49866784)-x)-(x<<((-1330832247)+x)))/x)^((x*(845763550.2134092))>>(tmp = 1427987604.5938706, tmp)))>>>(1857667535))));
+ assertEquals(NaN, x /= (-313793473));
+ assertEquals(0, x >>>= (x/x));
+ assertEquals(1869358566, x -= (-1869358566));
+ assertEquals(-1901664519209545200, x += ((tmp = 944729941.3936644, tmp)*(-2012918653)));
+ assertEquals(-1901664519209545200, x += ((tmp = 1348246793, tmp)/(x&x)));
+ assertEquals(-1576791552, x &= (tmp = 2719250966.739456, tmp));
+ assertEquals(-305087899, x ^= (-2955630491.030272));
+ assertEquals(0, x ^= (x%(1575252839.559443)));
+ assertEquals(4184604407, x += ((((tmp = -244720076.17657042, tmp)|(2819320515))^((((tmp = 1222623743.9184055, tmp)*(-95662379.577173))/(x/(x+(((x-(tmp = -3024718107.6310973, tmp))^(-1494390781))&(tmp = 2284054218.8323536, tmp)))))>>>(tmp = 2090069761, tmp)))>>>(x%x)));
+ assertEquals(3148907440, x -= (((tmp = -332379100.7695112, tmp)-(-1145399547))^(((((((tmp = 3133792677.785844, tmp)+x)<<(2306999139.5799255))>>((tmp = -2051266106, tmp)*(((((x+(((-728654312.8954825)>>(x>>>(((x%x)&(-1587152364))|(((((-2114138294)&x)&(1547554688))^x)-(-1856094268)))))*(((-1135018784)&((x+(tmp = -1444020289, tmp))|x))+x)))>>x)&x)/(2449005489))<<((131073798.64314616)%(x>>>((-2592101383.2205048)^(tmp = -757096673.0381112, tmp)))))))^(2766467316.8307915))-(-2465892914.515834))-((((tmp = 234064056, tmp)^((x>>>(1622627548.7944543))+(-1750474146)))|(-1959662039.4687617))^((-1222880974)&(-2794536175.906498))))));
+ assertEquals(-1157627488, x &= (-1156639323));
+ assertEquals(-1342170624, x <<= ((x/((((1829945345.0613894)/(x*((tmp = 1278865203.0854595, tmp)/(((tmp = -2298274086.519347, tmp)+(tmp = -545203761, tmp))-(tmp = 2712195820, tmp)))))>>>((tmp = 240870798.9384452, tmp)-(tmp = -3188865300.4768195, tmp)))>>>(x%((648799266)>>>(tmp = 24460403.864815235, tmp)))))|((tmp = 232533924, tmp)|x)));
+ assertEquals(-2684341248, x += x);
+ assertEquals(1073755136, x &= (((-662718514.9245079)>>(tmp = -1915462105, tmp))+(tmp = 1478850441.8689613, tmp)));
+ assertEquals(-1073755136, x /= (x|((tmp = -1767915185, tmp)|((325827419.1430224)|(((-1343423676)|(tmp = -1929549501, tmp))|(-866933068.9585254))))));
+ assertEquals(-1073755136, x %= ((tmp = 547342356, tmp)-((tmp = 2213249646.7047653, tmp)-((((((-2463314705)^(tmp = -993331620, tmp))^(((x%x)>>(tmp = 1798026491.3658786, tmp))-(((1024072781)/(tmp = -2407354455, tmp))%(1973295010))))<<(-1966787233))^x)|(-1787730004)))));
+ assertEquals(-1073754452, x |= (tmp = 3099823788.077907, tmp));
+ assertEquals(-1540683096, x &= (-1540674632.7013893));
+ assertEquals(-1540683052, x ^= ((tmp = -126183090, tmp)>>>((-622437575.5788481)|((((tmp = -2947914022, tmp)%(((tmp = 2512586745, tmp)>>x)>>>((27238232.23677671)/(tmp = 3203958551, tmp))))/(tmp = 2906005721.402535, tmp))^((((tmp = 1763897860.737334, tmp)^(1445562340.2485332))/x)+(-2393501217.716533))))));
+ assertEquals(-1258599433, x |= (tmp = 351291767.59661686, tmp));
+ assertEquals(-1241560065, x |= (626346046.5083935));
+ assertEquals(-1241560065, x ^= ((2263372092)/((tmp = -2868907862, tmp)>>>x)));
+ assertEquals(-893685228, x -= (tmp = -347874837, tmp));
+ assertEquals(3401282068, x >>>= (x*x));
+ assertEquals(0, x %= x);
+ assertEquals(0, x >>>= x);
+ assertEquals(-2079237393, x ^= (tmp = 2215729903, tmp));
+ assertEquals(NaN, x %= ((((tmp = 3203450436, tmp)/(2867575150.6528325))&(1864945829))&((x&((((tmp = -1927086741.3438427, tmp)|x)|(-1783290909.3240588))*((-1074778499.0697656)*(x-((tmp = -848983542.8456669, tmp)^(tmp = -1324673961, tmp))))))>>(tmp = -2144580304.245896, tmp))));
+ assertEquals(-43334009, x |= (x^(-43334009.72683525)));
+ assertEquals(-43334009, x &= x);
+ assertEquals(-43334009, x %= (tmp = 1252450645.060542, tmp));
+ assertEquals(-43334009, x |= (((((((tmp = 968062202, tmp)/(x|(tmp = 2766801984, tmp)))*((2173353793.938968)>>(((tmp = -2459317247, tmp)<<(tmp = -2333601397, tmp))>>>((tmp = -578254251.8969193, tmp)*(tmp = 839964110.7893236, tmp)))))&(((1675305119)&(tmp = -929153707, tmp))*((x*x)*x)))/x)|(x/(tmp = 384740559.43867135, tmp)))%(1657362591)));
+ assertEquals(0, x -= x);
+ assertEquals(0, x %= (-1334758781.1087842));
+ assertEquals(0, x -= x);
+ assertEquals(-54, x += ((tmp = -1787151355.470972, tmp)>>((tmp = 237028977, tmp)>>(((2829473542)<<(x>>>(((((((x-(-1950724753))*(((x>>>(2807353513.6283565))<<((-583810779.1155353)>>(x*x)))>>(-1068513265)))^(x^(-696263908.5131407)))%(((tmp = -1325619399, tmp)<<((tmp = -1030194450, tmp)-x))^x))+((-2852768585.3718724)>>(tmp = -3160022361, tmp)))%(x&x))>>(tmp = 2667222702.5454206, tmp))))+((804998368.8915854)<<x)))));
+ assertEquals(-54, x %= (-1601267268.4306633));
+ assertEquals(1, x >>>= (tmp = -543199585.579128, tmp));
+ assertEquals(4.732914708226396e-10, x /= (tmp = 2112862922, tmp));
+ assertEquals(-4266932650, x -= ((((x^((((tmp = 2784618443, tmp)^(tmp = -2271260297.9010153, tmp))|((((tmp = -599752639.7516592, tmp)*(2751967680.3680997))^(tmp = -1478450055.578217, tmp))*x))-x))&((tmp = -520061982, tmp)-((tmp = 1400176711.9637299, tmp)^(((2100417541)|(x+(tmp = -674592897.0420957, tmp)))>>x))))^(tmp = -365650686.7947228, tmp))>>>((-2943521813)&(((tmp = -1888789582, tmp)>>(tmp = 700459655.488978, tmp))+(tmp = -1725725703.655931, tmp)))));
+ assertEquals(224277168, x <<= (tmp = 2885115011.8229475, tmp));
+ assertEquals(224277168, x %= (tmp = -2655345206.442777, tmp));
+ assertEquals(850395136, x <<= (x-(((((-769868538.1729524)/((tmp = -298603579, tmp)%(x^x)))+((2691475692)|(((x>>>(628995710.4745524))^(x<<(((tmp = -1046054749, tmp)|(919868171))-x)))^((-1377678789.8170452)&((3065147797)%(tmp = 2638804433, tmp))))))^(tmp = -2036295169, tmp))&(((tmp = -157844758.08476114, tmp)*(tmp = -2819601496, tmp))&((((tmp = 78921441, tmp)<<(653551762.5197772))/(1801316098))*(-1479268961.8276927))))));
+ assertEquals(1645565728, x ^= (tmp = 1353013024, tmp));
+ assertEquals(1645565728, x >>>= x);
+ assertEquals(3020513544, x += (1374947816));
+ assertEquals(0, x %= x);
+ assertEquals(0, x %= ((((((tmp = -304228072.4115715, tmp)>>>((-90523260.45975709)-(tmp = -3013349171.084838, tmp)))%((-1640997281)*((tmp = -1600634553, tmp)%((tmp = 557387864, tmp)<<((888796080.766409)|(x^((((x%(((((tmp = 1164377954.1041703, tmp)*x)|(2742407432.192806))&((tmp = 1707928950, tmp)<<(1279554132.4481683)))+(tmp = -2108725405.7752397, tmp)))%(tmp = -465060827, tmp))^((tmp = 2422773793, tmp)+x))^((((((((tmp = -1755376249, tmp)^((-267446806)^x))/(((tmp = -1808578662.4939392, tmp)+((tmp = -1997100217, tmp)+x))+(((tmp = -2469853122.411479, tmp)/x)>>(tmp = 660624616.7956645, tmp))))%((x<<((((((tmp = -1701946558, tmp)-(tmp = 133302235, tmp))>>>x)/(738231394))<<(-1060468151.4959564))&(((((-1877380837.4678264)|(tmp = 2366186363, tmp))%x)>>>(-2382914822.1745577))>>((-1874291848.9775913)<<(tmp = 2522973186, tmp)))))<<(-2672141993)))|(tmp = 732379966, tmp))%x)^x)^x))))))))%(tmp = 2385998902.7287374, tmp))*x)+(tmp = -2195749866.017106, tmp)));
+ assertEquals(401488, x ^= (((-320896627)>>>(tmp = 2812780333.9572906, tmp))&(tmp = -2088849328, tmp)));
+ assertEquals(-1661116571.0046256, x += (tmp = -1661518059.0046256, tmp));
+ assertEquals(-1616122720, x <<= x);
+ assertEquals(-1616122720, x >>= x);
+ assertEquals(-390439413, x %= (tmp = -1225683307, tmp));
+ assertEquals(-84189205, x |= ((x|(2054757858))^(((x<<(((x|x)|(((x>>>((-2938303938.1397676)<<((2993545056)^((tmp = -643895708.5427527, tmp)/((1371449825.5345795)-(1896270238.695752))))))-(tmp = 1061837650, tmp))+(x+(tmp = 3072396681, tmp))))>>(x-((((tmp = -1877865355.1550744, tmp)&x)%(-2766344937))>>>(2055121782)))))-((x<<x)|(tmp = -2742351880.1974454, tmp)))<<((-2600270279.219802)>>(-1625612979)))));
+ assertEquals(-168378410, x += x);
+ assertEquals(-168378410, x &= x);
+ assertEquals(-1534983792, x &= (-1501412943));
+ assertEquals(-1821543761, x ^= (938439487));
+ assertEquals(-1821543761, x &= (x^(((tmp = -4237854, tmp)>>x)/x)));
+ assertEquals(2358, x >>>= (2954252724.620632));
+ assertEquals(4716, x <<= ((-75522382.8757689)/((tmp = 1074334479, tmp)|((tmp = -720387522, tmp)>>(x>>>(-3085295162.6877327))))));
+ assertEquals(-1313079316, x |= (2981887904.020387));
+ assertEquals(-1957790646, x -= (644711330));
+ assertEquals(17831, x >>>= ((tmp = -2550108342, tmp)-(((tmp = 454671414.0146706, tmp)+(-661129693.9333956))>>(x>>>(((tmp = 1752959432.3473055, tmp)*(-2619510342.1812334))%(tmp = -456773274.2411971, tmp))))));
+ assertEquals(689287937.6879716, x -= ((tmp = -397126863.6879716, tmp)-(((x>>x)^(x/(-1387467129.6278908)))|((x>>((tmp = -2361114214.8413954, tmp)<<(tmp = -805670024.4717407, tmp)))<<(-2724018098)))));
+ assertEquals(1378575875.3759432, x += x);
+ assertEquals(84112428460187.8, x *= (((((2681425112.3513584)%(tmp = -1757945333, tmp))|x)>>(-1793353713.0003397))%x));
+ assertEquals(-3221, x >>= (-1976874128));
+ assertEquals(-3221, x %= (((tmp = 2318583056.834932, tmp)|((tmp = -1016115125, tmp)+((-472566636.32567954)+x)))|(tmp = 3135899138.065598, tmp)));
+ assertEquals(-6596608, x <<= x);
+ assertEquals(-1249902592, x <<= (((tmp = -2025951709.5051148, tmp)/((-465639441)<<(-2273423897.9682302)))*((tmp = -2408892408.0294642, tmp)-(tmp = 1017739741, tmp))));
+ assertEquals(73802092170444800, x *= (tmp = -59046275, tmp));
+ assertEquals(-1619001344, x <<= x);
+ assertEquals(0, x <<= (tmp = 1610670303, tmp));
+ assertEquals(-0, x *= ((((x+(tmp = 2039867675, tmp))|(tmp = 399355061, tmp))<<(1552355369.313559))^x));
+ assertEquals(0, x *= x);
+ assertEquals(0, x >>>= (((2875576018.0610805)>>x)%(tmp = -2600467554, tmp)));
+ assertEquals(2290405226.139538, x -= (-2290405226.139538));
+ assertEquals(0, x %= x);
+ assertEquals(0, x ^= (((tmp = 2542309844.485515, tmp)-x)%((-2950029429.0027323)/(tmp = 2943628481, tmp))));
+ assertEquals(0, x += x);
+ assertEquals(0, x -= x);
+ assertEquals(0, x >>>= (tmp = 2337330038, tmp));
+ assertEquals(0, x += (x/(((292272669.0808271)&(tmp = 2923699026.224247, tmp))^(tmp = 367745855, tmp))));
+ assertEquals(0, x &= x);
+ assertEquals(0, x %= ((tmp = 1565155613.3644123, tmp)<<(-308403859.5844681)));
+ assertEquals(-1845345399.3731332, x += (tmp = -1845345399.3731332, tmp));
+ assertEquals(5158590659731951000, x *= (-2795460763.8680177));
+ assertEquals(-364664, x >>= (1837745292.5701954));
+ assertEquals(1, x /= x);
+ assertEquals(-860616114.8182092, x += ((tmp = 2076961323.1817908, tmp)+(-2937577439)));
+ assertEquals(-860616115, x ^= ((x*(tmp = 2841422442.583121, tmp))>>>((tmp = 1929082917.9039137, tmp)>>(-2602087246.7521305))));
+ assertEquals(-38387843, x |= (3114677624));
+ assertEquals(2927507837, x += (tmp = 2965895680, tmp));
+ assertEquals(1, x /= x);
+ assertEquals(-1792887531, x *= (-1792887531));
+ assertEquals(-0, x %= ((x^x)+x));
+ assertEquals(-0, x %= (tmp = 2800752702.562547, tmp));
+ assertEquals(1384510548, x ^= (tmp = 1384510548, tmp));
+ assertEquals(42251, x >>= (1645421551.363844));
+ assertEquals(0, x >>>= (17537561));
+ assertEquals(-2076742862, x ^= (tmp = 2218224434, tmp));
+ assertEquals(-2.790313825067623, x /= (744268563.3934636));
+ assertEquals(5313538, x &= (((((tmp = -2406579239.0691676, tmp)+((-1470174628)+(((tmp = -783981599, tmp)<<(tmp = -1789801141.272646, tmp))^(((((((tmp = -844643189.5616491, tmp)&(tmp = -252337862, tmp))&(x|x))%((-3159642145.7728815)+(tmp = 2149920003.9525595, tmp)))&(x>>(1737589807.9431858)))-((((((((1610161800)<<(497024994))>>x)<<x)/x)>>>x)&x)-(757420763.2141517)))-(tmp = -3061016994.9596977, tmp)))))/(tmp = 1810041920.4089384, tmp))&(tmp = 5887654.786785364, tmp))&((tmp = 1626414403.2432103, tmp)+(x%x))));
+ assertEquals(-2147483648, x <<= (tmp = 1304102366.8011155, tmp));
+ assertEquals(-208418816, x %= (((((-2850404799)*(x+(3158771063.226051)))*(-2017465205))/(x>>x))>>(x%(tmp = 2760203322, tmp))));
+ assertEquals(-2189223477, x -= (1980804661));
+ assertEquals(-859239912, x ^= (tmp = 2974421971.3544703, tmp));
+ assertEquals(-1599850415, x ^= (tmp = -2475871671.140151, tmp));
+ assertEquals(-1600636847, x += ((((tmp = -1311002944, tmp)<<((tmp = -1137871342, tmp)<<(tmp = 115719116, tmp)))/(413107255.6242596))<<(x>>((((-1908022173)&(((-1519897333)^((x>>(x*(tmp = -2886087774.426503, tmp)))*(tmp = 530910975, tmp)))+(-2579617265.889692)))+((2518127437.127563)>>>((tmp = 481642471.56441486, tmp)>>>(792447239))))^(x<<(248857393.6819017))))));
+ assertEquals(-191, x >>= (-1591265193));
+ assertEquals(-192.27421813247196, x += ((tmp = 2627329028.207775, tmp)/(tmp = -2061914644.9523563, tmp)));
+ assertEquals(1230613220, x ^= (tmp = 3064354212.307105, tmp));
+ assertEquals(1230613220, x &= x);
+ assertEquals(1230613220, x %= (1833479205.1064768));
+ assertEquals(1230613220, x >>>= ((((1559450742.1425748)|((2151905260.956583)*(1213275165)))%(514723483.12764716))>>>x));
+ assertEquals(1230613493, x |= ((((3004939197.578903)*(tmp = -576274956, tmp))+((tmp = 1037832416.2243971, tmp)^x))>>>(tmp = 2273969109.7735467, tmp)));
+ assertEquals(2461226986, x += x);
+ assertEquals(-27981, x >>= ((692831755.8048055)^((tmp = -1593598757, tmp)%(x-((((-1470536513.882593)|((tmp = -2716394020.466401, tmp)|(tmp = 2399097686, tmp)))&x)%x)))));
+ assertEquals(-1.4660454948034359e+23, x *= (((x>>>((((((tmp = -3056016696, tmp)<<(-2882888332))*(2041143608.321916))&(((tmp = -634710040, tmp)|(tmp = -2559412457, tmp))>>(1916553549.7552106)))%((-2150969350.3643866)*x))<<((x*(tmp = 2657960438.247278, tmp))|x)))%((tmp = 526041379, tmp)*(tmp = 2514771352.4509397, tmp)))*(1219908294.8107886)));
+ assertEquals(-1.4660454948034359e+23, x -= ((1709004428)>>(((x|(-422745730.626189))%x)>>x)));
+ assertEquals(-2247766068, x %= (-3105435508));
+ assertEquals(-386845856.0649812, x -= (-1860920211.9350188));
+ assertEquals(-386846803.0649812, x -= ((((-3214465921)|((tmp = -1326329034, tmp)+(((tmp = -1203188938.9833462, tmp)%((((((-1318276502)+(x+x))^((x<<x)%(x>>>x)))+(tmp = -439689881, tmp))+((-1455448168.695214)^(x-((-388589993)>>((((940252202)^(-2218777278))|x)/(tmp = -1007511556, tmp))))))&(-140407706.28176737)))-(x/((888903270.7746506)-((tmp = -2885938478.632409, tmp)<<(((((tmp = -1750518830.270917, tmp)>>(((((((tmp = 868557365.7908674, tmp)/(tmp = -2805687195.5172157, tmp))*x)|((((((-1342484550)-((tmp = 1089284576, tmp)^(tmp = 120651272, tmp)))<<(tmp = 2230578669.4642825, tmp))-(x*x))%(x^(((tmp = -3177941534, tmp)+(x>>(-1595660968)))/(-1738933247))))>>>(tmp = 2860175623, tmp)))-(((2392690115.8475947)>>>(tmp = -1754609670.2068992, tmp))>>>(tmp = 2615573062, tmp)))-(tmp = 2590387730, tmp))^((x+((((x-(tmp = -2823664112.4548965, tmp))*(200070977))>>>(((x|((((tmp = 1361398, tmp)>>((tmp = 1649209268, tmp)%x))+x)+(x>>>(tmp = -2379989262.1245675, tmp))))|(x^((tmp = -647953298.7526417, tmp)-x)))&(tmp = -1881232501.1945808, tmp)))>>>x))%(x^(tmp = -1737853471.005935, tmp)))))>>>(427363558))>>>((tmp = -3076726422.0846386, tmp)^(-1518782569.1853383)))/x)))))))|x)>>>(1854299126)));
+ assertEquals(-386846803.0649812, x -= (x%x));
+ assertEquals(238532, x >>>= (-448890706.10774803));
+ assertEquals(232, x >>>= (-791593878));
+ assertEquals(232, x <<= (((x^((x-x)&(tmp = 1219114201, tmp)))/(tmp = -427332955, tmp))%(tmp = 1076283154, tmp)));
+ assertEquals(210, x ^= (x>>>((2975097430)>>>x)));
+ assertEquals(1, x /= x);
+ assertEquals(2317899531, x *= (2317899531));
+ assertEquals(1131786, x >>>= x);
+ assertEquals(2301667519.6379366, x += ((tmp = 193109669.63793683, tmp)+(tmp = 2107426064, tmp)));
+ assertEquals(3842614963.6379366, x += (((-1676516834)>>>(tmp = -1817478916.5658965, tmp))^(((tmp = 1122659711, tmp)>>>(tmp = -2190796437, tmp))|(tmp = -2754023244, tmp))));
+ assertEquals(-452352333, x &= x);
+ assertEquals(-863, x >>= x);
+ assertEquals(-3.777863669459606e-7, x /= (2284359827.424491));
+ assertEquals(-3.777863669459606e-7, x %= ((tmp = -2509759238, tmp)>>>x));
+ assertEquals(0, x <<= (-814314066.6614306));
+ assertEquals(0, x %= (tmp = 190720260, tmp));
+ assertEquals(2301702913, x += (2301702913));
+ assertEquals(-249158048, x >>= (tmp = -2392013853.302008, tmp));
+ assertEquals(-249158048, x >>= x);
+ assertEquals(-498316096, x += x);
+ assertEquals(-498316096, x %= (tmp = 2981330372.914731, tmp));
+ assertEquals(106616.2199211318, x *= (((((tmp = 1020104482.2766557, tmp)^((tmp = -416114189.96786, tmp)>>>(1844055704)))|(tmp = 1665418123, tmp))>>(1826111980.6564898))/(-2446724367)));
+ assertEquals(106616, x |= x);
+ assertEquals(1094927345, x -= (((-1229759420)|(741260479.7854375))-x));
+ assertEquals(8353, x >>= x);
+ assertEquals(0, x >>>= (tmp = -327942828, tmp));
+ assertEquals(-953397616.8888416, x += (tmp = -953397616.8888416, tmp));
+ assertEquals(-1906641240.7776833, x += (x+((-3033450184.9106326)>>>(tmp = 2090901325.5617187, tmp))));
+ assertEquals(-1906641240.7776833, x %= (tmp = 2584965124.3953505, tmp));
+ assertEquals(-1098907671, x |= (tmp = -1272590495, tmp));
+ assertEquals(-1.8305258600334393, x /= (600323489));
+ assertEquals(-1, x &= x);
+ assertEquals(-1, x |= ((x+x)-x));
+ assertEquals(1, x *= x);
+ assertEquals(867473898, x ^= (tmp = 867473899.0274491, tmp));
+ assertEquals(6, x >>>= (tmp = 1174763611.341228, tmp));
+ assertEquals(0, x >>= ((689882795)^(2250084531)));
+ assertEquals(0, x /= (tmp = 2545625607, tmp));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x += x);
+ assertEquals(0, x -= (x*(-1098372339.5157008)));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x -= (tmp = -1797344676.375759, tmp));
+ assertEquals(1121476698, x |= (tmp = 1121476698, tmp));
+ assertEquals(1, x /= x);
+ assertEquals(1, x &= (-191233693));
+ assertEquals(330137888.92595553, x += (330137887.92595553));
+ assertEquals(-1792236714, x ^= (tmp = 2256609910, tmp));
+ assertEquals(269000724, x &= (316405813.62093115));
+ assertEquals(256, x >>= x);
+ assertEquals(256, x %= ((2556320341.54669)|(1066176021.2344948)));
+ assertEquals(256, x |= x);
+ assertEquals(131072, x <<= ((-1650561175.8467631)|x));
+ assertEquals(-286761951, x -= ((tmp = 287024095, tmp)-((-2293511421)&(x|x))));
+ assertEquals(-1561852927, x &= (3002663949.0989227));
+ assertEquals(-460778761, x %= (tmp = -550537083, tmp));
+ assertEquals(-3023749308.0492287, x += (tmp = -2562970547.0492287, tmp));
+ assertEquals(-481313332.04922867, x %= ((x|((tmp = -855929299, tmp)%((2181641323)%(x|(220607471.33018696)))))&x));
+ assertEquals(17510668, x &= (tmp = 363557663, tmp));
+ assertEquals(12552, x &= (3020225307));
+ assertEquals(1814655896, x |= ((x<<(((-1475967464)*(-3122830185))*x))+(x^(-2480340864.2661023))));
+ assertEquals(-3209124403525266400, x -= ((1146847590)*(tmp = 2798213497, tmp)));
+ assertEquals(-6418248807050533000, x += x);
+ assertEquals(1.1856589432073933e+28, x *= (-1847324681.313275));
+ assertEquals(-1238853292, x ^= (-1238853292));
+ assertEquals(-77428331, x >>= (x&((((2043976651.8514216)>>>x)^(x>>>(((tmp = -1785122464.9720652, tmp)%x)<<(1570073474.271266))))*x)));
+ assertEquals(2011, x >>>= x);
+ assertEquals(2011, x &= x);
+ assertEquals(0, x >>= (-2682377538));
+ assertEquals(-1.1367252770299785, x -= (((tmp = 2704334195.566802, tmp)/(2379056972))%((((-1764065164)*((((468315142.8822602)>>((x%(((tmp = 2537190513.506641, tmp)+((x&(x|((tmp = -947458639, tmp)^(2653736677.417406))))*((x<<((1243371170.1759553)>>>(((tmp = 1572208816, tmp)<<((tmp = 963855806.1090456, tmp)>>>x))%((-3078281718.7743487)*x))))^(-1154518374))))^(-2839738226.6314087)))^((-2865141241.190915)*(-2400659423.8207664))))>>((tmp = 32940590, tmp)/(tmp = 2917024064.570817, tmp)))+(((27601850)/(tmp = 3168834986, tmp))>>x)))+(tmp = 2528181032.600125, tmp))/(3162473952))));
+ assertEquals(-1697395408.7948515, x -= (1697395407.6581264));
+ assertEquals(1536992607912062500, x *= (tmp = -905500627.5781817, tmp));
+ assertEquals(102759872, x >>= (tmp = -707887133.4484048, tmp));
+ assertEquals(102759872, x %= (tmp = -1764067619.7913327, tmp));
+ assertEquals(12543, x >>>= (-144142995.1469829));
+ assertEquals(-2059555229.2592103, x += ((-2059555229.2592103)-x));
+ assertEquals(-537022593, x |= (tmp = -2770761410.407701, tmp));
+ assertEquals(23777505, x ^= (-560496738.6854918));
+ assertEquals(-64329014115772310, x *= ((tmp = -2729234369.198843, tmp)+x));
+ assertEquals(189083830, x ^= (tmp = 933619934, tmp));
+ assertEquals(189083830, x %= ((tmp = -2918083254, tmp)-(x|(x^(-2481479224.0329475)))));
+ assertEquals(378167660, x += x);
+ assertEquals(-0.45833387791900504, x /= ((tmp = 2727991875.241294, tmp)<<(tmp = 2570034571.9084663, tmp)));
+ assertEquals(0, x <<= x);
+ assertEquals(-0, x /= (tmp = -67528553.30662966, tmp));
+ assertEquals(0, x <<= (938440044.3983492));
+ assertEquals(-945479171, x ^= (tmp = -945479171, tmp));
+ assertEquals(-225632619284361200, x *= (238643670.00884593));
+ assertEquals(-0, x %= x);
+ assertEquals(-585826304, x ^= ((-1256265560)<<(tmp = 1144713549, tmp)));
+ assertEquals(-671583855, x ^= (183333265.1468178));
+ assertEquals(-484311040, x <<= x);
+ assertEquals(-3969762.62295082, x /= ((((tmp = -1164308668.931008, tmp)-x)%x)>>>(((397816647)>>(-1605343671.4070785))<<x)));
+ assertEquals(758097879, x ^= ((tmp = -2871307491, tmp)^(-2043176492.646442)));
+ assertEquals(0, x *= ((x>>(tmp = 1983292927, tmp))&(tmp = -860505131.4484091, tmp)));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x &= x);
+ assertEquals(0, x %= ((3132981707)-(-2832016477)));
+ assertEquals(0, x >>= (x<<((1830195133.0342631)>>>(tmp = -1003969250, tmp))));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x += (tmp = 273271019.87603223, tmp));
+ assertEquals(NaN, x += (625749326.1155348));
+ assertEquals(0, x >>= (tmp = -531039433.3702333, tmp));
+ assertEquals(0, x -= (((tmp = 2029464099, tmp)-(x-(tmp = -329058111.411458, tmp)))*(x<<x)));
+ assertEquals(-0, x *= ((-1112957170.5613296)|((tmp = 847344494, tmp)>>>(tmp = 2735119927, tmp))));
+ assertEquals(-0, x /= (tmp = 544636506, tmp));
+ assertEquals(0, x >>>= (x^(545093699)));
+ assertEquals(0, x %= (((tmp = -2208409647.5052004, tmp)+(3083455385.374988))+(((-482178732.7077277)*x)>>>((2661060565)*(-2125201239)))));
+ assertEquals(0, x >>>= (-212334007.34016395));
+ assertEquals(0.7004300865203454, x -= ((2032883941)/(-2902336693.0154715)));
+ assertEquals(0, x <<= (x<<((265868133.50175047)>>>(1162631094))));
+ assertEquals(604920272.4394834, x -= (-604920272.4394834));
+ assertEquals(604920272, x &= x);
+ assertEquals(0, x <<= (((-1961880051.1127694)%(tmp = 1715021796, tmp))|((tmp = 2474759639.4587016, tmp)|(243416152.55635))));
+ assertEquals(-46419074, x |= (((tmp = -518945938.5238774, tmp)%((x+(tmp = 242636408, tmp))+(-1974062910)))|(1546269242.0259726)));
+ assertEquals(-46419074, x += ((-629802130)*((tmp = -658144149, tmp)%((-905005358.5370393)>>>x))));
+ assertEquals(-46419074, x |= (x%(-1103652494)));
+ assertEquals(7892881050983985, x *= (-170035297.36469936));
+ assertEquals(1105701997.4273424, x %= ((((-490612260.0023911)>>>(tmp = 1803426906, tmp))^(x%(2725270344.2568116)))-(1010563167.8934317)));
+ assertEquals(1088619532, x &= (-2232199650));
+ assertEquals(1073807364, x &= (-888024506.5008001));
+ assertEquals(1153062254980628500, x *= x);
+ assertEquals(1153062255703627000, x -= (tmp = -722998613.897227, tmp));
+ assertEquals(-1141418584, x |= (3017232552.4814596));
+ assertEquals(-373464140, x ^= (-2914372068));
+ assertEquals(994050048, x <<= x);
+ assertEquals(0, x ^= x);
+ assertEquals(0, x &= (tmp = -3166402389, tmp));
+ assertEquals(0, x &= ((-1760842506.337213)|(tmp = 2538748127.795164, tmp)));
+ assertEquals(-0, x /= (-2635127769.808626));
+ assertEquals(0, x &= ((((tmp = 1414701581, tmp)^(((2425608769)/((x<<x)^(x-x)))^((tmp = -2641946468.737288, tmp)|(tmp = -313564549.1754241, tmp))))*(tmp = -2126027460, tmp))|(-2255015479)));
+ assertEquals(225482894, x ^= (225482894.8767246));
+ assertEquals(0, x ^= x);
+ assertEquals(306216231, x += (tmp = 306216231, tmp));
+ assertEquals(306216231, x -= ((-465875275.19848967)&((-806775661.4260025)/((((-184966089.49763203)>>>((x>>x)+((tmp = -1951107532, tmp)|x)))%x)*((2704859526.4047284)%((x*x)>>x))))));
+ assertEquals(30754, x &= (1706162402.033193));
+ assertEquals(30454.010307602264, x -= (((590456519)>>>(tmp = 2713582726.8181214, tmp))/x));
+ assertEquals(8419062, x |= ((2848886788)<<(tmp = 2993383029.402275, tmp)));
+ assertEquals(16, x >>= (tmp = -1651287021, tmp));
+ assertEquals(1, x /= x);
+ assertEquals(-1407643485, x ^= (-1407643486));
+ assertEquals(2, x >>>= (-1126004674));
+ assertEquals(470812081, x ^= ((-2411718964)>>>x));
+ assertEquals(550443688.6407901, x += (tmp = 79631607.6407901, tmp));
+ assertEquals(3669092443.64079, x -= (-3118648755));
+ assertEquals(-625874853, x <<= (((tmp = -1640437346, tmp)/(((x*x)>>>x)<<x))/x));
+ assertEquals(-1431439050363516700, x *= (2287101077));
+ assertEquals(-1921660672, x |= ((((((((-1912249689.9978154)&(-1676922742.5343294))*(2625527768))<<((820676465)^(((x+(tmp = -852743692, tmp))&((x-((((1361714551)/(311531668))>>>(tmp = -1330495518.8175917, tmp))<<(((tmp = 1369938417.8760853, tmp)*(-1217947853.8942266))<<(-2048029668))))-(-513455284)))>>>(tmp = 1980267333.6201067, tmp))))<<(((1503464217.2901971)>>(tmp = 2258265389, tmp))>>>(1868451148)))&(x-(x^(tmp = -1565209787, tmp))))*x)<<(tmp = -2426550685, tmp)));
+ assertEquals(-1921660672, x %= (((tmp = 523950472.3315773, tmp)+(((2971865706)^x)-x))&(-1773969177)));
+ assertEquals(420176973.1169958, x += (2341837645.116996));
+ assertEquals(420176973, x >>>= (((tmp = -2485489141, tmp)<<((tmp = -2520928568.360244, tmp)+x))&(543950045.0932506)));
+ assertEquals(50, x ^= (x|((tmp = 2001660699.5898843, tmp)>>>(tmp = 1209151128, tmp))));
+ assertEquals(138212770720.96973, x *= (2764255414.4193945));
+ assertEquals(-28683, x |= (((-535647551)|x)>>((((2065261509)>>(-354214733))*x)+(-3218217378.2592907))));
+ assertEquals(1627048838, x ^= (tmp = -1627044749, tmp));
+ assertEquals(-839408795, x ^= (2903337187.480303));
+ assertEquals(-1000652427, x += (tmp = -161243632, tmp));
+ assertEquals(740237908.4196916, x += ((tmp = 1587000348, tmp)+(tmp = 153889987.41969144, tmp)));
+ assertEquals(Infinity, x /= (((((-615607376.1012697)&(57343184.023578644))+((-1967741575)|(-3082318496)))<<(((tmp = -958212971.99792, tmp)>>(tmp = 2962656321.3519197, tmp))-(x|(x*(969365195)))))<<(tmp = -1739470562.344624, tmp)));
+ assertEquals(-Infinity, x /= ((tmp = -1736849852, tmp)%x));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x %= (tmp = -226505646, tmp));
+ assertEquals(1982856549, x -= (((x+(-1982856549))%(-2274946222))>>(x%(((tmp = -1289577208.9097936, tmp)>>x)^(778147661)))));
+ assertEquals(1648018703, x ^= ((3085618856)+((tmp = 1546283467, tmp)&(((x|((-2376306530)*(((((((tmp = -2807616416, tmp)%(((((tmp = 347097983.1491085, tmp)<<x)|(((((1135380667)/(x>>>(tmp = 1679395106, tmp)))^((1277761947)<<((tmp = -1614841203.5244312, tmp)>>x)))%((tmp = 1552249234.2065845, tmp)>>>x))>>>(tmp = -1677859287, tmp)))>>>(2605907565))/(tmp = 2291657422.221277, tmp)))%(((tmp = 425501732.6666014, tmp)>>>(1327403879.455553))+x))>>((tmp = -3075752653.2474413, tmp)&(x-(tmp = -71834630, tmp))))|((((2532199449.6500597)*(-842197612.4577162))%x)>>x))*(((1220047194.5100307)<<((tmp = 1642962251, tmp)<<((-662340)>>>((tmp = -1672316631.3251066, tmp)<<((tmp = 1762690952.542441, tmp)-(x/(1904755683.3277364)))))))>>x))|(((((tmp = 1625817700.7052522, tmp)%(tmp = -2990984460, tmp))|(2395645662))-((2619930607.550086)>>x))^(tmp = 130618712, tmp)))))&((-3142462204.4628367)/(1078126534.8819227)))%(((tmp = -256343715.2267704, tmp)+x)^(tmp = 2009243755, tmp))))));
+ assertEquals(1937698223, x |= (((tmp = 866354374.7435778, tmp)+(tmp = 2751925259.3264275, tmp))%(-2252220455)));
+ assertEquals(0, x -= x);
+ assertEquals(-823946290.6515498, x -= (tmp = 823946290.6515498, tmp));
+ assertEquals(706970324, x ^= (-457174758));
+ assertEquals(32916, x &= (25740724));
+ assertEquals(0, x >>>= ((-1658933418.6445677)|(tmp = -846929510.4794133, tmp)));
+ assertEquals(0, x ^= ((-834208600)/((-1256752740)&(tmp = 1973248337.8973258, tmp))));
+ assertEquals(-1639195806, x += (-1639195806));
+ assertEquals(-1559416478, x ^= ((tmp = 1349893449.0193534, tmp)*(tmp = 2044785568.1713037, tmp)));
+ assertEquals(0, x &= ((x>>(tmp = 1720833612, tmp))/((x+(-1305879952.5854573))^x)));
+ assertEquals(-0, x *= (tmp = -1713182743, tmp));
+ assertEquals(0, x >>= x);
+ assertEquals(NaN, x /= (((x%((x>>>(((-1515761763.5499895)^(-3076528507.626539))<<(tmp = 1293944457.8983147, tmp)))<<(tmp = 276867491.8483894, tmp)))>>(tmp = -2831726496.6887417, tmp))%((((tmp = 1780632637.3666987, tmp)^x)%((208921173.18897665)>>(tmp = 633138136, tmp)))+x)));
+ assertEquals(0, x >>= (tmp = -2755513767.0561147, tmp));
+ assertEquals(0, x |= x);
+ assertEquals(840992300.0324914, x -= ((-840992300.0324914)+x));
+ assertEquals(840992300, x &= x);
+ assertEquals(-1094140277, x ^= (2364029095));
+ assertEquals(-Infinity, x /= ((((((1257084956)<<(2009241695))>>(x+x))*x)>>>x)>>>(205318919.85870552)));
+ assertEquals(-Infinity, x -= (((x>>>(tmp = 3037168809.20163, tmp))&x)*(x&(((806151109)*x)-(tmp = -1741679480.58333, tmp)))));
+ assertEquals(400659949, x ^= (tmp = 400659949, tmp));
+ assertEquals(5, x >>= (tmp = 1175519290, tmp));
+ assertEquals(5, x |= x);
+ assertEquals(0, x >>= x);
+ assertEquals(0, x >>= ((1317772443)&(x<<x)));
+ assertEquals(-1123981819, x ^= (tmp = 3170985477, tmp));
+ assertEquals(1123864651, x ^= ((x%(((x&x)&(-2606227299.7590737))<<((tmp = -2018123078.1859496, tmp)*x)))|(x+(((((1935939774.8139446)/((-1303958190)/(2802816697.32639)))<<((2880056582)*x))+x)+x))));
+ assertEquals(1543368927, x |= (-2795691884));
+ assertEquals(NaN, x /= (x%((tmp = -1129915114, tmp)<<x)));
+ assertEquals(NaN, x += (tmp = -3045743135, tmp));
+ assertEquals(NaN, x -= (tmp = -2849555731.8207827, tmp));
+ assertEquals(NaN, x /= (((((2127485827)>>>((((tmp = 363239924, tmp)>>x)|((((tmp = -1419142286.0523334, tmp)-(x<<x))^(tmp = -1990365089.8283136, tmp))*((tmp = 2780242444.0739098, tmp)>>>(((-2336511023.342298)&x)/(tmp = 2296926221.402897, tmp)))))>>((tmp = 1378982475.6839466, tmp)>>(tmp = -816522530, tmp))))&(x^(tmp = -1668642255.0586753, tmp)))%(((tmp = 921249300.1500335, tmp)^x)*(tmp = -2228816905, tmp)))>>x));
+ assertEquals(-1460685191, x |= (tmp = 2834282105, tmp));
+ assertEquals(-1463439264, x &= (tmp = 2881860064.146755, tmp));
+ assertEquals(20.98100714963762, x /= (((3017150580.7875347)^((250499372.5339837)<<(tmp = -42767556.30788112, tmp)))|(x%(-2829281526))));
+ assertEquals(1, x /= x);
+ assertEquals(2, x += x);
+ assertEquals(8, x <<= x);
+ assertEquals(0, x >>>= ((730174750)>>>x));
+ assertEquals(0, x ^= x);
+ assertEquals(-1459637373, x ^= (2835329923.456409));
+ assertEquals(-1233115861, x ^= (511678120));
+ assertEquals(95682857, x >>>= ((tmp = 1534570885, tmp)|(tmp = -414425499.3786578, tmp)));
+ assertEquals(70254633, x &= (-1502067585));
+ assertEquals(51384749748909710, x *= (tmp = 731407276, tmp));
+ assertEquals(9390482.873469353, x %= (tmp = -592576964.7982686, tmp));
+ assertEquals(4695241, x >>>= (tmp = -1879898431.5395758, tmp));
+ assertEquals(-3129811912538149000, x += (((-727481809)^((3106908604)%x))*((((tmp = -1218123690, tmp)^(x>>((-942923806)^x)))/(x+x))>>>(-1508881888.969373))));
+ assertEquals(1596870236, x ^= (-1135673764.9721224));
+ assertEquals(0, x ^= x);
+ assertEquals(2133782410, x |= (((-2202469371)>>((tmp = 1327588406.183342, tmp)/(tmp = 253581265.7246865, tmp)))-((tmp = 2226575446.838795, tmp)^x)));
+ assertEquals(-81895217.83608055, x -= (tmp = 2215677627.8360806, tmp));
+ assertEquals(812089344, x <<= ((tmp = 882824005, tmp)/(((x>>((((((((tmp = 1211145185, tmp)/((-137817273)-(((tmp = 2165480503.1144185, tmp)-(-1840859887.1288517))*((155886014.8393339)>>((-1984526598)<<(tmp = 1331249058.3246582, tmp))))))>>(x*x))%(2830324652))%(933701061))|(1346496215))^(tmp = -988800810, tmp))+x))>>>x)<<(-2372088384))));
+ assertEquals(812089344, x <<= x);
+ assertEquals(8472, x %= ((((x|(((x%(tmp = 2772099481.664402, tmp))+(2894690616))-x))&(x&(((-715790638.6454093)>>(tmp = -1447931029, tmp))-(tmp = 1761027889, tmp))))^x)%(((tmp = 830969811, tmp)|x)|((-1102267929)-(3193018687)))));
+ assertEquals(-0.0000028559857417864914, x /= (-2966401364));
+ assertEquals(0, x >>= x);
+ assertEquals(-701800392, x += (tmp = -701800392, tmp));
+ assertEquals(2034756873, x -= (tmp = -2736557265, tmp));
+ assertEquals(-0.9475075048394501, x /= (((((82879340.27231383)+((tmp = -2876678920.653639, tmp)*(-2801097850)))<<x)>>>((x<<(((((x|x)&(tmp = -1572694766, tmp))>>(x+(x/((x-(((tmp = 1435301275, tmp)|(tmp = 983577854.212041, tmp))>>(tmp = 632633852.1644179, tmp)))+x))))>>>x)|(-850932021)))>>x))<<(-821983991)));
+ assertEquals(0, x >>= (x>>(2424003553.0883207)));
+ assertEquals(2599386349, x -= (-2599386349));
+ assertEquals(-68157441, x |= (((tmp = -1170343454.9327996, tmp)+((((tmp = 448468098, tmp)|(x>>(x>>(((x>>(((x/(x&(x<<x)))<<(2436876051.2588806))^(3010167261)))%((tmp = 2577616315.7538686, tmp)>>>(-2953152591.015912)))%((tmp = -1304628613, tmp)/(x&((x|((-2000952119)%((691146914)/((tmp = 1480966978.7766845, tmp)<<((tmp = 2644449477.392441, tmp)|(-2143869305.871568))))))+(tmp = -315254308, tmp))))))))&(-2060205555))|((-604140518.8186448)^(x*x))))%(x*((tmp = 1383244000.2807684, tmp)/(3195793656)))));
+ assertEquals(-68157441, x |= x);
+ assertEquals(-1, x >>= x);
+ assertEquals(-2147483648, x <<= x);
+ assertEquals(-1.5257198286933313, x /= (tmp = 1407521622, tmp));
+ assertEquals(1149084989.47428, x += (((tmp = 1149084991.9004865, tmp)&x)^((((((2797053000)/(x^x))*(-2829253694))>>>((tmp = -610924351, tmp)>>x))>>>(tmp = -675681012, tmp))<<(2812852729))));
+ assertEquals(0, x %= x);
+ assertEquals(0, x <<= ((tmp = -584069073, tmp)*(-2953140326)));
+ assertEquals(0, x <<= (tmp = -481515023.6404002, tmp));
+ assertEquals(-1441535370, x ^= (2853431926));
+ assertEquals(2853431926, x >>>= (((((((tmp = 2215663525.9620194, tmp)%((-1102832735.9274108)/x))>>x)&(3220898702.76322))&(((2077584946)*((x>>x)<<((tmp = 1845701049, tmp)-x)))/(tmp = 1947184202.5737212, tmp)))|(((tmp = 2976351488, tmp)^(-42517339))%((2648230244.410125)^(1520051731.31089))))/(1761635964)));
+ assertEquals(43539, x >>>= (tmp = 1361671184.7432632, tmp));
+ assertEquals(21769, x >>= ((tmp = -804932298.9572575, tmp)>>((((tmp = 1749006993.253409, tmp)+(276536978))^x)|(2698166994))));
+ assertEquals(1103025563, x |= (tmp = 1103007891, tmp));
+ assertEquals(1327594607, x += (tmp = 224569044, tmp));
+ assertEquals(1327594607, x |= x);
+ assertEquals(-478674944, x <<= (((672378508)&x)^(((-2070209708.6470091)|x)|(x>>>x))));
+ assertEquals(-478674943, x ^= ((-1832457698.6345716)>>>((tmp = -3077714019, tmp)/(1809383028))));
+ assertEquals(229129701056053250, x *= x);
+ assertEquals(1, x /= x);
+ assertEquals(2, x <<= (-1522529727));
+ assertEquals(2, x &= x);
+ assertEquals(-2016989182, x |= ((((tmp = -1267845511, tmp)*(1225350332))+((tmp = -1397690831.5717893, tmp)>>>(tmp = -2575382994, tmp)))+x));
+ assertEquals(-241, x >>= (tmp = 931869591, tmp));
+ assertEquals(-1048087547, x &= (tmp = -1048087403.1163051, tmp));
+ assertEquals(-4004486369.844599, x += (tmp = -2956398822.844599, tmp));
+ assertEquals(-4004486368.844599, x -= (((2701878498)>>x)|(x|(-1079354967))));
+ assertEquals(1, x >>= (tmp = -1583689092, tmp));
+ assertEquals(1, x *= (x>>(x%x)));
+ assertEquals(0, x %= x);
+ assertEquals(-0, x *= (-120818969));
+ assertEquals(0, x >>= ((tmp = 1794099660, tmp)/(((x&(((-321906091)^(tmp = -3009885933.8449526, tmp))&((tmp = -140917780, tmp)|(2037803173.4075825))))&x)&(tmp = -745357154, tmp))));
+ assertEquals(0, x <<= (563984257.3493614));
+ assertEquals(NaN, x %= ((((x>>(tmp = -2190891392.320677, tmp))-x)<<(462714956))<<((tmp = -84413570, tmp)|((x|(-2787022855))-((tmp = 2028532622, tmp)|(tmp = 1103757073.9178817, tmp))))));
+ assertEquals(NaN, x *= ((2137674085.3142445)|((tmp = -1054749859.2353804, tmp)%x)));
+ assertEquals(NaN, x /= (x>>>(((((tmp = 597103360.9069608, tmp)>>>(-2850217714.1866236))-((tmp = 1125150527, tmp)*x))%(tmp = -982662312, tmp))|((x/(((968656808.6069037)*(((128484784.15362918)>>x)^x))&((((x/((((tmp = 748775979, tmp)*((x-(((tmp = 709571811.9883962, tmp)%(-2083567026))%(x/(tmp = -680467505, tmp))))/((tmp = -167543858, tmp)/(tmp = -3113588783, tmp))))/x)<<(-2605415230)))>>>(tmp = 3133054172, tmp))%(tmp = -1904650393, tmp))*((x|(-1193709562))*(tmp = -1731312795.718104, tmp)))))/((tmp = -672386301, tmp)/(tmp = 808898833.4163612, tmp))))));
+ assertEquals(-9, x |= (((((tmp = 150377964.57195818, tmp)/(tmp = 2161910879.0514045, tmp))-(-2381625849))>>(-2715928517))/(((452113643)^(-2502232011))/((-3076471740)^(((tmp = 1664851172, tmp)*(((-1460011714)>>>x)<<((-2870606437)%x)))*((tmp = -2836565755.609597, tmp)-((x/(tmp = -871461415, tmp))-(2278867564))))))));
+ assertEquals(-1, x >>= x);
+ assertEquals(-1, x |= ((-1319927272)>>>(-2866709980)));
+ assertEquals(-1, x >>= ((2345179803.155703)&(-978025218.2243443)));
+ assertEquals(1, x /= x);
+ assertEquals(-260730973, x |= (tmp = -260730973, tmp));
+ assertEquals(1174405120, x <<= (2681054073));
+ assertEquals(1174405120, x &= x);
+ assertEquals(1073741824, x &= (tmp = 2017166572.7622075, tmp));
+ assertEquals(1073741824, x |= x);
+ assertEquals(168806102, x %= ((((tmp = -2939969193.950067, tmp)|((-2325174027.614815)/(-2329212715)))*(x/(((((-2927776738)/(x|x))+(x%(tmp = -3007347037.698492, tmp)))<<(-1898633380))>>(tmp = 204338085.45241892, tmp))))^x));
+ assertEquals(168806102, x %= ((-832849739.5197744)&(tmp = -141908598, tmp)));
+ assertEquals(-401033205.05225074, x -= (tmp = 569839307.0522507, tmp));
+ assertEquals(-401033205, x &= x);
+ assertEquals(-401130402, x ^= ((x*(tmp = 311418759.22436893, tmp))>>x));
+ assertEquals(793533469, x ^= (-950312893.5201888));
+ assertEquals(756, x >>>= (-1096189516));
+ assertEquals(711, x += ((tmp = -753105189, tmp)>>(599823192.5381484)));
+ assertEquals(0, x >>>= ((tmp = -2859668634.4641137, tmp)+(-1160392986.1521513)));
+ assertEquals(2427599726.176195, x -= (-2427599726.176195));
+ assertEquals(1942312465.2523103, x -= (485287260.92388475));
+ assertEquals(0, x >>>= ((tmp = -1740656456, tmp)/(tmp = 1339746799.9335847, tmp)));
+ assertEquals(0, x <<= ((-7017077.38786912)*((-699490904.4551768)^x)));
+ assertEquals(0, x <<= (tmp = 715662384, tmp));
+ assertEquals(0, x *= (x>>>(2149735450.0758677)));
+ assertEquals(NaN, x /= x);
+ assertEquals(0, x >>= ((397078885)*((851639692.8982519)-x)));
+ assertEquals(0, x &= (-2526654445));
+ assertEquals(0, x %= (-1204924598));
+ assertEquals(251639720, x ^= (x|(tmp = 251639720, tmp)));
+ assertEquals(695433573, x ^= (663539405));
+ assertEquals(-1038050104, x -= (1733483677));
+ assertEquals(0, x ^= x);
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x &= (392107269));
+ assertEquals(0, x %= (-3084908458.241551));
+ assertEquals(0, x ^= x);
+ assertEquals(-2121660509, x ^= (tmp = -2121660509.7861986, tmp));
+ assertEquals(2285041855588855800, x *= (x|(3209046634)));
+ assertEquals(54915072, x >>>= (x%(((((x%((((tmp = -1429433339.5078833, tmp)|(tmp = 2906845137, tmp))^(3207260333))&(-848438650)))-(-2721099735))&(141851917.19978714))+x)/x)));
+ assertEquals(54915072, x &= x);
+ assertEquals(54915072, x %= (x+(1855489160)));
+ assertEquals(70078753, x ^= ((((((-1648661736)+(x%((-1421237596)+(tmp = 2053180992.3857927, tmp))))+(tmp = 38606889, tmp))<<((-241334284)%((x>>(215316122))*(tmp = 396488307, tmp))))+((tmp = -2900704565, tmp)^x))^(((1103481003.1111188)^x)-(tmp = 1304113534, tmp))));
+ assertEquals(1149501440, x <<= ((x>>(tmp = 3203172843, tmp))*(tmp = -192535531, tmp)));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x >>= ((tmp = 2751499787, tmp)&((tmp = 2217654798, tmp)*(tmp = -2798728014, tmp))));
+ assertEquals(NaN, x /= ((((-2019592425)>>>((((-1571930240.741224)>>>((-183952981)/((((1990518443.672842)>>(((((2051371284)%(685322833.6793983))>>>(2662885938))<<(-1212029669.6675105))|((-2790877875)<<(1546643473))))<<x)-(tmp = 804296674.4579233, tmp))))-(tmp = -417759051.68770766, tmp))/((-621859758)>>>x)))&x)<<(tmp = -48558935.55320549, tmp)));
+ assertEquals(0, x <<= (x&x));
+ assertEquals(0, x *= (x%(tmp = 301196068, tmp)));
+ assertEquals(398290944, x |= (((tmp = 1904146839, tmp)+(1521017178))*(-3174245888.562067)));
+ assertEquals(1256401076, x ^= (1566464180));
+ assertEquals(149620758, x %= ((tmp = 532626355, tmp)^(tmp = -382971203, tmp)));
+ assertEquals(149620791, x |= (x>>x));
+ assertEquals(-0.07034576194938641, x /= ((tmp = -1977313182.7573922, tmp)-x));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x &= x);
+ assertEquals(0, x /= ((2182424851.139966)%(((-2768516150)+x)>>>x)));
+ assertEquals(0, x %= (-504299638.53962016));
+ assertEquals(-0, x *= (-2915134629.6909094));
+ assertEquals(0, x <<= ((tmp = 952692723.402582, tmp)%(2146335996.785011)));
+ assertEquals(230457472, x |= ((tmp = -574776101.8681948, tmp)*(683185125)));
+ assertEquals(933795934, x ^= (tmp = 974395614, tmp));
+ assertEquals(933801974, x ^= (x>>>((-148683729)*(((tmp = 2912596991.415531, tmp)^(-2883672328))/x))));
+ assertEquals(222, x >>= (-3060224682));
+ assertEquals(27, x >>>= (1429156099.1338701));
+ assertEquals(754519106, x ^= (tmp = 754519129.7281355, tmp));
+ assertEquals(188629776, x >>>= ((x>>>((1247267193)<<(tmp = -936228622, tmp)))%((tmp = 978604324.8236886, tmp)*((tmp = -3018953108, tmp)^(((tmp = 259650195, tmp)>>>(tmp = 2762928902.7901163, tmp))*(x>>((tmp = 787444263.5542864, tmp)/(x>>>(((-2039193776)<<(tmp = -1408159169, tmp))-(1238893783))))))))));
+ assertEquals(188629775.33987066, x += ((tmp = 1040520414, tmp)/((-1576237184)|((tmp = -970083705, tmp)&(((tmp = -312062761.12228274, tmp)|(1171754278.2968853))<<(-2069846597.7723892))))));
+ assertEquals(1473670, x >>>= ((tmp = 202409672, tmp)^x));
+ assertEquals(2171703268900, x *= (x>>(((tmp = 840468550, tmp)&(-3208057101.2136793))/x)));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x ^= (x&((tmp = 2569871408.2405066, tmp)|((tmp = -3149374622, tmp)<<(x-(x|((tmp = -821239139.1626894, tmp)>>>x)))))));
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x %= (tmp = 1926106354, tmp));
+ assertEquals(0, x >>= ((x/(-2848416))/(tmp = 2484293767, tmp)));
+ assertEquals(0, x <<= ((tmp = -2484137114, tmp)>>>(tmp = -887083772.8318355, tmp)));
+ assertEquals(0, x >>= (tmp = -2651389432, tmp));
+ assertEquals(0, x ^= x);
+ assertEquals(1041871201, x += ((tmp = 1041871201.9272791, tmp)|(x<<(-1136959830))));
+ assertEquals(651390879501530900, x *= ((tmp = 1250424964.0346212, tmp)>>x));
+ assertEquals(1965815296.245636, x %= ((2650603245.655831)+((-1610821947.8640454)>>>(((878987151.6917406)*((((784630543)%(((1448720244)>>(((tmp = 3036767847, tmp)+((tmp = 1012548422, tmp)<<(1957000200)))-x))/(x>>x)))<<((tmp = 914710268, tmp)*(((x^(1559603121))<<(tmp = 3181816736, tmp))|((-1964115655)+x))))-(-1055603890)))&(946797797.0616649)))));
+ assertEquals(1965815296.245636, x %= (tmp = -2601038357.593118, tmp));
+ assertEquals(-769384440.872302, x += (-2735199737.117938));
+ assertEquals(-769384440.872302, x %= (2193123162));
+ assertEquals(1, x /= x);
+ assertEquals(1, x -= (((x>>>(-1968465925))*((tmp = 563037904, tmp)>>((tmp = 3009534415.769578, tmp)>>((-2567240601.7038674)<<(tmp = -1258402723.4150183, tmp)))))%(3112239470.276867)));
+ assertEquals(1, x |= x);
+ assertEquals(1505461527, x ^= (tmp = 1505461526.5858076, tmp));
+ assertEquals(406553877, x &= (tmp = 2558242293, tmp));
+ assertEquals(406553877, x |= x);
+ assertEquals(-574902339, x |= ((-709809495)%(tmp = -2880884811.410611, tmp)));
+ assertEquals(-20281777.349363208, x %= (22184822.46602547));
+ assertEquals(1, x /= x);
+ assertEquals(-4360732, x ^= ((x|(tmp = 3178620274, tmp))>>(((2686286888)&(((-1107223053.8716578)/(((-2955575332.3675404)+(-2770518721))|(-2705016953.640522)))-x))^((1473641110.4633303)*((((-1466496401)<<x)+x)%(1805868749.082736))))));
+ assertEquals(-1158545408, x <<= ((((x/((-2710098221.691819)-(-2421462965.788145)))/(((((x>>>(tmp = 1994541591.1032422, tmp))+(tmp = -1276676679.9747126, tmp))&((tmp = 1764029634.2493339, tmp)+((x|(tmp = -3050446156, tmp))-((tmp = -9441859, tmp)/(((-2072420232)&x)*(-1003199889))))))+(tmp = -2443230628, tmp))*x))*((x&((((x|(747566933))*(((2039741506)>>>((tmp = -2456000554, tmp)>>>(-1566360933.7788877)))^((tmp = 960600745, tmp)/x)))&(x^(((-2649310348.777452)^((2224282875)-(tmp = -2129141087.3182096, tmp)))<<((x<<x)+((-1307892509.3874407)-(x|(tmp = -2831643528.9720087, tmp)))))))/(((tmp = -35502946, tmp)<<((tmp = 1091279222, tmp)>>(((-2686069468.8930416)-x)+(tmp = 367442353.2904701, tmp))))%(1218262628))))/x))^(-919079153.7857773)));
+ assertEquals(747, x >>>= (1229157974));
+ assertEquals(747, x |= x);
+ assertEquals(NaN, x %= (((3086718766.4715977)*((7912648.497568846)*((-2713828337.1659327)*(-176492425.4011252))))<<(tmp = -1074475173, tmp)));
+ assertEquals(0, x >>>= ((((444923201)<<x)>>>(-883391420.2142565))*((((617245412)<<x)>>>x)*(-913086143.2793813))));
+ assertEquals(1941802406, x ^= (tmp = -2353164890, tmp));
+ assertEquals(14, x >>>= (-1600311077.4571416));
+ assertEquals(-18229482703.7246, x += (((x+(-993157139.7880647))%x)*(1862419512.1781366)));
+ assertEquals(-14.531388114858734, x /= ((tmp = -1649072797.951641, tmp)<<x));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x >>= ((x/x)^x));
+ assertEquals(2, x ^= ((-1597416259)/(-738770020)));
+ assertEquals(0, x >>= (tmp = -387850072.74833393, tmp));
+ assertEquals(0, x >>>= ((2491085477.186817)>>(x*(((tmp = -1592498533, tmp)+(tmp = 2086841852, tmp))&(-3174019330.8288536)))));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x >>>= (tmp = -3045348659.45243, tmp));
+ assertEquals(-1208573479, x |= ((3086393817)-x));
+ assertEquals(1460649854142163500, x *= x);
+ assertEquals(1588199424, x <<= (-1902076952));
+ assertEquals(1586102272, x &= (tmp = 2139876091.9142454, tmp));
+ assertEquals(-460908552.5528109, x -= (tmp = 2047010824.552811, tmp));
+ assertEquals(-460908552.5528109, x %= (tmp = 507904117.09368753, tmp));
+ assertEquals(-460908552.5528109, x %= (2749577642.527038));
+ assertEquals(234012, x >>>= (-340465746.91275));
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x %= (tmp = -2601875531, tmp));
+ assertEquals(0, x %= (x|(tmp = 650979981.1158671, tmp)));
+ assertEquals(0, x %= (tmp = -2286020987, tmp));
+ assertEquals(0, x |= x);
+ assertEquals(0, x &= (x|((tmp = 2568101411, tmp)-(-1438002403))));
+ assertEquals(0, x >>>= (1399248574));
+ assertEquals(0, x %= (-1906670287.2043698));
+ assertEquals(0, x >>= (1019286379.6962404));
+ assertEquals(0, x |= (x/(tmp = -82583591.62643051, tmp)));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x *= (x^(1874776436)));
+ assertEquals(NaN, x -= ((-1238826797)-(-2971588236.7228813)));
+ assertEquals(0, x <<= (2064632559));
+ assertEquals(-0.5967273958864694, x += (((tmp = 1502995019, tmp)>>x)/(-2518729707)));
+ assertEquals(0, x >>>= x);
+ assertEquals(-0, x /= (-1923030890));
+ assertEquals(NaN, x %= x);
+ assertEquals(0, x >>= (tmp = 1081732779.9449487, tmp));
+ assertEquals(-820183066, x |= ((tmp = -3169007292.4721155, tmp)|(-1912588318)));
+ assertEquals(0, x -= x);
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x /= (tmp = 287181840, tmp));
+ assertEquals(0, x &= (x/((tmp = -1139766051, tmp)<<(x&(tmp = 2779004578, tmp)))));
+ assertEquals(0, x >>= (((tmp = -1816938028, tmp)+(-224851993.3139863))*(-2933829524)));
+ assertEquals(0, x |= ((((tmp = 305077929.1808746, tmp)&((x-(((((tmp = 2122810346.7475111, tmp)<<(717271979))*(tmp = 256854043.72633624, tmp))%((x+(tmp = -318657223.9992106, tmp))*((1993144830)<<(2594890698.603228))))^((((tmp = 257370667, tmp)>>>((((x^(3160746820))>>>(2049640466.8116226))>>>(2543930504.7117066))^(x-x)))^(x%(964838975)))^x)))%(x*x)))>>>x)*(tmp = -46861540, tmp)));
+ assertEquals(747575633, x ^= ((-2406502427)-(-3154078060.3794584)));
+ assertEquals(0, x *= (x%x));
+ assertEquals(0, x <<= (1313773705.3087234));
+ assertEquals(0, x >>>= ((x+x)>>>(3068164056)));
+ assertEquals(-0, x *= (tmp = -1771797797, tmp));
+ assertEquals(1784146970, x ^= (tmp = 1784146970, tmp));
+ assertEquals(1784146970, x >>>= (tmp = -2219972320.7195597, tmp));
+ assertEquals(1744830464, x <<= ((((-2769476584)-(((1798431604)>>(tmp = 1337687914.799577, tmp))>>>((-2802941943.15014)>>x)))>>>(tmp = 646033678, tmp))-x));
+ assertEquals(3044433348102455300, x *= x);
+ assertEquals(0, x >>= ((tmp = 1592076570.1900845, tmp)-((645774223.6317859)>>x)));
+ assertEquals(0, x >>= (x>>>(-3045822290.1536255)));
+ assertEquals(-0, x *= (tmp = -2450298800.986624, tmp));
+ assertEquals(0, x >>= (tmp = 1379605393, tmp));
+ assertEquals(0, x &= (((x-((((tmp = 837939461.6683749, tmp)+((((-813261853.3247359)|(x&(((-2565113940)*(tmp = -2725085381.240134, tmp))|x)))%(-1457259320))-(x+((tmp = -273947066, tmp)%((1164825698.879649)>>(1653138880.3434052))))))>>>(2823967606.411492))>>>((((((((1189235604.9646997)/(tmp = -2875620103.4002438, tmp))-(tmp = -801261493, tmp))<<(((1832556579.5095325)<<x)|((tmp = -2740330665, tmp)>>(tmp = -2352814025, tmp))))-(tmp = -1445043552.99499, tmp))&(x<<(((((445325471)*(1293047043.1808558))>>>(((1901837408.5910044)-(tmp = -2349093446.5313253, tmp))>>>(tmp = 1000847053.1861948, tmp)))*(x>>>(1771853406.6567078)))>>x)))>>>x)>>>(x^((tmp = 2813422715, tmp)-(x+(-342599947)))))))&(x>>>x))*x));
+ assertEquals(NaN, x %= ((tmp = -3027713526, tmp)-((((x%(((((x/((2711155710)^(((((x>>>x)%((1098599291.155015)^(((((tmp = 1855724377.8987885, tmp)/(x|x))*((-1963179786)*((x-((-1634717702)%x))<<x)))>>(2008859507))>>((tmp = 2635024299.7983694, tmp)^(tmp = -602049246, tmp)))))*(x>>x))&(tmp = -1925103609, tmp))*((tmp = 2106913531.2828505, tmp)%((tmp = -200970069, tmp)*(-2809001910.951446))))))%x)*((1990098169)>>((x<<(2303347904.2601404))%x)))|(2767962065.9846206))+(201589933.301661)))>>(((tmp = 1921071149.5140274, tmp)>>(1054558799.1731887))|x))*(x/((((-2833879637.345674)>>>(tmp = 2849099601, tmp))%x)+(x%(x%(((tmp = 1983018049, tmp)^(tmp = -2659637454, tmp))>>((-1335497229.6945198)-(x+(((((tmp = 1136612609.848967, tmp)%(2471741030.01762))<<(x|(((tmp = 1644081190.1972675, tmp)&(-1422527338))^(2379264356.265957))))/(tmp = 2979299484.1884174, tmp))/x)))))))))*((tmp = 1858298882, tmp)^((tmp = -547417134.9651439, tmp)*x)))));
+ assertEquals(-7664, x |= ((2286000258.825538)>>(1716389170)));
+ assertEquals(-1, x >>= x);
+ assertEquals(-1231640486.3023372, x += ((tmp = 1231640485.3023372, tmp)*x));
+ assertEquals(-2463280972.6046743, x += x);
+ assertEquals(1746, x >>>= x);
+ assertEquals(1746, x >>>= (((tmp = -562546488.0669937, tmp)*((-2475357745.8508205)&((x%(821425388.8633704))%((((-2315481592.687686)&(((tmp = 3130530521.7453523, tmp)+x)-x))^(-973033390.1773088))/x))))<<x));
+ assertEquals(1746, x %= (-1544973951.076033));
+ assertEquals(27936, x <<= (-525441532.33816123));
+ assertEquals(27936, x %= (x*((tmp = 344991423.5336287, tmp)+(-2267207281))));
+ assertEquals(27, x >>>= (tmp = 1249792906, tmp));
+ assertEquals(0, x >>>= (tmp = -1068989615, tmp));
+ assertEquals(0, x >>>= (tmp = 347969658.92579734, tmp));
+ assertEquals(-2656611892, x -= (2656611892));
+ assertEquals(1944539596, x |= (((tmp = 3000889963, tmp)-x)<<((tmp = 2917390580.5323124, tmp)^(-996041439))));
+ assertEquals(1944539596, x |= x);
+ assertEquals(-739740167.0752468, x -= ((1712009965.0752468)+(x>>((tmp = -740611560.99014, tmp)>>>((tmp = -1033267419.6253037, tmp)&(862184116.3583733))))));
+ assertEquals(-1479480334.1504936, x += x);
+ assertEquals(-4294967296.150494, x -= (x>>>((1219235492.3661718)&(3138970355.0665245))));
+ assertEquals(0, x >>= (x*x));
+ assertEquals(-0, x *= ((-2202530054.6558375)-(-676578695)));
+ assertEquals(-0, x %= (1336025846));
+ assertEquals(0, x &= x);
+ assertEquals(0, x /= (1759366510));
+ assertEquals(630007622, x |= (630007622));
+ assertEquals(-0.22460286863455903, x /= (tmp = -2804984753, tmp));
+ assertEquals(1102410276.775397, x -= (-1102410277));
+ assertEquals(1102410276.775397, x %= ((((-2569525203)&x)*(x|(-1932675298)))/((-2376634450)>>>(x>>>(tmp = 936937604.9491489, tmp)))));
+ assertEquals(33642, x >>= (3028252527));
+ assertEquals(2181106522.688034, x -= (-2181072880.688034));
+ assertEquals(-2113861630, x &= (2523921542));
+ assertEquals(-2147483646, x &= (-1996601566.9370148));
+ assertEquals(-2147483648, x &= (tmp = -665669175.1968856, tmp));
+ assertEquals(-2858673260.1367273, x -= (tmp = 711189612.1367272, tmp));
+ assertEquals(350657, x >>= (tmp = -170243892.25474262, tmp));
+ assertEquals(-0.0001405571562140975, x /= (-2494764474.7868776));
+ assertEquals(0, x ^= x);
+ assertEquals(NaN, x /= ((x&(-2041236879))*((tmp = -2182530229, tmp)^((1274197078)*x))));
+ assertEquals(0, x |= (x&(x-(1794950303))));
+ assertEquals(1222105379, x |= (tmp = 1222105379, tmp));
+ assertEquals(729884484, x ^= (tmp = 1666645607.6907792, tmp));
+ assertEquals(729884484, x %= (tmp = -2896922082, tmp));
+ assertEquals(8768, x &= ((tmp = 358940932, tmp)>>>(3159687631.3308897)));
+ assertEquals(1892384495, x |= (-2402591569));
+ assertEquals(1892470533, x += ((((x^(-2266612043))>>>(tmp = -531009952, tmp))<<(x>>>((-1365315963.5698428)>>>((x+((-3168207800.184341)-(tmp = 1776222157.609917, tmp)))+(-1588857469.3596382)))))>>>x));
+ assertEquals(143587205, x += (tmp = -1748883328, tmp));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x >>= (tmp = 2334880462.3195543, tmp));
+ assertEquals(0, x &= ((tmp = 1819359625.4396145, tmp)|(tmp = -1323513565, tmp)));
+ assertEquals(-1102259874, x ^= (3192707422));
+ assertEquals(2567457772588852700, x *= (-2329267202));
+ assertEquals(-16783687, x |= ((-2212476227.060922)^(378973700.78452563)));
+ assertEquals(4278183609, x >>>= ((((((((tmp = 1766363150.197206, tmp)*(-2774552871))%x)>>>((3071429820)&((((((tmp = 351068445.27642524, tmp)<<(tmp = 2646575765, tmp))^(806452682))<<((x>>>(-2217968415.505327))<<(1564726716)))|x)-(tmp = -3110814468.9023848, tmp))))+x)^x)>>>(tmp = -617705282.0788529, tmp))>>>x));
+ assertEquals(4314933530, x -= ((1032195469.789219)|(tmp = -448053861.9531791, tmp)));
+ assertEquals(9709850, x %= (((tmp = -3056286252.5853324, tmp)*x)&x));
+ assertEquals(9709850, x %= (tmp = -2596800940, tmp));
+ assertEquals(2655489828.9461126, x -= (tmp = -2645779978.9461126, tmp));
+ assertEquals(369266212, x &= (((335712316.24874604)|(tmp = 33648215, tmp))-((x/(2639848695))<<((-499681175)<<(-2490554556)))));
+ assertEquals(-2147483648, x <<= (-834465507));
+ assertEquals(1073741824, x >>>= (((tmp = 3018385473.1824775, tmp)>>(x*(-2574502558.216812)))|(((tmp = -1742844828, tmp)*(1698724455))&x)));
+ assertEquals(-270818218, x += (-1344560042));
+ assertEquals(360710144, x <<= x);
+ assertEquals(0, x <<= (tmp = 612718075, tmp));
+ assertEquals(0, x <<= x);
+ assertEquals(-0, x /= (tmp = -1922423684, tmp));
+ assertEquals(-0, x *= ((((tmp = 741806213.3264687, tmp)%(-711184803.2022421))+((tmp = -3209040938, tmp)&(525355849.044886)))&(x<<(tmp = -698610297, tmp))));
+ assertEquals(0, x <<= (-482471790));
+ assertEquals(0, x &= ((-921538707)/(tmp = -482498765.988616, tmp)));
+ assertEquals(0, x ^= (x^x));
+ assertEquals(-351721702, x ^= (-351721702.8850286));
+ assertEquals(726242219625599900, x -= ((2064820612)*x));
+ assertEquals(1452484439251199700, x += x);
+ assertEquals(2.52318299412847e-15, x %= ((((x<<((2508143285)+x))>>(-2493225905.011774))%(1867009511.0792103))/((((x<<(2542171236))>>((x|x)&(tmp = -384528563, tmp)))+((-1168755343)*(1731980691.6745195)))+(tmp = -1608066022.71164, tmp))));
+ assertEquals(79905008, x += ((((-2702081714.590131)&(x+(tmp = -1254725471.2121565, tmp)))*(3088309981))%(((tmp = 1476844981.1453142, tmp)|((((tmp = -1243556934.7291331, tmp)%x)^(-1302096154))+((660489180)/(tmp = -681535480.8642154, tmp))))^(tmp = -8410710, tmp))));
+ assertEquals(1215822204, x ^= ((-3008054900)>>>(tmp = -1990206464.460693, tmp)));
+ assertEquals(-394790532, x |= ((((-1334779133.2038574)+(tmp = -1407958866.832946, tmp))<<(1699208315))-(((x^(x%x))<<(3216443))>>(x+((((2576716374.3081336)|((tmp = 2316167191.348064, tmp)&((51086351.20208645)&((x|(tmp = -357261999, tmp))^(x/x)))))*(-45901631.10155654))*(((-439588079)>>>((-2358959768.7634916)|(1613636894.9373643)))+(((-908627176)<<x)%(x%((-1669567978)>>>((x>>(1289400876))+(tmp = 2726174270, tmp)))))))))));
+ assertEquals(-0.17717467607696327, x /= (2228255982.974148));
+ assertEquals(-1905616474, x ^= (tmp = 2389350822.851587, tmp));
+ assertEquals(-0, x %= x);
+ assertEquals(2818124981.508915, x -= (-2818124981.508915));
+ assertEquals(-1476842315, x |= x);
+ assertEquals(73408564, x &= (-3147390604.3453345));
+ assertEquals(70, x >>>= x);
+ assertEquals(1, x >>= x);
+ assertEquals(3086527319.899181, x *= (3086527319.899181));
+ assertEquals(-145, x >>= x);
+ assertEquals(-145, x %= (tmp = -2500421077.3982406, tmp));
+ assertEquals(-1, x >>= (tmp = -2970678326.712191, tmp));
+ assertEquals(-1, x %= ((tmp = -535932632.4668834, tmp)+(((-1226598339.347982)<<((tmp = 616949449, tmp)/(tmp = 2779464046, tmp)))/(214578501.67984307))));
+ assertEquals(1, x *= x);
+ assertEquals(1, x >>= ((tmp = 11080208, tmp)<<(460763913)));
+ assertEquals(-1.8406600706723492e-19, x /= ((tmp = -2334126306.1720915, tmp)*(tmp = 2327566272.5901165, tmp)));
+ assertEquals(856681434186007200, x -= ((tmp = -2286974992.8133907, tmp)*(374591518)));
+ assertEquals(3126084224, x >>>= x);
+ assertEquals(-1160460669, x |= (tmp = 181716099, tmp));
+ assertEquals(873988096, x <<= (tmp = 406702419, tmp));
+ assertEquals(0, x <<= ((tmp = 802107965.4672925, tmp)-((tmp = 1644174603, tmp)>>((tmp = 604679952, tmp)+(tmp = -515450096.51425123, tmp)))));
+ assertEquals(NaN, x %= ((x>>(tmp = 2245570378, tmp))*(tmp = 1547616585, tmp)));
+ assertEquals(NaN, x /= ((tmp = -776657947.0382309, tmp)&(tmp = 163929332.28270507, tmp)));
+ assertEquals(NaN, x *= (tmp = 243725679.78916526, tmp));
+ assertEquals(NaN, x /= (x>>x));
+ assertEquals(0, x <<= ((tmp = -1293291295.5735884, tmp)%(((((63309078)>>>x)&(x&(-2835108260.025297)))+x)>>>(-1317213424))));
+ assertEquals(0, x *= ((((tmp = -1140319441.0068483, tmp)*(tmp = 2102496185, tmp))&(-2326380427))<<(tmp = -2765904696, tmp)));
+ assertEquals(0, x /= (tmp = 2709618593, tmp));
+ assertEquals(0, x >>= (-1753085095.7670164));
+ assertEquals(1766381484, x |= (-2528585812));
+ assertEquals(1766381484, x %= (2735943476.6363373));
+ assertEquals(1766381484, x %= (x*(tmp = 2701354268, tmp)));
+ assertEquals(-2147483648, x <<= (-323840707.4949653));
+ assertEquals(4611686018427388000, x *= (x<<x));
+ assertEquals(0, x <<= (3066735113));
+ assertEquals(0, x ^= ((((x*x)^(tmp = -2182795086.39927, tmp))<<(x^(tmp = 1661144992.4371827, tmp)))<<((((-2885512572.176741)*(tmp = 609919485, tmp))|(tmp = 929399391.0790694, tmp))>>>((((((((((399048996)>>((-107976581.61751771)>>>x))|(((-1502100015)<<(tmp = -1108852531.9494338, tmp))&(x/(tmp = -3198795871.7239237, tmp))))+((-2627653357)>>x))>>>x)*(1066736757.2718519))%(tmp = 1326732482.201604, tmp))/(tmp = 2513496019.814191, tmp))>>>((1694891519)>>>(-2860217254.378931)))<<(tmp = 31345503, tmp)))));
+ assertEquals(0, x ^= (x/((-2556481161)>>>(x/(x%(x&(1302923615.7148068)))))));
+ assertEquals(NaN, x /= x);
+ assertEquals(NaN, x += (tmp = 846522031, tmp));
+ assertEquals(0, x >>= (x+(-1420249556.419045)));
+ assertEquals(0, x ^= (((x%(-1807673170))&x)-x));
+ assertEquals(-3484.311990686845, x -= ((((((-510347602.0068991)>>>x)<<((tmp = 1647999950, tmp)&(((305407727)>>((1781066601.791009)&x))<<((tmp = -998795238, tmp)%(((x/x)+x)<<(((2586995491.434947)<<x)-((((tmp = 545715607.9395425, tmp)*x)>>>x)>>>(((((2332534960.4595165)^(-3159493972.3695474))<<(tmp = 867030294, tmp))|(2950723135.753855))^(((3150916666)<<x)>>((tmp = 414988690, tmp)|((tmp = -1879594606, tmp)/(tmp = 1485647336.933429, tmp))))))))))))>>(tmp = -2676293177, tmp))%(617312699.1995015))/((((tmp = -1742121185, tmp)^((((x&x)<<(tmp = 698266916, tmp))/(-1860886248))+((-213304430)%((((((-2508973021.1333447)+(tmp = 2678876318.4903, tmp))&(tmp = -43584540, tmp))-x)^(-2251323850.4611115))-x))))>>>(tmp = 2555971284, tmp))%((((tmp = 16925106, tmp)^x)&x)|((x/((x|(tmp = -2787677257.125139, tmp))<<(-853699567)))+(tmp = -1721553520, tmp))))));
+ assertEquals(-447873933.26863855, x += (-447870448.9566479));
+ assertEquals(200591060101520900, x *= x);
+ assertEquals(200591062202483420, x -= (-2100962536));
+ assertEquals(-5.261023346568228e+24, x *= ((tmp = -419641692.6377077, tmp)>>(tmp = -224703100, tmp)));
+ assertEquals(1269498660, x |= (195756836));
+ assertEquals(1269498660, x |= x);
+ assertEquals(1269498660, x |= x);
+ assertEquals(-37.75978948486164, x /= (((tmp = -595793780, tmp)+((tmp = 2384365752, tmp)>>>(1597707155)))|((968887032)^(tmp = 2417905313.4337964, tmp))));
+ assertEquals(-37.75978948486164, x %= (tmp = -1846958365.291661, tmp));
+ assertEquals(1102319266.6421175, x += (1102319304.401907));
+ assertEquals(-1664202255175155200, x -= ((x^(tmp = 407408729, tmp))*x));
+ assertEquals(-752874653, x ^= (tmp = 314673507, tmp));
+ assertEquals(-72474761, x |= (tmp = -2538726025.8884344, tmp));
+ assertEquals(-72474761, x |= x);
+ assertEquals(-122849418, x += ((tmp = -2332080457, tmp)|(((((30496388.145492196)*(((-1654329438.451212)|(-2205923896))&(x>>(tmp = -1179784444.957002, tmp))))&(tmp = 319312118, tmp))*(651650825))|(((-2305190283)|x)>>>(-428229803)))));
+ assertEquals(994, x >>>= x);
+ assertEquals(614292, x *= (((((2565736877)/((tmp = 649009094, tmp)>>>(((x>>>(2208471260))>>(x>>>x))%x)))&(tmp = 357846438, tmp))<<(tmp = -2175355851, tmp))%x));
+ assertEquals(1792008118, x |= (tmp = 1791924774.5121183, tmp));
+ assertEquals(1246238208, x &= (tmp = 1264064009.9569638, tmp));
+ assertEquals(-88877082, x ^= (2969289190.285704));
+ assertEquals(0.044923746573582474, x /= ((tmp = -3057438043, tmp)^(-1009304907)));
+ assertEquals(0, x <<= ((-828383918)-((((x>>(734512101))*(tmp = -3108890379, tmp))-(x|((tmp = 3081370585.3127823, tmp)^((-271087194)-(x/(tmp = -2777995324.4073873, tmp))))))%x)));
+ assertEquals(1604111507.3365753, x -= (-1604111507.3365753));
+ assertEquals(-1721314970, x ^= (tmp = -956686859, tmp));
+ assertEquals(-102247425, x |= (tmp = -2535095555, tmp));
+ assertEquals(-102247425, x %= (-955423877));
+ assertEquals(1053144489850425, x *= (((tmp = 1583243590.9550207, tmp)&(1356978114.8592746))|(tmp = -10299961.622774363, tmp)));
+ assertEquals(-0.0043728190668037336, x /= ((-1196259252.435701)*(((-689529982)|(tmp = -1698518652.4373918, tmp))<<x)));
+ assertEquals(-2, x ^= (((x+(tmp = 2961627388, tmp))>>(tmp = 231666110.84104693, tmp))|x));
+ assertEquals(-1, x >>= (tmp = -83214419.92958307, tmp));
+ assertEquals(-1, x %= (-1303878209.6288595));
+ assertEquals(2944850457.5213213, x -= (tmp = -2944850458.5213213, tmp));
+ assertEquals(-1.6607884436053055, x /= (-1773164107));
+ assertEquals(-0.6607884436053055, x %= ((x>>(1240245489.8629928))%(tmp = -3044136221, tmp)));
+ assertEquals(-0, x *= ((x*x)>>>((1069542313.7656753)+x)));
+ assertEquals(0, x >>>= (tmp = -202931587.00212693, tmp));
+ assertEquals(-0, x *= (-375274420));
+ assertEquals(0, x |= ((x/(((tmp = -876417141, tmp)*(x>>>x))&(-2406962078)))<<x));
+ assertEquals(0, x &= ((tmp = -650283599.0780096, tmp)*(tmp = 513255913.34108484, tmp)));
+ assertEquals(3027255453.458466, x += (3027255453.458466));
+ assertEquals(-12568623413253943000, x *= (((x-(198689694.92141533))|x)-x));
+ assertEquals(-12568623410285185000, x -= (tmp = -2968758030.3694654, tmp));
+ assertEquals(-2008903680, x &= (3111621747.7679076));
+ assertEquals(-110045263.26583672, x += (tmp = 1898858416.7341633, tmp));
+ assertEquals(15964, x >>>= (1141042034));
+ assertEquals(31928, x += x);
+ assertEquals(0, x ^= x);
+ assertEquals(-1159866377, x |= (-1159866377));
+ assertEquals(0, x ^= x);
+ assertEquals(3072699529.4306993, x -= (tmp = -3072699529.4306993, tmp));
+ assertEquals(1, x /= x);
+ assertEquals(-1471195029, x |= (2823772267.429641));
+ assertEquals(-4152937108, x += (-2681742079));
+ assertEquals(142030188, x |= x);
+ assertEquals(270, x >>= (tmp = 1013826483, tmp));
+ assertEquals(0, x >>>= (529670686));
+ assertEquals(-2912300367, x -= (2912300367));
+ assertEquals(2213791134963007500, x *= (x<<((((-3214746140)>>(tmp = -588929463, tmp))+((tmp = -3084290306, tmp)>>x))>>x)));
+ assertEquals(2213791133466809900, x -= (tmp = 1496197641, tmp));
+ assertEquals(69834416, x >>>= (x|(((2755815509.6323137)^(x%(((x*((((tmp = 375453453, tmp)<<(x*x))>>(tmp = -973199642, tmp))*x))>>((tmp = -356288629, tmp)>>(tmp = 2879464644, tmp)))<<((((1353647167.9291127)>>>(x/x))<<((2919449101)/(2954998123.5529594)))^x))))&((-2317273650)>>>(tmp = 34560010.71060455, tmp)))));
+ assertEquals(69834416, x >>>= (x^(-2117657680.8646245)));
+ assertEquals(2217318064, x -= ((tmp = 2035883891, tmp)<<(tmp = -1884739265, tmp)));
+ assertEquals(-1272875686, x ^= (tmp = 805889002.7165648, tmp));
+ assertEquals(-1272875686, x >>= (x&(((1750455903)*x)>>((722098015)%((tmp = 1605335626, tmp)>>(tmp = -565369634, tmp))))));
+ assertEquals(-1274351316, x -= (x>>>((tmp = 2382002632, tmp)-((tmp = -2355012843, tmp)+(1465018311.6735773)))));
+ assertEquals(-2982908522.4418216, x -= ((tmp = 1635549038.4418216, tmp)+(((1952167017.720186)&((tmp = -2284822073.1002254, tmp)>>(-1403893917)))%(tmp = 655347757, tmp))));
+ assertEquals(312, x >>>= x);
+ assertEquals(1248, x <<= (2376583906));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x *= ((((tmp = 1914053541.881434, tmp)>>>(tmp = 1583032186, tmp))>>>(-2511688231))%(tmp = -2647173031, tmp)));
+ assertEquals(0, x >>>= (tmp = -2320612994.2421227, tmp));
+ assertEquals(0, x %= (((x+(tmp = -720216298.5403998, tmp))<<(414712685))>>(tmp = 480416588, tmp)));
+ assertEquals(0, x >>= ((((3039442014.271272)<<x)%(-2402430612.9724464))&((-2141451461.3664773)%((x>>(1361764256))/((tmp = -1723952801.9320493, tmp)%(477351810.2485285))))));
+ assertEquals(-0, x /= (tmp = -1627035877, tmp));
+ assertEquals(0, x >>>= (tmp = 1745193212, tmp));
+ assertEquals(0, x >>>= (2309131575));
+ assertEquals(NaN, x %= (((x*(tmp = -1730907131.6124666, tmp))%((((1481750041)|(x>>((((x>>>(tmp = 3128156522.5936565, tmp))/(tmp = -1277222645.9880452, tmp))^(tmp = -2327254789, tmp))+x)))>>>(-1161176960))>>>(tmp = 3135906272.5466847, tmp)))*(((((-2230902834.464362)^(1822893689.8183987))+(((tmp = 1597326356, tmp)/(x&((tmp = -3044163063.587389, tmp)>>(tmp = 2844997555, tmp))))%(x^x)))>>((x|x)/x))^(2634614167.2529745))));
+ assertEquals(0, x &= (3081901595));
+ assertEquals(0, x &= (-2453019214.8914948));
+ assertEquals(0, x &= x);
+ assertEquals(0, x >>>= (-596810618.3666217));
+ assertEquals(0, x >>= (((908276623)|x)/x));
+ assertEquals(0, x ^= x);
+ assertEquals(958890056, x |= (tmp = 958890056.474458, tmp));
+ assertEquals(1325436928, x <<= (tmp = -2474326583, tmp));
+ assertEquals(711588532333838300, x *= ((-148161646.68183947)<<(tmp = -1149179108.8049204, tmp)));
+ assertEquals(0, x ^= (((2862565506)%x)/(tmp = -2865813112, tmp)));
+ assertEquals(-2064806628, x += (((tmp = -2677361175.7317276, tmp)/((817159440)>>>(tmp = 1895467706, tmp)))^(x|(tmp = -2309094859, tmp))));
+ assertEquals(-69806982479424, x *= ((x&(tmp = 2857559765.1909904, tmp))&(-3166908966.754988)));
+ assertEquals(-430255744, x %= ((((((-2968574724.119535)<<x)<<((tmp = 1603913671, tmp)%((-1495838556.661653)^(tmp = 1778219751, tmp))))*(-400364265))<<((((1607866371.235576)-(1961740136))|(1259754297))&(tmp = -1018024797.1352971, tmp)))^x));
+ assertEquals(6.828637393208647e-7, x /= (x*(tmp = 1464421, tmp)));
+ assertEquals(0, x &= x);
+ assertEquals(-0, x *= (((tmp = -2510016276, tmp)-(2088209546))<<((tmp = -1609442851.3789036, tmp)+(tmp = 1919930212, tmp))));
+ assertEquals(-0, x %= (tmp = 1965117998, tmp));
+ assertEquals(-290294792.53186846, x += ((tmp = -2361555894.5318685, tmp)%(2071261102)));
+ assertEquals(-70873, x >>= (tmp = 2206814124, tmp));
+ assertEquals(-141746, x += x);
+ assertEquals(-141733.9831459089, x -= (((tmp = -806523527, tmp)>>>(tmp = 1897214891, tmp))/x));
+ assertEquals(-141733.9831459089, x %= ((tmp = 1996295696, tmp)<<(tmp = 3124244672, tmp)));
+ assertEquals(141733.9831459089, x /= (x>>(2688555704.561076)));
+ assertEquals(3196954517.3075542, x -= (tmp = -3196812783.3244085, tmp));
+ assertEquals(-19929155, x |= (((x|x)+x)^((tmp = 391754876, tmp)-(((((((tmp = -3051902902.5100636, tmp)*(x/(1546924993)))|(tmp = 1494375949, tmp))/((((-795378522)/(tmp = 509984856, tmp))>>>(tmp = -106173186, tmp))+x))|x)|(1916921307))>>>x))));
+ assertEquals(1279271449, x &= ((tmp = 1289446971, tmp)&(tmp = 1836102619, tmp)));
+ assertEquals(17876992, x <<= (-207633461));
+ assertEquals(0, x >>= (tmp = -903885218.9406946, tmp));
+ assertEquals(0, x >>>= x);
+ assertEquals(-2999, x -= (((754533336.2183633)%(tmp = 557970276.0537136, tmp))>>(tmp = -1171045520, tmp)));
+ assertEquals(-0.000003020470363504361, x /= (tmp = 992891715.2229724, tmp));
+ assertEquals(1, x /= x);
+ assertEquals(0.45768595820301217, x %= ((tmp = 673779031, tmp)/(tmp = -1242414872.3263657, tmp)));
+ assertEquals(-980843052.1872087, x += (tmp = -980843052.6448946, tmp));
+ assertEquals(-Infinity, x /= ((((tmp = 317747175.8024508, tmp)&(x&(((tmp = 1632953053, tmp)>>x)/x)))%x)/(3145184986)));
+ assertEquals(0, x &= (x<<x));
+ assertEquals(0, x ^= (x-((2969023660.5619783)/x)));
+ assertEquals(0, x *= x);
+ assertEquals(NaN, x %= (x/(((x-x)/((tmp = -1622970458.3812745, tmp)-(1626134522)))&((((((tmp = 1384729039.4149384, tmp)^(x%(tmp = -2736365959, tmp)))+((-1465172172)%x))>>(tmp = -1839184810.2603343, tmp))^(((tmp = 1756918419, tmp)>>>(x+(x%(tmp = -2011122996.9794662, tmp))))<<(-3026600748.902623)))*((tmp = -2040286580, tmp)>>(-2899217430.655154))))));
+ assertEquals(0, x >>>= (tmp = 2100066003.3046467, tmp));
+ assertEquals(1362012169, x ^= (tmp = 1362012169, tmp));
+ assertEquals(1476312683, x |= ((457898409)>>>(-3079768830.723079)));
+ assertEquals(1441711, x >>>= (905040778.7770994));
+ assertEquals(2078530607521, x *= x);
+ assertEquals(-208193103, x |= ((tmp = -241750000, tmp)^x));
+ assertEquals(745036378, x ^= (((tmp = -1737151062.4726632, tmp)<<x)|(tmp = -1900321813, tmp)));
+ assertEquals(1744830464, x <<= x);
+ assertEquals(212992, x >>>= ((1210741037)-(x-(x>>>((x^(-1273817997.0036907))+((2401915056.5471)%(x<<(tmp = 1696738364.277438, tmp))))))));
+ assertEquals(0.0001604311565639742, x /= (1327622418));
+ assertEquals(0, x <<= (tmp = 166631979.34529006, tmp));
+ assertEquals(0, x *= ((((tmp = 657814984, tmp)/(((-831055031)>>>(1531978379.1768064))|((tmp = 2470027754.302619, tmp)^(-223467597))))/(tmp = 1678697269.468965, tmp))&(tmp = -1756260071.4360774, tmp)));
+ assertEquals(-2049375053, x ^= (tmp = -2049375053, tmp));
+ assertEquals(-1879109889, x |= (tmp = -1963586818.0436726, tmp));
+ assertEquals(718239919, x ^= (tmp = -1523550640.1925273, tmp));
+ assertEquals(-1361085185, x |= (-1939964707));
+ assertEquals(2, x >>>= (1864136030.7395325));
+ assertEquals(0.794648722849246, x %= ((-668830999)*(((-2227700170.7193384)%(x^(x>>>x)))/(tmp = 399149892, tmp))));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x *= x);
+ assertEquals(0, x &= ((tmp = -2389008496.5948563, tmp)|((((tmp = -2635919193.905919, tmp)*((-64464127)<<(2136112830.1317358)))>>((184057979)*(-1204959085.8362718)))>>>(-442946870.3341484))));
+ assertEquals(-243793920, x -= ((tmp = 3002998032, tmp)<<((537875759)<<x)));
+ assertEquals(0, x -= x);
+ assertEquals(0, x *= ((((66852616.82442963)/((((x^x)&(2975318321.223734))+(((tmp = -1388210811.1249495, tmp)^((((-680567297.7620237)%(x-(tmp = -672906716.4672911, tmp)))-x)*(tmp = -1452125821.0132627, tmp)))*(((2770387154.5427895)%x)%x)))-x))<<((-1481832432.924325)>>(tmp = 3109693867, tmp)))>>>(x/(((((((tmp = 928294418, tmp)^(((-1018314535)/(tmp = -3167523001, tmp))%((((((tmp = -1639338126, tmp)-(tmp = -2613558829, tmp))&x)/x)%(tmp = 513624872, tmp))/((-520660667)&x))))*(2620452414))^((tmp = 2337189239.5949326, tmp)*(3200887846.7954993)))>>>((tmp = 1173330667, tmp)^x))<<x)>>(((tmp = -2475534594.982338, tmp)*x)|x)))));
+ assertEquals(0, x /= (2520915286));
+ assertEquals(0, x &= x);
+ assertEquals(0, x >>= (-1908119327));
+ assertEquals(0, x >>>= (tmp = 549007635, tmp));
+ assertEquals(0, x >>= (-994747873.8117285));
+ assertEquals(0, x <<= ((((x>>>((-3084793026.846681)%((1107295502)&(tmp = -296613957.8133817, tmp))))&((19637717.166736007)/(x+x)))+x)/(-2479724242)));
+ assertEquals(-695401420, x += (-695401420));
+ assertEquals(-695401394, x += (x>>>(tmp = 2340097307.6556053, tmp)));
+ assertEquals(-555745552, x -= (x|(-483851950.68644)));
+ assertEquals(-17825792, x <<= x);
+ assertEquals(-17825792, x >>= x);
+ assertEquals(-17, x %= ((tmp = 1799361095, tmp)|((x>>(((-1201252592)<<((((543273288)+(-2859945716.606924))*x)<<((-3030193601)<<(3081129914.9217644))))|((1471431587.981769)>>(-246180750))))|(((tmp = -2689251055.1605787, tmp)>>x)&(((2131333169)^x)-((tmp = -951555489, tmp)/x))))));
+ assertEquals(-8912896, x <<= (1146444211));
+ assertEquals(2854567584, x += (tmp = 2863480480, tmp));
+ assertEquals(426232502.24151134, x %= (1214167540.8792443));
+ assertEquals(1806802048, x ^= (-2368317898));
+ assertEquals(432537600, x <<= (tmp = 2831272652.589364, tmp));
+ assertEquals(432537600, x %= (((1713810619.3880467)-x)&((-2853023009.553296)&(tmp = -3158798098.3355417, tmp))));
+ assertEquals(-509804066, x += (tmp = -942341666, tmp));
+ assertEquals(-509804066, x %= (-732349220));
+ assertEquals(259900185710132350, x *= x);
+ assertEquals(711598501.7021885, x %= ((tmp = 2020395586.2280731, tmp)-(tmp = 3031459563.1386633, tmp)));
+ assertEquals(711598503.0618857, x += ((tmp = 967558548.4141241, tmp)/x));
+ assertEquals(711598503, x &= x);
+ assertEquals(711598503, x ^= (((((1609355669.1963444)+((((tmp = -2660082403.258437, tmp)+(tmp = -235367868, tmp))&(x/x))*((-2595932186.69466)|((tmp = -3039202860, tmp)<<x))))>>>(-951354869))-((tmp = -691482949.6335375, tmp)/(tmp = -1735502400, tmp)))/(tmp = 798440377, tmp)));
+ assertEquals(558262613882868500, x *= (784519095.4299527));
+ assertEquals(558262611968479000, x -= ((((tmp = 1039039153.4026555, tmp)/(-3138845051.6240187))*(tmp = 633557994, tmp))&(1981507217)));
+ assertEquals(1170427648, x |= ((x>>((((-1086327124)%((tmp = -1818798806.368613, tmp)^(tmp = 2183576654.9959817, tmp)))>>x)&((((((tmp = 1315985464.0330539, tmp)&(2774283689.333836))%x)*((2722693772.8994813)&(tmp = -2720671984.945404, tmp)))^(tmp = -76808019, tmp))<<((tmp = 685037799.2336662, tmp)^((tmp = 1057250849, tmp)&(tmp = 1469205111.2989025, tmp))))))+(x*(((tmp = 448288818.47173154, tmp)-(-2527606231))-((8387088.402292728)>>x)))));
+ assertEquals(558, x >>>= (tmp = 2732701109, tmp));
+ assertEquals(558, x &= x);
+ assertEquals(-0.00015855057024653912, x /= ((x+(((tmp = -1963815633, tmp)-(x>>x))-((x|x)>>x)))/x));
+ assertEquals(1.3458861596445712e-13, x /= (-1178038492.4116466));
+ assertEquals(0, x <<= (-104550232));
+ assertEquals(0, x >>>= (x>>(tmp = -255275244.12613606, tmp)));
+ assertEquals(0, x >>= x);
+ assertEquals(375, x |= ((1576819294.6991196)>>>(-2570246122)));
+ assertEquals(96000, x <<= ((2252913843.0150948)>>>(-49239716)));
+ assertEquals(6144000, x <<= ((((tmp = -2478967279, tmp)&((x%((tmp = -1705332610.8018858, tmp)+(x+(tmp = 590766349, tmp))))<<(tmp = 1759375933, tmp)))+(-2024465658.849834))&(1564539207.3650014)));
+ assertEquals(-1149239296, x <<= (1862803657.7241006));
+ assertEquals(-9, x >>= (((tmp = 463306384.05696774, tmp)^x)|((x>>((((-2098070856.799663)<<((-2054870274.9012866)<<(((-2582579691)/(829257170.0266814))<<(((((tmp = -1753535573.7074275, tmp)<<((x>>(-197886116))%((2487188445)%(tmp = 2465391564.873364, tmp))))&(((tmp = -500069832, tmp)&(tmp = 3016637032, tmp))&((tmp = 2525942628, tmp)|((((-920996215)|x)^((((tmp = -687548533.419106, tmp)&(1423222636.058937))<<((tmp = -1096532228, tmp)>>((((tmp = -3124481449.2740726, tmp)^(tmp = 2724328271.808975, tmp))>>x)*x)))+(-1661789589.5808442)))+(((x*(tmp = -1224371664.9549093, tmp))^((tmp = 3202970043, tmp)^x))/(tmp = 131494054.58501709, tmp))))))|(((tmp = -1654136720, tmp)<<x)>>((1652979932.362416)-(tmp = -863732721, tmp))))^(-113307998)))))^(-90820449.91417909))*((tmp = 641519890, tmp)-((((x<<(tmp = 2349936514.071881, tmp))*(2324420443.587892))^x)%(x<<((tmp = -1838473742, tmp)/(((-3154172718.4274178)-x)+x)))))))|(x>>>((tmp = 2096024376.4308293, tmp)<<x)))));
+ assertEquals(81, x *= x);
+ assertEquals(81, x &= x);
+ assertEquals(81, x %= (tmp = 2223962994, tmp));
+ assertEquals(81, x ^= ((x/(((-1606183420.099584)|(-1242175583))&(((x|((tmp = 828718431.3311573, tmp)/(x>>x)))+(((-2207542725.4531174)^(x*x))*(tmp = 551575809.955105, tmp)))/x)))&((x>>x)&x)));
+ assertEquals(81, x %= (tmp = 279598358.6976975, tmp));
+ assertEquals(101.72338484518858, x -= (((tmp = 2452584495.44003, tmp)%((-1181192721)+(((x>>(((x&x)^x)+((x>>>((x+(-2472793823.57181))/(((2854104951)>>(-1208718359.6554642))>>>(1089411895.694705))))/(x|(-2821482890.1780205)))))^(-1786654551))/(-29404242.70557475))))/(((-4352531)<<((-1227287545)<<x))%(-2558589438))));
+ assertEquals(101.72338484518858, x %= (-943645643));
+ assertEquals(0, x -= x);
+ assertEquals(0, x >>>= (-2440404084));
+ assertEquals(0, x >>= (tmp = 1029680958.405923, tmp));
+ assertEquals(0, x >>>= (1213820208.7204895));
+ assertEquals(-0, x /= (tmp = -103093683, tmp));
+ assertEquals(0, x >>>= (-2098144813));
+ assertEquals(-0, x /= (((-3087283334)+(((tmp = -3129028112.6859293, tmp)%(tmp = 2413829931.1605015, tmp))-(2578195237.8071446)))|x));
+ assertEquals(-15, x |= ((((-178926550.92823577)>>>(-965071271))^((tmp = -484633724.7237625, tmp)-(tmp = 473098919.1486404, tmp)))>>((-2264998310.203265)%(tmp = -499034672, tmp))));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x >>= (((-3207915976.698118)<<(tmp = 2347058630, tmp))|(tmp = -2396250098.559627, tmp)));
+ assertEquals(NaN, x %= x);
+ assertEquals(NaN, x *= (621843222));
+ assertEquals(0, x >>= (((-2409032228.7238913)*x)-(tmp = -887793239, tmp)));
+ assertEquals(NaN, x /= x);
+ assertEquals(1193017666, x ^= (tmp = 1193017666, tmp));
+ assertEquals(3.5844761899682753, x /= (tmp = 332829011.206393, tmp));
+ assertEquals(-888572929, x |= (((tmp = 1032409228, tmp)+(tmp = -1920982163.7853453, tmp))+x));
+ assertEquals(-1817051951333455600, x *= (((-1506265102)^(tmp = -775881816, tmp))-(tmp = -32116372.59181881, tmp)));
+ assertEquals(-1638479616, x |= x);
+ assertEquals(-114489, x %= (((tmp = -247137297.37866855, tmp)>>>((((((-322805409)-x)^x)>>((((((((x>>>(tmp = -900610424.7148039, tmp))/(-1155208489.6240904))|((-2874045803)|(tmp = 3050499811, tmp)))+(x/((tmp = -613902712, tmp)^((-982142626.2892077)*((((tmp = -3201753245.6026397, tmp)|((1739238762.0423079)^x))/(243217629.47237313))^((tmp = -11944405.987132788, tmp)/(tmp = 2054031985.633406, tmp)))))))*(tmp = 2696108952.450961, tmp))*x)>>>(tmp = 3058430643.0660386, tmp))>>(x<<x)))>>(-984468302.7450335))%((tmp = 1302320585.246251, tmp)>>>x)))%(tmp = -2436842285.8208156, tmp)));
+ assertEquals(2047, x >>>= (2380161237));
+ assertEquals(0, x >>= x);
+ assertEquals(0, x &= (tmp = 980821012.975836, tmp));
+ assertEquals(-1090535537, x -= ((-3064511503.1214876)&((tmp = -2598316939.163751, tmp)<<((tmp = -969452391.8925576, tmp)*x))));
+ assertEquals(-2181071074, x += x);
+ assertEquals(1, x >>>= ((2902525386.449062)>>x));
+ assertEquals(1, x += (x&(tmp = -2643758684.6636515, tmp)));
+ assertEquals(1, x %= ((tmp = -2646526891.7004848, tmp)/x));
+ assertEquals(448735695.7888887, x -= (tmp = -448735694.7888887, tmp));
+ assertEquals(1, x /= x);
+ assertEquals(1, x >>= ((-480385726)<<(2641021142)));
+ assertEquals(1, x %= (375099107.9200462));
+ assertEquals(1, x >>= (((x&((tmp = -2402469116.9903326, tmp)%(tmp = -2862459555.860298, tmp)))*(tmp = -2834162871.0586414, tmp))%(((x>>>(tmp = 721589907.5073895, tmp))*(x^x))%(((tmp = 2844611489.231776, tmp)^((983556913)&(906035409.6693488)))^(x>>>(1239322375))))));
+ assertEquals(268435456, x <<= (tmp = 178807644.80966163, tmp));
+ assertEquals(44, x %= ((tmp = 2527026779.081539, tmp)>>>(2736129559)));
+ assertEquals(88, x += x);
+ assertEquals(0, x >>>= x);
+ assertEquals(0, x -= x);
+ assertEquals(-1523121602, x |= (2771845694));
+ assertEquals(-2, x >>= x);
+ assertEquals(-4, x += x);
+ assertEquals(-256, x <<= (((2522793132.8616533)>>(tmp = 77232772.94058788, tmp))+(3118669244.49152)));
+ assertEquals(4294967040, x >>>= x);
+ assertEquals(-256, x &= x);
+ assertEquals(1278370155.835435, x -= (-1278370411.835435));
+ assertEquals(-3.488228054921667, x /= (tmp = -366481243.6881058, tmp));
+ assertEquals(1.162742684973889, x /= ((x|(((((2404819175.562809)*(tmp = -2524589506, tmp))&(tmp = -675727145, tmp))>>>(x*x))&((-413250006)<<(tmp = 2408322715, tmp))))|((2940367603)>>>x)));
+ assertEquals(0, x >>>= ((2513665793)-(tmp = 1249857454.3367786, tmp)));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x ^= x);
+ assertEquals(1989998348.6336238, x -= (-1989998348.6336238));
+ assertEquals(903237918.986834, x %= (1086760429.6467898));
+ assertEquals(-4.4185765232981975, x /= (-204418304));
+ assertEquals(1471621914, x ^= (tmp = -1471621914.1771696, tmp));
+ assertEquals(1471621914, x |= ((((((x<<(tmp = -2676407394.536844, tmp))%(((343324258)+(x/(x>>(((-221193011)>>>x)|x))))>>(((-2737713893)^((tmp = -49214797.00735545, tmp)+((-2818106123.172874)/(tmp = -2361786565.3028684, tmp))))<<(1859353297.6355076))))*(tmp = -751970685, tmp))|((tmp = 2502717391.425871, tmp)/(tmp = -2647169430, tmp)))*((tmp = -1647567294, tmp)&(((tmp = 1819557651, tmp)/x)>>((((-3073469753)/x)-(((tmp = -1973810496.6407511, tmp)&((x-(x+(tmp = -2986851659, tmp)))>>>(tmp = -2226975699, tmp)))|(418770782.142766)))<<x))))*(((((tmp = 125466732, tmp)/((((1453655756.398259)|(((874792086.7064595)-(194880772.91499102))>>>x))%(x<<(tmp = -1445557137, tmp)))<<x))>>>(tmp = -1953751906, tmp))/((tmp = -2140573172.2979035, tmp)*((-108581964)^x)))|(-481484013.0393069))));
+ assertEquals(1454179065, x += ((tmp = 947147038.2829313, tmp)|(tmp = -154822975.3629098, tmp)));
+ assertEquals(1, x /= x);
+ assertEquals(1, x %= ((((((tmp = -2262250297.991866, tmp)-(tmp = 481953960, tmp))/(1629215187.6020458))|(2515244216))>>>((tmp = -3040594752.2184515, tmp)-(tmp = -1116041279, tmp)))^(((-182133502)-(1065160192.6609197))+(((((-1850040207)^(tmp = -1570328610, tmp))^(tmp = 20542725.09256518, tmp))*x)|(2386866629)))));
+ assertEquals(1, x &= (2889186303));
+ assertEquals(0, x >>= (((-1323093107.050538)>>(x%x))-(((((((-1736522840)+(tmp = -2623890690.8318863, tmp))*(959395040.5565329))*(233734920))<<((x+(x%((tmp = -2370717284.4370327, tmp)%(tmp = 2109311949, tmp))))-(tmp = -1005532894, tmp)))|(861703605))>>>((2399820772)/x))));
+ assertEquals(0, x >>= x);
+ assertEquals(57233408, x |= ((tmp = 2655923764.4179816, tmp)*(-1353634624.3025436)));
+ assertEquals(997939728, x |= (980552208.9005274));
+ assertEquals(1859642592476610800, x *= (1863481872));
+ assertEquals(-977190656, x <<= x);
+ assertEquals(4.378357529141239e+26, x *= ((((x/(((tmp = 2429520991, tmp)/(x/(tmp = 784592802, tmp)))-(tmp = -2704781982, tmp)))*(tmp = -2161015768.2322354, tmp))&((((-3164868762)>>(tmp = 2390893153.32907, tmp))^x)>>(-2422626718.322538)))*(tmp = 278291869, tmp)));
+ assertEquals(4.378357529141239e+26, x -= (1710777896.992369));
+ assertEquals(0, x &= (((((tmp = -2532956158.400033, tmp)|((2195255831.279001)|(1051047432)))|(-1628591858))|(tmp = -2042607521.947963, tmp))>>((-1471225208)/(((-133621318)>>(1980416325.7358408))*((1741069593.1036062)-(x|(2133911581.991011)))))));
+ assertEquals(-0, x /= (-656083507));
+ assertEquals(NaN, x += ((tmp = -1071410982.2789869, tmp)%x));
+ assertEquals(NaN, x *= (tmp = -1513535145.3146675, tmp));
+ assertEquals(0, x >>= ((2831245247.5267224)>>(x<<((x+(((3068824580.7922907)|(1708295544.275714))*((tmp = -1662930228.1170444, tmp)-(((tmp = 1979994889, tmp)<<(tmp = -1826911988, tmp))&((x/(x<<(1909384611.043981)))+(1958052414.7139997))))))<<(tmp = 2481909816.56558, tmp)))));
+ assertEquals(0, x *= (((tmp = -2979739958.1614842, tmp)&x)+x));
+ assertEquals(-0, x *= ((-332769864.50313234)^x));
+ assertEquals(0, x >>= ((((689018886.1436445)+(tmp = -2819546038.620694, tmp))|(((tmp = -1459669934.9066005, tmp)|x)/x))<<(((tmp = 2640360389, tmp)/((x%((-1947492547.9056122)%((1487212416.2083092)-(-1751984129))))^x))%(tmp = 2666842881, tmp))));
+ assertEquals(-1801321460, x |= (tmp = 2493645836, tmp));
+ assertEquals(-1801321460, x %= (2400405136));
+ assertEquals(-2905399858195810300, x *= (tmp = 1612926911, tmp));
+ assertEquals(-2905399858195810300, x -= (x>>(tmp = 1603910263.9593458, tmp)));
+ assertEquals(-238798848, x &= ((tmp = -2638646212.767516, tmp)/(((tmp = 1755616291.436998, tmp)>>>(tmp = 1083349775, tmp))-(x%(((tmp = 1728859105.53634, tmp)^(1931522619.0403612))/(tmp = 712460587.0025489, tmp))))));
+ assertEquals(-2363873607.2302856, x += (-2125074759.230286));
+ assertEquals(1712665, x &= (((117229515)>>>(((1707090894.1915488)>>>((-1696008695)>>(((-1045367326.7522249)<<(tmp = -209334716, tmp))-x)))|(-1707909786.080653)))%(1260761349.172689)));
+ assertEquals(1073741824, x <<= (tmp = -289437762.34742975, tmp));
+ assertEquals(1073741824, x &= (tmp = 2079141140, tmp));
+ assertEquals(0, x <<= ((x^(-3139646716.1615124))-(((-362323071.74237394)|(tmp = 2989896849, tmp))*(tmp = -218217991, tmp))));
+ assertEquals(0, x &= (tmp = -1476835288.425903, tmp));
+ assertEquals(0, x >>>= (tmp = 61945262.70868635, tmp));
+ assertEquals(0, x ^= x);
+ assertEquals(-2735263498.7189775, x -= (2735263498.7189775));
+ assertEquals(-1182289920, x <<= (x+x));
+ assertEquals(-1182289580, x ^= ((2858446263.2258)>>>(2387398039.6273785)));
+ assertEquals(696693056, x &= ((2178665823)*(-51848583)));
+ assertEquals(1652555776, x <<= (((tmp = 2943916975, tmp)-((-1544273901)>>(-1671503106.2896929)))|x));
+ assertEquals(6455296, x >>>= (tmp = 1492638248.675439, tmp));
+ assertEquals(2097152, x &= (((x|x)*(2873891571.7000637))^((2165264807)+(tmp = 451721563, tmp))));
+ assertEquals(2097152, x %= (tmp = 1089484582.1455994, tmp));
+ assertEquals(2097152, x <<= x);
+ assertEquals(2097152, x &= ((tmp = 119096343.4032247, tmp)^((-1947874541)*x)));
+ assertEquals(0, x &= (tmp = 2363070677, tmp));
+ assertEquals(0, x &= ((tmp = -1897325383, tmp)>>>((2368480527)>>>((tmp = 1837528979, tmp)*(-1838904077)))));
+ assertEquals(-1898659416, x ^= (-1898659416.1125412));
+ assertEquals(-725506048, x <<= x);
+ assertEquals(1392943104, x <<= (295287938.9104482));
+ assertEquals(-63620329, x ^= ((tmp = -3175925826.5573816, tmp)-(tmp = 2474613927, tmp)));
+ assertEquals(-1135111726, x -= ((tmp = -1133259081, tmp)^(((tmp = -742228219, tmp)>>((-7801909.587711811)%((tmp = -642758873, tmp)+(tmp = 2893927824.6036444, tmp))))^((tmp = -2145465178.9142997, tmp)+x))));
+ assertEquals(0, x ^= x);
+ assertEquals(660714589, x |= (660714589));
+ assertEquals(660714676, x ^= ((-376720042.8047826)>>>(2196220344)));
+ assertEquals(660714676, x |= ((((((((x<<(-1140465568))-(tmp = -1648489774.1573918, tmp))%(((tmp = -2955505390.573639, tmp)*x)<<((((tmp = -1769375963, tmp)*(tmp = -440619797, tmp))&((tmp = 1904284066, tmp)%(-2420852665.0629807)))+(-324601009.2063596))))>>(tmp = 2317210783.9757776, tmp))^((tmp = 750057067.4541628, tmp)^(tmp = -1391814244.7286487, tmp)))>>((344544658.6054913)%((tmp = -1508630423.218488, tmp)&(tmp = 1918909238.2974637, tmp))))>>((-647746783.685822)&(tmp = 2444858958.3595476, tmp)))&x));
+ assertEquals(-962337195, x ^= (tmp = -507358495.30825853, tmp));
+ assertEquals(-182008925.58535767, x %= (tmp = -195082067.35366058, tmp));
+ assertEquals(502070, x >>>= (tmp = 1459732237.1447744, tmp));
+ assertEquals(-2391009930.7235765, x -= (tmp = 2391512000.7235765, tmp));
+ assertEquals(1568669696, x <<= x);
+ assertEquals(0, x <<= (tmp = -571056688.2717848, tmp));
+ assertEquals(1770376226, x ^= (tmp = 1770376226.0584736, tmp));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x &= ((((x<<x)>>>x)|x)|(((tmp = -2141573723, tmp)^x)|(64299956))));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x &= x);
+ assertEquals(0, x <<= (1106060336.7362857));
+ assertEquals(-0, x /= (x|(tmp = 2760823963, tmp)));
+ assertEquals(0, x <<= ((-2436225757)|(-1800598694.4062433)));
+ assertEquals(0, x >>>= ((-728332508.9870625)<<x));
+ assertEquals(-173377680, x ^= ((tmp = -173377680, tmp)%(tmp = -2843994892, tmp)));
+ assertEquals(-173377680, x |= ((((-819217898)&(tmp = -1321650255, tmp))&(x+((x^x)<<((1700753064)>>((((((-1038799327)>>((782275464)^x))-(tmp = -2113814317.8539028, tmp))>>(2143804838))&x)-((2970418921)/(-3073015285.6587048)))))))&((-1759593079.4077306)%((1699128805)-((tmp = -467193967, tmp)&(((2225788267.3466334)*(((2687946762.5504274)+x)>>>x))<<(-1853556066.880512)))))));
+ assertEquals(-0.5520657226957338, x /= ((tmp = -755493878, tmp)&(tmp = 918108389, tmp)));
+ assertEquals(0.30477656217556287, x *= x);
+ assertEquals(0, x &= ((tmp = -2746007517, tmp)<<(2749629340)));
+ assertEquals(0, x ^= ((x%(tmp = 1683077876, tmp))%(-162706778)));
+ assertEquals(0, x *= (tmp = 10203423, tmp));
+ assertEquals(119043212.1461842, x += (tmp = 119043212.1461842, tmp));
+ assertEquals(587202560, x <<= (tmp = 658697910.7051642, tmp));
+ assertEquals(-138689730, x |= (x-(tmp = 1296317634.5661907, tmp)));
+ assertEquals(-138663011, x -= ((-1751010109.5506423)>>(152829872)));
+ assertEquals(-138663011, x %= (-1266200468));
+ assertEquals(-138663011, x &= (x|((tmp = -571277275.622529, tmp)<<x)));
+ assertEquals(-138663011, x >>= ((971259905.1265712)*(tmp = 2203764981, tmp)));
+ assertEquals(-138663011, x %= (-904715829));
+ assertEquals(-138663011, x |= ((tmp = -2823047885.283391, tmp)>>>(((tmp = 533217000, tmp)|(650754598.7836078))|(-1475565890))));
+ assertEquals(-1610612736, x <<= x);
+ assertEquals(-1610612736, x &= x);
+ assertEquals(163840, x >>>= (-188885010));
+ assertEquals(-1224224814, x |= (tmp = 3070742482, tmp));
+ assertEquals(1498726395213334500, x *= x);
+ assertEquals(1723591210, x |= ((tmp = 615164458, tmp)|x));
+ assertEquals(1721910480, x ^= (x>>>x));
+ assertEquals(4505284605.764313, x -= (tmp = -2783374125.7643127, tmp));
+ assertEquals(-9504912393868483000, x *= (((tmp = 2896651872, tmp)<<(-2896385692.9017262))&(((((tmp = -2081179810.20238, tmp)|(tmp = -2484863999, tmp))>>((tmp = 1560885110.2665749, tmp)/(((tmp = 934324123.4289343, tmp)<<((tmp = -1591614157.0496385, tmp)+x))/(((x%(((tmp = 1672629986.8055913, tmp)%x)>>(tmp = 2116315086.2559657, tmp)))/(((-2687682697.5806303)>>x)/(-2034391222.5029132)))%(x-((((((tmp = 2598594967, tmp)/(((((((2950032233)%x)/x)^(tmp = -2126753451.3732262, tmp))<<(tmp = -3019113473, tmp))+(tmp = -2021220129.2320697, tmp))%((((-587645875.4666483)>>(((((x+x)+x)&(tmp = 533801785, tmp))|x)-((tmp = -2224808495.678903, tmp)/(1501942300))))>>>(-2558947646))>>((2798508249.020792)>>>x))))>>>((1060584557)/((((((((x&x)|(1426725365))>>>(tmp = 1500508838, tmp))>>(-1328705938))*((tmp = -2288009425.598777, tmp)>>>(((2586897285.9759064)%((-1605651559.2122297)>>>(tmp = 1936736684.4887302, tmp)))+((tmp = 2316261040, tmp)^(570340750.353874)))))&(x^((tmp = -2266524143, tmp)-(tmp = 2358520476, tmp))))+(tmp = 1449254900.9222453, tmp))%((-100598196)%((tmp = -2985318242.153491, tmp)>>((620722274.4565848)>>(871118975)))))))<<x)*(tmp = -1287065606.4143271, tmp))>>>(1038059916.2438471)))))))+((x/(-276990308.1264961))&(tmp = 2471016351.2195315, tmp)))|(((((tmp = -1288792769.3210807, tmp)+((tmp = -641817194, tmp)*(x<<(((-1933817364)>>(((tmp = 2084673536, tmp)|x)&x))&(tmp = -2752464480, tmp)))))%((796026752)*x))+(((tmp = -3083359669, tmp)|x)-((715303522)|(tmp = 181297266, tmp))))*(-1691520182.3207517)))));
+ assertEquals(0, x <<= (-2322389800));
+ assertEquals(0, x *= (tmp = 3188682235, tmp));
+ assertEquals(0, x |= (x>>>((tmp = -2729325231.8288336, tmp)^((-393497076.96012783)*(x/(tmp = -2198942459.9466457, tmp))))));
+ assertEquals(0, x ^= x);
+ assertEquals(0, x %= (2835024997.4447937));
+ assertEquals(0, x <<= x);
+ assertEquals(0, x >>= (tmp = 1109824126, tmp));
+ assertEquals(0, x <<= (3013043386));
+ assertEquals(206825782.74659085, x -= (-206825782.74659085));
+ assertEquals(-645346761227699500, x *= (-3120243292));
+ assertEquals(6825462, x >>= ((tmp = 1457908135, tmp)<<x));
+ assertEquals(-612366097.9189918, x -= (619191559.9189918));
+ assertEquals(-612306090.9189918, x -= ((2328676543.893506)>>x));
+ assertEquals(0, x ^= (x>>(((x>>>(1856200611.2269292))&(tmp = 2003217473, tmp))%((((((-107135673)+(((3062079356.170611)<<(tmp = -676928983, tmp))>>((tmp = -1487074941.2638814, tmp)|((-1601614031)/(1317006144.5025365)))))+x)*(((1163301641)>>>(448796567))/((x%((tmp = 72293197.34410787, tmp)+(-2304112723)))/((455610361)%(-2799431520)))))>>>(-217305041.09432888))<<(x-(tmp = -2168353649, tmp))))));
+ assertEquals(0, x >>= x);
+ assertEquals(-Infinity, x -= (((-1651597599.8950624)+(1780404320))/x));
+ assertEquals(0, x <<= (tmp = 2246420272.4321294, tmp));
+ assertEquals(0, x *= ((2793605382)-(tmp = -272299011, tmp)));
+ assertEquals(0, x *= x);
+ assertEquals(0, x <<= x);
+ assertEquals(0, x >>= (tmp = 2556413090, tmp));
+ assertEquals(0, x >>= ((tmp = -1784710085, tmp)%x));
+ assertEquals(0, x %= (tmp = -1929880813, tmp));
+ assertEquals(0, x *= (2586983368));
+ assertEquals(0, x &= x);
+ assertEquals(0, x <<= (-2144588807));
+ assertEquals(0, x ^= ((x<<(((((((-596537598)+((x-(((((((tmp = -3179604796, tmp)/((tmp = 1156725365.3543215, tmp)>>>(tmp = -2762144319, tmp)))%(x<<x))&((tmp = 1750241928.1271567, tmp)&(x/((tmp = 1781306819, tmp)|x))))+((((2893068644)/((tmp = -576164593.9720252, tmp)<<((2724671.48995471)&(tmp = -573132475, tmp))))%(tmp = -1355625108, tmp))&(tmp = -302869512.5880568, tmp)))+x)<<x))>>((tmp = -2569172808, tmp)/x)))^x)-(tmp = -1174006275.2213159, tmp))&x)&(((((((-2303274799)>>(tmp = -814839320, tmp))/(tmp = 183887306.09810615, tmp))>>(((tmp = 1054106394.3704875, tmp)|x)>>>x))-(x-(tmp = 1313696830, tmp)))-((tmp = 2373274399.0742035, tmp)|((((tmp = -3163779539.4902935, tmp)*(tmp = -3056125181.726942, tmp))&(((x^(x^(x/((tmp = -576441696.6015451, tmp)<<(tmp = -26223719.920306206, tmp)))))>>(tmp = -2332835940, tmp))|((-146303509.41093707)&(tmp = -2676964025, tmp))))/((((x*(tmp = 1059918020, tmp))|((((2341797349)|(tmp = -744763805.1381104, tmp))<<x)+((2991320875.552578)^(2920702604.701831))))^(-1721756138))^(((tmp = -2794367554, tmp)>>((-2671235923.2097874)<<(x&((((tmp = -621472314.0859051, tmp)-(((x*x)+x)>>>((tmp = 1834038956, tmp)+x)))*x)^(tmp = -2090567586.321468, tmp)))))<<(321395210))))))>>>(tmp = -1207661719, tmp)))+(-2877264053.3805156)))/(x%(tmp = -2226991657.709366, tmp))));
+ assertEquals(0, x *= (tmp = 986904991.061398, tmp));
+ assertEquals(0, x -= (x%(650819306.6671969)));
+ assertEquals(0, x >>>= (905893666.2871252));
+ assertEquals(0, x += (((tmp = 2501942710.4804144, tmp)&x)/((tmp = -851080399.1751502, tmp)-(-1168623992))));
+ assertEquals(-0, x *= (tmp = -2014577821.4554045, tmp));
+ assertEquals(0, x &= (tmp = 1995246018, tmp));
+ assertEquals(0, x %= (1724355237.7031958));
+ assertEquals(-954696411, x += (((-2825222201)+(((1662353496.1795506)>>>(x-x))|(tmp = 225015046, tmp)))^(x&x)));
+ assertEquals(-2158427339993389800, x *= (2260852052.1539803));
+ assertEquals(19559, x >>>= (-862409169.4978967));
+ assertEquals(-0.000012241163878671237, x /= (x^(tmp = 2697144215.160239, tmp)));
+ assertEquals(0, x -= x);
+ assertEquals(1448177644, x |= (tmp = 1448177644.624848, tmp));
+ assertEquals(1448177644, x %= (((-1497553637.4976408)+(402228446))<<x));
+ assertEquals(2304640553, x -= (-856462909));
+ assertEquals(152436736, x &= ((766686903)*(((tmp = 660964683.1744609, tmp)|((((tmp = 297369746, tmp)-(x+((tmp = -2677127146, tmp)/x)))>>(((((((x%(x<<x))-(((((529254728)|((x|(-1407086127.6088922))&(tmp = -1968465008.5000398, tmp)))/(x%x))&((((-2761805265.92574)-x)*(x^(tmp = 110730179, tmp)))%((177220657.06030762)*(((2532585190.671373)/x)+(-1465143151)))))<<((tmp = -3008848338, tmp)<<(-2475597073))))|((-192996756.38619018)|((((1445996780)|(x>>>((((tmp = -2482370545.791443, tmp)*(tmp = -270543594, tmp))^x)*((1346780586)/(tmp = -625613363.885356, tmp)))))-(x<<(x/(-562307527))))&(-125701272))))*((x&x)%(tmp = 752963070, tmp)))>>>(tmp = 17419750.79086232, tmp))*x)^(x^((-157821212.04674292)-(tmp = 503849221.598824, tmp)))))-(tmp = 1479418449, tmp)))>>>((((((-78138548.2193842)<<(((2319032860.806689)-(tmp = -1564963892.5137577, tmp))>>>(-73673322.28957987)))<<((1797573493.3467085)*x))>>(tmp = 759994997, tmp))>>>(-1066441220))&(((((((tmp = 1972048857, tmp)*(((x&((-1347017320.0747669)>>>x))*(-2332716925.705054))%(-376976019.24362826)))>>>((tmp = -466479974, tmp)+x))&(-2282789473.3675604))|(((((((((269205423.7510414)-(tmp = 21919626.105656862, tmp))*((x-(tmp = -378670528, tmp))>>(tmp = -1045706598, tmp)))>>(tmp = -3062647341.234485, tmp))>>>x)|(tmp = -285399599.9386575, tmp))%(tmp = 2731214562, tmp))|((((tmp = 837093165.3438574, tmp)|(tmp = -2956931321, tmp))+((1871874558.3292787)<<((x|((tmp = -3169147427, tmp)%(((x^x)%(1479885041))%((1769991217)%(tmp = -1899472458, tmp)))))*(tmp = -837098563.71806, tmp))))>>(tmp = -1866722748, tmp)))-(2037734340.8345597)))>>((tmp = -1262019180.5332131, tmp)+(x*(1274173993.9800131))))*(tmp = 2336989321.855402, tmp))))));
+ assertEquals(4, x >>= (tmp = -2577728327, tmp));
+ assertEquals(16, x *= (x<<((2622323372.580596)*(tmp = -1947643367, tmp))));
+ assertEquals(33554432, x <<= (tmp = -2938370507, tmp));
+ assertEquals(-2399497018.987414, x -= (tmp = 2433051450.987414, tmp));
+ assertEquals(1, x /= x);
+ assertEquals(2, x <<= x);
+ assertEquals(0, x >>= (x&x));
+ assertEquals(0, x <<= x);
+}
+f();
diff --git a/src/3rdparty/v8/test/mjsunit/numops-fuzz.js b/src/3rdparty/v8/test/mjsunit/numops-fuzz.js
deleted file mode 100644
index bd7e4fa..0000000
--- a/src/3rdparty/v8/test/mjsunit/numops-fuzz.js
+++ /dev/null
@@ -1,4609 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-function f() {
- var x = 0;
- var tmp = 0;
- assertEquals(0, x /= (tmp = 798469700.4090232, tmp));
- assertEquals(0, x *= (2714102322.365509));
- assertEquals(0, x *= x);
- assertEquals(139516372, x -= (tmp = -139516372, tmp));
- assertEquals(1, x /= (x%(2620399703.344006)));
- assertEquals(0, x >>>= x);
- assertEquals(-2772151192.8633175, x -= (tmp = 2772151192.8633175, tmp));
- assertEquals(-2786298206.8633175, x -= (14147014));
- assertEquals(1509750523, x |= ((1073767916)-(tmp = 919311632.2789925, tmp)));
- assertEquals(2262404051.926751, x += ((752653528.9267509)%x));
- assertEquals(-270926893, x |= (tmp = 1837232194, tmp));
- assertEquals(0.17730273401688765, x /= ((tmp = -2657202795, tmp)-(((((x|(tmp = -1187733892.282897, tmp))-x)<<(556523578))-x)+(-57905508.42881298))));
- assertEquals(122483.56550261026, x *= ((((tmp = 2570017060.15193, tmp)%((-1862621126.9968336)>>x))>>(x>>(tmp = 2388674677, tmp)))>>>(-2919657526.470434)));
- assertEquals(0, x ^= x);
- assertEquals(0, x <<= (tmp = 2705124845.0455265, tmp));
- assertEquals(0, x &= (-135286835.07069612));
- assertEquals(-0, x *= ((tmp = -165810479.10020828, tmp)|x));
- assertEquals(248741888, x += ((735976871.1308595)<<(-2608055185.0700903)));
- assertEquals(139526144, x &= (tmp = -1454301068, tmp));
- assertEquals(-0.047221345672746884, x /= (tmp = -2954726130.994727, tmp));
- assertEquals(0, x <<= (x>>x));
- assertEquals(0, x >>>= ((x+(912111201.488966))-(tmp = 1405800042.6070075, tmp)));
- assertEquals(-1663642733, x |= (((-1663642733.5700119)<<(x^x))<<x));
- assertEquals(-914358272, x <<= ((((-308411676)-(-618261840.9113789))%(-68488626.58621716))-x));
- assertEquals(-1996488704, x &= (-1358622641.5848842));
- assertEquals(-345978263, x += (1650510441));
- assertEquals(3, x >>>= (-1106714178.701668));
- assertEquals(1, x %= (((x>>(x>>(tmp = -3052773846.817114, tmp)))*(tmp = 1659218887.379526, tmp))&x));
- assertEquals(-943225672, x += (-943225673));
- assertEquals(-0.41714300120060854, x /= (tmp = 2261156652, tmp));
- assertEquals(0, x >>>= ((3107060934.8863482)<<(tmp = 1902730887, tmp)));
- assertEquals(0, x &= x);
- assertEquals(1476628, x |= ((tmp = -2782899841.390033, tmp)>>>(2097653770)));
- assertEquals(0.0008887648921591833, x /= ((tmp = 1661438264.5253348, tmp)%((tmp = 2555939813, tmp)*(-877024323.6515315))));
- assertEquals(0, x <<= (tmp = -2366551345, tmp));
- assertEquals(0, x &= (tmp = 1742843591, tmp));
- assertEquals(0, x -= x);
- assertEquals(4239, x += ((-3183564176.232031)>>>(349622674.1255014)));
- assertEquals(-67560, x -= ((2352742295)>>>x));
- assertEquals(-67560, x &= x);
- assertEquals(-0.00003219917807302283, x /= (2098190203.699741));
- assertEquals(0, x -= x);
- assertEquals(0, x >>= ((((tmp = -869086522.8358297, tmp)/(187820779))-(tmp = -2000970995.1931965, tmp))|(1853528755.6064696)));
- assertEquals(0, x >>= (-3040509919));
- assertEquals(0, x %= (((tmp = -2386688049.194946, tmp)<<(tmp = -669711391, tmp))|x));
- assertEquals(0, x %= (tmp = -298431511.4839926, tmp));
- assertEquals(0, x /= (2830845091.2793818));
- assertEquals(0, x /= ((((-2529926178)|x)^((tmp = 2139313707.0894063, tmp)%((-1825768525.0541775)-(-952600362.7758243))))+x));
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x -= x);
- assertEquals(NaN, x /= (tmp = -432944480, tmp));
- assertEquals(0, x <<= (((((x^((-1777523727)+(2194962794)))>>>(((((-590335134.8224905)%(x*(2198198974)))|(tmp = -2068556796, tmp))/(1060765637))*(-147051676)))/((tmp = -477350113.92686677, tmp)<<((x/(2018712621.0397925))^((tmp = 491163813.3921983, tmp)+(((x|((((x%(1990073256.812654))%((-2024388518.9599915)>>((tmp = 223182187, tmp)*(-722241065))))>>>(tmp = 2517147885.305745, tmp))%(1189996239.11222)))&x)%(-306932860))))))&((tmp = 1117802724.485684, tmp)+((-1391614045)-x)))%((((x>>((2958453447)*x))^(((410825859)|(((tmp = -1119269292.5495896, tmp)>>>(((((((x%(tmp = 648541746.6059314, tmp))*((-2304508480)<<((((x^(1408199888.1454597))|((251623937)|x))/((-382389946.9984102)|(tmp = -2082681143.5893767, tmp)))-(((tmp = 631243472, tmp)>>>(1407556544))/(((x>>>x)>>>(tmp = -6329025.47865057, tmp))>>>(tmp = 948664752.543093, tmp))))))/((((-183248880)>>x)&x)&x))>>x)&(((-978737284.8492057)%(tmp = 2983300011.737006, tmp))&(tmp = 2641937234.2954116, tmp)))<<x)>>(2795416632.9722223)))%((((tmp = -50926632, tmp)/x)&(((tmp = -2510786916, tmp)/x)/(-699755674)))|((((tmp = 1411792593, tmp)>>(924286570.2637128))>>((1609997725)>>(2735658951.0762663)))*(tmp = 726205435, tmp)))))<<(tmp = -2135055357.3156831, tmp)))/(tmp = 1408695065, tmp))^(tmp = -1343267739.8562133, tmp))));
- assertEquals(0, x %= (-437232116));
- assertEquals(-2463314518.2747326, x -= (2463314518.2747326));
- assertEquals(109, x >>= (2401429560));
- assertEquals(-2687641732.0253763, x += (-2687641841.0253763));
- assertEquals(-2336375490019484000, x *= (tmp = 869303174.6678596, tmp));
- assertEquals(5.458650430363785e+36, x *= x);
- assertEquals(0, x |= ((((-1676972008.797291)*x)*((tmp = 2606991807, tmp)-x))<<x));
- assertEquals(0, x &= ((-3053393759.3496876)+(-1431008367)));
- assertEquals(-856728369, x |= (x-(((((764337872)/x)<<((x|(((tmp = 1409368192.1268077, tmp)+(tmp = -848083676, tmp))|(-2797102463.7915916)))^x))/x)^(tmp = 856728369.0589117, tmp))));
- assertEquals(-0, x %= x);
- assertEquals(1116550103, x ^= (-3178417193));
- assertEquals(1116550103, x %= (tmp = -1482481942, tmp));
- assertEquals(133, x >>>= x);
- assertEquals(-1.381429241671034e-7, x /= ((tmp = -962771116.8101778, tmp)^x));
- assertEquals(-1092268961, x |= ((tmp = 3202672531, tmp)-((x-(tmp = 845529357, tmp))>>(tmp = -868680593, tmp))));
- assertEquals(-1092268961, x %= (tmp = 2670840415.304719, tmp));
- assertEquals(-122794480, x %= (tmp = 969474481, tmp));
- assertEquals(-297606521542193600, x *= (2423614820));
- assertEquals(72460064, x >>>= (tmp = -1230798655, tmp));
- assertEquals(-203714325373689600, x *= (-2811401400));
- assertEquals(2154914048, x >>>= (((2241377026.001436)/x)+x));
- assertEquals(1177864081, x ^= (tmp = -968513903, tmp));
- assertEquals(35947664, x &= (-2086226758.2704995));
- assertEquals(20795732539020670, x += (x*(578500247)));
- assertEquals(-892004992, x >>= x);
- assertEquals(-7023661.354330708, x /= ((((((1740714214)%((tmp = -459699286, tmp)+(tmp = -1700187400, tmp)))>>(tmp = -3170295237, tmp))+(tmp = -497509780, tmp))+((1971976144.6197853)+(661992813.6077721)))>>>(-1683802728)));
- assertEquals(-1634205696, x <<= x);
- assertEquals(-7, x >>= (-3187653764.930914));
- assertEquals(-5.095345981491203, x -= ((tmp = 748315289, tmp)/(tmp = -392887780, tmp)));
- assertEquals(1486531570, x &= (1486531570.9300508));
- assertEquals(5670, x >>= (((tmp = -2486758205.26425, tmp)*(732510414))|x));
- assertEquals(5670, x >>= (((-1811879946.2553763)%(1797475764))/(((tmp = -2159923884, tmp)|x)+(tmp = -1774410807, tmp))));
- assertEquals(38, x %= (x>>>x));
- assertEquals(-151134215, x ^= (((tmp = -2593085609.5622163, tmp)+((tmp = -814992345.7516887, tmp)-(534809571)))|(tmp = -232678571, tmp)));
- assertEquals(-234881024, x <<= x);
- assertEquals(-234881024, x <<= (x>>>x));
- assertEquals(55169095435288580, x *= x);
- assertEquals(0, x >>= (tmp = 1176612256, tmp));
- assertEquals(0, x <<= (1321866341.2486475));
- assertEquals(0, x %= (x-(-602577995)));
- assertEquals(0, x >>>= (((((tmp = -125628635.79970193, tmp)^(tmp = 1294209955.229382, tmp))&(((tmp = -2353256654.0725203, tmp)|((-1136743028.9425385)|((((950703429.1110399)-(x>>>x))/((((x%(-252705869.21126103))/((tmp = 886957620, tmp)<<(x%((tmp = -1952249741, tmp)*(tmp = -1998149844, tmp)))))|(tmp = 1933366713, tmp))|((tmp = -2957141565, tmp)>>>(tmp = 1408598804, tmp))))+(((((((-2455002047.4910946)%(tmp = -528017836, tmp))&((-2693432769)/(tmp = 2484427670.9045153, tmp)))%(-356969659))-((((((tmp = 3104828644.0753174, tmp)%(x>>>(tmp = 820832137.8175925, tmp)))*((tmp = 763080553.9260503, tmp)+(3173597855)))<<(((-510785437)^x)<<(x|(((x*(x%((tmp = -1391951515, tmp)/x)))-x)|(x-((-522681793.93221474)/((2514619703.2162743)*(2936688324))))))))|x)>>>(-2093210042)))&(763129279.3651779))&x))))-x))%(((-1331164821)&(tmp = 1342684586, tmp))<<(x<<(tmp = 2675008614.588005, tmp))))>>((2625292569.8984914)+(-3185992401))));
- assertEquals(0, x *= (tmp = 671817215.1147974, tmp));
- assertEquals(-1608821121, x ^= ((tmp = 2686146175.04077, tmp)>>>x));
- assertEquals(-0, x %= x);
- assertEquals(-0, x /= ((tmp = 286794551.0720866, tmp)|(x%x)));
- assertEquals(0, x <<= (x|(tmp = 1095503996.2285218, tmp)));
- assertEquals(443296752, x ^= (443296752));
- assertEquals(110824188, x >>= ((184708570)>>(x&x)));
- assertEquals(0.7908194935161674, x /= ((((167151154.63381648)&((tmp = -1434120690, tmp)-(tmp = 2346173080, tmp)))/(56656051.87305987))^(140138414)));
- assertEquals(-0.9027245492678485, x *= ((tmp = 1724366578, tmp)/(((2979477411)<<(((897038568)>>(tmp = 348960298, tmp))%(281056223.2037884)))^((((-1383133388)-(((-1379748375)-((x>>(x&(tmp = 2456582046, tmp)))>>>(-2923911755.565961)))&x))<<(-2825791731))^(tmp = -1979992970, tmp)))));
- assertEquals(0, x &= (2482304279));
- assertEquals(-0, x *= (-2284213673));
- assertEquals(0, x <<= ((2874381218.015819)|x));
- assertEquals(0, x *= (x>>>(tmp = 2172786480, tmp)));
- assertEquals(0, x &= (-1638727867.2978938));
- assertEquals(0, x %= ((tmp = -2213947368.285817, tmp)>>x));
- assertEquals(0, x >>>= (tmp = -531324706, tmp));
- assertEquals(0, x %= (tmp = -2338792486, tmp));
- assertEquals(0, x <<= (((tmp = 351012164, tmp)<<(x|((tmp = -3023836638.5337825, tmp)^(-2678806692))))|x));
- assertEquals(0, x %= (x-(tmp = -3220231305.45039, tmp)));
- assertEquals(0, x <<= (-2132833261));
- assertEquals(0, x >>>= x);
- assertEquals(0, x %= ((2544970469)+(((-2633093458.5911965)&(644108176))-(x>>>(tmp = -949043718, tmp)))));
- assertEquals(-2750531265, x += (-2750531265));
- assertEquals(0, x >>= x);
- assertEquals(0, x *= ((tmp = 1299005700, tmp)-x));
- assertEquals(0, x >>= x);
- assertEquals(-1785515304, x -= (((((-806054462.5563161)/x)>>>x)+(1785515304))|((tmp = 2937069788.9396844, tmp)/x)));
- assertEquals(-3810117159.173689, x -= (2024601855.1736891));
- assertEquals(-6.276064139320051, x /= (607087033.3053156));
- assertEquals(134217727, x >>>= (((x%(tmp = 924293127, tmp))^x)|((x>>>(x&((((tmp = -413386639, tmp)/(x>>(tmp = 599075308.8479941, tmp)))^(tmp = -1076703198, tmp))*((tmp = -2239117284, tmp)>>(655036983)))))-x)));
- assertEquals(134217727, x %= (tmp = 2452642261.038778, tmp));
- assertEquals(-569504740360507, x *= ((tmp = -1086243941, tmp)>>(tmp = 1850668904.4885683, tmp)));
- assertEquals(113378806, x >>>= (tmp = -2558233435, tmp));
- assertEquals(979264375, x -= (((x>>(1950008052))%((2917183569.0209)*(tmp = 1184250640.446752, tmp)))|((((tmp = -691875212, tmp)-(-2872881803))>>(tmp = 44162204.97461021, tmp))^(tmp = 865885647, tmp))));
- assertEquals(-1127813632, x <<= ((((tmp = -2210499281, tmp)>>>x)-(tmp = 2359697240, tmp))-x));
- assertEquals(-1707799657, x ^= (653518231.3995534));
- assertEquals(2916579668449318000, x *= x);
- assertEquals(2916579669254640600, x += (x&(tmp = 2986558026.399422, tmp)));
- assertEquals(870995175, x ^= (2598813927.8991632));
- assertEquals(870995175, x %= (-2857038782));
- assertEquals(1869503575895591000, x *= (x|(x|(((tmp = 2478650307.4118147, tmp)*((tmp = 2576240847.476932, tmp)>>>x))<<x))));
- assertEquals(-134947790, x |= ((tmp = 1150911808, tmp)*((2847735464)/(-2603172652.929262))));
- assertEquals(-137053182, x -= ((tmp = 2155921819.0929346, tmp)>>>(x-(((-1960937402)-(-1907735074.2875962))%((1827808310)^(tmp = -2788307127, tmp))))));
- assertEquals(-134824702, x |= (((2912578752.2395406)^(x%(((-2585660111.0638976)<<(((((tmp = 747742706, tmp)%(-1630261205))&((((x|(x|(-2619903144.278758)))|((2785710568.8651934)>>((-968301967.5982246)<<(x&x))))>>((x>>>((x>>>(tmp = -1402085797.0310762, tmp))*((tmp = -323729645.2250068, tmp)<<(tmp = 2234667799, tmp))))>>>(-167003745)))>>((924665972.4681011)<<x)))>>>x)<<((((x+x)+x)-(((tmp = 2399203431.0526247, tmp)-(-2872533271))-(((tmp = 914778794.2087344, tmp)-(tmp = 806353942.9502392, tmp))|(((tmp = 262924334.99231672, tmp)&x)|(tmp = -460248836.5602243, tmp)))))/x)))%((-1681000689)/(tmp = -2805054623.654228, tmp)))))*(tmp = 957346233.9619625, tmp)));
- assertEquals(-3274838, x %= ((((tmp = 3155450543.3524327, tmp)>>>x)<<(tmp = 2103079652.3410985, tmp))>>x));
- assertEquals(-3274838, x |= ((((tmp = 2148004645.639173, tmp)>>>(tmp = -1285119223, tmp))<<(((((-711596054)>>>(tmp = -2779776371.3473206, tmp))^(((((tmp = -1338880329.383915, tmp)<<((-1245247254.477341)>>x))*(tmp = -2649052844.20065, tmp))>>((1734345880.4600453)%(x/(2723093117.118899))))*(1252918475.3285656)))<<(2911356885))^x))<<(-1019761103)));
- assertEquals(1703281954, x &= (((tmp = 1036570471.7412028, tmp)+((tmp = 3043119517, tmp)%(2374310816.8346715)))%(tmp = -2979155076, tmp)));
- assertEquals(1741588391, x |= ((tmp = 1230009575.6003838, tmp)>>>(-1247515003.8152597)));
- assertEquals(72869474.64782429, x %= (tmp = 1668718916.3521757, tmp));
- assertEquals(770936242.104203, x += (698066767.4563787));
- assertEquals(-0.2820604726420833, x /= (tmp = -2733230342, tmp));
- assertEquals(403480578, x |= ((969730374)&(tmp = 1577889835, tmp)));
- assertEquals(-1669557233, x ^= ((-1616812135)+(tmp = -456209292, tmp)));
- assertEquals(-1630427, x >>= ((2327783031.1175823)/(226947662.4579488)));
- assertEquals(131022, x >>>= ((tmp = -1325018897.2482083, tmp)>>(x&((((((-1588579772.9240348)<<(tmp = -1775580288.356329, tmp))<<(tmp = -1021528325.2075481, tmp))>>((tmp = 2373033451.079956, tmp)*(tmp = 810304612, tmp)))-((tmp = -639152097, tmp)<<(tmp = 513879484, tmp)))&(2593958513)))));
- assertEquals(1, x >>= ((3033200222)-x));
- assertEquals(-561146816.4851823, x += (tmp = -561146817.4851823, tmp));
- assertEquals(-4.347990105831158, x /= ((((-1270435902)*x)%((tmp = 637328492.7386824, tmp)-(x>>(-749100689))))%(x+x)));
- assertEquals(-1, x >>= x);
- assertEquals(1, x *= x);
- assertEquals(111316849706694460, x += ((966274056)*(x|(115202150))));
- assertEquals(-1001883840, x >>= x);
- assertEquals(-1001883840, x &= x);
- assertEquals(-3006880758, x += ((((-2275110637.4054556)/((x+(tmp = -1390035090.4324536, tmp))>>(-5910593)))&(tmp = 378982420, tmp))|(tmp = 2289970378.568629, tmp)));
- assertEquals(314474, x >>>= (x>>((tmp = -228007336.31281257, tmp)%(tmp = 1127648013, tmp))));
- assertEquals(-17694827, x ^= ((tmp = 2095133598.1849852, tmp)|(-1978322311)));
- assertEquals(1, x /= x);
- assertEquals(1, x %= (-2323617209.7531185));
- assertEquals(0, x >>>= (x*(tmp = -1574455400.489434, tmp)));
- assertEquals(0, x >>= (3131854684));
- assertEquals(2853609824, x += ((-231012098)-(tmp = -3084621922, tmp)));
- assertEquals(8143089027629311000, x *= x);
- assertEquals(313052685, x ^= (tmp = 2962303501, tmp));
- assertEquals(4776, x >>= (tmp = 2271457232, tmp));
- assertEquals(0.000002812258572702285, x /= (tmp = 1698279115, tmp));
- assertEquals(0, x >>>= (tmp = 1698465782.0927145, tmp));
- assertEquals(0, x <<= x);
- assertEquals(0, x |= ((x<<((-1824760240.3040407)<<(2798263764.39145)))&(tmp = 1795988253.0493627, tmp)));
- assertEquals(1782206945, x ^= (-2512760351.7881565));
- assertEquals(7610569113843172000, x *= (((tmp = -44415823.92972565, tmp)&(tmp = 1402483498.9421625, tmp))+(tmp = 2909778666, tmp)));
- assertEquals(15221138227873292000, x += (x-(tmp = -186948658.394145, tmp)));
- assertEquals(0, x -= x);
- assertEquals(-2238823252, x -= ((tmp = 2238823252, tmp)+x));
- assertEquals(0, x -= x);
- assertEquals(0, x >>= (2976069570));
- assertEquals(0, x >>= ((tmp = -2358157433, tmp)/x));
- assertEquals(-949967713, x ^= (tmp = -949967713, tmp));
- assertEquals(-1, x >>= x);
- assertEquals(-1522291702.1977966, x *= (1522291702.1977966));
- assertEquals(-1522291702, x >>= ((((2290279800)|x)|(1793154434.6798015))&((-1161390929.0766077)>>>x)));
- assertEquals(83894274, x &= (tmp = 1571058486, tmp));
- assertEquals(43186847.90522933, x += ((tmp = -1131332988.0947707, tmp)%x));
- assertEquals(0, x >>= (tmp = -1968312707.269359, tmp));
- assertEquals(0, x &= (2507747643.26175));
- assertEquals(0, x %= (tmp = 3190525303.366887, tmp));
- assertEquals(-1968984602, x ^= (((x/(x|(-1607062026.5338054)))<<(tmp = 2207669861.8770065, tmp))+(tmp = 2325982694.956348, tmp)));
- assertEquals(554, x >>>= (((tmp = -2302283871.993821, tmp)>>>(-3151835112))|(((((x%(-1534374264))/((731246012)<<(((883830997.1194847)<<(((-1337895080.1937215)/(tmp = 3166402571.8157315, tmp))^(tmp = -1563897595.5799441, tmp)))>>(tmp = -556816951.0537591, tmp))))>>(-2682203577))<<(x/((1654294674.865079)+x)))/((x^(-2189474695.4259806))/(-475915245.7363057)))));
- assertEquals(1372586111, x ^= (1372586581));
- assertEquals(1166831229, x -= ((-834168138)&(762573579)));
- assertEquals(2333662456, x -= ((x>>x)-x));
- assertEquals(-1961304840, x &= x);
- assertEquals(-2130143128, x &= (2982852718.0711775));
- assertEquals(1073741824, x <<= (-1446978661.6426942));
- assertEquals(2097152, x >>>= ((-1424728215)-(((127872198)%(tmp = -2596923298, tmp))&x)));
- assertEquals(2097152, x >>>= x);
- assertEquals(0, x &= (x/(tmp = -518419194.42994523, tmp)));
- assertEquals(0, x >>= ((x/(-1865078245))%(tmp = 2959239210, tmp)));
- assertEquals(-0, x *= ((x|(-1721307400))|(-3206147171.9491577)));
- assertEquals(0, x >>>= ((-694741143)&(tmp = -2196513947.699142, tmp)));
- assertEquals(0, x <<= x);
- assertEquals(0, x &= ((tmp = 2037824385.8836646, tmp)+((tmp = 1203034986.4647732, tmp)/(x>>>(((-1374881234)/(899771270.3237157))+((-2296524362.8020077)|(-1529870870)))))));
- assertEquals(0, x >>= (tmp = 2770637816, tmp));
- assertEquals(0, x ^= x);
- assertEquals(-1861843456, x |= ((632402668)*((x|(tmp = -1032952662.8269436, tmp))|(tmp = 2671272511, tmp))));
- assertEquals(-1861843456, x >>= (((x>>>x)+x)<<(-1600908842)));
- assertEquals(-58182608, x >>= (x-(tmp = -2496617861, tmp)));
- assertEquals(-3636413, x >>= (tmp = -400700028, tmp));
- assertEquals(-7272826, x += x);
- assertEquals(-1, x >>= ((tmp = -3184897005.3614545, tmp)-((-1799843014)|(tmp = 2832132915, tmp))));
- assertEquals(-121800925.94209385, x *= (121800925.94209385));
- assertEquals(-30450232, x >>= (-979274206.6261561));
- assertEquals(-30450232, x >>= (tmp = -1028204832.5078967, tmp));
- assertEquals(-30450232, x |= x);
- assertEquals(965888871, x ^= (((((-2157753481.3375635)*((tmp = -1810667184.8165767, tmp)&((tmp = 2503908344.422232, tmp)|x)))>>(x>>(1601560785)))<<x)^(tmp = 943867311.6380403, tmp)));
- assertEquals(7546006, x >>>= x);
- assertEquals(7546006, x <<= ((tmp = 1388931761.780241, tmp)*(x-(tmp = -1245147647.0070577, tmp))));
- assertEquals(12985628, x += (x&(-1520746354)));
- assertEquals(12985628, x &= x);
- assertEquals(12985628, x %= (tmp = 308641965, tmp));
- assertEquals(685733278, x |= ((tmp = -1275653544, tmp)-((tmp = -1956798010.3773859, tmp)%(tmp = 2086889575.643448, tmp))));
- assertEquals(679679376, x &= (2860752368));
- assertEquals(1770773904, x |= (x<<(3200659207)));
- assertEquals(1224886544, x &= (-585733767.6876519));
- assertEquals(1224886544, x %= ((tmp = -114218494, tmp)-x));
- assertEquals(1208109328, x &= (tmp = 1854361593, tmp));
- assertEquals(18434, x >>>= x);
- assertEquals(-349394636955256100, x *= (x*(-1028198742)));
- assertEquals(-519536600.7713163, x %= (-1054085356.9120367));
- assertEquals(-1610612736, x ^= ((tmp = -3126078854, tmp)&x));
- assertEquals(-2637321565906333700, x *= (1637464740.5658746));
- assertEquals(-2637321568051070500, x -= ((tmp = -1006718806, tmp)<<(3005848133.106345)));
- assertEquals(368168695, x ^= (x^(tmp = 368168695.6881037, tmp)));
- assertEquals(43, x >>>= x);
- assertEquals(-2081297089, x |= ((167169305.77248895)+(-2248466405.3199244)));
- assertEquals(-2474622167, x -= (tmp = 393325078, tmp));
- assertEquals(-135109701, x %= (-1169756233));
- assertEquals(0, x ^= x);
- assertEquals(0, x >>= (((((tmp = -164768854, tmp)/(tmp = -1774989993.1909926, tmp))+x)-((-921438912)>>(tmp = -191772028.69249105, tmp)))-(tmp = 558728578.22033, tmp)));
- assertEquals(0, x %= (tmp = 2188003745, tmp));
- assertEquals(0, x <<= (((tmp = -999335540, tmp)>>((((325101977)/(tmp = -3036991542, tmp))<<(tmp = -213302488, tmp))+x))|(tmp = -1054204587, tmp)));
- assertEquals(0, x &= ((2844053429.4720345)>>>x));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x -= (-1481729275.9118822));
- assertEquals(NaN, x *= (tmp = 1098314618.2397528, tmp));
- assertEquals(-1073741824, x ^= ((tmp = 1718545772, tmp)<<(((tmp = -81058910, tmp)-(2831123087.424368))+(tmp = 576710057.2361784, tmp))));
- assertEquals(-2921155898.4793186, x -= (1847414074.4793184));
- assertEquals(-1295646720, x <<= (2178621744));
- assertEquals(-0.8906779709597907, x /= ((tmp = -2840292585.6837263, tmp)<<(x&((tmp = 892527695.6172305, tmp)>>>x))));
- assertEquals(0, x <<= (((tmp = 3149667213.298993, tmp)>>(tmp = 1679370761.7226725, tmp))^(115417747.21537328)));
- assertEquals(0, x |= x);
- assertEquals(0, x %= ((-1112849427)>>(-1245508870.7514496)));
- assertEquals(0, x &= x);
- assertEquals(0, x |= x);
- assertEquals(0, x >>>= ((3144100694.930459)>>>(tmp = 2408610503, tmp)));
- assertEquals(0, x <<= ((tmp = 2671709754.0318713, tmp)%x));
- assertEquals(0, x >>>= (x|((tmp = -3048578701, tmp)-(674147224))));
- assertEquals(NaN, x %= x);
- assertEquals(0, x &= ((tmp = -2084883715, tmp)|(((((-3008427069)+(875536047.4283574))>>>x)%(tmp = -450003426.1091652, tmp))%(((-2956878433.269356)|(x/((((x%((((((x<<(((tmp = -1581063482.510351, tmp)^x)-(tmp = 1364458217, tmp)))^((tmp = 1661446342, tmp)+(1307091014)))/(342270750.9901335))>>>(x&((1760980812.898993)&((tmp = 2878165745.6401143, tmp)/(((tmp = -981178013, tmp)/(-2338761668.29912))>>(-958462630))))))*((1807522840)^((tmp = 1885835034, tmp)^(-2538647938))))*(1673607540.0854697)))%x)>>x)<<x)))<<(853348877.2407281)))));
- assertEquals(0, x >>>= x);
- assertEquals(-1162790279, x -= (1162790279));
- assertEquals(-1162790279, x >>= (((-490178658)*x)/((((((tmp = -1883861998.6699312, tmp)/(tmp = -2369967345.240594, tmp))+(3142759868.266447))&(508784917.8158537))&x)>>(-2129532322))));
- assertEquals(-1360849740.9829152, x -= (x+(1360849740.9829152)));
- assertEquals(1928392181, x ^= (-602670783));
- assertEquals(19478708.898989897, x /= (((-2617861994)>>(tmp = 797256920, tmp))%(-1784987906)));
- assertEquals(-8648903.575540157, x *= (((tmp = 673979276, tmp)/(-1517908716))%(x/x)));
- assertEquals(-8648903.575540157, x %= ((((643195610.4221292)>>>(tmp = 2342669302, tmp))>>>(tmp = -1682965878, tmp))^((tmp = -208158937.63443017, tmp)>>((907286989)&(x<<(448634893))))));
- assertEquals(1399288769, x ^= (tmp = -1407486728, tmp));
- assertEquals(0, x &= (((1999255838.815517)/(tmp = 564646001, tmp))/(-3075888101.3274765)));
- assertEquals(0, x ^= ((-78451711.59404826)%x));
- assertEquals(-1351557131, x |= (2943410165));
- assertEquals(1715626371, x -= (-3067183502));
- assertEquals(71434240, x &= ((-1800066426)<<(((((x<<(-324796375))+x)<<(tmp = 2696824955.735132, tmp))^x)%(tmp = 444916469, tmp))));
- assertEquals(71434240, x >>>= (((x&((x%x)|x))+(tmp = 2226992348.3050146, tmp))<<(-305526260)));
- assertEquals(0, x -= (x%(tmp = 582790928.5832802, tmp)));
- assertEquals(0, x *= ((x%(1865155340))>>>((x<<(2600488191))^(-308995123))));
- assertEquals(0, x >>= (x&(-3120043868.8531103)));
- assertEquals(0, x |= x);
- assertEquals(-0, x *= (tmp = -172569944, tmp));
- assertEquals(0, x <<= (-1664372874));
- assertEquals(1377713344.6784928, x += (tmp = 1377713344.6784928, tmp));
- assertEquals(1377713344, x |= x);
- assertEquals(-232833282, x |= (tmp = 2685870654, tmp));
- assertEquals(84639, x -= (((((2778531079.998492)%(2029165314))>>>(tmp = -468881172.3729558, tmp))^x)|((x>>>((((x%(3044318992.943596))&(1996754328.2214756))^(1985227172.7485228))%(tmp = -1984848676.1347625, tmp)))|((tmp = 2637662639, tmp)<<x))));
- assertEquals(0, x ^= x);
- assertEquals(1237720303, x -= (-1237720303));
- assertEquals(2, x >>= (-2148785379.428976));
- assertEquals(2, x &= (tmp = -3087007874, tmp));
- assertEquals(0, x %= x);
- assertEquals(0, x >>>= x);
- assertEquals(0, x >>>= x);
- assertEquals(0, x += x);
- assertEquals(0, x &= (2055693082));
- assertEquals(-1349456492, x += (x^(-1349456492.315998)));
- assertEquals(671088640, x <<= (x>>(-2030805724.5472062)));
- assertEquals(-417654580004782100, x *= (tmp = -622353822, tmp));
- assertEquals(1538160360, x |= (195983080.56698656));
- assertEquals(733, x >>>= (tmp = 661085269, tmp));
- assertEquals(657, x &= (-1611460943.993404));
- assertEquals(431649, x *= x);
- assertEquals(863298, x += x);
- assertEquals(0, x &= ((1899423003)/((472439729)>>((tmp = 2903738952, tmp)+(tmp = 2164601630.3456993, tmp)))));
- assertEquals(0, x &= (x>>>(tmp = 1939167951.2828958, tmp)));
- assertEquals(1557813284, x |= (x-(-1557813284)));
- assertEquals(72876068, x &= (662438974.2372154));
- assertEquals(0.6695448637501589, x /= (tmp = 108844189.45702457, tmp));
- assertEquals(0, x -= x);
- assertEquals(2944889412, x += (2944889412));
- assertEquals(3787980288, x -= ((((tmp = -2003814373.2301111, tmp)<<x)>>>(tmp = -3088357284.4405823, tmp))-(843090884)));
- assertEquals(1, x >>>= (729274079));
- assertEquals(1, x %= (-148002187.33869123));
- assertEquals(3073988415.673201, x *= (tmp = 3073988415.673201, tmp));
- assertEquals(4839166225.673201, x += (tmp = 1765177810, tmp));
- assertEquals(4529373898.673201, x += (-309792327));
- assertEquals(3097903.090496063, x %= (-150875866.51942348));
- assertEquals(1270874112, x <<= ((((((tmp = -960966763.1418135, tmp)>>((((-3208596981.613482)>>>(tmp = 746403937.6913509, tmp))>>>(-2190042854.066803))/(2449323432)))*(-1272232665.791577))<<(-99306767.7209444))^((-1942103828)/((1570981655)/(tmp = 2381666337, tmp))))+(tmp = -1946759395.1558368, tmp)));
- assertEquals(1273845956, x |= (tmp = -3197282108.6120167, tmp));
- assertEquals(159230744, x >>= (((tmp = -1036031403.8108604, tmp)>>>(((3084964493)>>((x*x)^x))+(((2980108409.352001)^x)-(tmp = -2501685423.513927, tmp))))&(326263839)));
- assertEquals(-370091747145550100, x *= (tmp = -2324248055.674161, tmp));
- assertEquals(143384219.54999557, x /= (tmp = -2581119096, tmp));
- assertEquals(1843396287, x |= (tmp = 1842718767, tmp));
- assertEquals(2.4895593465813803, x /= (740450831));
- assertEquals(2.4895593465813803, x %= ((((((((-3175333618)>>>((tmp = -1403880166, tmp)<<(tmp = -134875360, tmp)))>>>(2721317334.998084))<<(x&(tmp = 2924634208.1484184, tmp)))*((((x>>(tmp = -200319931.15328693, tmp))-(tmp = -495128933, tmp))+((-788052518.6610589)*((((tmp = 107902557, tmp)&(1221562660))%(x<<(((3155498059)*(((tmp = -1354381139.4897022, tmp)^(tmp = 3084557138.332852, tmp))*((((tmp = 1855251464.8464525, tmp)/((-1857403525.2008865)>>x))|x)-(-2061968455.0023944))))*(1917481864.84619))))^(x-(-508176709.52712965)))))+((((x%(-1942063404))+(x%(tmp = 855152281.180481, tmp)))|(-522863804))>>x)))>>>((tmp = -2515550553, tmp)&(((((-801095375)-(tmp = -2298729336.9792976, tmp))^x)/(tmp = 2370468053, tmp))>>(x|(tmp = -900008879, tmp)))))>>>(((tmp = -810295719.9509168, tmp)*((tmp = -1306212963.6226444, tmp)/(((tmp = 3175881540.9514832, tmp)|(-1439142297.819246))+((tmp = -134415617, tmp)|((-245801870)+x)))))>>(tmp = 1889815478, tmp)))-(((tmp = 597031177, tmp)%(858071823.7655672))+((tmp = 2320838665.8243756, tmp)|((938555608)<<(2351739219.6461897))))));
- assertEquals(6.197905740150709, x *= x);
- assertEquals(1, x /= x);
- assertEquals(0, x >>= (-1639664165.9076233));
- assertEquals(0, x >>= (-3135317748.801177));
- assertEquals(0, x &= (3185479232.5325994));
- assertEquals(-0, x *= ((-119759439.19668174)/(tmp = 2123964608, tmp)));
- assertEquals(0, x /= (-1183061929.2827876));
- assertEquals(0, x <<= (-1981831198));
- assertEquals(0, x >>= ((((x<<(((((((-2133752838)&((tmp = -3045157736.9331336, tmp)>>>(x%x)))>>x)%(tmp = 3082217039, tmp))&(tmp = 270770770.97558427, tmp))|((-2212037556)^((((((2089224421)|(tmp = 360979560, tmp))<<x)%((tmp = -1679487690.6940534, tmp)+((173021423)|((tmp = 560900612, tmp)+((244376267.58977115)^x)))))<<(tmp = 2534513699, tmp))^x)))>>>(2915907189.4873834)))+(x*x))%(1637581117))%(tmp = 2363861105.3786244, tmp)));
- assertEquals(0, x &= ((-2765495757.873004)&(1727406493)));
- assertEquals(NaN, x -= (((((-1419667515.2616255)|x)-(150530256.48022234))%((((x|x)<<x)>>>(x^x))+x))-((-1216384577.3749187)*(495244398))));
- assertEquals(NaN, x += (x^((tmp = 2472035493, tmp)+x)));
- assertEquals(NaN, x %= ((tmp = -1753037412.885754, tmp)|((tmp = 2507058310, tmp)<<(1475945705))));
- assertEquals(-1008981005, x |= ((tmp = -1140889842.6099494, tmp)-(tmp = -131908837, tmp)));
- assertEquals(999230327.5872104, x -= (tmp = -2008211332.5872104, tmp));
- assertEquals(975810, x >>= (((-1211913874)*x)>>>((-2842129009)>>(x&(tmp = -1410865834, tmp)))));
- assertEquals(7623, x >>= ((tmp = -1051327071, tmp)-(((tmp = -237716102.8005445, tmp)|((2938903833.416546)&x))|(((-1831064579)^x)/((tmp = 2999232092, tmp)-(981996301.2875179))))));
- assertEquals(0, x -= x);
- assertEquals(0, x %= (x|(tmp = -666201160.5810485, tmp)));
- assertEquals(-1347124100, x |= (-1347124100));
- assertEquals(-0, x %= (x&x));
- assertEquals(-661607963, x ^= (tmp = -661607963.3794863, tmp));
- assertEquals(3465, x >>>= (-828119020.8056595));
- assertEquals(-268431991, x -= (((tmp = -1386256352, tmp)^((tmp = 743629575, tmp)%((x*((tmp = -1719517658, tmp)>>(2019516558)))<<((2637317661)|x))))<<(tmp = -51637065, tmp)));
- assertEquals(1578876380, x += ((tmp = 1847308371, tmp)&(((((((tmp = 1487934776.1893163, tmp)%(tmp = 1423264469.3137975, tmp))|(((2653260792.5668964)/(-2417905016.043802))>>>(2097411118.4501896)))^x)^(((tmp = -71334226, tmp)|x)>>>(tmp = -2771758874.7696714, tmp)))^((tmp = -1464849031.3240793, tmp)%(tmp = 2349739690.6430283, tmp)))/x)));
- assertEquals(3269293934, x += (1690417554));
- assertEquals(4025392608.031957, x -= (((tmp = 268501120.7225704, tmp)<<(tmp = 2841620654.8903794, tmp))+((tmp = 1606704462.8455591, tmp)/((-2601879963)/(tmp = 2966620168.989736, tmp)))));
- assertEquals(7, x >>>= (x^(-1913800035)));
- assertEquals(1.4326776816275493e-8, x /= ((((tmp = -2703417892, tmp)/x)^((-2693772270.396241)>>>((x-(tmp = 615999818.5666655, tmp))>>((((2308121439.3702726)<<((-1794701502)>>(x+(tmp = -2253406035.972883, tmp))))<<((tmp = -197103799.0624652, tmp)|(629975898)))>>>x))))>>>((tmp = 2833656803, tmp)^(x^(tmp = -1580436025, tmp)))));
- assertEquals(0, x >>>= (tmp = 1525372830.2126007, tmp));
- assertEquals(0, x %= ((2354010949.24469)>>>(x<<x)));
- assertEquals(0, x ^= (((1112335059.6922574)*(tmp = -1874363935, tmp))&(((((2154894295.8360596)<<x)&(tmp = -270736315.13505507, tmp))&x)>>>(-2205692260.552064))));
- assertEquals(0, x >>>= (x<<((1488533932)*(tmp = 1707754286, tmp))));
- assertEquals(0, x >>= (((tmp = 1232547376.463387, tmp)%((x>>(711691823.1608362))>>>x))>>(((895039781.7478573)*(((((-334946524)&x)*(tmp = -1214529640, tmp))^(tmp = -1586820245, tmp))*(1062595445)))+x)));
- assertEquals(0, x *= (1863299863.2631998));
- assertEquals(0, x /= (tmp = 1858428705.1330547, tmp));
- assertEquals(0, x &= x);
- assertEquals(611788028, x += (x^(611788028.1510412)));
- assertEquals(1, x /= x);
- assertEquals(0, x >>= ((tmp = -1617320707.1784317, tmp)-((-2139400380)-(-1402777976))));
- assertEquals(0, x >>= (415866827.34665));
- assertEquals(-1990811897, x -= (tmp = 1990811897, tmp));
- assertEquals(-1990811895, x += ((x>>>(tmp = -2175453282.769696, tmp))&(tmp = -1459450498.7327478, tmp)));
- assertEquals(-2377017935.149517, x += (-386206040.1495173));
- assertEquals(1946129845, x |= (tmp = -2890956796.936539, tmp));
- assertEquals(0, x %= x);
- assertEquals(0, x <<= (1616188263));
- assertEquals(-1081213596, x ^= (tmp = 3213753700, tmp));
- assertEquals(3213753700, x >>>= (tmp = -3211181312, tmp));
- assertEquals(-1081213596, x &= x);
- assertEquals(-1081213583, x ^= (((tmp = 1599988273.4926577, tmp)>>((((-1061394954.6331315)^x)+((-1835761078)*x))+(x%(tmp = -696221869, tmp))))/((tmp = -1156966790.3436491, tmp)^x)));
- assertEquals(0, x ^= x);
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x += (-1257400530.9263027));
- assertEquals(NaN, x /= (753062089));
- assertEquals(NaN, x *= ((tmp = 305418865.57012296, tmp)^(((-2797769706)+((((tmp = -33288276.988654375, tmp)%(tmp = 1242979846, tmp))|(-316574800))-((tmp = -1766083579.4203427, tmp)*(((x*(tmp = -2400342309.2349987, tmp))>>(tmp = 2632061795, tmp))^(tmp = -1001440809, tmp)))))^((((x-(tmp = -1469542637.6925495, tmp))-x)-(3184196890))%(((((((633226688)*((tmp = -2692547856, tmp)>>(((tmp = -1244311756, tmp)>>>x)+((1746013631.405202)>>>(941829464.1962085)))))%(x-x))+(995681795))-(tmp = -3047070551.3642616, tmp))/(1968259705))-((-2853237880)^(tmp = -2746628223.4540343, tmp)))))));
- assertEquals(0, x >>= x);
- assertEquals(0.5713172378854926, x += (((x+(((x+x)/(tmp = 2642822318, tmp))*(-2590095885.4280834)))|(tmp = -1769210836, tmp))/(tmp = -3096722308.8665104, tmp)));
- assertEquals(-0.000002311097780334994, x /= ((2269858877.9010344)>>(-2992512915.984787)));
- assertEquals(-0.000002311097780334994, x %= (-1139222821));
- assertEquals(-0.000004622195560669988, x += x);
- assertEquals(1, x /= x);
- assertEquals(1, x >>>= (((3002169429.6061807)/(-3068577366))>>>((tmp = -1844537620, tmp)%((((tmp = 2087505119, tmp)>>>x)+x)&(2179989542)))));
- assertEquals(-534213071, x *= (-534213071));
- assertEquals(-534213077.3716287, x -= (((tmp = -2390432951.154034, tmp)^x)/(-290501980)));
- assertEquals(1836305, x >>>= (x&x));
- assertEquals(1836305, x %= ((x|((3070123855)^(49986396)))+((-1863644960.4202995)>>>((tmp = 1886126804.6019692, tmp)^x))));
- assertEquals(28692, x >>>= ((2561362139.491764)>>(((((tmp = -1347469854.7413375, tmp)/(((x|(x+x))^((x^(tmp = -2737413775.4595394, tmp))^x))<<(((tmp = 225344844.07128417, tmp)&x)&(tmp = 145794498, tmp))))*x)<<(1424529187))/((-2924344715)/(tmp = -2125770148, tmp)))));
- assertEquals(-2089419535.2717648, x += (-2089448227.2717648));
- assertEquals(18957929, x ^= (tmp = 2186590872, tmp));
- assertEquals(-708972800, x -= (727930729));
- assertEquals(-4198593, x |= (799483455.1885371));
- assertEquals(-1, x >>= (-2330654693.6413193));
- assertEquals(-1, x |= (((tmp = -116877155, tmp)>>>((((tmp = -1677422314.1333556, tmp)/(tmp = -3108738499.0798397, tmp))%((x&(x/x))%((tmp = -695607185.1561592, tmp)-(tmp = 2302449181.622259, tmp))))^(((-1482743646.5604773)^((897705064)>>>x))-(tmp = -2933836669, tmp))))%(((tmp = -2991584625, tmp)|(((x>>x)+(-1101066835))-x))>>(-33192973.819939613))));
- assertEquals(-1, x &= x);
- assertEquals(-524288, x <<= (-1177513101.3087924));
- assertEquals(1978770334.9189441, x += (tmp = 1979294622.9189441, tmp));
- assertEquals(901783582, x &= ((-368584615)^(((((-478030699.2647903)<<x)<<x)+(tmp = 708725752, tmp))^((tmp = -3081556856, tmp)/(tmp = 1149958711.0676727, tmp)))));
- assertEquals(-1480333211.8654308, x += (tmp = -2382116793.865431, tmp));
- assertEquals(956930239.6783283, x *= ((tmp = 956930239.6783283, tmp)/x));
- assertEquals(1277610.4668602513, x /= ((tmp = 1571029828, tmp)>>(tmp = 2417481141, tmp)));
- assertEquals(-1077333228, x ^= (tmp = 3218755006, tmp));
- assertEquals(-50218, x |= (tmp = -1044436526.6435988, tmp));
- assertEquals(-1, x >>= (-154655245.18921852));
- assertEquals(0.00006276207290978003, x *= (((tmp = 2234286992.9800305, tmp)>>(tmp = 2132564046.0696363, tmp))/((((tmp = -2565534644.3428087, tmp)>>>(tmp = 2622809851.043325, tmp))>>>((tmp = 311277386, tmp)&x))-(tmp = -2003980974, tmp))));
- assertEquals(0, x %= x);
- assertEquals(1282114076, x += ((((422838227)>>>((tmp = 1024613366.1899053, tmp)-((368275340)<<(((tmp = -3066121318, tmp)+(-2319101378))&x))))^(x>>(tmp = 1920136319.803412, tmp)))^(1282264803.3968434)));
- assertEquals(-277097604, x |= (-283585688.9123297));
- assertEquals(553816692, x &= (x&(tmp = 554082036.676608, tmp)));
- assertEquals(658505728, x <<= x);
- assertEquals(658505728, x &= (x%(2846071230)));
- assertEquals(39, x >>= (334728536.5172192));
- assertEquals(0, x -= x);
- assertEquals(0, x += x);
- assertEquals(0, x &= (tmp = -335285336, tmp));
- assertEquals(0, x <<= (tmp = 1255594828.3430014, tmp));
- assertEquals(0, x %= (-630772751.1248167));
- assertEquals(NaN, x /= ((((x&(tmp = -1576090612, tmp))%x)>>>x)*((-1038073094.2787619)>>>x)));
- assertEquals(NaN, x += x);
- assertEquals(NaN, x -= (((tmp = -2663887803, tmp)&((x+(-1402421046))/x))/(-2675654483)));
- assertEquals(NaN, x %= (x&(tmp = 672002093, tmp)));
- assertEquals(0, x |= x);
- assertEquals(-2698925754, x += (tmp = -2698925754, tmp));
- assertEquals(-2057748993, x += ((tmp = -2263466497, tmp)^x));
- assertEquals(1, x /= x);
- assertEquals(-2769559719.4045835, x -= (2769559720.4045835));
- assertEquals(-1.3964174646069973, x /= (tmp = 1983332198, tmp));
- assertEquals(-2140716624.3964174, x += (tmp = -2140716623, tmp));
- assertEquals(0, x <<= ((2589073007)-(-816764911.8571186)));
- assertEquals(-2837097288.161354, x -= (tmp = 2837097288.161354, tmp));
- assertEquals(-1445059927.161354, x += (tmp = 1392037361, tmp));
- assertEquals(155197984, x &= (tmp = -2694712730.924674, tmp));
- assertEquals(155197984, x |= (x>>>(tmp = 69118015.20305443, tmp)));
- assertEquals(155197984, x >>>= (((x^(-1353660241))*x)<<(((((x%(tmp = -1905584634, tmp))>>>(tmp = -860171244.5963638, tmp))&(-1084415001.7039547))+(x-(((tmp = 298064661, tmp)>>x)>>((tmp = 378629912.383446, tmp)-(x%x)))))+(((3212580683)/(((((x^x)>>(tmp = -1502887218, tmp))<<x)%(-142779025))|(((tmp = 1361745708, tmp)*(((((tmp = 1797072528.0673332, tmp)+x)%(tmp = 167297609, tmp))%(-287345856.1791787))^(((((((x*(tmp = -640510459.1514752, tmp))<<(x^(tmp = 1387982082.5646644, tmp)))>>(tmp = 2473373497.467914, tmp))^((234025940)*x))+(tmp = 520098202.9546956, tmp))*(x*(tmp = -362929250.1775775, tmp)))^(-2379972900))))*(tmp = -1385817972, tmp))))+(-1788631834)))));
- assertEquals(0, x >>= ((tmp = -18671049, tmp)/((tmp = 651261550.6716013, tmp)>>(-58105114.70740628))));
- assertEquals(0, x *= ((((x>>(tmp = 2256492150.737681, tmp))<<(x<<(((-2738910707)&x)<<(1892428322))))*(tmp = 1547934638, tmp))>>((((319464033.7888391)|(((((tmp = 2705641070, tmp)<<((tmp = 1566904759.36666, tmp)*((-682175559.7540412)&(-691692016.3021002))))%(tmp = 1118101737, tmp))|(902774462))<<x))^((tmp = -388997180, tmp)<<(x<<((((((-88462733)+(x>>>x))%x)*(tmp = -20297481.556210756, tmp))>>>(1927423855.1719701))-((2047811185.6278129)-(tmp = 2952219346.72126, tmp))))))|(-1685518403.7513878))));
- assertEquals(0, x /= (tmp = 1858074757.563318, tmp));
- assertEquals(-1351623058, x ^= (-1351623058.4756806));
- assertEquals(1, x /= x);
- assertEquals(0, x ^= x);
- assertEquals(0, x -= (x&(997878144.9798675)));
- assertEquals(-0, x /= (-2769731277));
- assertEquals(0, x >>>= ((-2598508325)>>(-1355571351)));
- assertEquals(0, x >>>= x);
- assertEquals(0, x -= (x&(tmp = 1672810223, tmp)));
- assertEquals(-924449908.1999881, x -= (924449908.1999881));
- assertEquals(-0, x %= x);
- assertEquals(-0, x /= (tmp = 2007131382.059545, tmp));
- assertEquals(-0, x += x);
- assertEquals(225132064, x += ((((tmp = -2422670578.1260514, tmp)|x)+x)^(1660142894.7066057)));
- assertEquals(Infinity, x /= (x-x));
- assertEquals(0, x ^= x);
- assertEquals(0, x <<= x);
- assertEquals(-2455424946.732606, x -= (2455424946.732606));
- assertEquals(1208029258, x &= ((tmp = 1823728509, tmp)+x));
- assertEquals(1.3682499724725645, x /= ((((tmp = 1267938464.3854322, tmp)%((tmp = 2510853574, tmp)+(((2979355693.866435)-(tmp = 1989726095.7746763, tmp))<<x)))%((-1382092141.1627176)+(((-901799353)+((-2936414080.8254457)>>>(2515004943.0865674)))-(2532799222.353197))))<<(tmp = -2168058960.2694826, tmp)));
- assertEquals(0.13799826710735907, x %= ((-1090423235)/(tmp = 2659024727, tmp)));
- assertEquals(0, x >>= (1688542889.082693));
- assertEquals(0, x <<= x);
- assertEquals(NaN, x %= ((((tmp = 1461037539, tmp)<<((x<<(tmp = 2101282906.5302017, tmp))>>(-2792197742)))%(((x%x)^(((tmp = 1399565526, tmp)^(tmp = 643902, tmp))-((tmp = -1449543738, tmp)|x)))/x))*(x<<(471967867))));
- assertEquals(0, x &= ((tmp = -2121748100.6824129, tmp)>>(tmp = -2817271480.6497793, tmp)));
- assertEquals(0, x &= (3169130964.6291866));
- assertEquals(-0, x /= (-2303316806));
- assertEquals(0, x <<= (tmp = 120185946.51617038, tmp));
- assertEquals(449448375, x ^= ((((tmp = -836410266.014014, tmp)/x)&((x>>>(tmp = -2602671283, tmp))+x))+(tmp = 449448375, tmp)));
- assertEquals(202003841790140640, x *= x);
- assertEquals(202003840800829020, x += (((tmp = -1339865843, tmp)+(tmp = 350554234.15375435, tmp))<<((((((tmp = -1798499687.8208885, tmp)>>(((x-(x^x))|((tmp = 463627396.23932934, tmp)/(2714928060)))&(tmp = 3048222568.1103754, tmp)))&(-3127578553))<<(tmp = -2569797028.8299003, tmp))&x)<<((tmp = 2104393646, tmp)/((tmp = 2314471015.742891, tmp)<<((2704090554.1746845)>>(((tmp = 1935999696, tmp)*(((1348554815)>>>x)>>>(146665093.82445252)))%x)))))));
- assertEquals(202003841764125400, x -= (tmp = -963296372.2846234, tmp));
- assertEquals(-413485056, x <<= (tmp = -2474480506.6054573, tmp));
- assertEquals(-3171894580.186845, x += ((tmp = -1261111102, tmp)+(tmp = -1497298422.1868448, tmp)));
- assertEquals(17136, x >>= (tmp = 3055058160, tmp));
- assertEquals(17136, x %= (tmp = 1706784063.3577294, tmp));
- assertEquals(17136, x >>= ((tmp = 2161213808, tmp)*x));
- assertEquals(-17136, x /= ((((tmp = -1492618154, tmp)>>x)|(1381949066))>>(tmp = 2014457960, tmp)));
- assertEquals(-34272, x += x);
- assertEquals(-1498690902, x += (-1498656630));
- assertEquals(-1168674482, x ^= (486325220));
- assertEquals(-1168674482, x <<= ((x^x)*x));
- assertEquals(794521557347068000, x *= (-679848469));
- assertEquals(1.3330392590424505e+26, x *= (tmp = 167778866, tmp));
- assertEquals(0, x <<= (tmp = -2501540637.3664584, tmp));
- assertEquals(0, x >>>= (x-(x*(-890638026.1825848))));
- assertEquals(0, x %= ((-285010538.2813468)&(1314684460.7634423)));
- assertEquals(0, x -= x);
- assertEquals(0, x *= x);
- assertEquals(NaN, x %= (x*(x<<x)));
- assertEquals(NaN, x %= (x<<(((tmp = -1763171810.601149, tmp)&(-138151449.18303752))^(x|x))));
- assertEquals(0, x |= (x>>x));
- assertEquals(0, x &= (tmp = 1107152048, tmp));
- assertEquals(0, x >>= (1489117056.8200984));
- assertEquals(518749976, x ^= (518749976.20107937));
- assertEquals(356718654, x += (tmp = -162031322, tmp));
- assertEquals(356718654, x %= (((x>>>((tmp = -373747439.09634733, tmp)*(tmp = 563665566, tmp)))*(tmp = 2853322586.588251, tmp))*((1303537213)%(-2995314284))));
- assertEquals(5573728, x >>= (tmp = -2095997978, tmp));
- assertEquals(5573728, x <<= x);
- assertEquals(5573728, x >>= (((((tmp = 1745399178.334154, tmp)<<(tmp = 2647999783.8219824, tmp))^(tmp = 1571286759, tmp))%x)/(2166250345.181711)));
- assertEquals(10886, x >>>= ((682837289)+(x*x)));
- assertEquals(170, x >>>= x);
- assertEquals(169.95167497151652, x -= (((tmp = 527356024.19706845, tmp)+((tmp = 1263164619.2954736, tmp)|(tmp = 2942471886, tmp)))/((3017909419.131321)+(tmp = 2137746252.8006272, tmp))));
- assertEquals(-1915170061, x ^= (tmp = -1915170214, tmp));
- assertEquals(206045792, x &= (((tmp = 887031922, tmp)>>>x)-((-1861922770)|(9633541))));
- assertEquals(-1940321674, x |= (tmp = -2012149162.1817405, tmp));
- assertEquals(-1940321674, x &= x);
- assertEquals(1128412272.160699, x += (tmp = 3068733946.160699, tmp));
- assertEquals(0.47486363523180236, x /= (tmp = 2376286976.807289, tmp));
- assertEquals(-1.4931079540252477e-10, x /= (tmp = -3180370407.5892467, tmp));
- assertEquals(0, x |= (((1220765170.5933602)*(884017786))*((x%(tmp = -2538196897.226384, tmp))<<(x^x))));
- assertEquals(-525529894, x += (tmp = -525529894, tmp));
- assertEquals(1621426184, x &= ((3046517714)*(((((-162481040.8033898)+(x/((x&(1489724492))/((x|(tmp = 943542303, tmp))>>>((-1840491388.1365871)<<(2338177232))))))+(((-2268887573.2430763)>>>(((tmp = 2919141667, tmp)+((tmp = 1326295559.692003, tmp)<<(-2256653815)))>>>(((((tmp = 1602731976.7514615, tmp)*(856036244.3730336))^x)>>>((((2846316421.252943)&(915324162))%(tmp = 1144577211.0221815, tmp))%x))*(x*x))))%(tmp = -2641416560, tmp)))*(x+(x>>>x)))>>x)));
- assertEquals(1621426184, x %= (tmp = 1898223948, tmp));
- assertEquals(-3.383396676504762, x /= ((tmp = 2211088034.5234556, tmp)^x));
- assertEquals(7120923705.122882, x *= (((((tmp = 2632382342.914504, tmp)/(-615440284.1762738))&(2162453853.6658797))<<(-849038082.5298986))|(tmp = -2104667110.5603983, tmp)));
- assertEquals(-1469010887, x &= x);
- assertEquals(850767635866964700, x *= (tmp = -579143179.5338116, tmp));
- assertEquals(0, x %= x);
- assertEquals(-571457, x |= ((2849326490.8464212)|(tmp = 1450592063, tmp)));
- assertEquals(-571457, x &= x);
- assertEquals(-0.00018638416434019244, x /= (3066016912.021368));
- assertEquals(0, x <<= (2058262829));
- assertEquals(NaN, x %= ((x|((x%x)>>>x))%((tmp = -2970314895.6974382, tmp)+x)));
- assertEquals(NaN, x *= (-698693934.9483855));
- assertEquals(NaN, x += (-100150720.64391875));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x -= (-530301478));
- assertEquals(NaN, x /= (1507673244));
- assertEquals(0, x <<= (x%(tmp = 2977838420.857235, tmp)));
- assertEquals(0, x <<= (tmp = 3200877763, tmp));
- assertEquals(0, x <<= (tmp = -2592127060, tmp));
- assertEquals(NaN, x -= (((((((1930632619)*(3018666359))<<((tmp = 2676511886, tmp)&(-2786714482.25468)))%x)-(-633193192))<<((tmp = 403293598, tmp)*(-2765170226)))%x));
- assertEquals(530062092, x |= (tmp = 530062092, tmp));
- assertEquals(129409, x >>>= x);
- assertEquals(-152430382316341.78, x *= (-1177896300.229055));
- assertEquals(-304860764632683.56, x += x);
- assertEquals(0, x ^= x);
- assertEquals(0, x %= (tmp = -63071565.367660046, tmp));
- assertEquals(0, x &= ((((tmp = -1007464338, tmp)<<(x<<((x^(tmp = -726826835, tmp))|x)))>>>x)*(((tmp = 469293335.9161849, tmp)<<(((((tmp = 1035077379, tmp)*(tmp = -555174353.7567515, tmp))&(3109222796.8286266))-(((((x-(tmp = 1128900353.6650414, tmp))|(tmp = 3119921303, tmp))&((-1353827690)&(x%((-924615958)&x))))>>>x)+(tmp = 1167787910, tmp)))+x))%((605363594)>>(1784370958.269381)))));
- assertEquals(0, x %= (2953812835.9781704));
- assertEquals(0, x -= x);
- assertEquals(0, x <<= x);
- assertEquals(-901209266, x += (-901209266));
- assertEquals(-901209266, x &= x);
- assertEquals(404, x >>>= (-3195686249));
- assertEquals(824237108, x ^= (824237472));
- assertEquals(497790936.1853996, x /= ((tmp = 1253776028, tmp)/(757207285)));
- assertEquals(497790936, x >>>= ((tmp = -2212598336, tmp)<<(x^(1335355792.9363852))));
- assertEquals(0, x %= x);
- assertEquals(-2659887352.6415873, x += (tmp = -2659887352.6415873, tmp));
- assertEquals(1635079945, x |= ((x&(1234659380))>>((((tmp = 2694276886.979136, tmp)|x)^((tmp = 132795582, tmp)<<((-1089828902)>>>x)))<<((((tmp = -2098728613.0310376, tmp)<<(x/(tmp = -2253865599, tmp)))*((x+(x>>>((48633053.82579231)-(385301592))))*(tmp = -1847454853.333535, tmp)))/((-540428068.8583717)+x)))));
- assertEquals(1, x /= x);
- assertEquals(33554432, x <<= ((((2803140769)<<x)|(tmp = -1965793804, tmp))>>>(tmp = -2273336965.575082, tmp)));
- assertEquals(67108864, x += x);
- assertEquals(9007199254740992, x *= (x+((x>>x)%(2674760854))));
- assertEquals(55369784, x %= (x|(-170725544.20038843)));
- assertEquals(55369784, x %= (-1186186787));
- assertEquals(0, x ^= x);
- assertEquals(0, x <<= x);
- assertEquals(NaN, x /= ((-2968110098)-((x/(x|(((((x|((x&((-130329882)>>>(((-135670650)|(x<<(tmp = 1280371822, tmp)))^x)))-(-1183024707.2230911)))&(-1072829280))>>>(-340696948.41492534))>>>(tmp = 436308526.4938295, tmp))<<(((tmp = 3113787500, tmp)*((2038309320)>>>(-1818917055)))&((2808000707)/(774731251))))))%x)));
- assertEquals(0, x |= (x*(tmp = -843074864, tmp)));
- assertEquals(0, x &= (tmp = -752261173.8090212, tmp));
- assertEquals(0, x >>>= (tmp = 1532349931.7517128, tmp));
- assertEquals(0, x <<= ((tmp = -8628768, tmp)-((((tmp = 225928543, tmp)%(x>>>(x+x)))^((tmp = -2051536806.5249376, tmp)-x))-((tmp = -2274310376.9964137, tmp)%(tmp = 2251342739, tmp)))));
- assertEquals(0, x >>= (1011388449));
- assertEquals(0, x += x);
- assertEquals(0, x >>>= x);
- assertEquals(-0, x *= ((-1781234179.8663826)>>(((1514201119.9761915)>>(((((1174857164.90042)^(tmp = 1124973934, tmp))^x)+((-1059246013.8834443)<<(2997611138.4876065)))%(((798188010)*(-1428293122))>>>(tmp = -3087267036.8035297, tmp))))<<x)));
- assertEquals(1752554372, x ^= (tmp = -2542412924, tmp));
- assertEquals(1752554372, x %= (tmp = 3037553410.2298307, tmp));
- assertEquals(1859383977, x -= (x^(2446603103)));
- assertEquals(1183048193, x &= ((tmp = -962336957, tmp)/(x/x)));
- assertEquals(67738157, x %= ((((tmp = -1813911745.5223546, tmp)+x)<<(x-(((-1980179168)^x)|x)))|(1913769561.1308007)));
- assertEquals(67698724, x &= ((1801574998.3142045)*((tmp = -2057492249, tmp)/((1713854494.72282)>>x))));
- assertEquals(0, x -= x);
- assertEquals(-25232836, x -= ((tmp = 25232836, tmp)|x));
- assertEquals(-49, x >>= (x+((tmp = 2201204630.2897243, tmp)|(-1929326509))));
- assertEquals(-1605632, x <<= x);
- assertEquals(-165965313, x += (tmp = -164359681, tmp));
- assertEquals(9.220413724941365e-10, x /= (((((tmp = 2579760013.0808706, tmp)*(tmp = -2535370639.9805303, tmp))>>((tmp = 2138199747.0301933, tmp)-(tmp = -2698019325.0972376, tmp)))*(tmp = -425284716, tmp))/((-1951538149.6611228)/(x^(2632919130)))));
- assertEquals(0, x &= x);
- assertEquals(0, x &= ((-645189137)/(tmp = 800952748, tmp)));
- assertEquals(0, x &= (tmp = -1773606925, tmp));
- assertEquals(0, x += x);
- assertEquals(0, x >>>= (tmp = 211399355.0741787, tmp));
- assertEquals(0, x <<= ((-1317040231.5737965)/((((((tmp = 838897586.0147077, tmp)|((-1902447594)|(tmp = 404942728.83034873, tmp)))^(2462760692.2907705))%((((((x%(tmp = -2888980287, tmp))<<(-368505224.49609876))-((x>>>(532513369))&(((((((tmp = -1298067543, tmp)^(tmp = -3130435881.100909, tmp))>>x)/(tmp = -3041161992, tmp))>>(x|(-431685991.95776653)))^((tmp = 1031777777, tmp)^((-105610810)>>>((-631433779)>>(tmp = -2577780871.167671, tmp)))))%(tmp = -3170517650.088039, tmp))))-(((tmp = 2175146237.968785, tmp)-((384631158.50508535)>>((893912279.4646157)|(tmp = -1478803924.5338967, tmp))))%(x/(-1089156420))))<<(tmp = -2024709456, tmp))>>x))*(tmp = -1423824994.6993582, tmp))%(tmp = 1739143409, tmp))));
- assertEquals(-1799353648, x |= ((-1799353648.3589036)>>>((((x&(-923571640.1012449))%x)+((tmp = 971885508, tmp)>>((tmp = -2207464428.2123804, tmp)+(-3108177894.0459776))))-(-2048954486.7014258))));
- assertEquals(-3666808032.2958965, x -= (tmp = 1867454384.2958965, tmp));
- assertEquals(-260069478915415100, x *= (tmp = 70925305.23136711, tmp));
- assertEquals(1142096768, x &= (tmp = 1866401706.9144325, tmp));
- assertEquals(1, x >>>= (tmp = 2701377150.5717473, tmp));
- assertEquals(1865946805, x |= (tmp = -2429020492, tmp));
- assertEquals(1424222287, x ^= ((((tmp = 433781338, tmp)>>(x>>>((-2914418422.4829016)/(tmp = 1600920669, tmp))))|(tmp = 588320482.9566053, tmp))>>>((((((x+(tmp = -2556387365.5071325, tmp))+(tmp = -2381889946.1830974, tmp))/(3154278191))>>>(-1069701268.8022757))>>(((tmp = 182049089.28866422, tmp)>>x)>>>(tmp = -447146173, tmp)))/(x-(2103883357.0929923)))));
- assertEquals(0, x ^= x);
- assertEquals(0, x -= (x%(3036884806)));
- assertEquals(0, x >>>= (tmp = -652793480.3870945, tmp));
- assertEquals(0, x += x);
- assertEquals(304031003, x ^= ((tmp = -900156495, tmp)^(-666397014.0711515)));
- assertEquals(1, x /= x);
- assertEquals(-1974501681, x |= (x^(-1974501681.4628205)));
- assertEquals(-1.3089278317616264, x /= (((-1723703186.962839)>>>x)|((2061022161.6239533)<<x)));
- assertEquals(-1, x |= (tmp = -1987006457, tmp));
- assertEquals(-0.14285714285714285, x /= ((((((x|(-1767793799.7595732))-(-1391656680))<<x)|(x>>(tmp = -2301588485.2811003, tmp)))>>>(((tmp = 1812723993, tmp)>>>((x^(((tmp = -3154100157.951021, tmp)%((tmp = -1254955564.4553523, tmp)-(((x>>>(((-1762886343)*x)*x))*(x^(x*(-750918563.4387553))))*x)))|((x>>x)>>(x<<((((-1766797454.5634143)^(tmp = -2251474340, tmp))-(-787637516.5276759))<<((1390653368)^(-1937605249.245374)))))))|(((tmp = 1156611894, tmp)<<x)<<(x>>((((x+(tmp = 2170166060.881797, tmp))&(x>>>(tmp = -1749295923.1498983, tmp)))>>(((-1014973878)|x)&(1302866805.684057)))*(tmp = 560439074.4002491, tmp))))))|(-2758270803.4510045)))&x));
- assertEquals(0, x |= x);
- assertEquals(0, x += ((x>>((x+(tmp = -2776680860.870219, tmp))-(((688502468)<<(((tmp = 475364260.57888806, tmp)<<x)+(329071671)))/(-1097134948))))*(tmp = -1281834214.3416953, tmp)));
- assertEquals(0, x *= ((((1159762330)<<(tmp = -1892429200, tmp))%x)<<x));
- assertEquals(0, x >>>= (-770595225));
- assertEquals(NaN, x += (((x>>x)/(tmp = 281621135, tmp))/x));
- assertEquals(0, x >>= (1363890241));
- assertEquals(1639023942.9945002, x += (1639023942.9945002));
- assertEquals(-2568590958567747000, x *= (-1567146697));
- assertEquals(1793554700, x ^= (tmp = 3215813388.405799, tmp));
- assertEquals(437879, x >>= x);
- assertEquals(1339485943, x |= (1339220210));
- assertEquals(1, x /= x);
- assertEquals(512, x <<= (2509226729.1477118));
- assertEquals(512, x <<= ((x>>(1326274040.7181284))<<(tmp = -760670199, tmp)));
- assertEquals(1, x /= (x<<(x^x)));
- assertEquals(0, x >>>= (((((1382512625.8298302)&(x>>>x))*(tmp = -815316595, tmp))>>>x)-(-95538051)));
- assertEquals(-544344229.3548596, x -= (tmp = 544344229.3548596, tmp));
- assertEquals(-1088688458.7097192, x += x);
- assertEquals(-1022850479579041900, x *= (939525418.3104812));
- assertEquals(2069622661, x |= (-2632744187.7721186));
- assertEquals(-1353480538017756400, x -= ((tmp = 1308085980, tmp)*((x>>>(-629663391.5165792))&(tmp = 3182319856.674114, tmp))));
- assertEquals(1.3702811563654176e+27, x *= ((((3061414617.6321163)/(tmp = 2628865442, tmp))+(-1549548261))+(x&((tmp = 809684398, tmp)|(x^(tmp = 801765002, tmp))))));
- assertEquals(0, x >>>= ((-2988504159)&((tmp = -260444190.02252054, tmp)^(2178729442.260293))));
- assertEquals(-1518607002, x -= (tmp = 1518607002, tmp));
- assertEquals(724566016, x <<= (tmp = 1042915731.7055794, tmp));
- assertEquals(707584, x >>>= (-208959862.93305588));
- assertEquals(0, x >>>= (((tmp = 877181764, tmp)>>(-970697753.3318911))%x));
- assertEquals(0, x ^= x);
- assertEquals(0, x += x);
- assertEquals(0, x <<= x);
- assertEquals(0, x /= (x^((x/(-2903618412.4936123))+(tmp = 1169288899, tmp))));
- assertEquals(0, x >>>= x);
- assertEquals(-1302645245, x ^= ((1855892732.3544865)+(tmp = 1136429319.5633948, tmp)));
- assertEquals(0, x ^= x);
- assertEquals(0, x &= (-1384534597.409375));
- assertEquals(-0, x /= (tmp = -680466419.8289509, tmp));
- assertEquals(-0, x *= (318728599.95017374));
- assertEquals(NaN, x %= (x>>(2019695267)));
- assertEquals(0, x >>= (tmp = 1280789995, tmp));
- assertEquals(0, x *= (tmp = 2336951458, tmp));
- assertEquals(0, x >>= ((2981466013.758637)%(731947033)));
- assertEquals(0, x -= x);
- assertEquals(0, x ^= x);
- assertEquals(0, x /= ((((3068070149.1452317)>>x)%(((1448965452)*((tmp = -2961594129, tmp)+(1829082104.0681171)))>>(-2331499703)))>>>(tmp = -3206314941.2626476, tmp)));
- assertEquals(0, x >>= (x%(1869217101.9823673)));
- assertEquals(0, x <<= (x+x));
- assertEquals(0, x >>>= ((1202130282)>>>x));
- assertEquals(0, x += x);
- assertEquals(2603245248.6273212, x += (tmp = 2603245248.6273212, tmp));
- assertEquals(-1691864471, x ^= (x>>>(2504513614.117516)));
- assertEquals(136835305, x -= ((-1618979896)&(-746953306)));
- assertEquals(-2568499564.1261334, x += (tmp = -2705334869.1261334, tmp));
- assertEquals(1038075700, x ^= (1530399136));
- assertEquals(2076151400, x += x);
- assertEquals(-524018410.1751909, x -= ((2398973627.175191)-(-201196183)));
- assertEquals(0.327110599608614, x /= ((3181340288.602796)&x));
- assertEquals(0.327110599608614, x %= (tmp = -2284484060, tmp));
- assertEquals(0, x |= x);
- assertEquals(403217947.5779772, x += (tmp = 403217947.5779772, tmp));
- assertEquals(403217947, x |= x);
- assertEquals(-Infinity, x *= ((58693583.845808744)+(((tmp = -1527787016, tmp)*x)/((((2532689893.3191843)/(tmp = 2781746479.850424, tmp))|(((((460850355.9211761)/((((tmp = 626683450, tmp)<<((tmp = 1349974710, tmp)-((tmp = -1349602292, tmp)/(-2199808871.1229663))))>>((x/(-3092436372.3078623))&(tmp = -1190631012.0323825, tmp)))^((-2907082828.4552956)-(tmp = 1858683340.1157017, tmp))))^(-1513755598.5398848))%x)/x))&(1147739260.136806)))));
- assertEquals(0, x &= (tmp = -3047356844.109563, tmp));
- assertEquals(637934616, x -= (tmp = -637934616, tmp));
- assertEquals(-1553350083, x ^= (-2056266203.094929));
- assertEquals(-0.13467351026547192, x %= ((tmp = 824736251, tmp)/(2544186314)));
- assertEquals(1, x /= x);
- assertEquals(1, x |= x);
- assertEquals(0, x >>>= (2166609431.9515543));
- assertEquals(0, x <<= (x|(tmp = 121899222.14603412, tmp)));
- assertEquals(0, x *= (1300447849.6595674));
- assertEquals(0, x %= (tmp = -2360500865.3944597, tmp));
- assertEquals(0, x %= (tmp = -1693401247, tmp));
- assertEquals(0, x >>= x);
- assertEquals(0, x /= (471265307));
- assertEquals(257349748, x ^= (257349748.689448));
- assertEquals(257349748, x &= x);
- assertEquals(981, x >>>= (tmp = -1959001422, tmp));
- assertEquals(0, x >>= ((-79932778.18114972)/x));
- assertEquals(0, x <<= (((-2599621472)^(tmp = 662071103, tmp))%(tmp = -2675822640.7641535, tmp)));
- assertEquals(0, x &= (tmp = 2582354953.878623, tmp));
- assertEquals(0, x /= ((-953254484)/((-2571632163.376176)-(tmp = -342034471, tmp))));
- assertEquals(0, x <<= ((x-(tmp = -3013057672, tmp))&(tmp = -3204761036, tmp)));
- assertEquals(0, x ^= ((x&((515934453)>>>x))/x));
- assertEquals(1, x |= ((-1914707646.2075093)>>>(tmp = -1918045025, tmp)));
- assertEquals(-2002844120.8792589, x += (tmp = -2002844121.8792589, tmp));
- assertEquals(573030794, x >>>= (tmp = 1707788162, tmp));
- assertEquals(1.917619109627369, x /= ((1909436830.484202)%((123114323)<<(tmp = -1288988388.6444468, tmp))));
- assertEquals(-1400358045, x |= (-1400358046));
- assertEquals(-2043022529.4273133, x += (tmp = -642664484.4273133, tmp));
- assertEquals(-81408068.86728716, x %= (tmp = -980807230.2800131, tmp));
- assertEquals(0.1436896445024992, x /= (((tmp = 3201789924.913518, tmp)%(tmp = -962242528.6008646, tmp))^((tmp = -338830119.55884504, tmp)*(tmp = -916120166, tmp))));
- assertEquals(0.1436896445024992, x %= (tmp = 2598469263, tmp));
- assertEquals(0, x *= (x-x));
- assertEquals(-1409286144, x += (((-111514798.64745283)|(2372059654))<<(tmp = 175644313, tmp)));
- assertEquals(-2393905467.0073113, x += (-984619323.0073113));
- assertEquals(-835111172.0073113, x %= (x^(-765900532.5585573)));
- assertEquals(-835111172.0073113, x %= (tmp = -946478116, tmp));
- assertEquals(-100, x >>= ((-1020515908)>>(((x&((x^(169474253.53811646))>>(-221739002)))+x)*((201939882.92880356)/(tmp = -50402570, tmp)))));
- assertEquals(2131506964, x &= (tmp = -2163460268, tmp));
- assertEquals(1074275840, x &= ((-1561930379.8719592)*(tmp = -2871750052.876917, tmp)));
- assertEquals(-954232605.5377102, x -= (tmp = 2028508445.5377102, tmp));
- assertEquals(-29, x >>= (-279577351.87217045));
- assertEquals(-232, x <<= x);
- assertEquals(-70, x |= (215185578));
- assertEquals(-1, x >>= (x>>(-1691303095)));
- assertEquals(1, x /= x);
- assertEquals(3149465364.2236686, x *= (3149465364.2236686));
- assertEquals(3304787832.3790073, x += (tmp = 155322468.15533853, tmp));
- assertEquals(100068712.23500109, x %= (tmp = 3204719120.1440063, tmp));
- assertEquals(91628864, x &= (tmp = 629090241, tmp));
- assertEquals(-113202292046379710, x *= (-1235443583));
- assertEquals(122, x >>>= (tmp = 3196555256, tmp));
- assertEquals(122, x >>>= (((2226535734)-x)^(2248399036.393125)));
- assertEquals(6.904199169070746e-8, x /= (tmp = 1767040564.9149356, tmp));
- assertEquals(-212687449.99999994, x += ((((2244322375)*(((2515994102)^x)>>x))<<(x-(-832407685.3251972)))^(2266670502)));
- assertEquals(366515938514778750, x *= (tmp = -1723260768.3940866, tmp));
- assertEquals(366515938514778750, x += ((-1643386193.9159095)/(tmp = 425161225.95316494, tmp)));
- assertEquals(654872716.4123061, x /= ((-1377382984)-(tmp = -1937058061.811642, tmp)));
- assertEquals(654872716, x &= x);
- assertEquals(-86260926.17813063, x -= (tmp = 741133642.1781306, tmp));
- assertEquals(1052176592, x >>>= x);
- assertEquals(2020882856, x ^= (-3107796616));
- assertEquals(0, x <<= ((606939871.9812952)|(tmp = -3127138319.1557302, tmp)));
- assertEquals(NaN, x -= ((x%((1120711400.2242608)%x))*(tmp = -930171286.7999947, tmp)));
- assertEquals(NaN, x %= (3215044180));
- assertEquals(NaN, x %= (tmp = 2882893804.20102, tmp));
- assertEquals(NaN, x %= ((217170359.5778643)^x));
- assertEquals(0, x &= ((-1095125960.9903677)>>(x^(-2227981276))));
- assertEquals(-748549860, x += (-748549860));
- assertEquals(1816208256, x <<= (-610872411.3826082));
- assertEquals(201400576, x &= (((tmp = 1910394603.4836266, tmp)<<x)^x));
- assertEquals(0, x %= x);
- assertEquals(NaN, x %= x);
- assertEquals(0, x <<= (((((2670901339.6696005)%(2180020861))*((2134469504)/(2237096063.0680027)))*((tmp = 1203829756, tmp)>>((765467065)+(x|(2673651811.9494815)))))<<((-1463378514)|(((x/(tmp = -1075050081, tmp))-((-879974865)+x))>>>(tmp = 2172883926, tmp)))));
- assertEquals(433013198, x ^= (433013198.2833413));
- assertEquals(0, x >>= ((((-2404431196)%(x%(tmp = 1443152875.8809233, tmp)))&(x|((1414364997.0517852)/((tmp = -435854369, tmp)+(tmp = 2737625141, tmp)))))|(((tmp = 2241746562.2197237, tmp)^(tmp = -1606928010.1992552, tmp))|((tmp = -3083227418.686173, tmp)>>(tmp = -2717460410, tmp)))));
- assertEquals(0, x >>= x);
- assertEquals(0, x *= ((tmp = 2302521322, tmp)>>>(((((((tmp = 344089066.9725498, tmp)%(tmp = 1765830559, tmp))-x)|x)^(((-2450263325)/(tmp = 371928405.17475057, tmp))>>>(1330100413.7731652)))^(((173024329)%(tmp = -2927276187, tmp))+(x>>>(-1042229940.308507))))|(((((tmp = 379074096, tmp)+((142762508)-((-2773070834.526266)-(x&((tmp = 57957493, tmp)<<(2189553500))))))+((36991093)+(tmp = 339487168.58069587, tmp)))*(-1257565451))&(tmp = 645233114, tmp)))));
- assertEquals(-2644503151.1185284, x += (-2644503151.1185284));
- assertEquals(-5289006302.237057, x += x);
- assertEquals(-4008773824.2370567, x -= (tmp = -1280232478, tmp));
- assertEquals(1975449413, x |= ((tmp = 1957832005.4285066, tmp)>>((1681236712.9715524)&(-675823978))));
- assertEquals(-146472960, x <<= (-648510672.5644083));
- assertEquals(-3, x |= (((((x>>>(tmp = 2271744104, tmp))+(tmp = -210058133.30147195, tmp))+(tmp = -2827493425, tmp))/(tmp = 765962538, tmp))%(tmp = 1048631551, tmp)));
- assertEquals(1, x /= x);
- assertEquals(0, x >>= (1070524782.5154183));
- assertEquals(0, x <<= (462502504));
- assertEquals(0, x %= (540589670.0730014));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x /= ((-1268640098)%x));
- assertEquals(NaN, x %= (1741157613.744652));
- assertEquals(NaN, x += x);
- assertEquals(NaN, x %= ((x|(tmp = 1992323492.7000637, tmp))*x));
- assertEquals(NaN, x /= ((tmp = -2271503368.0341196, tmp)>>((tmp = 1224449194, tmp)>>>(tmp = 2976803997, tmp))));
- assertEquals(NaN, x += (tmp = -1078313742.1633894, tmp));
- assertEquals(NaN, x += (-787923311));
- assertEquals(NaN, x %= x);
- assertEquals(-1299878219, x ^= (2995089077));
- assertEquals(536887953, x &= ((625660571.2651105)&(x^(((tmp = 950150725.2319129, tmp)+(-2122154205.466675))/(tmp = 1754964696.974752, tmp)))));
- assertEquals(4096, x >>>= x);
- assertEquals(1, x /= x);
- assertEquals(-82508517, x ^= (((-930231800)%(tmp = -423861640.4356506, tmp))+x));
- assertEquals(-82508517, x &= (x&x));
- assertEquals(-479519, x %= ((tmp = 1861364600.595756, tmp)|x));
- assertEquals(479518, x ^= (((x>>(-1539139751.6860313))>>(tmp = -456165734, tmp))|(-2786433531)));
- assertEquals(959036, x += x);
- assertEquals(29, x >>>= ((tmp = -1049329009.7632706, tmp)^(((((((1117739997)/(((-841179741.4939663)*(-1211599672))>>>((-413696355)%(tmp = -1753423217.2170188, tmp))))<<(tmp = 1599076219.09274, tmp))>>>(-1382960317))^(((x^(tmp = 515115394, tmp))>>>(tmp = -388476217, tmp))>>>(x/x)))^x)<<(136327532.213817))));
- assertEquals(24, x &= (2388755418));
- assertEquals(0, x >>>= (tmp = -405535917, tmp));
- assertEquals(0, x &= (tmp = -1427139674, tmp));
- assertEquals(NaN, x /= (x^((1530470340)%x)));
- assertEquals(0, x |= ((x>>(-1429690909.8472774))*((((tmp = 2033516515, tmp)/(1314782862))>>>x)>>(tmp = 1737186497.6441216, tmp))));
- assertEquals(0, x -= x);
- assertEquals(0, x %= (3115422786));
- assertEquals(-0, x *= (x+(tmp = -2558930842.267017, tmp)));
- assertEquals(NaN, x %= x);
- assertEquals(0, x &= (2695531252.254449));
- assertEquals(-613178182, x ^= (-613178182));
- assertEquals(54, x >>>= (x%(((tmp = 2277868389, tmp)^((((tmp = -1143932265.3616111, tmp)^((x&((x-((-2100384445.7850044)|(tmp = 908075129.3456883, tmp)))*x))+(((tmp = 1031013284.0275401, tmp)*((((tmp = -233393205, tmp)>>>(tmp = -111859419, tmp))*(-1199307178))|(tmp = -1998399599, tmp)))>>>((((-731759641.9036775)>>>(tmp = 2147849691, tmp))>>>(tmp = -2121899736, tmp))>>>(x>>>x)))))>>((1900348757.360562)^(tmp = 2726336203.6149445, tmp)))>>>((x*((tmp = -2697628471.0234947, tmp)%((x^(tmp = -2751379613.9474974, tmp))*x)))+(x>>(tmp = 42868998.384643435, tmp)))))+(598988941))));
- assertEquals(34, x &= ((tmp = 2736218794.4991407, tmp)%(2169273288.1339874)));
- assertEquals(2.086197133417468, x /= ((tmp = 2176358852.297597, tmp)%x));
- assertEquals(2, x <<= (((tmp = -1767330075, tmp)|(-3107230779.8512735))&x));
- assertEquals(4194304, x <<= (tmp = 1061841749.105744, tmp));
- assertEquals(48609515, x ^= (44415211.320786595));
- assertEquals(48609515, x %= (1308576139));
- assertEquals(23735, x >>>= ((-324667786)-x));
- assertEquals(23735, x <<= ((-1270911229)<<(((((tmp = -882992909.2692418, tmp)+(tmp = 394833767.947718, tmp))-x)<<(702856751))/x)));
- assertEquals(-31080872939240, x *= (tmp = -1309495384, tmp));
- assertEquals(-14625.31935626114, x /= ((668084131)+(1457057357)));
- assertEquals(-14625.31935626114, x %= (266351304.6585492));
- assertEquals(-12577, x |= (-945583977.619837));
- assertEquals(-4097, x |= ((tmp = -2621808583.2322493, tmp)-(tmp = -2219802863.9072213, tmp)));
- assertEquals(-1004843865, x &= ((-1004839768)+((tmp = 2094772311, tmp)/(-1340720370.275643))));
- assertEquals(-31401371, x >>= ((2035921047)>>>((tmp = -1756995278, tmp)>>>(-537713689))));
- assertEquals(1791746374.016472, x -= ((tmp = -1823147745, tmp)-(x/(tmp = -1906333520, tmp))));
- assertEquals(3.7289343120517406, x /= (tmp = 480498240, tmp));
- assertEquals(7.457868624103481, x += x);
- assertEquals(234881024, x <<= (-781128807.2532628));
- assertEquals(67108864, x &= (tmp = -2060391332, tmp));
- assertEquals(-605958718, x -= (673067582));
- assertEquals(-605958718, x <<= ((x%x)&((tmp = 1350579401.0801518, tmp)|x)));
- assertEquals(-109268090.4715271, x %= (tmp = -496690627.5284729, tmp));
- assertEquals(-109268090, x <<= (((-2004197436.8023896)%((x|((tmp = 271117765.61283946, tmp)-((1595775845.0754795)*(555248692.2512416))))/x))<<x));
- assertEquals(-652725370, x &= (-543590449));
- assertEquals(0.321858133298825, x /= (tmp = -2027990914.2267523, tmp));
- assertEquals(1959498446, x ^= (1959498446));
- assertEquals(1959498446, x &= (x%(tmp = 3155552362.973523, tmp)));
- assertEquals(14949, x >>>= ((tmp = 586618136, tmp)>>>(tmp = 699144121.9458897, tmp)));
- assertEquals(-28611391568319.285, x *= (tmp = -1913933478.3811147, tmp));
- assertEquals(1680557633, x &= (((tmp = 2606436319.199714, tmp)<<(1575299025.6917372))|((-1092689109)/(735420388))));
- assertEquals(1680361024, x &= ((tmp = 1860756552.2186172, tmp)|(-360434860.1699109)));
- assertEquals(820488, x >>>= (1788658731));
- assertEquals(820488, x >>= (-1555444352));
- assertEquals(2104296413, x ^= (2103543509));
- assertEquals(16843328, x &= ((x<<((-2920883149)/(1299091676)))-(((((tmp = 3199460211, tmp)+(-237287821.61504316))&(tmp = -1524515028.3596857, tmp))-(tmp = -700644414.6785603, tmp))+(-180715428.86124516))));
- assertEquals(1326969834, x |= (tmp = -2968063574.793867, tmp));
- assertEquals(0, x %= (x>>>(tmp = 1350490461.0012388, tmp)));
- assertEquals(0, x &= ((-2620439260.902854)+x));
- assertEquals(-1775533561, x |= ((-1775533561)|(((x>>>((861896808.2264911)>>>(970216466.6532537)))%x)%(tmp = 2007357223.8893046, tmp))));
- assertEquals(-1775533561, x &= x);
- assertEquals(-23058877.415584415, x /= ((tmp = -3002439857, tmp)>>((((x-(tmp = 1583620685.137125, tmp))|x)%(-2568798248.6863875))^x)));
- assertEquals(-577.4155844151974, x %= (((-1440361053.047877)+((tmp = 821546785.0910633, tmp)-(((tmp = 1023830881.1444875, tmp)/(-754884477))+(tmp = 651938896.6258571, tmp))))>>(tmp = 346467413.8959185, tmp)));
- assertEquals(-1, x >>= (tmp = 2993867511, tmp));
- assertEquals(-1, x |= (tmp = 823150253.4916545, tmp));
- assertEquals(-0, x %= x);
- assertEquals(-0, x /= ((tmp = 997969036, tmp)&((((tmp = 928480121, tmp)>>(((-2610875857.086055)>>>(tmp = -2251704283, tmp))|x))+(10781750))>>x)));
- assertEquals(0, x >>>= ((tmp = -1872319523, tmp)>>>(-278173884)));
- assertEquals(0, x |= (x/(x*x)));
- assertEquals(0, x %= ((77912826.10575807)^(tmp = 2770214585.3019757, tmp)));
- assertEquals(0, x &= (tmp = 722275824, tmp));
- assertEquals(-1417226266, x |= (tmp = 2877741030.1195555, tmp));
- assertEquals(0, x ^= x);
- assertEquals(0, x %= (tmp = -1740126105, tmp));
- assertEquals(910709964, x |= (tmp = 910709964, tmp));
- assertEquals(-1744830464, x <<= (tmp = -2445932551.1762686, tmp));
- assertEquals(318767104, x >>>= (tmp = -2465332061.628887, tmp));
- assertEquals(301989888, x &= (-2771167302.022801));
- assertEquals(301989888, x |= x);
- assertEquals(37748736, x >>= (tmp = -835820125, tmp));
- assertEquals(1474977371, x ^= (tmp = -2857738661.6610327, tmp));
- assertEquals(470467500, x += (-1004509871));
- assertEquals(0.30466562575942585, x /= (((tmp = 1515955042, tmp)<<(x+((1607647367)-(tmp = 1427642709.697169, tmp))))^x));
- assertEquals(1.0348231148499734e-10, x /= (tmp = 2944132397, tmp));
- assertEquals(0, x >>= (x>>>(tmp = -2847037519.569043, tmp)));
- assertEquals(NaN, x /= x);
- assertEquals(0, x >>>= (-1817784819.9058492));
- assertEquals(0, x >>= x);
- assertEquals(-0, x *= ((tmp = -1387748473, tmp)|(x+(352432111))));
- assertEquals(-0, x *= (((-2591789329)/(tmp = -2144460203, tmp))>>(tmp = -568837912.5033123, tmp)));
- assertEquals(0, x <<= (-2963600437.305708));
- assertEquals(0, x &= ((588720662)>>>x));
- assertEquals(1561910729, x += (1561910729));
- assertEquals(0, x ^= x);
- assertEquals(-0, x *= (-2722445702));
- assertEquals(0, x &= (tmp = -2738643199.732308, tmp));
- assertEquals(0, x /= (((1859901899.227291)>>>((tmp = -1067365693, tmp)+((-1975435278)|x)))|((1844023313.3719304)&(tmp = -624215417.0227654, tmp))));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x %= (-2852766277));
- assertEquals(0, x <<= (-1482859558));
- assertEquals(0, x >>= x);
- assertEquals(-1196775786, x += (tmp = -1196775786, tmp));
- assertEquals(-68176201, x |= ((tmp = 2336517643, tmp)+x));
- assertEquals(0, x ^= x);
- assertEquals(0, x <<= x);
- assertEquals(0, x >>= (2969141362.868086));
- assertEquals(NaN, x %= x);
- assertEquals(0, x >>= ((x-((((tmp = -905994835, tmp)|(tmp = 2850569869.33876, tmp))<<((-2405056608.27147)>>(tmp = 1280271785, tmp)))&(-1942926558)))*(tmp = 707499803.177796, tmp)));
- assertEquals(0, x &= ((-697565829.8780258)+((2978584888.549406)%x)));
- assertEquals(0, x >>= (748642824.4181392));
- assertEquals(0, x += x);
- assertEquals(0, x >>>= (-1701028721));
- assertEquals(92042539, x -= ((-92042539)|(x*(x%(-293705541.00228095)))));
- assertEquals(0, x %= x);
- assertEquals(0, x >>= x);
- assertEquals(0, x %= (-2278672472.458228));
- assertEquals(0, x %= (((-2374117528.0359464)/((tmp = -2809986062, tmp)|(tmp = 895734980, tmp)))&(tmp = 1564711307.41494, tmp)));
- assertEquals(0, x >>>= x);
- assertEquals(0, x += x);
- assertEquals(-0, x /= ((tmp = -2749286790.3666043, tmp)<<(x^(-2966741582.324482))));
- assertEquals(0, x *= x);
- assertEquals(0, x >>>= x);
- assertEquals(-1882562314, x ^= (2412404982.782115));
- assertEquals(-806620, x %= (((tmp = 1527219936.5232096, tmp)*(-1139841417))>>>(tmp = 201632907.3236668, tmp)));
- assertEquals(-1613240, x += x);
- assertEquals(-1664766177387640, x *= (1031939561));
- assertEquals(-9.478083550117849e+23, x *= (tmp = 569334221.1571662, tmp));
- assertEquals(-8.462574598319509e+21, x /= ((x-(tmp = -2985531211.114498, tmp))>>(tmp = 174615992.91117632, tmp)));
- assertEquals(1638924288, x <<= (((((x>>((-1823401733.4788911)+((tmp = 1362371590, tmp)>>>x)))^(tmp = -56634380, tmp))/(tmp = 2387980757.1540084, tmp))%((((tmp = -3175469977, tmp)^(tmp = -1816794042, tmp))+(232726694))*(tmp = 822706176, tmp)))/(tmp = 1466729893.836311, tmp)));
- assertEquals(2686072821796307000, x *= x);
- assertEquals(-1007977445.9812208, x /= (-2664814408.800125));
- assertEquals(-1007977445, x &= x);
- assertEquals(322314656346249100, x *= (tmp = -319763758.54942775, tmp));
- assertEquals(197436885.26815608, x /= (tmp = 1632494637, tmp));
- assertEquals(-67191339, x |= ((-399580815.1746769)/((1335558363)/(tmp = 224694526, tmp))));
- assertEquals(1229588737, x &= (tmp = 1296763683.5732255, tmp));
- assertEquals(1229588737, x -= ((((1171546503)|((tmp = -2701891308, tmp)%(-2155432197.022206)))/(-306122816.85682726))>>x));
- assertEquals(4162606632, x -= (tmp = -2933017895, tmp));
- assertEquals(1.6487311395551163, x /= (2524733434.1748486));
- assertEquals(-1929308648.9913044, x += (-1929308650.6400356));
- assertEquals(-3858617297.982609, x += x);
- assertEquals(788529152, x <<= (x^(1401824663)));
- assertEquals(6160384, x >>>= ((((((x>>>x)>>((((x*(tmp = -1958877151, tmp))>>>(1310891043))-(tmp = 564909413.9962088, tmp))%(-175978438)))%x)|((tmp = -1193552419.7837512, tmp)*(tmp = 1508330424.9068346, tmp)))|(1428324616.3303494))-((1828673751)/(tmp = 1281364779, tmp))));
- assertEquals(6160384, x |= x);
- assertEquals(1, x /= x);
- assertEquals(1, x &= (tmp = -855689741, tmp));
- assertEquals(0, x >>>= x);
- assertEquals(-1088569655.3528988, x -= (tmp = 1088569655.3528988, tmp));
- assertEquals(-1088569655, x >>= ((tmp = 2429646226.626727, tmp)<<((-1539293782.4487276)>>(x^((tmp = 1140855945.537702, tmp)+x)))));
- assertEquals(-311, x %= ((x/x)<<x));
- assertEquals(1.2007722007722008, x /= (x|(tmp = 448796341.87655175, tmp)));
- assertEquals(3, x |= (x+x));
- assertEquals(-9.32416092168023e-10, x /= (-3217447688));
- assertEquals(0, x >>= (615837464.0921166));
- assertEquals(0, x >>>= (tmp = -2993750670.683118, tmp));
- assertEquals(0, x >>>= (x%x));
- assertEquals(1610612736, x ^= ((-1322905256.6770213)<<(-2567950598)));
- assertEquals(1693676493, x ^= (83063757.63660407));
- assertEquals(-758030371, x ^= (tmp = -1239274480, tmp));
- assertEquals(-758030371, x %= (tmp = 1961339006, tmp));
- assertEquals(-1509754528, x ^= (tmp = 1960027837, tmp));
- assertEquals(-1509754528, x <<= x);
- assertEquals(-1509754528, x -= (((tmp = -50690205.33559728, tmp)/((tmp = -1364565380, tmp)<<(tmp = 2585052504, tmp)))<<(tmp = -2356889596, tmp)));
- assertEquals(1, x >>>= (-3204164321));
- assertEquals(1, x *= x);
- assertEquals(1114370230.591965, x *= ((tmp = 1114370229.591965, tmp)+x));
- assertEquals(-4.886305275432552, x /= ((-228059887.33344483)%(2841553631.3685856)));
- assertEquals(2.358309397373389e-9, x /= (((x*(tmp = 203428818.08174622, tmp))&(x-(((510438355)*x)+x)))+x));
- assertEquals(0, x >>>= ((tmp = 1444810010, tmp)&(tmp = -3135701995.2235208, tmp)));
- assertEquals(0, x /= (1865982928.6819582));
- assertEquals(0, x *= x);
- assertEquals(2078726016.3772051, x -= (tmp = -2078726016.3772051, tmp));
- assertEquals(1580337898, x ^= ((tmp = -2714629398.447015, tmp)^x));
- assertEquals(1268363034, x -= ((x+((tmp = 1144068248.3834887, tmp)&(-954104940.155973)))<<(tmp = 1270573731.7828264, tmp)));
- assertEquals(1744830464, x <<= (((1444869551.7830744)>>>((((x+(tmp = -904688528, tmp))<<x)-((tmp = 121151912.85873199, tmp)/(tmp = -2414150217.66479, tmp)))|(((-472906698)|(3215236833.8417764))+(907737193.9056952))))-((x&(-732223723))|(-221800427.7392578))));
- assertEquals(717338523283226600, x *= (x^(tmp = -2407450097.0604715, tmp)));
- assertEquals(402653184, x >>= ((-3191405201.168252)*((tmp = -1941299639.695196, tmp)|(((x>>(((3215741220)>>>x)/(x+x)))^(((tmp = -2144862025.9842231, tmp)|((tmp = -1966913385, tmp)&x))%x))*((tmp = -1124749626.6112225, tmp)/(tmp = 837842574, tmp))))));
- assertEquals(402653184, x &= ((x|x)>>x));
- assertEquals(134217728, x &= ((2720231644.3849487)*x));
- assertEquals(134217726.75839183, x -= ((2438054684.738043)/(((((-984359711)*(x|((tmp = 177559682, tmp)^x)))/(-1253443505))/((2727868438.416792)*(x+((x<<(((tmp = 3023774345, tmp)&(-705699616.0846889))/x))<<x))))^(1963626488.548761))));
- assertEquals(1, x /= x);
- assertEquals(245781494, x += ((tmp = 2551445099, tmp)^(2528486814)));
- assertEquals(-1474427807, x ^= (-1497868393.342241));
- assertEquals(-1057271682, x += ((((((x>>x)%(-1556081693))|(x/(((1166243186.6325684)-(((tmp = 2870118257.1019487, tmp)/(x+(-69909960)))^(2270610694.671496)))/((1463187204.5849519)-x))))-x)-(x<<(-3077313003)))%x));
- assertEquals(-1065725846, x &= ((tmp = -1808223767, tmp)|(-481628214.3871765)));
- assertEquals(-1065725846, x ^= (x&(((tmp = -1785170598, tmp)-(tmp = -2525350446.346484, tmp))/((((((-1783948056)^(tmp = 3027265884.41588, tmp))|((((tmp = 2195362566.2237773, tmp)<<(-2919444619))<<((tmp = -2507253075.2897573, tmp)^(x^((tmp = 1067516137, tmp)+((667737752)^(x*(tmp = -1187604212.7293758, tmp)))))))%(-617406719.5140038)))*(tmp = 511060465.6632478, tmp))*((tmp = 2580189800.752836, tmp)|((((tmp = 2357895660, tmp)%((-814381220)*(x-((x>>>(((x<<x)<<(tmp = 1919573020, tmp))-x))>>>((-2756011312.136148)>>(tmp = -1603458856, tmp))))))/((tmp = -1609199312, tmp)&(-3127643445)))%x)))<<(-2261731798)))));
- assertEquals(1.6020307924030301, x /= (tmp = -665234308.2628405, tmp));
- assertEquals(-1120020556.697667, x *= (tmp = -699125486.2321637, tmp));
- assertEquals(-215875188, x -= (((((tmp = -1307845034, tmp)>>>((((-2820720421)^x)-(((x<<x)|(tmp = -3042092997.57406, tmp))+(((-1294857544)+((tmp = -668029108.1487186, tmp)>>(x<<x)))^(912144065.5274727))))^(389671596.2983854)))|(-2774264897.146559))%(x-((tmp = 1378085269, tmp)^x)))+((-1659377450.5247462)&(((1613063452.834885)>>>((-344896580.0694165)>>>((-13450558)+x)))^x))));
- assertEquals(1, x /= x);
- assertEquals(0, x >>>= (2355750790));
- assertEquals(1969435421.4409347, x += (1969435421.4409347));
- assertEquals(0, x -= x);
- assertEquals(0, x >>>= (((x*((-1022802960.6953495)<<(tmp = -2848428731.8339424, tmp)))^(-1630921485))%(1532937011)));
- assertEquals(0, x <<= ((x+((x^(x^(tmp = 2017651860, tmp)))&(((x<<(((tmp = -1913317290.8189478, tmp)|(x-((((x%((tmp = -3035245210, tmp)+(-2270863807)))>>>((-2351852712)*(x^(-2422943296.0239563))))&((((-1578312517)%x)*x)*(-65592270.28452802)))>>>(tmp = 1104329727.2094703, tmp))))-(tmp = -1431159990.3340137, tmp)))&x)|((tmp = -2589292678.801344, tmp)&(x+((((tmp = -2557773457.456996, tmp)>>(451910805.309445))-x)>>(((tmp = -1937832765.7654495, tmp)^x)%x)))))))%x));
- assertEquals(0, x %= (tmp = -626944459, tmp));
- assertEquals(-732310021, x |= (tmp = -732310021, tmp));
- assertEquals(-732310021, x |= x);
- assertEquals(671352839, x ^= (x-((-3087309090.7153115)|x)));
- assertEquals(134479872, x &= (tmp = 2357183984, tmp));
- assertEquals(18084835973136384, x *= x);
- assertEquals(0, x <<= ((1040482277)-(tmp = -357113781.82650447, tmp)));
- assertEquals(74957, x |= ((((tmp = -70789345.7489841, tmp)%(tmp = 1415750131, tmp))&x)|((307027314)>>(2284275468))));
- assertEquals(9, x >>>= x);
- assertEquals(0, x &= (x&((x*((x*(x%x))%(x>>x)))/x)));
- assertEquals(-1872875060, x |= (2422092236.6850452));
- assertEquals(9, x >>>= (-382763684));
- assertEquals(4608, x <<= x);
- assertEquals(40.480234260614935, x /= (((((((tmp = 814638767.5666755, tmp)&((tmp = 2081507162, tmp)^(x>>>(1460148331.2229118))))&(tmp = 1187669197.7318723, tmp))<<(412000677.93339765))^((tmp = 556111951, tmp)>>(tmp = -2232569601.292395, tmp)))&(-3006386864))/x));
- assertEquals(32, x &= (-3053435209.383913));
- assertEquals(418357217, x ^= (418357185));
- assertEquals(204275, x >>= ((-1188650337.9010527)^((51494580)%(-2544545273))));
- assertEquals(982392804, x += (((x+(((tmp = -982596937.9757051, tmp)+x)%(-2298479347)))^((((tmp = 1610297674.0732534, tmp)>>>x)*(((x>>(-2746780903.08599))&(-2376190704.247188))^(((20545353)/(tmp = 1468302977, tmp))-(x<<x))))>>(((-1434332028.0447056)/((tmp = 1983686888, tmp)&((tmp = 2324500847, tmp)%(394330230.6163173))))%(((-1129687479.2158055)+((-3127595161)*((-3066570223)&((tmp = 3192134577.4963055, tmp)/(-2697915283.3233275)))))+(-1112243977.5306559)))))|(x&(-2622725228))));
- assertEquals(-2735750653096133600, x *= (-2784782870.9218984));
- assertEquals(-1876329472, x |= ((((((2752866171)<<(-1681590319))/x)>>((tmp = 1451415208, tmp)>>>(1126858636.6634417)))+(((tmp = 2165569430.4844217, tmp)/x)^(((tmp = -1675421843.4364457, tmp)-(-2187743422.2866993))|x)))*x));
- assertEquals(3520612287495799000, x *= x);
- assertEquals(-200278016, x |= ((((-2379590931)%((((-1558827450.833285)&x)>>(-665140792))-((tmp = -445783631.05567217, tmp)+(tmp = 93938389.53113222, tmp))))/(3103476273.734701))^x));
- assertEquals(-9178285062592.75, x *= ((2042671875.7211144)%(((tmp = 589269308.0452716, tmp)/x)<<(-130695915.9934752))));
- assertEquals(60048960, x |= (x<<x));
- assertEquals(60048960, x <<= ((((((tmp = -2793966650, tmp)/(-2882180652))&(((x<<((tmp = -384468710, tmp)+(2236162820.9930468)))>>>((((969371919)>>((tmp = -3153268403.2565875, tmp)-((((573811084)/x)^(tmp = -968372697.4844134, tmp))>>>(((-3096129189)>>x)/(tmp = 830228804.6249363, tmp)))))<<(((1243972633.3592157)|x)&((-1687610429)&(tmp = -1945063977.458529, tmp))))<<(((tmp = -217456781.37068868, tmp)-(400259171.68077815))^x)))>>>x))%(((2728450651.300167)/(((-2713666705.089135)%(tmp = 740472459, tmp))^x))|x))^x)*(-2463032364)));
- assertEquals(60048960, x %= (tmp = -442107222.9513445, tmp));
- assertEquals(-1573781504, x <<= (960581227));
- assertEquals(1297, x >>>= (tmp = -1692919563, tmp));
- assertEquals(1297, x &= x);
- assertEquals(-3113308397155.233, x *= (tmp = -2400391979.3024154, tmp));
- assertEquals(-3115513013486.233, x -= (2204616331));
- assertEquals(-3113809649082.233, x -= (-1703364404));
- assertEquals(0, x >>>= (((-1181206665)-(550946816.586771))|(tmp = -2346300456, tmp)));
- assertEquals(0, x %= (tmp = 1649529739.2785435, tmp));
- assertEquals(0, x ^= ((tmp = -2452761827.2870226, tmp)%(((1090281070.5550141)/(tmp = 992149154.6500508, tmp))*(x<<((((((x>>>x)|((tmp = -2410892363, tmp)%(tmp = 2585150431.0231533, tmp)))/x)*(tmp = 1541294271, tmp))+x)&((97566561.77126992)&((((-640933510.1287451)&(((((x>>>((-1821077041)<<((tmp = -1138504062.093695, tmp)-(tmp = -181292160, tmp))))%x)-(x>>((x&(((tmp = 1067551355, tmp)/(x|(1004837864.8550552)))&(x-(-103229639.25084043))))&((tmp = 2064184671.210937, tmp)+((((tmp = -2245728052, tmp)|(1538407002.8365717))+(x<<((x>>((76549490)/(tmp = 628901902.6084052, tmp)))<<((x<<x)^(-1907669184)))))+(-1409123688))))))>>>((((-1911547456.933543)-((-512313175)+((tmp = -2620903017, tmp)^(tmp = 2148757592.244808, tmp))))<<((-1740876865)>>>x))+((tmp = 691314720.9488736, tmp)<<(614057604.4104803))))|(x^((tmp = -3040687.291528702, tmp)/(x^(((x+(-2899641915))^((tmp = -1220211746, tmp)/x))%x))))))^(tmp = 119850608, tmp))%(2091975696))))))));
- assertEquals(291273239, x -= (tmp = -291273239, tmp));
- assertEquals(2206394018, x += (1915120779));
- assertEquals(235641480, x <<= (x&(x&(-1810963865.1415658))));
- assertEquals(28764, x >>= ((tmp = -1927011875, tmp)^((tmp = -1986461808, tmp)|((-868139264.8399222)*((421956566)%(3068424525))))));
- assertEquals(-99780626900900, x *= ((tmp = -1512869526.3223472, tmp)+(tmp = -1956071751, tmp)));
- assertEquals(51218520, x &= (((-2353401311)>>>x)-(2216842509)));
- assertEquals(51218520, x >>>= ((tmp = -1534539302.6990812, tmp)<<x));
- assertEquals(-2147483648, x <<= (-292608644));
- assertEquals(-2147483648, x |= ((((((x<<((-2981292735)-x))>>((tmp = 2540545320.96558, tmp)&(tmp = -2343790880, tmp)))>>>((((((x^((-172697043.94487858)/((2627260337)>>(2879112814.1247935))))&(tmp = 3000943191, tmp))<<(tmp = 1094830905, tmp))-x)>>>x)>>((((tmp = 3095796200, tmp)^(x|(tmp = 1460377694, tmp)))<<(x^(tmp = -357546193, tmp)))/((2729539495)>>x))))%(tmp = 268894171.74961245, tmp))|(x>>(tmp = 2735650924, tmp)))/(-2197885357.09768)));
- assertEquals(-2147483648, x |= x);
- assertEquals(-1967162776824578000, x *= (tmp = 916031551, tmp));
- assertEquals(-2147483648, x &= x);
- assertEquals(-457743917756973060, x *= (tmp = 213153622, tmp));
- assertEquals(0, x >>>= ((((tmp = 2930076928.480559, tmp)+(x^x))<<(tmp = -1349755597.1280541, tmp))|(x+(2865632849))));
- assertEquals(0, x <<= ((x>>x)-(x>>(-2629977861))));
- assertEquals(0, x <<= x);
- assertEquals(NaN, x /= x);
- assertEquals(0, x |= x);
- assertEquals(0, x >>>= x);
- assertEquals(749327478, x |= ((tmp = 749327478, tmp)^(x>>(tmp = 881107862, tmp))));
- assertEquals(1897869364, x += (1148541886));
- assertEquals(463347, x >>>= (tmp = -726431220, tmp));
- assertEquals(-395990542, x += (-396453889));
- assertEquals(-2824792585.1675367, x -= (2428802043.1675367));
- assertEquals(-2147483648, x <<= (tmp = -1420072385.9175675, tmp));
- assertEquals(8388608, x >>>= (-2211390680.488455));
- assertEquals(8388608, x >>= (((x/(x|(((x^(((tmp = -2175960170.8055067, tmp)|((tmp = -1964957385.9669886, tmp)/(tmp = -475033330, tmp)))&((x|((tmp = 1386597019.2014387, tmp)>>((tmp = -2406589229.8801174, tmp)+x)))<<(tmp = -844032843.8415492, tmp))))>>(x^x))|x)))-((x&((tmp = 1858138856, tmp)*(-3156357504)))%x))<<(((2046448340)+x)/(-2645926916))));
- assertEquals(8359470765396279, x *= ((tmp = 871437183.7888144, tmp)-(-125089387.17460155)));
- assertEquals(0, x ^= x);
- assertEquals(-303039014, x += ((tmp = -2475713214, tmp)|(-372871718.2343409)));
- assertEquals(2655126577, x -= (-2958165591));
- assertEquals(1830332793, x ^= (tmp = -212161208, tmp));
- assertEquals(1830332793, x ^= (((2352454407.0126333)<<((((tmp = 3083552367, tmp)/x)-(-1243111279))-((tmp = -1669093976, tmp)%(((-757485455)-(tmp = -116051602, tmp))<<x))))>>(((((-2235071915.9536905)>>(tmp = -1284656185, tmp))-x)>>((-1807028069.7202528)>>>((x%((tmp = -3070857953.311804, tmp)+((tmp = 2759633693.441942, tmp)%((169489938)*(-1582267384)))))<<(x^((tmp = -787578860, tmp)<<x)))))>>((x/(x|(409464362)))-(tmp = -64033017, tmp)))));
- assertEquals(397605933.90319204, x %= (tmp = 716363429.548404, tmp));
- assertEquals(186400, x &= (((x%(-1745754586))>>>x)<<(x&(x&((-2163627752)-((1784050895)+(((-2864781121.899456)>>>x)&x)))))));
- assertEquals(186400, x %= (tmp = -423209729, tmp));
- assertEquals(186400, x <<= ((x<<(x+(1232575114.4447284)))*x));
- assertEquals(1386299, x ^= ((tmp = -1074209615, tmp)>>>(x>>>((tmp = -1456741008.2654872, tmp)>>((1724761067)>>(-2016103779.9084842))))));
- assertEquals(347302967.20758367, x -= (-345916668.20758367));
- assertEquals(1.9325619389304094, x /= (179711170.03359854));
- assertEquals(-3703324711.628227, x *= (tmp = -1916277371, tmp));
- assertEquals(-920980517031624800, x *= (tmp = 248690187.53332615, tmp));
- assertEquals(0, x &= (((tmp = -2753945953.082594, tmp)*x)-(172907186)));
- assertEquals(-0, x /= (((((-2744323543.187253)>>((tmp = 2663112845, tmp)>>(((-121791600)+(x^x))*(2758944252.4214177))))|x)/(tmp = -2746716631.6805267, tmp))-x));
- assertEquals(0, x ^= ((tmp = 983113117, tmp)&((2638307333)+((((tmp = 3076361304.56189, tmp)<<(-2663410588.5895214))%((-1109962112)-(tmp = -2381021732, tmp)))%((tmp = 410559095, tmp)&x)))));
- assertEquals(0, x <<= (tmp = 1510895336.5111506, tmp));
- assertEquals(0, x <<= (tmp = -1688348296.2730422, tmp));
- assertEquals(2269471424, x -= (-2269471424));
- assertEquals(-2022580224, x ^= (x%((tmp = 160999480.21415842, tmp)&x)));
- assertEquals(-2077171712, x &= (tmp = 3032415014.3817654, tmp));
- assertEquals(270727, x >>>= (2973489165.1553965));
- assertEquals(270727, x |= x);
- assertEquals(-1895894537, x |= ((tmp = -1895903118.129186, tmp)|x));
- assertEquals(-1895894537, x -= ((((((((3143124509)>>>(-2866190144.8724117))*((x>>((961021882)*(tmp = 2363055833.8634424, tmp)))/((2032785518)+((2713643671.3420825)>>((-447782997.0173557)*((tmp = 1174918125.3178625, tmp)*((((tmp = -541539365.548115, tmp)%(-359633101))|(1765169562.2880063))+(tmp = -2512371966.374508, tmp))))))))/x)>>(x*((((-847238927.6399388)&(857288850))%(-2427015402))^((2221426567)%(x+x)))))>>>x)<<((tmp = 2009453564.2808268, tmp)>>((2924411494)<<(x>>(tmp = -1240031020.8711805, tmp)))))%(tmp = 3118159353, tmp)));
- assertEquals(0, x ^= x);
- assertEquals(0, x %= (-30151583));
- assertEquals(-1035186736, x ^= ((tmp = -517593368, tmp)<<(tmp = 3216155585, tmp)));
- assertEquals(49740, x >>>= x);
- assertEquals(49740, x %= (640223506));
- assertEquals(388, x >>>= ((x>>(tmp = 3161620923.50496, tmp))+(2605183207)));
- assertEquals(776, x += x);
- assertEquals(-97905, x ^= ((((((tmp = 145447047.8783008, tmp)^(((x>>>(tmp = 3014858214.2409887, tmp))>>>(629911626.132971))>>(((x+((369309637.229408)-x))<<(-2661038814.9204755))*(x+(x%(3025191323.4780884))))))+x)*(-482550691))|(-632782135))/x));
- assertEquals(-97905, x %= ((((-492914681)-((-2508632959.269368)&(tmp = 1209318291, tmp)))>>(-723512989.459533))>>>(((-528429623.985692)&(x^(tmp = -925044503, tmp)))-(-1696531234))));
- assertEquals(9585389025, x *= x);
- assertEquals(-715425728, x <<= ((583763091)<<(-1223615295)));
- assertEquals(-520093696, x <<= ((tmp = -1891357699.671592, tmp)*(((tmp = 3206095739.5163193, tmp)+(-2908596651.798733))>>>((tmp = -2820415686, tmp)>>(x|((((tmp = -566367675.6250327, tmp)*(-959117054))>>((((-187457085.89686918)*x)*(tmp = -2394776877.5373516, tmp))>>>x))|(((tmp = 80478970.46290505, tmp)<<(tmp = 2173570349.493097, tmp))-(x/((-2896765964)-((x/((tmp = 198741535.7034216, tmp)%(436741457)))%(tmp = 2936044280.0587225, tmp)))))))))));
- assertEquals(-2520.5909527086624, x /= ((211290893.06029093)>>(663265322)));
- assertEquals(-2520.5909527086624, x %= (x^((1057915688)<<(tmp = 1914820571.1142511, tmp))));
- assertEquals(1, x >>>= (((894963408.7746166)+(tmp = -2888351666, tmp))|x));
- assertEquals(-1989841636629996300, x += ((1424670316.224575)*((-2144149843.0876865)|((((421479301.0983993)|((3082651798)^(tmp = -271906497, tmp)))>>x)+((tmp = -178372083, tmp)%x)))));
- assertEquals(17935384255.088326, x /= (((((((tmp = 1168194849.2361898, tmp)>>>(-107316520.53815603))>>>(x^(((x%((x>>>(((-2456622387)/x)&((2124689803)|(((-1130151701)^(2796315158))>>x))))-((-884686033.5491502)>>>((-2371185318.5358763)&x))))+(tmp = 558422989, tmp))|((tmp = -420359120.0596726, tmp)/((-1820568437.0587764)&(2298602280.266465))))))>>(x-((tmp = -1164568978, tmp)^x)))^x)-x)+x));
- assertEquals(134233150, x &= ((x>>(((tmp = 98498118.13041973, tmp)-(804574397))/(tmp = -1564490985.7904541, tmp)))+x));
- assertEquals(4, x >>= (449610809));
- assertEquals(1912543790, x |= (1912543790));
- assertEquals(2487274263, x += (tmp = 574730473, tmp));
- assertEquals(-2140759118, x ^= (tmp = 338055333.9701035, tmp));
- assertEquals(311607367, x += (2452366485));
- assertEquals(9509, x >>= (372113647.84365284));
- assertEquals(-2001075684.1562128, x += (-2001085193.1562128));
- assertEquals(-638703280, x ^= (((tmp = 1096152237, tmp)&x)|((2707404245.0966487)-(((tmp = 1550233654.9691348, tmp)+(tmp = 2008619647, tmp))&((tmp = -2653266325, tmp)+(tmp = -280936332, tmp))))));
- assertEquals(-101811850, x |= (-2250090202));
- assertEquals(-13, x >>= ((-561312810.0218933)|(tmp = 79838949.86521482, tmp)));
- assertEquals(-13, x >>= ((tmp = -936543584, tmp)/(1180727664.1746705)));
- assertEquals(-1547, x *= (((tmp = 1005197689, tmp)>>>x)>>>(tmp = 34607588, tmp)));
- assertEquals(2393209, x *= x);
- assertEquals(2393209, x |= x);
- assertEquals(0, x >>= (-2691279235.1215696));
- assertEquals(0, x *= (((896175510.4920144)*((((tmp = 1770236555.7788959, tmp)%(537168585.7310632))/x)&(tmp = 1094337576, tmp)))&(((x-x)-x)>>x)));
- assertEquals(-1922620126, x ^= (-1922620126));
- assertEquals(3.43481396325761, x /= (tmp = -559745053.6088333, tmp));
- assertEquals(0, x >>= x);
- assertEquals(0, x >>>= (tmp = 2106956255.6602135, tmp));
- assertEquals(-1339003770, x ^= ((tmp = 2955963526.960022, tmp)+x));
- assertEquals(-0, x *= ((((tmp = 368669994, tmp)>>>(x*x))<<(tmp = 2355889375, tmp))&(tmp = -2267550563.9174895, tmp)));
- assertEquals(0, x >>= (753848520.8946902));
- assertEquals(0, x >>>= x);
- assertEquals(0, x %= ((tmp = -2872753234.2257266, tmp)|x));
- assertEquals(NaN, x %= (x>>>(tmp = 890474186.0898918, tmp)));
- assertEquals(NaN, x %= ((tmp = 1341133992.284471, tmp)&(tmp = -2979219283.794898, tmp)));
- assertEquals(NaN, x += (-2865467651.1743298));
- assertEquals(NaN, x += ((-1424445677)%(x^(tmp = 1150366884, tmp))));
- assertEquals(0, x &= (x+((tmp = 1499426534, tmp)+x)));
- assertEquals(0, x |= (((((tmp = -2413914642, tmp)<<((x>>>x)^(1218748804)))+((((-1085643932.2642736)-(-1199134221.533854))>>(tmp = 2148778719, tmp))-((tmp = 1589158782.0040946, tmp)/(tmp = -2485474016.1575155, tmp))))>>>(x>>x))/(2230919719)));
- assertEquals(0, x %= ((tmp = -2576387170.517563, tmp)>>>((tmp = -2362334915.919525, tmp)>>>(((3096453582)-(700067891.4834484))^(2396394772.9253683)))));
- assertEquals(-1798103432, x ^= (((((tmp = 2396144191, tmp)*(x>>>(1512158325)))&(((-1256228298.5444434)&(((-2963136043.434966)&((tmp = 2472984854, tmp)+(tmp = -454900927, tmp)))%(tmp = 484255852.65332687, tmp)))>>((x%x)-x)))&(tmp = 929723984, tmp))^(tmp = -1798103432.5838807, tmp)));
- assertEquals(-2137913344, x &= ((((x|(-2970116473))&(((x/x)/((tmp = 2853070005, tmp)>>>x))%(((tmp = -3123344846, tmp)/((2224296621.6742916)-(tmp = -2246403296.455411, tmp)))+((x&(((x^(x*(2829687641)))+x)&(tmp = 988992521, tmp)))^x))))<<((((-820608336)^(tmp = 2851897085, tmp))>>(tmp = -402427624, tmp))>>>x))-(((x*(((-2287402266.4821453)%(tmp = -520664172.1831205, tmp))^(x/(1875488837))))<<(tmp = 402393637, tmp))&(tmp = 1576638746.3047547, tmp))));
- assertEquals(-2827557853031924000, x *= (tmp = 1322578326.6507945, tmp));
- assertEquals(6.424459501778244e+27, x *= (tmp = -2272087729.3065624, tmp));
- assertEquals(-1586887483, x |= (-1586887483));
- assertEquals(-567868980691736100, x *= (tmp = 357850816, tmp));
- assertEquals(1489101591, x ^= (x%(x|(421921075))));
- assertEquals(-801213804822328000, x *= (x|(-672326904.6888077)));
- assertEquals(612257233.6612054, x /= (((tmp = -350127617, tmp)>>>(-1140467595.9752212))<<((x^x)+(-3117914887))));
- assertEquals(19097.231243331422, x /= ((x^(tmp = -570012517, tmp))>>>x));
- assertEquals(0, x >>= ((x%(((-2347648358)%((x-(tmp = -456496327, tmp))|(x^(-1977407615.4582832))))<<(x/(tmp = -2021394626.214082, tmp))))%(tmp = -949323000.2442119, tmp)));
- assertEquals(0, x <<= x);
- assertEquals(NaN, x %= (x^(x>>(((tmp = 597147546.7701412, tmp)&(((((-972400689.6267757)|(tmp = -2390675341.6367044, tmp))|(tmp = 1890069123.9831812, tmp))<<(((1606974563)-(tmp = -2211617255.8450356, tmp))&((((x+((2433096953)&(-2527357746.681596)))*(tmp = -313956807.55609417, tmp))|((tmp = -2146031047.968496, tmp)/(tmp = 2851650714.68952, tmp)))>>(((tmp = 2630692376.6265225, tmp)-(tmp = -3162222598, tmp))>>((tmp = 1915552466, tmp)*(x>>>(-2413248225.7536864)))))))&(x%((((1218471556)|x)+(tmp = -849693122.6355379, tmp))+x))))>>>(x/((tmp = 689889363, tmp)/x))))));
- assertEquals(0, x >>>= (45649573.23297));
- assertEquals(0, x >>>= (tmp = 1084439432.771266, tmp));
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x *= (tmp = 1642750077, tmp));
- assertEquals(0, x >>>= (tmp = -1944001182.0778434, tmp));
- assertEquals(1682573000, x |= (tmp = -2612394296.2858696, tmp));
- assertEquals(3041823595, x -= (((tmp = 720576773, tmp)|(x^(-1068335724.2253149)))>>(x*(-2501017061))));
- assertEquals(6083647190, x += x);
- assertEquals(-6536258988089986000, x *= ((tmp = 632312939.6147232, tmp)|((-1621821634)+(((tmp = -2281369913.562131, tmp)&((tmp = -381226774, tmp)|x))&(664399051)))));
- assertEquals(4.272268155938712e+37, x *= x);
- assertEquals(733271152, x %= (-1345127171));
- assertEquals(847089925, x ^= (tmp = 432620917.57699084, tmp));
- assertEquals(1337073824, x <<= x);
- assertEquals(-25810602, x ^= (tmp = 2982414838, tmp));
- assertEquals(-25282209, x |= ((tmp = -2927596922, tmp)>>>(-2404046645.01413)));
- assertEquals(639190091919681, x *= x);
- assertEquals(173568320, x &= ((((tmp = -718515534.4119437, tmp)&(tmp = 2989263401, tmp))<<x)|((tmp = 537073030.5331153, tmp)-(tmp = 883595389.314624, tmp))));
- assertEquals(0, x -= x);
- assertEquals(0, x >>>= (tmp = -1844717424.917882, tmp));
- assertEquals(0, x >>= (tmp = -462881544.2225325, tmp));
- assertEquals(0, x >>= x);
- assertEquals(-1868450038, x ^= (2426517258.6111603));
- assertEquals(1, x /= x);
- assertEquals(1175936039.4202638, x += (tmp = 1175936038.4202638, tmp));
- assertEquals(-127916015, x ^= ((x/(1841969600.3012052))-(tmp = 1099467723, tmp)));
- assertEquals(395713785658171900, x *= (-3093543726));
- assertEquals(395713787128560900, x += (((((-717204758)*(tmp = -588182129.6898501, tmp))-x)+(tmp = 20638023, tmp))^x));
- assertEquals(-962609355, x |= ((x^(-3118556619.912983))<<((tmp = 876126864, tmp)&x)));
- assertEquals(-962609355, x %= (tmp = -2079049990, tmp));
- assertEquals(-114583755, x -= (((-2806715240)&(((1961136061.0329285)>>>((2087162059)*x))+((tmp = -1890084022.7631018, tmp)%(tmp = 2137514142.358262, tmp))))+(x<<(tmp = 2991240918, tmp))));
- assertEquals(-425721856, x <<= x);
- assertEquals(3778560, x >>>= ((x|(3198503572))>>(1158434541.1099558)));
- assertEquals(3778560, x %= (tmp = -2592585378.9592104, tmp));
- assertEquals(624640, x &= (tmp = 2261638192.9864054, tmp));
- assertEquals(1249280, x += x);
- assertEquals(1048576, x &= ((tmp = -2144301819.9892588, tmp)^((x-x)<<x)));
- assertEquals(2097152, x <<= (x/x));
- assertEquals(5069061551149729, x *= (tmp = 2417116904.8069615, tmp));
- assertEquals(1.4836296666029616e+25, x += ((tmp = 2926833006.7121572, tmp)*x));
- assertEquals(-256, x >>= ((-469330345.3589895)%((x^(((2554170843.4978285)/(2495676674.815263))>>>x))*(-918892963))));
- assertEquals(-134217728, x <<= (x|(((((1687450853.1321645)+(tmp = 2369533014.5803776, tmp))+(tmp = -2613779445, tmp))+(tmp = -2488826226.3733397, tmp))>>(tmp = -220646936.41245174, tmp))));
- assertEquals(704164545131708400, x *= ((-2632786741)+(-2613647956)));
- assertEquals(9216, x >>>= (-1925405359.657349));
- assertEquals(4491403261551.008, x *= (tmp = 487348444.1787118, tmp));
- assertEquals(4490606381829.008, x -= (tmp = 796879722, tmp));
- assertEquals(-60294056, x >>= x);
- assertEquals(-3193966580.494005, x += (tmp = -3133672524.494005, tmp));
- assertEquals(550500358, x >>>= ((tmp = -2779637628.390116, tmp)-((tmp = 29230786.984039664, tmp)%(tmp = -310649504.7704866, tmp))));
- assertEquals(68812544, x >>= (-1347584797));
- assertEquals(1.2120221595741834e-11, x /= ((2791020260)*((((1964870148.6358237)^x)|(-3082869417))-((x^x)&((1234292117.8790703)<<(-1792461937.2469518))))));
- assertEquals(1.2120221595741834e-11, x %= (x-(2780439348)));
- assertEquals(-1421552183, x |= (tmp = -1421552183.5930738, tmp));
- assertEquals(-1420954119, x |= ((((-2547788562.5735893)<<x)%(435385623))>>(x|x)));
- assertEquals(1, x /= x);
- assertEquals(1, x >>= (x>>>(((2975715011.501709)-(tmp = -1473273552.981069, tmp))/(1654883913.042487))));
- assertEquals(-65382, x ^= ((x/((tmp = -2780026200, tmp)<<x))^(((-2683084424)<<x)>>(-1716245874))));
- assertEquals(1530921106, x &= (1530940914));
- assertEquals(1, x /= x);
- assertEquals(0, x >>= x);
- assertEquals(0, x /= (tmp = 773741434.1972584, tmp));
- assertEquals(0, x |= x);
- assertEquals(0, x <<= (-67977514.99888301));
- assertEquals(0, x %= (2496550482.524729));
- assertEquals(-0, x /= (tmp = -515040417, tmp));
- assertEquals(0, x <<= (-1673460935.2858837));
- assertEquals(-2638209488, x += (-2638209488));
- assertEquals(-2400951839498683400, x *= (910068685));
- assertEquals(1600582036, x ^= (((-1247602308.4812562)>>(((-2393714444.179732)>>>x)%(-778140635.7165127)))+(-1933914727.2268424)));
- assertEquals(0, x *= ((x-x)>>(-1270234575)));
- assertEquals(0, x >>>= (tmp = 3193676327.493656, tmp));
- assertEquals(0, x ^= (x>>>(1148676785.389884)));
- assertEquals(0, x >>= (tmp = -2269181763.8663893, tmp));
- assertEquals(0, x >>= (3149450221));
- assertEquals(0, x >>= (1069630750));
- assertEquals(-625009654, x ^= ((-2143499112)%(-759244728.6214335)));
- assertEquals(3583943, x >>>= (-2942645558.1204453));
- assertEquals(1791971, x >>= (x/x));
- assertEquals(223996, x >>= x);
- assertEquals(6999, x >>= (tmp = -1051883611.9443719, tmp));
- assertEquals(1459617792, x <<= (-1572314984));
- assertEquals(2622356453.269262, x -= (tmp = -1162738661.2692618, tmp));
- assertEquals(5103676461.269262, x += (2481320008));
- assertEquals(823989684.2692623, x %= (x^(((((1048362966)*((tmp = -2423040747.6233954, tmp)>>>x))*((tmp = 2330818588.4081, tmp)>>(tmp = 103312020.98346841, tmp)))+(tmp = 2264492857.144133, tmp))>>>((tmp = 2523442834, tmp)<<x))));
- assertEquals(0, x >>>= (tmp = -2018700898.531027, tmp));
- assertEquals(NaN, x /= x);
- assertEquals(0, x <<= (tmp = -2489442223, tmp));
- assertEquals(0, x >>= ((3045836220)>>>x));
- assertEquals(-1156905149, x ^= (3138062147));
- assertEquals(-0, x %= x);
- assertEquals(-3118433907.512866, x -= ((tmp = 1338611238, tmp)-(-1779822669.5128663)));
- assertEquals(100679693, x &= (1040565279));
- assertEquals(10136400582574248, x *= x);
- assertEquals(0, x %= x);
- assertEquals(2400318405, x += (2400318405));
- assertEquals(1.0036190808578471, x /= (((tmp = -2313492253.9889445, tmp)|(x-((tmp = -205459123, tmp)>>x)))+x));
- assertEquals(0, x >>>= (tmp = 882343227.1675215, tmp));
- assertEquals(0, x &= ((tmp = 2307828832.2706165, tmp)^((((((1404388047)<<((807879382)-(-2862921873)))-x)*(tmp = -1897734732, tmp))>>(tmp = 1981888881.2306776, tmp))%x)));
- assertEquals(0, x <<= x);
- assertEquals(0, x *= (((x*x)*((((2764801384.171454)%(x>>>x))&(384818815))+(x>>(tmp = -1481683516, tmp))))&x));
- assertEquals(0, x >>= (tmp = -2202536436, tmp));
- assertEquals(0, x ^= x);
- assertEquals(0, x &= (tmp = 15161124, tmp));
- assertEquals(-1586110900, x ^= (-1586110900));
- assertEquals(-1586127952, x -= ((tmp = 560737212, tmp)%((1349529668)>>>(tmp = -1956656528, tmp))));
- assertEquals(-1174945870, x -= ((1178456190)|x));
- assertEquals(1335167624.3422346, x -= (tmp = -2510113494.3422346, tmp));
- assertEquals(1329952126.3422346, x -= (x>>x));
- assertEquals(1, x >>= x);
- assertEquals(3, x |= (x<<x));
- assertEquals(3, x -= (x-x));
- assertEquals(-1938525669, x |= (tmp = 2356441625.5128202, tmp));
- assertEquals(-1938525669, x ^= ((tmp = -197149141.3622346, tmp)/(2833823156)));
- assertEquals(-2.6292393147661324, x /= (737295254.2254335));
- assertEquals(2925975987.370761, x -= (-2925975990));
- assertEquals(2925975987.370761, x %= (tmp = 3041184582.8197603, tmp));
- assertEquals(-1908068660, x ^= ((tmp = -1380575181, tmp)-(2375164084.8366547)));
- assertEquals(-477017165, x >>= (tmp = 2420877826.353099, tmp));
- assertEquals(-477017165, x %= ((tmp = -2919204062.3683634, tmp)-(tmp = -2263328990, tmp)));
- assertEquals(-2105539936, x &= ((tmp = -1630795440, tmp)-(x&((933423833)>>(-475069901)))));
- assertEquals(-4979480720, x -= (tmp = 2873940784, tmp));
- assertEquals(-4190953472, x -= (x&(tmp = -645918862.9001305, tmp)));
- assertEquals(17564091004468855000, x *= x);
- assertEquals(-857277134, x |= (tmp = 2363948338, tmp));
- assertEquals(1015632515, x -= (-1872909649));
- assertEquals(-1150380043, x ^= (tmp = -2014853770, tmp));
- assertEquals(1607729152, x <<= ((2194449589)+(x|(tmp = -1470075256.4605722, tmp))));
- assertEquals(1608356496, x |= ((((x|(670426524))<<((-2415862218)>>(tmp = 1572561529.9213061, tmp)))^((-1989566800.3681061)|x))&(2170270618.3401785)));
- assertEquals(-1836056576, x <<= (tmp = 2906301296.540217, tmp));
- assertEquals(-2952415961567723500, x *= (tmp = 1608020145, tmp));
- assertEquals(1435500544, x <<= x);
- assertEquals(700928, x >>>= (tmp = 2924829771.1804566, tmp));
- assertEquals(0, x <<= ((x^(2410009094))|(((-164334714.18698573)%(x*x))|(tmp = 2182431441.2575436, tmp))));
- assertEquals(-143321285, x ^= (tmp = -143321285, tmp));
- assertEquals(-2, x >>= x);
- assertEquals(-1, x >>= (x&(1109737404)));
- assertEquals(1, x >>>= x);
- assertEquals(0, x ^= x);
- assertEquals(-2463707358.165766, x += (-2463707358.165766));
- assertEquals(1831259938, x >>= (((((x-(tmp = 1359448920.5452857, tmp))%(tmp = -104541523, tmp))/((3133289055.9780197)*x))>>x)%x));
- assertEquals(1858895646, x ^= ((tmp = 131424376, tmp)>>(tmp = -396761023, tmp)));
- assertEquals(1, x >>= x);
- assertEquals(-1888369021, x |= ((tmp = -2038869285.046599, tmp)^((tmp = -1318286592.4250565, tmp)-(tmp = 2825123496, tmp))));
- assertEquals(1036458508, x <<= ((tmp = 2722401450, tmp)/((tmp = 1090712291, tmp)>>((tmp = -2155694696.9755683, tmp)*(tmp = 1661107340, tmp)))));
- assertEquals(1, x /= (x%((tmp = -1716050484, tmp)+(tmp = -1683833551.797319, tmp))));
- assertEquals(0, x >>= (tmp = -2899315628, tmp));
- assertEquals(0, x |= x);
- assertEquals(0, x >>>= x);
- assertEquals(0, x <<= x);
- assertEquals(1546062911, x |= (1546062911));
- assertEquals(1546195271, x += ((tmp = -3210667091, tmp)>>(tmp = 1323121165, tmp)));
- assertEquals(3092390542, x += x);
- assertEquals(-1199626354, x |= (406783756));
- assertEquals(-3650317194584908300, x *= (tmp = 3042878461.625484, tmp));
- assertEquals(-7.650495675092354e+27, x *= (2095844078));
- assertEquals(0, x >>= (tmp = 342617880.3384919, tmp));
- assertEquals(22, x ^= (((tmp = 381409558.9104688, tmp)>>((2823172888.974557)>>x))>>x));
- assertEquals(736383550, x += (736383528));
- assertEquals(0, x %= x);
- assertEquals(0, x += x);
- assertEquals(-1553157831, x -= (1553157831));
- assertEquals(1838556960, x <<= (3158944357.262641));
- assertEquals(5503285699.188747, x *= ((tmp = 2437440276, tmp)/(814308583.8128904)));
- assertEquals(5824889900.188747, x -= (((tmp = 1171445694, tmp)-(tmp = -1584666956, tmp))^(tmp = 1217545373, tmp)));
- assertEquals(747032, x >>>= (-89332085));
- assertEquals(747032, x |= (x^(x^(x>>>x))));
- assertEquals(747032, x >>>= ((-1558482440)*((tmp = -2413907480, tmp)+(3003996862.384156))));
- assertEquals(7.747761349084291e+23, x += ((tmp = 518064022.64624584, tmp)*((tmp = 2001951702, tmp)*x)));
- assertEquals(0, x <<= (2769324707.5640426));
- assertEquals(NaN, x %= (((((((-2458056470.7717686)&x)>>(tmp = -361831232.42602444, tmp))*(2611108609.6727047))>>>x)/(-1713747021.8431413))*(-1143281532)));
- assertEquals(NaN, x %= ((x^((-613836813)*(tmp = -3180432597.0601435, tmp)))%x));
- assertEquals(NaN, x /= ((-1607092857)^x));
- assertEquals(0, x &= (-1190719534));
- assertEquals(0, x >>>= x);
- assertEquals(0, x += (x>>(642177579.1580218)));
- assertEquals(-3129552333, x += (-3129552333));
- assertEquals(1165414963, x &= x);
- assertEquals(2222, x >>= (((tmp = 2606317568, tmp)|x)+(tmp = 1844107136, tmp)));
- assertEquals(NaN, x %= ((x^x)<<(x/(((tmp = -1362148700, tmp)&((tmp = 76371048, tmp)<<x))>>>((x^(-2605741153))>>(((tmp = -2131608159.7634726, tmp)|(((2827792229.8004875)|(((-848439251)+(-2576768890.123433))|((tmp = -2617711776, tmp)-((-199980264)&((tmp = -46967951.76266599, tmp)/(-733253537))))))*(tmp = 1820087608, tmp)))>>>(tmp = -3118359396.4298744, tmp)))))));
- assertEquals(NaN, x /= ((2144871731)*x));
- assertEquals(NaN, x *= x);
- assertEquals(NaN, x %= (tmp = 234811462.08692443, tmp));
- assertEquals(0, x >>>= ((1121416685)|(x^(((tmp = -2905413334, tmp)<<(tmp = -3091554324.030834, tmp))<<x))));
- assertEquals(-55938048, x |= ((tmp = -55938048, tmp)+(x*(tmp = -1518809027.2695136, tmp))));
- assertEquals(-3.3234995678333864e-10, x /= (x*(tmp = -3008876576, tmp)));
- assertEquals(0, x <<= (x/((((((-2168824234.2418427)>>(((tmp = 1976810951, tmp)%x)<<(x*(x>>(x%(3146266192))))))%(tmp = 1756971968.122397, tmp))>>>(-2859440157.8352804))/(-1001406.1919288635))>>>(-1358031926))));
- assertEquals(-0, x *= (tmp = -1756000533, tmp));
- assertEquals(-0, x %= (2522761446.869926));
- assertEquals(0, x >>>= (((1087690535)>>>(2741387979))^x));
- assertEquals(0, x -= x);
- assertEquals(0, x >>= (-819422694.2188396));
- assertEquals(0, x ^= x);
- assertEquals(NaN, x /= x);
- assertEquals(0, x &= (tmp = 86627723, tmp));
- assertEquals(0, x += x);
- assertEquals(0, x %= (tmp = -2317915475, tmp));
- assertEquals(Infinity, x += (((-3072799584)^(-2487458319))/(((tmp = -3050692353, tmp)&x)>>(-777977292.8500206))));
- assertEquals(Infinity, x += x);
- assertEquals(Infinity, x -= (tmp = 484428269, tmp));
- assertEquals(Infinity, x *= x);
- assertEquals(Infinity, x /= (2059586218.2278104));
- assertEquals(Infinity, x *= (tmp = 415918523.8350445, tmp));
- assertEquals(-1800869091, x |= (((-1800869091)>>>(x>>>(tmp = -2832575051, tmp)))>>>x));
- assertEquals(6196126991451132000, x *= ((-1467292383.8458765)+(-1973339154.7911158)));
- assertEquals(6196126992684649000, x += (1233517421));
- assertEquals(1, x /= x);
- assertEquals(-7153809722216516000, x -= (((-2984550787.146106)<<(tmp = 743743974, tmp))*((3155151275)/((-1771412568.8965073)%x))));
- assertEquals(-7153809721471491000, x -= (-745024056));
- assertEquals(5.117699353102001e+37, x *= x);
- assertEquals(0, x >>= x);
- assertEquals(-0, x *= ((-2651785447.666973)<<(-1124902998)));
- assertEquals(-0, x /= (2119202944));
- assertEquals(1042673805.5205957, x -= ((x<<x)-(tmp = 1042673805.5205957, tmp)));
- assertEquals(62, x >>>= (tmp = 2769597912.977452, tmp));
- assertEquals(34, x &= ((tmp = -61541150, tmp)%(x^(-943160469))));
- assertEquals(34, x ^= ((-2625482224.4605474)<<(-2277806338.3461556)));
- assertEquals(536870912, x <<= ((-2373927426.4757633)^x));
- assertEquals(536870912, x &= x);
- assertEquals(512, x >>>= ((-1626769708.310139)<<((tmp = 641796314, tmp)/(721629637.3215691))));
- assertEquals(0, x <<= (-113973033));
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x += (-1602711788.2390788));
- assertEquals(NaN, x *= (x%x));
- assertEquals(0, x &= (x<<(x|(x>>((x>>>(x%((1182960050)^(((-220896609)-((((tmp = 1518275435.360103, tmp)/(tmp = -88234820, tmp))^x)/x))>>(3169930777.548236)))))-(tmp = -2912668817.662395, tmp))))));
- assertEquals(0, x *= ((2323969408.7524366)/(((tmp = -3089229853, tmp)>>>((((tmp = -1012580544.5631487, tmp)>>(1138049418.9023373))>>x)&x))*(tmp = 626912001, tmp))));
- assertEquals(0, x >>>= x);
- assertEquals(NaN, x /= (x%(-868024322)));
- assertEquals(NaN, x /= (tmp = -1749532322, tmp));
- assertEquals(1861918711, x |= (-2433048585.853014));
- assertEquals(1861918711, x >>= (((102451747)>>>((((241651917.47259736)/((((((((1759022236)^(tmp = -2592022722, tmp))+((-1748044969)>>>(704597925)))/(-1639604842))%((1349846853.7345295)<<(-729695861)))/(x>>((tmp = -2654474404.7365866, tmp)>>x)))>>>(((-480356478)|(x%((tmp = -1668269244.6979945, tmp)+(tmp = -2441424458.565183, tmp))))^((1634981212.7598324)>>>(tmp = 122455570.22000062, tmp))))<<x))*((tmp = -1058636137.5037816, tmp)+((2794083757.138838)&((x/(50081370))&x))))/x))/((tmp = -243106636, tmp)<<((x*((tmp = -648475219.5971704, tmp)>>((tmp = -1568913034, tmp)-((tmp = 911458615, tmp)|x))))>>>(tmp = 2714767933.920696, tmp)))));
- assertEquals(0, x ^= x);
- assertEquals(-2080484602, x |= (((1544771831.4758213)|x)^(-538113039)));
- assertEquals(696451072, x <<= (tmp = -1587032689, tmp));
- assertEquals(-162595645, x += (tmp = -859046717, tmp));
- assertEquals(516546456, x >>>= x);
- assertEquals(623083588, x += ((-1371850352)^(tmp = -1469933252, tmp)));
- assertEquals(92342412, x %= (tmp = -132685294, tmp));
- assertEquals(500272110, x |= ((tmp = 1616032506, tmp)%((tmp = 1589569590.4269853, tmp)|(-972791738.1829333))));
- assertEquals(3247086, x %= (((tmp = 1372216208, tmp)|(-638950076.3387425))&((-2619249161.849716)&(73957896))));
- assertEquals(0, x >>>= (tmp = -1482343462.6911879, tmp));
- assertEquals(1265125662, x ^= (tmp = -3029841634, tmp));
- assertEquals(4941897, x >>>= (-2039728632));
- assertEquals(206857, x &= (tmp = 226962365.45571184, tmp));
- assertEquals(1.0925018562586405e+24, x += ((tmp = 2687424146, tmp)*(((-1998020319)%x)*(-2080331363))));
- assertEquals(-1.755270751212437e+32, x *= (-160665242));
- assertEquals(0, x <<= (3152796521.6427975));
- assertEquals(0, x ^= ((((((tmp = -855001595, tmp)<<(2007525777))-(x-(x-x)))/(3036585090.9701214))&(1827983388))*((tmp = -915604789.0515733, tmp)&(((((tmp = -806628722.7820358, tmp)%x)/(tmp = -2773117447, tmp))|x)<<(((tmp = -2902300974.7300634, tmp)|x)/(-1608133440))))));
- assertEquals(0, x |= ((((((119024954)*(((x^(tmp = 2939514414, tmp))|x)^(x-(tmp = -1597415597.6795669, tmp))))+(((tmp = -182277816.14547157, tmp)<<(((-2983451324.3908825)^(tmp = 1572568307, tmp))+(-1165604960.8619013)))/(x>>((tmp = -2127699399, tmp)>>((x^(((((tmp = -1968667383, tmp)^(tmp = 3120052415.9964113, tmp))|(((x|(((x^((tmp = 2831505153, tmp)<<((-3150506831.547093)+((x%(tmp = 383761651, tmp))%(2856803457)))))+(((tmp = -2426953997, tmp)^(tmp = -2667954801.1010714, tmp))*(tmp = -2707801631, tmp)))&(tmp = 2082935238.794707, tmp)))^((tmp = 697573323.5349133, tmp)-x))%(tmp = 661936357, tmp)))/(-1717944600.261446))>>>((2423776015.0968056)^((-1410322010)|((x<<(tmp = 2935993226, tmp))/(tmp = -1533896392, tmp))))))*(tmp = -596675330, tmp))))))>>>(((2944268153)^(x&(144579050.93126357)))/(-2123810677.2619643)))>>>(1473040195.9009588))*x));
- assertEquals(0, x /= (2877666495));
- assertEquals(2174852514, x -= (tmp = -2174852514, tmp));
- assertEquals(543713128, x >>>= x);
- assertEquals(2978128878.939105, x += (tmp = 2434415750.939105, tmp));
- assertEquals(3529591145844655600, x *= (tmp = 1185170719.3753138, tmp));
- assertEquals(659, x >>>= ((((((x<<(((((-425423078)/(((tmp = 160617689.20550323, tmp)&(-1524740325.5003028))%(tmp = -1869426475, tmp)))<<(((x^(-487449247))>>>(tmp = -1962893666.7754712, tmp))%x))*x)>>((tmp = 623413085, tmp)&(x+(((((-2200726309.083274)-(x-x))+x)&(-1304849509))|((((tmp = -431896184, tmp)>>>(x>>(-1932126133)))<<((1078543321.2196498)*(-10761352)))>>(tmp = -2681391737.5003796, tmp)))))))/x)-(tmp = -1768629117, tmp))/(((((tmp = -2320718566.0664535, tmp)%x)+(-2831503351.995921))>>>(-2695416841.3578796))*(943979723)))<<x)|((652520546.7651662)>>(1045534827.6806792))));
- assertEquals(531, x &= (tmp = -293707149, tmp));
- assertEquals(0, x >>= (tmp = -678056747.5701449, tmp));
- assertEquals(1184651529.8021393, x += (tmp = 1184651529.8021393, tmp));
- assertEquals(1721719611, x |= (tmp = 1645413178, tmp));
- assertEquals(-406880257, x |= (tmp = 2268544460, tmp));
- assertEquals(-4194304, x <<= (tmp = -109701322.43455839, tmp));
- assertEquals(17592186044416, x *= x);
- assertEquals(0, x ^= (x&x));
- assertEquals(0, x <<= (tmp = 1715401127, tmp));
- assertEquals(-1793087394, x |= (tmp = -1793087394.730585, tmp));
- assertEquals(-2, x >>= x);
- assertEquals(263607360.10747814, x += (tmp = 263607362.10747814, tmp));
- assertEquals(1073214955, x |= (893759979.3631718));
- assertEquals(703953930, x -= ((2738450011)%(x^(tmp = 679402836, tmp))));
- assertEquals(1, x >>= (tmp = 2262515165.6670284, tmp));
- assertEquals(0, x >>= (((tmp = 747896494, tmp)^((tmp = -1005070319, tmp)+x))|x));
- assertEquals(0, x >>= ((953612771)>>>(tmp = 3066170923.3875694, tmp)));
- assertEquals(-314941454, x -= (x+(((314941454)%(((tmp = 2200222912.9440064, tmp)>>>(2534128736.805429))>>>(x|((747716234)%(((tmp = -252254528, tmp)%(-1553513480.1875453))&x)))))<<x)));
- assertEquals(-535686958, x &= (-522809126));
- assertEquals(0.5480312086215239, x /= (tmp = -977475278, tmp));
- assertEquals(-1199953459.6090598, x *= ((-2189571393)+((3186862741.37774)>>(tmp = -2193090564.5026345, tmp))));
- assertEquals(-1199953459.6090598, x %= ((tmp = 2986532440, tmp)*(2685122845)));
- assertEquals(-1199953459.6090598, x %= (1951182743.7399902));
- assertEquals(51262285383887820, x *= (-42720228));
- assertEquals(-424776752, x |= x);
- assertEquals(166221344210236600, x *= (tmp = -391314598.6158786, tmp));
- assertEquals(-1883425600, x >>= (((tmp = -1020679296, tmp)^((-1416867718)+(-1412351617)))<<(-2743753169)));
- assertEquals(0, x &= (x/(-2250026610)));
- assertEquals(-1111956501, x ^= (tmp = 3183010795, tmp));
- assertEquals(2012059503, x ^= (tmp = -900369276, tmp));
- assertEquals(15719214, x >>>= (tmp = -3196277049, tmp));
- assertEquals(15719214, x |= x);
- assertEquals(100779035, x -= ((-1245802025)^(-2964289852)));
- assertEquals(0, x >>= x);
- assertEquals(0, x &= (((x<<((2361941389.708063)%x))>>((328256762.09842086)>>>((((tmp = 3094192285, tmp)-(((x>>(tmp = -2920437464, tmp))<<(tmp = -2693021467, tmp))-(x>>>((2410065554)%(x%(tmp = 2487056196.689908, tmp))))))-(tmp = -866314146, tmp))^((1754098471)-((((((-2450740191)-(tmp = 1977885539.6785035, tmp))*((tmp = -1205431332, tmp)>>>x))>>(-870601854))>>(tmp = -301859264, tmp))|((tmp = -2308971516.8301244, tmp)/x))))))&((2307007357)-((tmp = -1518812934, tmp)+(2562270162)))));
- assertEquals(0, x <<= x);
- assertEquals(-1802124619, x |= (-1802124619));
- assertEquals(-1802124619, x %= ((1617132364.306333)+((1678465962.079633)|((516698570)%(((569813606)*(-1800804098.6270027))%((tmp = 1976706935, tmp)-((tmp = -1830228989.5488424, tmp)>>(((x^((tmp = 1015246068.3791624, tmp)>>x))^((-2171682812.246772)-(tmp = -398330350, tmp)))&x))))))));
- assertEquals(904564673.6237984, x -= (tmp = -2706689292.6237984, tmp));
- assertEquals(818237248768128900, x *= x);
- assertEquals(254842325.2585001, x %= (1550087667.9657679));
- assertEquals(-1163919360, x <<= x);
- assertEquals(-3.4644526843674166, x /= ((-446801454)+(x>>>(tmp = -2025151870, tmp))));
- assertEquals(0, x &= ((((((((-1739617728)&(x&(((tmp = -2946470036.552597, tmp)/x)*x)))^(-1130501404))>>>x)/((1870230831)>>>(840301398)))%x)/x)/(-2927537567)));
- assertEquals(0, x >>= x);
- assertEquals(0, x >>>= (x&(x&x)));
- assertEquals(0, x &= ((-579614044)-(-756012505.4048488)));
- assertEquals(-2970367642, x -= (tmp = 2970367642, tmp));
- assertEquals(-415129376, x ^= (tmp = 2847041926.060355, tmp));
- assertEquals(-1505681312, x &= (tmp = -1225184902.9215767, tmp));
- assertEquals(-3174471329.5807734, x += (-1668790017.5807734));
- assertEquals(-Infinity, x /= (x>>x));
- assertEquals(NaN, x -= x);
- assertEquals(0, x ^= (x^(((-1407936301.5682082)<<((x^(((tmp = 3213446217.307076, tmp)|x)|((tmp = 3219810777.3171635, tmp)/(tmp = 1561807400, tmp))))>>>((tmp = 2449910203.0949173, tmp)|((((1954662538.7453175)>>(tmp = -1711636239.9916713, tmp))>>>(tmp = 406219731.214718, tmp))<<(((-907908634.4609842)^((((((tmp = 2408712345, tmp)*(tmp = 1740346634.5154347, tmp))>>(tmp = 715783991, tmp))^(tmp = -655628853.2821262, tmp))%(tmp = 2819143280.434571, tmp))/(-1240412852)))*x)))))/x)));
- assertEquals(0, x >>>= x);
- assertEquals(0, x <<= x);
- assertEquals(0, x >>>= (((-3198075268.8543105)>>(((((x+((tmp = -133461401.50823164, tmp)-((x&(((((tmp = 2617977319, tmp)>>((tmp = -2704719576.8734636, tmp)|((tmp = -977362542.2423751, tmp)<<(x<<(tmp = 3054487697.1441813, tmp)))))>>>((-1635655471)%x))/(-2079513672))%(tmp = 1993563806, tmp)))<<(tmp = -1310524200.6106496, tmp))))%((((-2558804500.7722936)+(tmp = -1641265491, tmp))<<((tmp = -1309608349, tmp)>>>x))/((tmp = -2306644272, tmp)<<x)))*(-2009396162.3063657))+(267343314.3720045))-(-2212612983.661479)))|x));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x *= x);
- assertEquals(-824822309, x |= (-824822309));
- assertEquals(-807944741, x |= (((-598067403)*((x&(tmp = 2897778389, tmp))>>>(-1322468310.3699632)))|x));
- assertEquals(90004223.44097246, x /= (((tmp = -481122620, tmp)&x)%((tmp = 1109368524, tmp)/(((-3150568522.633032)<<(tmp = 2923396776, tmp))^(x-((x/x)&(x/(-287976185.1049104))))))));
- assertEquals(0.4521931751193329, x /= (tmp = 199039323, tmp));
- assertEquals(1.8110466604491368e-10, x /= (2496860986.492693));
- assertEquals(0, x |= x);
- assertEquals(-1225944576, x += ((tmp = -807700791.631221, tmp)<<((-700782615.4781106)-((((-2954619897)>>>x)<<((tmp = 997657844, tmp)>>>(1227994596)))/((-1234591654.8495834)*((tmp = -191189053.70693636, tmp)+(tmp = -3027659304, tmp)))))));
- assertEquals(-1225811383, x |= (-1866233271));
- assertEquals(3069155913, x >>>= (((x/(-99524153.40911508))%(x>>>((((tmp = 2985975640, tmp)/(tmp = 2781516546.2494454, tmp))&(((2234114508)|(((x/(tmp = -1224195047, tmp))<<x)^(x>>>((537884375.5698513)+x))))^((tmp = -2144817497.5089426, tmp)|(-498079183.8178189))))>>>((x+x)&(-3086080103.6460695)))))<<(((tmp = 2151157136, tmp)*x)/(((x/x)>>>(-1149734628.4364533))-((3025445835.654089)+(tmp = 530902725.91127443, tmp))))));
- assertEquals(-1733702568, x ^= (tmp = 776361489.423534, tmp));
- assertEquals(8981504, x &= ((tmp = 2902581847, tmp)*(x-(-2697760560))));
- assertEquals(1153166.8526612986, x -= ((x/(tmp = -1375025594.5027463, tmp))+((3043576689.1538706)%(x+x))));
- assertEquals(3389855, x |= (x+x));
- assertEquals(-488458393.17759943, x += (-491848248.17759943));
- assertEquals(40982867145206920, x *= ((3132857155)|(tmp = -218356553, tmp)));
- assertEquals(688, x >>= (((((tmp = 403321821, tmp)+((tmp = 2536984658, tmp)%((tmp = 2759309029.8753624, tmp)|(((tmp = 1994203554.7417293, tmp)^((704660500.434877)*(tmp = 1536292958.2691746, tmp)))+(-164139788)))))/((1205950994.1255205)+x))^((((tmp = 975272146.0133443, tmp)-(150107797))/(-1764309514))^((x>>>(x^(x^x)))+(203250124))))>>>(tmp = 1864959239.512323, tmp)));
- assertEquals(10, x >>= ((tmp = 1631996431.9620514, tmp)>>x));
- assertEquals(10, x %= (tmp = 2678904916, tmp));
- assertEquals(335544320, x <<= (tmp = -2759037415.6811256, tmp));
- assertEquals(-153389967, x |= ((tmp = -2411636565, tmp)+(tmp = -2305156154, tmp)));
- assertEquals(-1171, x >>= x);
- assertEquals(813080576, x &= (((tmp = -65428547, tmp)&(tmp = 3163266999, tmp))<<x));
- assertEquals(4346532303, x += ((tmp = -761515569.0707853, tmp)>>>(((tmp = 143240971.0661509, tmp)<<x)*(x^((tmp = -271697192.8471005, tmp)&x)))));
- assertEquals(-863299035, x ^= ((((2663001827.1492147)>>>((x/(((tmp = 482665912, tmp)-(x>>(tmp = 354425840.784659, tmp)))>>((-2012932893)>>>x)))/((tmp = -1354385830.6042836, tmp)>>>(-2149023857))))^((tmp = 585746520, tmp)+(tmp = 756104608, tmp)))^(517529841.184085)));
- assertEquals(-997654012, x &= (((tmp = -404836025.15326166, tmp)+((tmp = 3035650114.0402126, tmp)<<((-1308209196)>>(tmp = 693748480, tmp))))<<(((465774671.4458921)<<x)/(1971108057))));
- assertEquals(-320581507110848260, x *= ((x-(tmp = -2266777911.7123194, tmp))^(tmp = -2810021113.304348, tmp)));
- assertEquals(-320581508271196300, x += ((-1195215841.5355926)|(x-((2715907107.4276557)+(((-843426980)>>(x&(x%(tmp = -1139279208.34768, tmp))))^x)))));
- assertEquals(368031616, x &= x);
- assertEquals(368031616, x %= (tmp = 1211767328, tmp));
- assertEquals(-67505614939510744, x *= (tmp = -183423412.56766033, tmp));
- assertEquals(959424552, x >>= ((tmp = -171120122.5083747, tmp)/x));
- assertEquals(30949179.096774194, x /= (((x-((((x&(tmp = -180770090, tmp))<<(((tmp = -2061363045.419958, tmp)*((655711531)^((1205768703)-(tmp = 2468523718.8679857, tmp))))+(-2746704581)))+((-853685888)*(tmp = -2299124234, tmp)))|(tmp = 2429502966, tmp)))|(((-985794986.0232368)>>>(2890862426))%x))>>(tmp = 1005542138.8415397, tmp)));
- assertEquals(30949179, x |= x);
- assertEquals(30949179, x %= (810126097.6814196));
- assertEquals(120895, x >>= (tmp = 3065886056.1873975, tmp));
- assertEquals(1934320, x <<= (1478650660.7445493));
- assertEquals(0, x >>= (1069658046.2191329));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x %= (x*x));
- assertEquals(NaN, x *= ((((2148513916)+(tmp = -210070225.85489202, tmp))>>(975470028))+((-3060642402)>>x)));
- assertEquals(NaN, x *= (2888778384));
- assertEquals(NaN, x -= (294531300.16350067));
- assertEquals(-465620423, x ^= (tmp = -465620423.5891335, tmp));
- assertEquals(1613303808, x &= (-2530649850.1952305));
- assertEquals(2045458658, x |= (tmp = 432158946.5708574, tmp));
- assertEquals(0, x >>>= (2277328255.770018));
- assertEquals(0, x &= (-64904722.41319156));
- assertEquals(0, x >>= x);
- assertEquals(3109394857.361766, x += (3109394857.361766));
- assertEquals(1519021650, x ^= ((tmp = -2632472653, tmp)|(tmp = 2161964921.8225584, tmp)));
- assertEquals(370854, x >>>= ((1486892931.4564312)-((tmp = 3017755741.9547133, tmp)>>>x)));
- assertEquals(1333145110.39802, x -= ((-1051580495.39802)-(tmp = 281193761, tmp)));
- assertEquals(0, x ^= x);
- assertEquals(0, x |= x);
- assertEquals(0, x <<= x);
- assertEquals(0, x >>>= x);
- assertEquals(799202788.1455135, x -= (tmp = -799202788.1455135, tmp));
- assertEquals(1539080192, x <<= (x%(((((x-x)|(((((x%(959993901))+(tmp = -2647575570.092733, tmp))/(tmp = -2040600976.5104427, tmp))*(x*(tmp = 2785252760, tmp)))>>(-377867259)))/((x&(1549738240.013423))>>>(tmp = -1502185618, tmp)))*x)%(1159283801.0002391))));
- assertEquals(0, x >>= (-268660225));
- assertEquals(-0, x /= (-2795206270.635887));
- assertEquals(0, x >>>= (1869556260.2489955));
- assertEquals(64202212, x ^= ((((tmp = -942983515.5386059, tmp)*(((1057759788)-x)*(tmp = 2038041858, tmp)))>>x)+(tmp = 64202212, tmp)));
- assertEquals(2021126977, x -= ((tmp = -2009912898, tmp)^((2240062309)%x)));
- assertEquals(4332348265459724000, x *= (tmp = 2143530968, tmp));
- assertEquals(1472, x >>>= ((283380755)<<x));
- assertEquals(-1672370407872, x *= (tmp = -1136121201, tmp));
- assertEquals(338573318, x ^= (tmp = 2329579078.4832354, tmp));
- assertEquals(2377388772.1662374, x -= (tmp = -2038815454.1662374, tmp));
- assertEquals(-1.264761712403516, x /= ((((tmp = -2106209534, tmp)>>((((((tmp = 626190172, tmp)/x)>>>(-824270996.8545206))/((1258369810.9498723)-(tmp = -2947556209, tmp)))^((((366784589.24711144)|(1462064104.828938))-(1571045395.777879))<<(444685689.60103726)))>>(tmp = -2757110357.410516, tmp)))/(x>>>((tmp = 829226010, tmp)>>>(629512715))))|x));
- assertEquals(-2905481691.264762, x -= (2905481690));
- assertEquals(-1710543566.1481905, x -= (-1194938125.1165714));
- assertEquals(-3421087132.296381, x += x);
- assertEquals(-884178944, x <<= ((-1820881235)|x));
- assertEquals(-884178944, x &= (x%(tmp = -2298828530, tmp)));
- assertEquals(1516503040, x <<= ((tmp = -3039882653, tmp)+((tmp = 1956034508, tmp)<<(x>>(tmp = 280388051, tmp)))));
- assertEquals(3033006080, x += x);
- assertEquals(846431222.321887, x %= (x+(-1939718651.1609435)));
- assertEquals(-846431224, x ^= ((-1742116766.54132)/x));
- assertEquals(1157918728, x &= (tmp = 1966568030, tmp));
- assertEquals(1157918728, x >>>= ((((((tmp = -2392096728.184257, tmp)*(x&(-3051259597.301086)))>>>(((tmp = 1712991918.071982, tmp)*(tmp = -714525951, tmp))-((-1784801647)>>((-1270567991)%(((214272558)/(((-3110194570)|(tmp = 2558910020, tmp))&(-1266294955.717899)))*((2654922400.609189)>>>(tmp = 370485018, tmp)))))))*(((tmp = -2621203138.1838865, tmp)%(858913517))*((tmp = -1564229442.2596471, tmp)>>((tmp = 1898557618, tmp)|(-1282356275)))))*(tmp = -1253508468, tmp))+((-361964404.75944185)|x)));
- assertEquals(961668975, x += (-196249753));
- assertEquals(1, x >>= (tmp = 890453053, tmp));
- assertEquals(1, x >>= (((((tmp = 871309275, tmp)/(x>>>((tmp = 2033022083, tmp)&(tmp = -1393761939, tmp))))%((437488665.104565)^(tmp = 2808776860.4572067, tmp)))-((tmp = -359283111.49483967, tmp)<<((tmp = 2985855945, tmp)%(tmp = -596479825.9114966, tmp))))/(-1965528507)));
- assertEquals(0, x >>= ((tmp = -1753776989, tmp)%(tmp = 322622654, tmp)));
- assertEquals(84411424, x ^= (((x|(x|(tmp = -1617122265, tmp)))&(tmp = -313813263, tmp))&(1472888112.0258927)));
- assertEquals(67633184, x &= ((1556833131.0776267)<<(x<<(1501219716.5575724))));
- assertEquals(68002293, x |= (((tmp = 188984203.0350548, tmp)>>>(tmp = 1356052777, tmp))%(x*(tmp = -2944960865, tmp))));
- assertEquals(67108864, x &= (((1046644783.9042064)<<x)+((-2796345632)>>>(((-1913290350.3687286)<<(((((tmp = -2223692353, tmp)>>x)&(x<<(x>>((((tmp = -976850020, tmp)%(tmp = 1379692507, tmp))>>>(1120103052.2077985))>>(tmp = 5592070.612784743, tmp)))))<<(x+((tmp = -3154037212.9764376, tmp)%(((x-(-1961060483.6965141))+(((1920670676)-(2852444470.7530622))/(((1445954602)>>((1353665887)>>(tmp = 111411560.64111042, tmp)))<<x)))+x))))<<((-1773130852.6651905)^((1216129132)>>(1511187313.2680469)))))|((tmp = -1107142147, tmp)|(tmp = -768165441.4956136, tmp))))));
- assertEquals(0, x -= x);
- assertEquals(0, x %= (tmp = -1655707538.0778136, tmp));
- assertEquals(-184120712930843900, x += (x+((tmp = -3174410166, tmp)+((tmp = -301807453, tmp)*(tmp = 610060182.1666535, tmp)))));
- assertEquals(-54598560, x >>= (-1365351357));
- assertEquals(-6763.94449950446, x /= (((-1953016847)<<((673287269.7002038)%(-558739761)))>>>(tmp = 1607754129, tmp)));
- assertEquals(-1, x >>= x);
- assertEquals(1, x >>>= x);
- assertEquals(0, x >>>= x);
- assertEquals(0, x >>= ((-384747983)+((((tmp = -949058352.381772, tmp)>>>(-1920744986))-(-882729639))^((x^((tmp = 2351364046, tmp)<<(((tmp = -3110165747, tmp)^(-1266489735))-((tmp = -371614326, tmp)>>((tmp = -2064968414, tmp)&(-2075036504.617934))))))&(((-2616501739)&(tmp = 2591437335.4029164, tmp))>>x)))));
- assertEquals(0, x >>>= ((tmp = 2946468282, tmp)&((-2741453019)>>x)));
- assertEquals(0, x -= ((x%(-134700915))&(-1955768279)));
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x /= (x^(((((((tmp = 3185669685.772061, tmp)>>(tmp = -1973500738, tmp))-(tmp = -87401348.93002152, tmp))>>(tmp = -2813508730, tmp))&(tmp = -778957225, tmp))<<(x-(x&((-2821756608)+(((((tmp = 2475456548, tmp)/(tmp = 997998362, tmp))<<((tmp = -83043634, tmp)|x))%(636120329))%(tmp = -1910213427.7556462, tmp))))))%x)));
- assertEquals(0, x &= x);
- assertEquals(0, x <<= x);
- assertEquals(0, x >>>= (x%x));
- assertEquals(0, x %= (745221113));
- assertEquals(0, x >>>= ((1467615554.7672596)|x));
- assertEquals(0, x /= (tmp = 735317995, tmp));
- assertEquals(-1513001460, x |= (2781965836));
- assertEquals(-1513001460, x |= (x%(1970577124.3780568)));
- assertEquals(-0, x %= x);
- assertEquals(1864972269, x ^= (-2429995027.840316));
- assertEquals(1226843341, x &= (tmp = -639621923.5135081, tmp));
- assertEquals(1226843339.3171186, x += ((1297620268.272113)/(-771070549)));
- assertEquals(76677708, x >>>= (1009134980));
- assertEquals(0, x ^= x);
- assertEquals(0, x ^= x);
- assertEquals(NaN, x /= x);
- assertEquals(716040787, x |= ((1851586229)-(1135545441.3502865)));
- assertEquals(1385693184, x <<= x);
- assertEquals(1321, x >>= (x^((tmp = -1576632297.0860603, tmp)>>>(405218605))));
- assertEquals(-1319012931, x |= (-1319014243));
- assertEquals(-1319012931, x >>= ((((1689898279.3580785)<<((((x^(x>>>((((tmp = 2635260332, tmp)*(tmp = 2053357650, tmp))*x)*(2856480122.339903))))>>x)&(-2382703000.077593))%(1183918594)))*(tmp = -1670081449, tmp))<<x));
- assertEquals(-528327581.7646315, x %= (tmp = -790685349.2353685, tmp));
- assertEquals(2073431790, x ^= (tmp = 2601800333, tmp));
- assertEquals(-6514722684180, x -= (((tmp = 824141806.0668694, tmp)>>>(((-1865885282.8723454)&(x&(x|((900188006.3757659)>>>(x&x)))))+(2227126244.0526423)))*x));
- assertEquals(1450593, x >>>= ((2157053647)>>(x+(-2934071355.418474))));
- assertEquals(576782336, x <<= ((1054640368.827202)&((tmp = -3182236876.434615, tmp)>>(tmp = 2129856634.0328193, tmp))));
- assertEquals(2950754326, x -= (tmp = -2373971990, tmp));
- assertEquals(738197504, x <<= (1188157369.5988827));
- assertEquals(0, x <<= (x+((tmp = -839533141, tmp)&((((((tmp = -1148768474.7306862, tmp)|(172650299))+(tmp = -2739838654, tmp))/(3132557129))%x)>>>(tmp = -1229961746.2466633, tmp)))));
- assertEquals(0, x %= (tmp = -2974207636, tmp));
- assertEquals(0, x %= ((2323482163)>>>x));
- assertEquals(0, x &= (((x/(x+(x>>((tmp = 55935149, tmp)%x))))|((3109182235)>>>(tmp = 1217127738.8831062, tmp)))+((((tmp = -385114910, tmp)*((((((tmp = -2535158574.634239, tmp)&(x+x))<<(-2821692922.43476))&(-776804130.9457026))>>((-1374832535)^(tmp = 2175402162.701251, tmp)))%(-1646995095)))-(x*(tmp = -921556123, tmp)))^(79224621))));
- assertEquals(128935435, x |= ((tmp = 2279459038, tmp)%(tmp = -537630900.5271742, tmp)));
- assertEquals(128935435, x /= ((((((x<<(2750024311))-((-1332480769.4784315)&(1418160003)))&(1551783357))<<(((((-2870460218.55027)|((-1958752193.7746758)&(2551525625)))>>>((((tmp = -1698256471, tmp)^(((((((((tmp = -830799466, tmp)+x)-(-111590590))+(tmp = -1105568112.3921182, tmp))/((tmp = -3058577907, tmp)|(((-1944923240.2965696)%(-2884545285))<<(tmp = -1993196044.1645615, tmp))))^(x>>(tmp = -2961488181.3795304, tmp)))&x)*x)|(((tmp = 97259132.88922262, tmp)<<((1601451019.343733)&x))*(x|x))))+((((x>>x)<<x)+(-868409202.2512136))/(((tmp = -2893170791, tmp)-((x|(-853641616))%(((tmp = 549313922, tmp)&(-768036601.6759064))%(tmp = -543862220.9338839, tmp))))-((tmp = 1639851636, tmp)+((2164412959)/(-273028039.941242))))))>>>((((-2382311775.753495)^(-2062191030.2406163))>>>(tmp = -1054563031, tmp))/(-862111938.7009578))))%x)+(-3103170117.625942)))%((tmp = -1144062234, tmp)>>x))>>>(tmp = 1216332814.00042, tmp)));
- assertEquals(41.631074722901715, x /= (x&(-2542806180.962227)));
- assertEquals(41.631074722901715, x %= (-14003386.556780577));
- assertEquals(8, x &= (x&((-2231622948)%(tmp = 488279963.9445952, tmp))));
- assertEquals(9.002961614252625e-9, x /= ((53802728.56204891)<<(((867697152.3709695)-(538719895.5707034))&(-631307825.4491808))));
- assertEquals(0, x >>= x);
- assertEquals(-0, x *= (tmp = -785674989, tmp));
- assertEquals(-0, x += x);
- assertEquals(0, x /= (-250703244));
- assertEquals(0, x <<= ((tmp = -661062581.5511999, tmp)|x));
- assertEquals(0, x &= (-1299482308));
- assertEquals(0, x &= ((-399690060)>>>(2448074202.385213)));
- assertEquals(0, x &= (2574341201));
- assertEquals(0, x <<= ((x|(((tmp = 2458873162.645012, tmp)+(tmp = -1999705422.8188977, tmp))<<((x^(tmp = -392530472, tmp))>>>x)))&(((tmp = 2463000826.7781224, tmp)|(tmp = 3020656037, tmp))-x)));
- assertEquals(1397603760, x += ((tmp = -1359413071, tmp)-(tmp = -2757016831, tmp)));
- assertEquals(513823851, x -= (883779909));
- assertEquals(-1765712747, x ^= (2288060670.6797976));
- assertEquals(3117741504918286000, x *= x);
- assertEquals(3117741506284045300, x += (1365759456));
- assertEquals(6035555595.597267, x /= (tmp = 516562470, tmp));
- assertEquals(104203275, x &= (tmp = 376835755.32434213, tmp));
- assertEquals(10858322520725624, x *= x);
- assertEquals(59458951, x >>>= (153765028));
- assertEquals(49370856, x += ((tmp = -1291276092, tmp)>>x));
- assertEquals(0, x %= x);
- assertEquals(0, x += x);
- assertEquals(-1494589645, x -= (1494589645));
- assertEquals(-0, x %= x);
- assertEquals(0, x <<= (x&((2730708043.467806)<<x)));
- assertEquals(0, x /= ((tmp = -1483912394.153527, tmp)>>>((tmp = 1800568769, tmp)^((((((tmp = 1351568510, tmp)>>(tmp = -1337992543.2562337, tmp))>>>(tmp = 2602239360.40513, tmp))*x)%x)+(-2095840128.0700707)))));
- assertEquals(-0, x /= ((2363946613)^(tmp = -2227868069, tmp)));
- assertEquals(0, x &= ((((2634933507)<<(2798775374.140882))>>>x)>>>(((tmp = 1135200853.6396222, tmp)-(tmp = -1529829490.7007523, tmp))-(((((((((x^((x|(2135742668.591568))-(924230444.8390535)))%(tmp = -2459525610.51898, tmp))+(x&((tmp = 1177231743.809653, tmp)/(tmp = 1743270357.2735395, tmp))))|(((tmp = -1894305017, tmp)^((tmp = 1791704240, tmp)&x))%(-1569751461)))>>>(tmp = -2078321944, tmp))|x)*(((x*(tmp = -163239354, tmp))<<((tmp = 2859087562.694203, tmp)&(-657988325.9410558)))^(2508013840)))-((-243572350)+(x%((-1095206140)+((tmp = 3213566608.942816, tmp)*((2256442613)%((tmp = 1723751298, tmp)^(x-((-1145710681.2693722)|x)))))))))+(1556870627)))));
- assertEquals(130883024.97423434, x -= (-130883024.97423434));
- assertEquals(0.046720352789736276, x /= (tmp = 2801413456, tmp));
- assertEquals(1806558189, x |= (tmp = 1806558189.157823, tmp));
- assertEquals(72.40475060062144, x /= (x%((1932591076.531628)>>(1982030182))));
- assertEquals(-1077558321.5975945, x += (tmp = -1077558394.002345, tmp));
- assertEquals(98187, x >>>= x);
- assertEquals(97792, x &= (tmp = -1032487404, tmp));
- assertEquals(709197609, x |= (x^(709179177)));
- assertEquals(11081212, x >>>= (tmp = 1412940006.169063, tmp));
- assertEquals(11081212, x &= x);
- assertEquals(-1920311203, x -= ((tmp = 1931392415, tmp)<<((x%(tmp = -2873576383, tmp))%x)));
- assertEquals(-1920311203, x |= (x&(-993884718.2172024)));
- assertEquals(-4, x >>= (1409411613.0051966));
- assertEquals(-7947632484, x *= ((-2856731734)^((-1181032235.9132767)-((tmp = 780101930, tmp)+((tmp = -1732707132.6253016, tmp)^x)))));
- assertEquals(-2016362769, x ^= (tmp = 2711125619.2455907, tmp));
- assertEquals(-61535, x >>= x);
- assertEquals(-124771649, x ^= (tmp = 124726558, tmp));
- assertEquals(-1, x >>= x);
- assertEquals(-0, x %= (x*x));
- assertEquals(0, x <<= x);
- assertEquals(0, x /= (2444628112));
- assertEquals(0, x <<= ((-38968517.72504854)<<x));
- assertEquals(-1504619917, x |= (tmp = 2790347379, tmp));
- assertEquals(-1504619917, x &= x);
- assertEquals(2790347379, x >>>= ((1825218368)<<(-1843582593.2843356)));
- assertEquals(7786038495492170000, x *= x);
- assertEquals(-11011696, x |= (((tmp = 2931644407.4936504, tmp)-(3077095016.001658))%(tmp = -1731851949, tmp)));
- assertEquals(-107866, x %= ((-697845074.1661191)>>(772708134)));
- assertEquals(356779149, x ^= (-356884949.503757));
- assertEquals(0, x %= x);
- assertEquals(0, x *= ((tmp = 1542291783, tmp)^x));
- assertEquals(0, x += ((tmp = 1105314644.002441, tmp)&x));
- assertEquals(-1005882993, x ^= (-1005882993.0899806));
- assertEquals(-1301065066, x += (tmp = -295182073, tmp));
- assertEquals(-1454702592, x <<= ((-2440858737.390277)&(-1363565201.7888322)));
- assertEquals(-201539012492525570, x *= ((((tmp = -1416268089, tmp)|x)-(tmp = 1669129769, tmp))&(x<<((x/(-2614041678.7423654))%x))));
- assertEquals(-2.1995276811535986e+25, x *= (x/(-1846667987.154371)));
- assertEquals(0, x |= ((x*(((x>>>((tmp = 1044173034, tmp)>>>((x<<((tmp = -2906412863, tmp)%((tmp = -437401503, tmp)<<(((((x|(2167319070))<<((tmp = 2766179640.1840167, tmp)&(-2372076054)))*(tmp = -241617431.06416297, tmp))*((((((tmp = 2570465382.5574293, tmp)>>>(x/((-2851324509.354545)%x)))>>(((x+((tmp = -614687945, tmp)^x))^((((tmp = 1653437743, tmp)>>x)/(tmp = 3072995069, tmp))>>x))*(((((-290508242)>>((tmp = 2969511554, tmp)<<(tmp = 158176292.95642304, tmp)))<<(32376015))+(tmp = 2391895870.4562025, tmp))*x)))&((((x/(tmp = 365292078.53605413, tmp))>>x)/(1167322811.0008812))|(((tmp = 2487970377.365221, tmp)^x)<<((tmp = 2342607988.711308, tmp)/(((2276081555.340126)-(((tmp = -2571071930, tmp)>>(tmp = -248468735.76550984, tmp))>>>(tmp = -2862254985.608489, tmp)))^(-1312017395))))))<<x)&(2762717852.949236)))+((((-2492896493)&x)<<(-2756272781.4642315))/x)))))*(2405395452))))>>((-1433975206)/((tmp = -2064757738.6740267, tmp)<<((((tmp = -1563531255, tmp)-(-589277532.2110934))<<x)^(2249328237.0923448)))))-x))-(-225624231)));
- assertEquals(0, x *= (tmp = 1657982666.2188392, tmp));
- assertEquals(86443387, x |= (tmp = 86443387.25165462, tmp));
- assertEquals(86443387, x %= (-1341731981.702294));
- assertEquals(172886774, x <<= ((-1799840391)&(1011948481.310498)));
- assertEquals(-1115684864, x <<= x);
- assertEquals(-2098253702059525600, x *= (1880686715.1865616));
- assertEquals(-2098253700213206300, x -= (tmp = -1846319435.0583687, tmp));
- assertEquals(570692096, x &= (((tmp = -1572055366.64332, tmp)%(tmp = 1720120910, tmp))%((x-(912386952.5959761))*(tmp = -1146251719.4027123, tmp))));
- assertEquals(603979776, x <<= ((-329752233.8144052)&(tmp = -368636559, tmp)));
- assertEquals(603979776, x <<= x);
- assertEquals(364791569817010200, x *= x);
- assertEquals(0, x &= ((2074587775.983799)/(tmp = 438856632.76449287, tmp)));
- assertEquals(0, x &= (((1509671758)*(tmp = -935801537.7325008, tmp))>>>(((tmp = -1752877566, tmp)<<x)%(tmp = -517163766, tmp))));
- assertEquals(-2031730599, x ^= ((2264285273)&(tmp = -1762662949.014101, tmp)));
- assertEquals(-843578945, x %= (-1188151654));
- assertEquals(-2147483648, x <<= x);
- assertEquals(-2147483648, x >>= (tmp = -3165079200.229641, tmp));
- assertEquals(-44086313.1323726, x %= ((x%(-254466243.48728585))-((x>>(-457411829.1063688))-((-2606923436.9333453)/x))));
- assertEquals(-44086313, x |= x);
- assertEquals(1037812, x >>>= ((tmp = 342497258.9786743, tmp)+(1652928385.8150895)));
- assertEquals(-2371695599678100, x *= (tmp = -2285284425, tmp));
- assertEquals(-2371697387004653, x += (tmp = -1787326553.0542095, tmp));
- assertEquals(0, x ^= x);
- assertEquals(0, x >>= ((x^(tmp = 544039787, tmp))>>>x));
- assertEquals(0, x &= ((x%(((((((tmp = -424572417.1088555, tmp)|(-2381863189))/(tmp = -2007482475.1809125, tmp))&(((((tmp = 311016073, tmp)>>(tmp = -1548839845, tmp))+((-2557740399.7947464)<<(2399113209)))&x)>>>x))%(-297180308.7721617))-(tmp = 860906293, tmp))^x))%(-2740622304)));
- assertEquals(4971841192462909000, x += ((tmp = -2723203837.572612, tmp)+((((-2909100706)+(-951999374))|(-3116735764))*(3087123539.422669))));
- assertEquals(-460, x >>= (1081807537.557404));
- assertEquals(2354165127.3906384, x += (tmp = 2354165587.3906384, tmp));
- assertEquals(357.8680960002211, x /= ((((x<<(((x&x)+(1113841407))|((x/(tmp = 384533564, tmp))>>>(-605853882))))%x)&((tmp = 2050375842, tmp)>>>x))>>(((2745147573)^x)<<(x-(900043292)))));
- assertEquals(0, x *= (x>>>(-295974954.5058532)));
- assertEquals(0, x *= ((-2448592125.815531)*(tmp = -94957474.8986013, tmp)));
- assertEquals(0, x &= ((x>>x)^(tmp = -1335129180, tmp)));
- assertEquals(395092065, x |= ((3081659156)^(tmp = -1608334475, tmp)));
- assertEquals(395092065, x &= x);
- assertEquals(-413337639, x += (x^(tmp = -664996071.3641524, tmp)));
- assertEquals(-1604423637896759800, x *= (x>>>(tmp = 1242912352.955432, tmp)));
- assertEquals(0, x &= ((((((tmp = 651293313, tmp)|(((2541604468.635497)>>>(tmp = 758815817.7145422, tmp))>>>((-1948795647)/x)))&x)/((tmp = -3161497100, tmp)+(782910972.3648237)))>>>x)%(834206255.5560443)));
- assertEquals(0, x >>>= (tmp = 125945571, tmp));
- assertEquals(NaN, x -= (x%x));
- assertEquals(NaN, x %= (tmp = 282259853, tmp));
- assertEquals(NaN, x += (tmp = -2081332383, tmp));
- assertEquals(0, x >>>= (((x>>(-2298589097.7522116))|((((x>>>(x-(tmp = 755218194, tmp)))|x)%x)-(tmp = 2206031927, tmp)))>>>((((x&(x-x))^(tmp = 2836686653, tmp))*((x<<(tmp = -1624140906.4099245, tmp))>>>((2942895486)|((x>>>x)>>>(-1586571476)))))|((781668993)+(-1857786909)))));
- assertEquals(0, x &= (tmp = -708084218.9248881, tmp));
- assertEquals(0, x %= (1645913394.5625715));
- assertEquals(0, x <<= ((x^((tmp = 1185413900, tmp)*((-2441179733.997965)*(tmp = 2554099020.066989, tmp))))%((1704286567.29923)/x)));
- assertEquals(0, x += x);
- assertEquals(0, x *= x);
- assertEquals(0, x |= (x>>>(139138112.141927)));
- assertEquals(0, x >>>= (tmp = 2142326564, tmp));
- assertEquals(0, x |= x);
- assertEquals(-0, x /= ((((x+(2817799428))|x)%((1050079768)-(x>>>((1452893834.8981247)|((((tmp = -1737187310.889149, tmp)/(tmp = -362842139, tmp))%(1234225406))%(((x|x)*((-1055695643.739629)-((x-x)*(945954197.676585))))-(tmp = 786185315.346615, tmp)))))))<<(-173891691)));
- assertEquals(0, x &= (-2842855092.319309));
- assertEquals(0, x &= ((-3188403836.570895)/x));
- assertEquals(0, x *= (x+x));
- assertEquals(NaN, x /= (x>>>(((tmp = 391037497.68871593, tmp)/((192754032)*(1382659402.5745282)))/((((-2187364928)>>>x)>>(tmp = 2563448665.7594023, tmp))^(tmp = 1500866009.7632217, tmp)))));
- assertEquals(NaN, x /= ((tmp = -935036555.2500343, tmp)-(x/(((x&(x^(tmp = -3001352832.5034075, tmp)))^x)/((1122547613)>>x)))));
- assertEquals(0, x >>= (tmp = -2951766379.0809536, tmp));
- assertEquals(-632945188, x ^= (-632945188.7188203));
- assertEquals(-632945188, x %= ((((((tmp = -3181527314.82724, tmp)&(2280175415))>>(x^(x|x)))^(tmp = -524233678.52970886, tmp))*x)|((tmp = 1782882786, tmp)>>>(tmp = -592607219, tmp))));
- assertEquals(404189184, x <<= ((tmp = -2761472127, tmp)^(36616299.88780403)));
- assertEquals(872651572, x ^= (tmp = 739568436.6252247, tmp));
- assertEquals(13, x >>>= ((tmp = -1033843418.865577, tmp)%(x%(1247263629.0445533))));
- assertEquals(0, x >>>= x);
- assertEquals(0, x >>= (3189175317));
- assertEquals(0, x &= (((2391973519.6142406)^((-2950058736.191456)|(x*x)))>>(tmp = 343822384.294345, tmp)));
- assertEquals(0, x >>>= (tmp = -2306246544, tmp));
- assertEquals(-1572339598, x ^= ((tmp = 2991380083.337327, tmp)&(tmp = -1361507970, tmp)));
- assertEquals(649, x >>>= ((1961407923.4950056)>>(x-(-872821523.7513013))));
- assertEquals(649, x ^= (((x&(tmp = -702931788, tmp))^(((x>>x)|(((tmp = 2710759269, tmp)/(x>>(x*((((((tmp = -2428445134.9555864, tmp)+(-1859938743))%(x<<x))*((236868604)+((tmp = -3066688385, tmp)/(787503572.8839133))))/(tmp = 3215629315, tmp))>>(-1315823020)))))%(1461368627.1293125)))>>>(tmp = -2921804417.5735087, tmp)))/(x>>>(((tmp = 2175260691.824617, tmp)/((-582958935.7628009)-((((((x>>x)|(2590503723.4810824))^(tmp = -1994324549, tmp))-(-684683327))/(tmp = -3133419531, tmp))|(tmp = -328974092.05095506, tmp))))>>(-447624639.4518213)))));
- assertEquals(649, x %= ((((1854382717)|(((x+(tmp = 2568081234, tmp))-x)+((tmp = 1043086140, tmp)<<((tmp = 2979118595.0496006, tmp)+((x&(2669577199.852803))/(-2567808445.101112))))))<<((((tmp = -1471092047, tmp)&((-3099138855.21041)-((tmp = -798574377.526715, tmp)&((2255586141)<<(-1069867774)))))>>>(((x*(tmp = -2810255707.781517, tmp))/x)*(2706435744.054121)))^(394262253)))^((844325548.0612085)/(tmp = 1434691648, tmp))));
- assertEquals(823215943.1924392, x += (tmp = 823215294.1924392, tmp));
- assertEquals(536872706, x &= ((-334612686)%((1303605874)|x)));
- assertEquals(-30666374.413486242, x += ((tmp = -567539080.4134862, tmp)%(tmp = -1655555936.3195171, tmp)));
- assertEquals(-56438727096752984, x *= (tmp = 1840410814, tmp));
- assertEquals(-33200107.984488487, x %= (((tmp = 3007206509, tmp)-(3079337725.6659536))%(1819565202.5011497)));
- assertEquals(-1214493182, x ^= (-3060193769));
- assertEquals(-1214493179.1335113, x -= ((-3218099496.595745)/(1122662554)));
- assertEquals(-1214493179, x >>= ((-375364195)<<(((tmp = 619439637.8754326, tmp)>>(-1830023279.9486575))&(tmp = -1106180387.2448823, tmp))));
- assertEquals(-303623295, x >>= (-2109241374.3349872));
- assertEquals(-0, x %= x);
- assertEquals(0, x |= x);
- assertEquals(1917126206, x -= (-1917126206));
- assertEquals(2659779928, x -= (tmp = -742653722, tmp));
- assertEquals(-1635187368, x >>= ((tmp = -674385169, tmp)*((9848362.783326745)|(x*(55220544.00989556)))));
- assertEquals(-1981113695, x ^= ((tmp = 392404985, tmp)>>(((x<<((2006207061)<<(tmp = 2558988218, tmp)))*((((tmp = 1789304307.1153054, tmp)/(2538061546))<<(tmp = 556026116, tmp))&((tmp = 1076457999.6424632, tmp)*(tmp = -1822378633.2489474, tmp))))%(((((-1117046924)&((-69013651)%(x&(((-2320327696)/(x&x))-(tmp = 2458222544, tmp)))))>>((-3092360983.0037227)/(-3171415636)))*(((tmp = 2520431213, tmp)<<(1066492762.6149663))+((tmp = 1272200889, tmp)^((1687693123.2295754)+x))))-(-1096823395)))));
- assertEquals(-990556848, x >>= x);
- assertEquals(981202869119695100, x *= x);
- assertEquals(981202869119695100, x -= (x/x));
- assertEquals(0, x ^= (x>>x));
- assertEquals(NaN, x %= x);
- assertEquals(0, x ^= x);
- assertEquals(0, x *= ((((2980512718)>>>x)<<((x^(-1111233869))>>((2531466092.6036797)>>>(((tmp = -1791229364, tmp)*(-2210950307.206208))%((tmp = -806645443, tmp)<<((((((((tmp = 112334634.26187229, tmp)%(x|((((2154021796.1166573)+x)&((-1047293079.9686966)^(tmp = -1894127139, tmp)))+(tmp = 1910946653.2314827, tmp))))^(293142672.5016146))-x)<<(-1593533039.8718698))+x)>>(x<<(((46359706.50393462)&(tmp = 272146661, tmp))|(tmp = 2117690168, tmp))))%(tmp = -1784737092.4924843, tmp)))))))-(1465796246)));
- assertEquals(0, x &= x);
- assertEquals(NaN, x %= x);
- assertEquals(0, x &= (x+(-1612418456)));
- assertEquals(0, x &= ((tmp = -843964311, tmp)/x));
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x *= x);
- assertEquals(NaN, x += (x>>>(54020240)));
- assertEquals(489206868, x |= (489206868));
- assertEquals(489206868, x &= x);
- assertEquals(489206848, x &= ((tmp = -1699133906.2361684, tmp)>>(tmp = 2658633814, tmp)));
- assertEquals(489206848, x |= x);
- assertEquals(1910559006, x -= (tmp = -1421352158, tmp));
- assertEquals(1, x >>= x);
- assertEquals(0, x -= x);
- assertEquals(0, x %= (x^(tmp = 2745376003.2927403, tmp)));
- assertEquals(0, x %= (((tmp = 3199743302.1063356, tmp)^((-1905944176)&(x>>>(187247029.5209098))))<<((x*((-1394648387)*(1252234289)))-(3140049815))));
- assertEquals(0, x <<= (-2567872355));
- assertEquals(0, x %= (tmp = 1057707555.8604916, tmp));
- assertEquals(0, x %= ((tmp = -1877857405.0228279, tmp)>>>(((tmp = 423831184, tmp)*((tmp = -2106757468.324615, tmp)%(tmp = -1197717524.6540637, tmp)))>>(tmp = -93746263.46774769, tmp))));
- assertEquals(0, x |= x);
- assertEquals(-0, x *= ((tmp = 1317609776.6323466, tmp)*(tmp = -26959885.89325118, tmp)));
- assertEquals(0, x >>= (-1288116122.0091262));
- assertEquals(0, x &= ((370818172.92511404)%((tmp = -528319853.54781747, tmp)*(x/((tmp = -2839758076, tmp)^(x+(((-1258213460.041857)<<(tmp = 302017800.72064054, tmp))|((((tmp = -624254210, tmp)^((-338165065.97507)|((623392964)-x)))>>>x)%(tmp = 2767629843.0643625, tmp)))))))));
- assertEquals(0, x >>>= x);
- assertEquals(0, x >>>= x);
- assertEquals(0, x |= ((-2001549164.1988192)*x));
- assertEquals(0, x -= x);
- assertEquals(0, x *= (((((165836842.14390492)*(tmp = -3220002961, tmp))|(-2840620221.747431))%((x/(tmp = 3153915610, tmp))>>>(tmp = 2018941558, tmp)))>>>x));
- assertEquals(-0, x *= (-231994402.93764925));
- assertEquals(0, x <<= x);
- assertEquals(0, x %= (tmp = 2702385056.1149964, tmp));
- assertEquals(0, x <<= (tmp = 378459323, tmp));
- assertEquals(0, x >>>= ((x&(x&(((-1014963013)<<(x&((tmp = -3110294840, tmp)|(x+(x<<(1129643420))))))+(1093795819.1853619))))+((((tmp = -2295103369.697398, tmp)&(((370501313.43019223)>>>(2465439579))/x))-x)>>x)));
- assertEquals(0, x /= ((tmp = 1779625847, tmp)+(tmp = -662459654.6908865, tmp)));
- assertEquals(0, x -= x);
- assertEquals(0, x %= ((tmp = 2723291421, tmp)|(277246502.4027958)));
- assertEquals(0, x ^= (((-2936270162)>>>((((tmp = -2019015609.1648235, tmp)|(47218153))*(-823685284))+x))&(x<<(x*(x|(((tmp = -941955398, tmp)^(tmp = -2365238993.5300865, tmp))-(778674685)))))));
- assertEquals(0, x >>>= x);
- assertEquals(NaN, x %= x);
- assertEquals(0, x &= (-175235975.8858137));
- assertEquals(-2684493800.1062117, x += (tmp = -2684493800.1062117, tmp));
- assertEquals(-1290806265.6063132, x -= (-1393687534.4998984));
- assertEquals(-1290806265, x >>= (((x>>(tmp = -1710112056.4935386, tmp))*(586227650.2860553))<<(tmp = -2918251533.6052856, tmp)));
- assertEquals(23470008, x >>>= x);
- assertEquals(1668734969, x |= ((-295560682.9663689)^(x|((((tmp = -1183847364, tmp)&(3135327694))+(1679127747.1406744))-((-1895825528)%((tmp = -3180115006, tmp)+((tmp = 2373812187, tmp)|x)))))));
- assertEquals(1744306169, x |= (1188503928.5009093));
- assertEquals(1744306169, x %= (tmp = -2723982401.4997177, tmp));
- assertEquals(3488612338, x += x);
- assertEquals(3488612337, x += (((x/(-325849204))>>x)|(-1820624550.9149108)));
- assertEquals(-1511119305, x ^= (tmp = 1778506182.2952862, tmp));
- assertEquals(-12211415, x %= (x^(tmp = -54943035, tmp)));
- assertEquals(-12211415, x %= ((-1267051884)%(-643566443.0122576)));
- assertEquals(-30.84976063258681, x /= (((1052047194)>>>x)&(1495698235.5117269)));
- assertEquals(-61.69952126517362, x += x);
- assertEquals(-244, x <<= (x^(x+(tmp = -2822258210.076373, tmp))));
- assertEquals(-6652, x &= ((tmp = 2593685093, tmp)>>((((2047688852.4609032)<<((x*(-611076291))*x))^(-2665364024.817528))>>>(165267874))));
- assertEquals(0, x -= x);
- assertEquals(0, x /= (2454186758));
- assertEquals(0, x &= (tmp = -2226895206, tmp));
- assertEquals(0, x += x);
- assertEquals(-21390701, x += ((-1369004846.0816503)>>(tmp = -2661552634.039692, tmp)));
- assertEquals(-0.012568536912921919, x /= (1701924507.856429));
- assertEquals(7.09517966608176e-11, x /= (tmp = -177141911.8955555, tmp));
- assertEquals(0, x >>= (tmp = 231535697, tmp));
- assertEquals(1383687797, x ^= (tmp = -2911279499.568808, tmp));
- assertEquals(1383687797, x %= (tmp = -2258636646.5294995, tmp));
- assertEquals(1319, x >>= ((tmp = -2549411892.8426056, tmp)/(((((1532476676)^(153720871.82640445))+x)/(((2988190456.3206205)&(tmp = -2920873674, tmp))-(((((tmp = -1044518167.0581458, tmp)>>x)-((((tmp = -194701879.13505793, tmp)&(498352051))&((tmp = -2167339635.6529818, tmp)^(((x>>(tmp = 700159851, tmp))*(tmp = 2874921158, tmp))/x)))-((2856128689)|((-1876321441)>>>(2110732915)))))^((((tmp = -193379494.18825436, tmp)/(-3055182489.533142))<<x)+((tmp = -2286109605, tmp)>>(tmp = 698475484.3987849, tmp))))^(3182231653.500364))))|(((tmp = -194670835, tmp)>>>((786780139)%(((2114171416.2305853)^(1703145352.8143656))/x)))>>>((tmp = -3029462067, tmp)>>((67647572.02624655)&(x*(-2394283060))))))));
- assertEquals(13903855, x |= ((tmp = -2515306586, tmp)>>>x));
- assertEquals(54311, x >>>= ((-2413722658)-((tmp = -2159787584, tmp)^(tmp = 949937622.9744623, tmp))));
- assertEquals(108622, x += x);
- assertEquals(1250717187, x ^= ((tmp = 842692148, tmp)+(((2649331689.694273)<<x)-(tmp = -2992181273, tmp))));
- assertEquals(4536777, x %= (tmp = 73304730, tmp));
- assertEquals(0, x -= x);
- assertEquals(-580081499, x ^= ((tmp = -580081499.0170684, tmp)^(x%(tmp = -1542730817.88261, tmp))));
- assertEquals(-1382738784, x <<= x);
- assertEquals(-1382738784, x <<= x);
- assertEquals(2912228512, x >>>= (x*(x>>>x)));
- assertEquals(-1076374105, x |= (2589443367));
- assertEquals(-0.2818750938197037, x /= (((tmp = -1559525732.9603848, tmp)|(-477068917.5483327))>>>((-688616257)*((((tmp = -1192490153.1226473, tmp)*(-502280624.0265591))<<(-442688727.4881985))%(x+(((((tmp = -2948836853.831935, tmp)-(tmp = -2850398330.910424, tmp))>>>(x>>>(-1947835558)))^x)+(x*x)))))));
- assertEquals(2032826546, x |= (tmp = 2032826546.819327, tmp));
- assertEquals(3408404827.14316, x += (tmp = 1375578281.1431599, tmp));
- assertEquals(258183922.14315987, x %= (tmp = 350024545, tmp));
- assertEquals(479694848, x <<= (tmp = -481187157, tmp));
- assertEquals(-2147483648, x <<= (((tmp = -2956588045.472398, tmp)>>>(((tmp = -1838455399.1775856, tmp)&(((((tmp = -637547, tmp)/x)&(x^((-44876328.1767962)+(((-2059598286)-(1071496688))%(tmp = -1492254402, tmp)))))-(x%x))*(x|x)))>>(1226250760)))<<x));
- assertEquals(-2288163338.9020815, x -= (140679690.9020816));
- assertEquals(4954833118513997000, x *= (-2165419327.4906025));
- assertEquals(1578331238, x ^= (-2410854298.2270393));
- assertEquals(-810627292, x += (-2388958530));
- assertEquals(-810627292, x ^= ((1495296640.4087524)/(tmp = 1561790291, tmp)));
- assertEquals(657116606535253200, x *= x);
- assertEquals(0.675840332689047, x %= (((-1816548473)^(((tmp = -151918689.19451094, tmp)|(1819911186.535233))/((((((1514297447)+(tmp = 856485190.9684253, tmp))&(((1809369464.4363992)<<(493538496))*x))+((x*(x>>(x&(tmp = 222293461, tmp))))>>>(((784519621)|x)^((-580766922)>>(tmp = -947264116, tmp)))))>>>((((2794210354.22964)>>>(((2896952532.0183973)*((x+(tmp = -1813175940, tmp))<<(tmp = -1302618293, tmp)))&x))>>(x-(((x|((1456466890.1952953)*x))^(-169979758.19158387))-(x-x))))>>x))&(tmp = 2671604078.3026733, tmp))))/(-1701675745)));
- assertEquals(0.675840332689047, x %= ((tmp = 2421871143, tmp)^x));
- assertEquals(NaN, x %= ((((tmp = 1175526323.433271, tmp)+(tmp = 2813009575.952405, tmp))%((tmp = -3112133516.3303423, tmp)&x))&((((((-424329392)^(tmp = 1430146361, tmp))+x)-(1533557337.268306))%((tmp = -3117619446, tmp)-(-3127129232)))>>>x)));
- assertEquals(NaN, x += x);
- assertEquals(0, x >>>= ((1710641057.7325037)%(104961723.56541145)));
- assertEquals(0, x <<= (tmp = -970072906, tmp));
- assertEquals(0, x *= (87768668));
- assertEquals(-1464968122, x ^= (tmp = -1464968122, tmp));
- assertEquals(-1467983895, x ^= ((tmp = -1204896021, tmp)>>>(((91792661)&(x>>>(((-2364345606)>>>x)*x)))+x)));
- assertEquals(2.991581508270506, x /= (-490704963.5591147));
- assertEquals(0, x >>>= x);
- assertEquals(0, x >>= ((tmp = 639854873, tmp)%(tmp = 743486160.3597239, tmp)));
- assertEquals(0, x <<= (tmp = 1045577245.3403939, tmp));
- assertEquals(0, x >>= ((tmp = -1932462290, tmp)|(tmp = 1629217987, tmp)));
- assertEquals(517617438, x ^= ((tmp = 2737789043, tmp)%(tmp = -2220171604.135681, tmp)));
- assertEquals(126371, x >>>= ((tmp = 205210223.69909227, tmp)-(tmp = 598118404, tmp)));
- assertEquals(918548455, x |= ((918228734.8363427)+(x+x)));
- assertEquals(918548455, x |= ((tmp = 599828198, tmp)>>((tmp = -851081330, tmp)|(tmp = -1152596996.8443217, tmp))));
- assertEquals(918548443.7739062, x -= ((tmp = 1497642976.2260938, tmp)%(x>>(tmp = -548469702.5849569, tmp))));
- assertEquals(0.7739062309265137, x %= (x&x));
- assertEquals(2317939163.8239403, x *= (tmp = 2995116296, tmp));
- assertEquals(1014415360, x <<= (-279972114));
- assertEquals(0, x &= ((296810932)/(x*(tmp = -2750499950, tmp))));
- assertEquals(0, x *= (x%((126285451.05086231)>>>(x*(tmp = -2789790532, tmp)))));
- assertEquals(0, x >>>= ((975695102.5771483)%(x-((-1011726540)-((tmp = 2223194882, tmp)/x)))));
- assertEquals(-1747794584, x |= (-1747794584.3839395));
- assertEquals(-543544679, x %= (tmp = -1204249905, tmp));
- assertEquals(-543544679, x %= (-881024001));
- assertEquals(1, x /= x);
- assertEquals(-1879376393, x |= ((tmp = 161643764, tmp)|(tmp = 2281346499.9084272, tmp)));
- assertEquals(1.321124264431369, x /= (-1422558379.7061746));
- assertEquals(1, x >>>= (x&(tmp = -963118950.4710281, tmp)));
- assertEquals(3, x ^= ((x+x)/x));
- assertEquals(1, x /= x);
- assertEquals(1, x &= (2090796073));
- assertEquals(-1284301873, x ^= (((-11041168.146357536)+(tmp = -1273260707.8134556, tmp))+x));
- assertEquals(292559045, x &= (x&((-2401110739)^((tmp = 630802904, tmp)^(((1012634447.0346229)+x)%((tmp = -1240091095, tmp)%(x/(-1483936527))))))));
- assertEquals(0, x %= x);
- assertEquals(0, x /= (tmp = 613145428.3653506, tmp));
- assertEquals(0, x /= ((x-(tmp = 3116638456, tmp))*(-973300716)));
- assertEquals(0, x %= (tmp = -1794741286.0464535, tmp));
- assertEquals(0, x &= x);
- assertEquals(0, x >>= (-551370105.0746605));
- assertEquals(-1471996874, x ^= ((2822970422.2331414)-x));
- assertEquals(-277914313, x |= (tmp = -818980601.2544096, tmp));
- assertEquals(-34, x >>= x);
- assertEquals(305422768, x -= (-305422802));
- assertEquals(-2406146240, x += (tmp = -2711569008, tmp));
- assertEquals(1073745408, x &= (tmp = -3046625618, tmp));
- assertEquals(1073745408, x <<= ((-1234108306.7646303)<<((-233519302)|x)));
- assertEquals(1073745408, x %= (tmp = 1898831268, tmp));
- assertEquals(1073745408, x <<= (((tmp = 3089406038, tmp)/x)&(-2960027680)));
- assertEquals(65536, x >>>= (2858188366));
- assertEquals(128, x >>>= ((-2640257239.857275)%((tmp = -3185405235.3177376, tmp)*x)));
- assertEquals(128, x >>>= x);
- assertEquals(128, x -= (x&(x-(tmp = -247588018, tmp))));
- assertEquals(81616906825.07776, x *= (tmp = 637632084.57092, tmp));
- assertEquals(78860097686.07776, x -= (((1507215684)^((709254783)+(((x<<x)*((-2890828152.667641)%(2537817529.2041526)))^x)))+(3114024487)));
- assertEquals(-2920545695.721283, x += (((tmp = -2555437435, tmp)>>>x)-((2920546109.72129)+x)));
- assertEquals(-2879412281.721283, x += ((-1662428756)>>>(tmp = -1928491386.6926208, tmp)));
- assertEquals(67403845, x &= (tmp = 2921644117, tmp));
- assertEquals(16850961, x >>>= (((-1039328365)>>>(tmp = -768615112, tmp))<<((1037261855)*(tmp = -2906902831.4797926, tmp))));
- assertEquals(0, x ^= x);
- assertEquals(0, x *= ((-2729056530)/((-1776175111)%(1493002300.4604707))));
- assertEquals(0, x *= (tmp = 370696035.22912216, tmp));
- assertEquals(0, x ^= x);
- assertEquals(0, x |= ((((((tmp = -1541196993, tmp)^x)/(854730380.1799632))/(2879117705.492209))+((((-2892068577)^(-2460614446.1044483))>>>((743413943)<<(-1285280084.4220598)))/(tmp = -1719994579.5141463, tmp)))%(((((tmp = 2522797851.088227, tmp)<<(tmp = 2257160597.1538725, tmp))/(-680406007))&((x>>>(tmp = -260350730, tmp))^(tmp = 1920522110.852598, tmp)))>>(-697620442))));
- assertEquals(0, x &= x);
- assertEquals(-591399642.958673, x += (x-(tmp = 591399642.958673, tmp)));
- assertEquals(27, x >>>= (tmp = -726721317.2109983, tmp));
- assertEquals(-2043736843, x -= (2043736870));
- assertEquals(-3991674, x >>= (tmp = 1098126089, tmp));
- assertEquals(-997919, x >>= ((x%(((x*(((-1497329257.1781685)%(2334511329.2690516))/(-3072526140.6635056)))+(-1843998852))-(tmp = 240300314.34070587, tmp)))+(714080860.6032693)));
- assertEquals(-0, x %= x);
- assertEquals(NaN, x /= x);
- assertEquals(0, x >>= (tmp = 538348328.5363884, tmp));
- assertEquals(0, x *= (800317515));
- assertEquals(0, x -= x);
- assertEquals(0, x >>= (984205514));
- assertEquals(857282491, x += (tmp = 857282491, tmp));
- assertEquals(587792897, x &= (tmp = 2951307845.164059, tmp));
- assertEquals(595301269, x |= (tmp = 24285588.90314555, tmp));
- assertEquals(1190602538, x += x);
- assertEquals(0, x -= x);
- assertEquals(-442423060, x |= ((x^((x-(tmp = 2342497475.637024, tmp))%(-1900074414.7678084)))|((tmp = 1932380130, tmp)%(x%(2291727569.817062)))));
- assertEquals(-442423060, x %= (((tmp = 703479475.545413, tmp)>>(x-x))<<(2435723056.753845)));
- assertEquals(1, x /= x);
- assertEquals(0, x >>= x);
- assertEquals(-1265317851, x |= (tmp = -1265317851, tmp));
- assertEquals(-2, x >>= (-2015895906.8256726));
- assertEquals(-0, x %= x);
- assertEquals(-0, x %= (((1219237746)+(284683029))*(((tmp = 2288119628, tmp)|(-404658161.2563329))*(-265228691.74142504))));
- assertEquals(1039509109, x -= (-1039509109));
- assertEquals(2079018218, x += x);
- assertEquals(-1979.9362673719077, x /= ((3219723500)>>x));
- assertEquals(-62, x >>= ((x/(326466691))*(tmp = -607654070, tmp)));
- assertEquals(-45, x |= (tmp = -2954888429.549882, tmp));
- assertEquals(-1180929712, x &= (3114037588.570232));
- assertEquals(815550480, x &= (-2302684143.3378315));
- assertEquals(815550480, x %= (-2177479570));
- assertEquals(815550480, x %= (tmp = 2895822167, tmp));
- assertEquals(815550480, x %= (-1247621230.5438688));
- assertEquals(283929811, x -= ((tmp = 251831053.17096448, tmp)|((tmp = 1140463506.004994, tmp)+(tmp = -743224673.546309, tmp))));
- assertEquals(1825767424, x <<= (((tmp = 1732353599, tmp)^(tmp = 658726044, tmp))>>>((-2827889370.932477)%(tmp = 1950139204.3291233, tmp))));
- assertEquals(1828450414, x |= (tmp = 1618538606, tmp));
- assertEquals(0, x <<= (-2411670689.045702));
- assertEquals(0, x <<= (-27744888.428537607));
- assertEquals(-0, x /= (tmp = -1597552450, tmp));
- assertEquals(0, x >>>= (((2165722776.7220936)>>>(tmp = 1233069931, tmp))>>>(-1120420811)));
- assertEquals(-0, x *= ((tmp = -1505252656, tmp)>>((((3035637099.6156535)&((467761577.7669761)>>(-361034537)))^(tmp = -2347994840.6541123, tmp))*(tmp = -2191739821, tmp))));
- assertEquals(0, x &= (795727404.0738752));
- assertEquals(-0, x *= (tmp = -3125944685.3991394, tmp));
- assertEquals(-0, x *= (x&x));
- assertEquals(0, x >>= ((tmp = -2045709233, tmp)^x));
- assertEquals(NaN, x /= (x>>(x/(3102894071))));
- assertEquals(NaN, x += ((tmp = 2149079756.8941655, tmp)-(tmp = 810121645.305179, tmp)));
- assertEquals(0, x >>>= (-859842989));
- assertEquals(0, x >>>= (tmp = 2530531143.9369526, tmp));
- assertEquals(0, x >>= (((-932981419.6254237)|(tmp = 1591591715, tmp))>>>(x+((3149795006)>>>(tmp = 613352154, tmp)))));
- assertEquals(-4294967295, x -= ((((-2289331668)%(-282648480.0078714))>>(-1373720705.5142756))>>>((tmp = 15511563.517014384, tmp)/(360279080))));
- assertEquals(1, x &= x);
- assertEquals(0, x >>= (x^(-2791872557.5190563)));
- assertEquals(0, x &= ((tmp = 336466956.7847167, tmp)>>((1235728252.053619)|(x<<((1828176636.13488)%x)))));
- assertEquals(-0, x *= (-364042830.8894656));
- assertEquals(0, x >>>= x);
- assertEquals(-1675298680, x |= ((2323049541.321387)+(296619075)));
- assertEquals(-0, x %= x);
- assertEquals(-1583048579.4420977, x += (-1583048579.4420977));
- assertEquals(0, x -= x);
- assertEquals(-2, x ^= ((603171992.0545617)/(((-271888695.718297)%(tmp = -400159585, tmp))^((((tmp = 1536123971, tmp)-(tmp = -2310418666.6243773, tmp))|((tmp = 2242779597.1219435, tmp)<<(tmp = 1758127684.4745512, tmp)))/x))));
- assertEquals(-2, x &= (x&x));
- assertEquals(0, x &= ((tmp = -1098806007.4049063, tmp)/(((2862384059.3229523)/((((tmp = -92960842, tmp)-(x>>(tmp = 1244068344.2269042, tmp)))&x)*(tmp = -1919148313, tmp)))<<(-2486665929))));
- assertEquals(0, x &= x);
- assertEquals(-1441272634.582818, x -= (1441272634.582818));
- assertEquals(-3, x >>= (tmp = 3186393693.7727594, tmp));
- assertEquals(-1206855850, x ^= (((tmp = 607979495.303539, tmp)-(tmp = -2480131951, tmp))^(x*((tmp = 1324153477, tmp)/((1248126288)+(x|(1917331780.0741704)))))));
- assertEquals(-1206855853, x ^= (x>>>(653288765.1749961)));
- assertEquals(-1206857725, x &= (3149461539.6019173));
- assertEquals(3088109571, x >>>= (x*(x<<(tmp = 1543540084, tmp))));
- assertEquals(536903680, x &= (tmp = 644851760, tmp));
- assertEquals(536903674.312194, x += (((-3183290076)-((tmp = 40738191.12097299, tmp)-x))/((x>>>(3151371851.9408646))^(tmp = 472698205.22445416, tmp))));
- assertEquals(2127424750.0506563, x -= (tmp = -1590521075.7384624, tmp));
- assertEquals(2127424750.0506563, x %= (tmp = 3027273433.361373, tmp));
- assertEquals(0, x >>= (x>>(1445204441.702043)));
- assertEquals(NaN, x %= (x<<x));
- assertEquals(0, x ^= ((tmp = -2903841152.136344, tmp)-(x%(2938662860))));
- assertEquals(0, x <<= (x<<x));
- assertEquals(0, x >>>= (tmp = -979481631.33442, tmp));
- assertEquals(0, x >>= x);
- assertEquals(0, x &= (((x%((((((tmp = 1657446354.6820035, tmp)>>(-1916527001.2992697))/x)>>(tmp = 1450467955, tmp))&(277676820))+(x/(-945587805))))/((tmp = -690095354, tmp)^x))+(tmp = -2651195021, tmp)));
- assertEquals(0, x <<= (752343428.2934296));
- assertEquals(0, x /= (tmp = 3022310299, tmp));
- assertEquals(0, x >>= (x%((388245402)>>>x)));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x %= ((tmp = 1205123529.8649468, tmp)>>>(-2848300932)));
- assertEquals(0, x >>= ((x>>>x)<<(tmp = 487841938, tmp)));
- assertEquals(0, x *= (((273436000.9463471)|(tmp = 141134074.27978027, tmp))^(tmp = 1220326800.7885802, tmp)));
- assertEquals(1525600768, x |= (((x^(-2674777396))-(tmp = 1966360716.3434916, tmp))<<(794782595.9340223)));
- assertEquals(761927595, x %= (tmp = -763673173, tmp));
- assertEquals(1.1353588586934338, x /= ((x&((-1897159300.4789193)*(-348338328.0939896)))&(978680905.6470605)));
- assertEquals(8.631173314966319e-10, x /= (1315416592));
- assertEquals(0, x >>= ((tmp = -2581239435, tmp)-((-628818404.1122074)<<x)));
- assertEquals(0, x -= x);
- assertEquals(0, x *= (2925158236));
- assertEquals(0, x /= (x+(tmp = 1405531594.0181243, tmp)));
- assertEquals(0, x *= (2712022631.230831));
- assertEquals(0, x >>= (tmp = 80518779.81608999, tmp));
- assertEquals(1953477932.8046472, x += (tmp = 1953477932.8046472, tmp));
- assertEquals(1953477932, x >>= (tmp = 3025539936, tmp));
- assertEquals(1953477932, x -= ((-2675119685.8812313)>>(x/(-1808264410.9754841))));
- assertEquals(1292620430, x += ((-660857502)%((((tmp = -698782819, tmp)%(tmp = 2847304199, tmp))<<(-2423443217.1315413))+x)));
- assertEquals(78895, x >>>= x);
- assertEquals(2, x >>= x);
- assertEquals(2, x <<= (tmp = 1313641888.8301702, tmp));
- assertEquals(1857416935.2532766, x += (tmp = 1857416933.2532766, tmp));
- assertEquals(-1677721600, x <<= (tmp = -2482476902, tmp));
- assertEquals(309226853.62854385, x -= (tmp = -1986948453.6285439, tmp));
- assertEquals(33965156, x &= (2409088742));
- assertEquals(Infinity, x /= (x-(x<<((x/(tmp = -3106546671.536726, tmp))/((tmp = 2695710176, tmp)-((((-2102442864)&(857636911.7079853))/x)%(-65640292)))))));
- assertEquals(1270005091, x |= (tmp = 1270005091.0081215, tmp));
- assertEquals(1270005091, x %= (tmp = -1833876598.2761571, tmp));
- assertEquals(158750636, x >>>= x);
- assertEquals(-1000809106.0879555, x -= (tmp = 1159559742.0879555, tmp));
- assertEquals(72400936, x &= ((2448271389.3097963)%(tmp = 1517733861, tmp)));
- assertEquals(282816, x >>= x);
- assertEquals(282816, x %= (tmp = 3192677386, tmp));
- assertEquals(0.00021521351827207216, x /= (1314118194.2040696));
- assertEquals(Infinity, x /= (((tmp = 2822091386.1977024, tmp)&x)%(tmp = -3155658210, tmp)));
- assertEquals(NaN, x %= (-359319199));
- assertEquals(0, x >>>= (((tmp = -2651558483, tmp)-(x<<(tmp = 2537675226.941645, tmp)))<<(tmp = 667468049.0240343, tmp)));
- assertEquals(-0, x *= (tmp = -2827980482.12998, tmp));
- assertEquals(-0, x %= (((tmp = -689972329.3533998, tmp)>>>x)|(tmp = -7488144, tmp)));
- assertEquals(0, x >>>= x);
- assertEquals(0, x |= x);
- assertEquals(-2410373675.2262926, x -= (2410373675.2262926));
- assertEquals(1840423, x >>= ((-1081642113)^x));
- assertEquals(-4829451429403412, x *= (-2624098606.35485));
- assertEquals(-94552231, x %= (tmp = -97015883, tmp));
- assertEquals(-94433287, x ^= (((tmp = -2297735280, tmp)&(((tmp = 2261074987.7072973, tmp)%((((2565078998)^(-2573247878))|x)|(((tmp = -2120919004.7239416, tmp)>>(tmp = -579224101, tmp))>>>(1905808441))))*(x|(3149383322))))>>(542664972)));
- assertEquals(0, x ^= (x<<(tmp = -3112569312, tmp)));
- assertEquals(0, x <<= (-2141934818.7052917));
- assertEquals(0, x >>= (tmp = -2539525922, tmp));
- assertEquals(-434467613, x ^= (tmp = -434467613, tmp));
- assertEquals(-274792709, x |= (1233452601.462551));
- assertEquals(-274726917, x |= (-2130333750));
- assertEquals(-272629761, x |= (-1516071602.5622227));
- assertEquals(-272629761, x |= ((tmp = 3012131694, tmp)&((tmp = -2595342375.8674774, tmp)-((tmp = -2710765792, tmp)>>>((x-(tmp = 2397845540, tmp))+(2496667307))))));
- assertEquals(-4194305, x |= (1343705633.165825));
- assertEquals(4190207, x >>>= ((tmp = 276587830, tmp)*((tmp = -1517753936, tmp)>>x)));
- assertEquals(0, x >>= (x|((2247486919)-((-1664642412.4710495)*((((tmp = -358185292.17083216, tmp)-(tmp = -1472193444, tmp))*(tmp = 2699733752, tmp))&((x|(x<<(1137610148.1318119)))>>(((375089690.8764564)*x)&(tmp = 859788933.9560187, tmp))))))));
- assertEquals(0, x %= (3080673960));
- assertEquals(0, x >>>= (1328846190.1963305));
- assertEquals(1249447579, x |= (-3045519717.580775));
- assertEquals(-0.8743931060971377, x /= (-1428931187));
- assertEquals(1, x |= ((tmp = -1756877535.7557893, tmp)/((-142900015.93200803)<<(1414557031.347334))));
- assertEquals(759627265, x ^= (759627264.0514802));
- assertEquals(741823, x >>= (1106391210));
- assertEquals(610451, x &= ((x>>>((919849416)+((tmp = -427708986, tmp)^((x%x)|(tmp = -2853100288.932063, tmp)))))*x));
- assertEquals(372650423401, x *= x);
- assertEquals(410404493, x >>>= ((((-1425086765)>>>x)>>((2813118707.914771)>>(-424850240)))^x));
- assertEquals(120511585729013, x *= ((tmp = -1889454669, tmp)>>>x));
- assertEquals(120513295294304.22, x -= (tmp = -1709565291.2115698, tmp));
- assertEquals(6164, x >>>= ((2244715719.397763)^(tmp = -741235818.6903033, tmp)));
- assertEquals(937572790.468221, x -= (tmp = -937566626.468221, tmp));
- assertEquals(937572790, x |= ((2129102867.156146)*(x%x)));
- assertEquals(32, x &= ((2700124055.3712993)>>>((1977241506)>>>(-2915605511))));
- assertEquals(32, x %= (tmp = -2513825862, tmp));
- assertEquals(0, x <<= (-1379604802));
- assertEquals(0, x >>>= (tmp = -1033248759, tmp));
- assertEquals(-1151517050, x ^= (3143450246));
- assertEquals(-180577, x |= ((738373819.4081701)^(-357134176)));
- assertEquals(-0, x %= x);
- assertEquals(-2086887759, x |= (tmp = 2208079537, tmp));
- assertEquals(-2, x >>= (1460216478.7305799));
- assertEquals(-2, x %= ((-1979700249.0593133)^(-3156454032.4790583)));
- assertEquals(-256, x <<= ((1810316926)>>>(tmp = 414362256, tmp)));
- assertEquals(-1, x >>= (((((((-1616428585.595561)*((tmp = 2574896242.9045777, tmp)|(86659152.37838173)))>>(((tmp = 2476869361, tmp)&((x+((tmp = -2445847462.1974697, tmp)>>(tmp = -1960643509.5255682, tmp)))+(x|(((((2231574372.778028)|(tmp = 1824767560, tmp))>>>((1108035230.2692142)|(tmp = 2354035815, tmp)))/((tmp = -2602922032, tmp)>>(-925080304.7681987)))-x))))-(x>>x)))>>>((tmp = 751425805.8402164, tmp)|(tmp = 1165240270.3437088, tmp)))-x)*(2870745939))-(x>>>((tmp = 2986532631.405425, tmp)>>>(((tmp = 2547448699, tmp)+(((((x<<(((((-2756908638.4197435)>>>(3134770084))-(-1147872642.3756688))%(x*(tmp = -282198341.6600039, tmp)))+(-770969864.2055655)))+((-2725270341)^x))/(-3093925722))>>(x&x))>>((tmp = -2705768192, tmp)>>>(((tmp = 577253091.6042917, tmp)/(((x&(((((x+x)>>>(-1000588972))/(x&(717414336)))^(tmp = 428782104.21504414, tmp))>>>(1084724288.953223)))%(tmp = -2130932217.4562194, tmp))&x))-(-286367389)))))+((x>>(tmp = 2001277117, tmp))>>((tmp = 1028512592, tmp)^((tmp = 2055148650, tmp)+((tmp = 1490798399, tmp)/(tmp = -2077566434.2678986, tmp))))))))));
- assertEquals(-1, x |= (tmp = 1542129482, tmp));
- assertEquals(-671816743, x &= (tmp = -671816743.9111726, tmp));
- assertEquals(-1840333080, x -= (1168516337));
- assertEquals(-1755382023, x |= ((((tmp = 2625163636.0142937, tmp)>>>((tmp = 1534304735, tmp)^x))-(tmp = -1959666777.9995313, tmp))%x));
- assertEquals(-1750421896, x += (x>>>(tmp = -1364828055.1003118, tmp)));
- assertEquals(-72864007, x %= (tmp = 239651127, tmp));
- assertEquals(-72863956, x -= (((tmp = -1103261657.626319, tmp)*((tmp = 2789506613, tmp)+((tmp = 2294239314, tmp)>>>(2588428607.5454817))))>>x));
- assertEquals(-170337477, x -= (tmp = 97473521, tmp));
- assertEquals(-170337477, x |= (((tmp = 246292300.58998203, tmp)/(((tmp = -2664407492, tmp)|((-2416228818)^(tmp = 909802077, tmp)))%(tmp = 532643021.68109465, tmp)))/(tmp = 1015597843.8295637, tmp)));
- assertEquals(1, x >>>= (((tmp = -2247554641.7422867, tmp)/(1186555294))%(tmp = -785511772.3124621, tmp)));
- assertEquals(1188939891.668705, x -= (tmp = -1188939890.668705, tmp));
- assertEquals(1188939891, x &= x);
- assertEquals(1188413555, x &= (((tmp = -372965330.5709038, tmp)%(((tmp = 3108909487, tmp)|(x^(-1056955571.9951684)))^(-1549217484.009048)))/(x>>>(1403428437.9368362))));
- assertEquals(-0.7343692094664643, x /= (-1618278026.4758227));
- assertEquals(0, x -= x);
- assertEquals(0, x &= (-2701762139.7500515));
- assertEquals(0, x >>>= (((-1692761485.2299166)^x)+(tmp = -1221349575.938864, tmp)));
- assertEquals(0, x <<= ((2148160230)<<x));
- assertEquals(0, x <<= (((x<<(-740907931.38363))&(tmp = -930960051.6095045, tmp))>>(x/((tmp = -1921545150.1239789, tmp)/(-3015379806)))));
- assertEquals(0, x <<= x);
- assertEquals(NaN, x /= (x|x));
- assertEquals(0, x >>= (tmp = -2265988773, tmp));
- assertEquals(-0, x *= (((x<<(-928153614))<<(-989694208))^(2544757713.481016)));
- assertEquals(0, x >>= ((tmp = 578009959.5299993, tmp)>>x));
- assertEquals(0, x /= ((((tmp = 412689800.0431709, tmp)&(1630886276))*(tmp = 2028783080.7296097, tmp))/x));
- assertEquals(0, x |= ((((x*(-2197198786))>>((2719887264.761987)<<(tmp = 2253246512, tmp)))-(tmp = -150703768.07045603, tmp))/(((-3160098146)%(((((1486098047.843547)>>(((tmp = -593773744.1144242, tmp)&(x<<(2651087978)))|((-680492758.930413)>>(tmp = 88363052.13662052, tmp))))<<x)<<(tmp = 2232672341, tmp))/((x<<x)&(((((348589117.64135563)<<(-1010050456.3097556))^(x/(tmp = -2282328795, tmp)))-(tmp = 1653716293, tmp))-((3157124731)/((tmp = 3007369535.341745, tmp)%(tmp = -2246556917, tmp)))))))+x)));
- assertEquals(0, x >>= ((1935211663.5568764)>>(x-(tmp = 2116580032, tmp))));
- assertEquals(-1725272693, x ^= (tmp = -1725272693, tmp));
- assertEquals(313683, x >>>= (-1782632531.2877684));
- assertEquals(0.009772287443565642, x /= (tmp = 32099240, tmp));
- assertEquals(-647945916.9902277, x += (-647945917));
- assertEquals(3647021380, x >>>= ((((((((2470411371.688199)<<x)>>x)-(x>>>((tmp = 1750747780, tmp)/x)))-x)<<(tmp = -2666186351.695101, tmp))^(((tmp = 2749205312.6666174, tmp)%x)&(2069802830.360536)))<<(tmp = 6051917.9244532585, tmp)));
- assertEquals(-647939220, x |= ((x>>>((tmp = -2980404582.794245, tmp)>>>(-996846982)))^x));
- assertEquals(-572178450, x |= ((-800571300.3277931)+(tmp = 2084365671, tmp)));
- assertEquals(1172311208, x &= (x&((tmp = -1207487657.8953774, tmp)^x)));
- assertEquals(12176516458994, x += ((((tmp = -1534997221, tmp)%(412142731))*((tmp = 2958726303, tmp)>>(1489169839)))+(((-574726407.2051775)>>>(((1772885017)<<(947804536.9958035))>>(-2406844737)))>>x)));
- assertEquals(-1480065024, x <<= x);
- assertEquals(-1736999042.227129, x += (tmp = -256934018.22712898, tmp));
- assertEquals(-1338699394, x ^= ((((((x%(((tmp = -2551168455.222048, tmp)|(3213507293.930222))/((-1559278033)>>((tmp = 3107774495.3698573, tmp)-(2456375180.8660913)))))*((x*(tmp = 1088820004.8562922, tmp))+((tmp = 1850986704.9836102, tmp)%(tmp = -1226590364, tmp))))*(1786192008))&(((2193303940.310299)%(tmp = 1041726867.0602217, tmp))|((2210722848)/((-1293401295.6714435)&((tmp = 3052430315, tmp)|x)))))>>>(tmp = -2028014470.1524236, tmp))+(((1695818039.0383925)<<((1669068145)*(-2746592133.899276)))<<(tmp = 519092169, tmp))));
- assertEquals(-334674849, x >>= (1170377794));
- assertEquals(-10214, x >>= ((tmp = 1074704264.3712895, tmp)>>>((tmp = -1200860192, tmp)^((tmp = 539325023.4101218, tmp)*((tmp = -588989295, tmp)|x)))));
- assertEquals(1384169472, x &= (1384171140));
- assertEquals(1384169472, x >>>= ((tmp = -2161405973.830981, tmp)*(tmp = 2054628644, tmp)));
- assertEquals(1610140972, x |= (527961388));
- assertEquals(1073273198, x += ((tmp = -259650225.71344328, tmp)&(tmp = -344359694, tmp)));
- assertEquals(65507, x >>= ((x<<((tmp = 2925070713.5245204, tmp)%(x+((tmp = -1229447799, tmp)/(((x/(x|(((-2337139694)|((((((2996268529.7965417)&x)%(((tmp = -1088587413, tmp)>>(-1384104418.90339))>>((tmp = -1643984822.3946526, tmp)+x)))%(((1118125268.4540217)-((((-1975051668.6652594)-(-704573232))+((tmp = 1674952373, tmp)/(tmp = 1321895696.0062659, tmp)))*(tmp = 1820002533.2021284, tmp)))>>>(tmp = -583960746.9993203, tmp)))|((tmp = -2577675508.550925, tmp)&x))/(tmp = 1459790066, tmp)))/(((((1051712301.7804044)&(tmp = -2726396354, tmp))^(tmp = 263937254.18934345, tmp))+(((x^x)*(((tmp = -2289491571, tmp)+x)%(-2239181148)))&x))>>(tmp = -1743418186.3030887, tmp)))))/(tmp = 1475718622, tmp))<<x)))))|(x&((((tmp = -2934707420, tmp)<<x)/x)^(1022527598.7386684)))));
- assertEquals(2047, x >>= (x-(tmp = 2300626270, tmp)));
- assertEquals(8384512, x <<= (tmp = -1917680820, tmp));
- assertEquals(0, x <<= (2393691134));
- assertEquals(0, x >>= x);
- assertEquals(649995936.5853252, x -= (tmp = -649995936.5853252, tmp));
- assertEquals(649995936, x &= x);
- assertEquals(-0.33672017582945424, x /= (tmp = -1930374188, tmp));
- assertEquals(-0.33672017582945424, x += (x&((1208055031)^(-2761287670.968586))));
- assertEquals(0, x |= x);
- assertEquals(0, x <<= ((-2038368978)/x));
- assertEquals(0, x >>= (x&((tmp = 2481378057.738218, tmp)&(x+(1172701643)))));
- assertEquals(0, x <<= ((x*(((((((tmp = 70690601.3046323, tmp)&(((((((((((x+(x+(x^(3118107461))))<<(264682213.41888392))&(tmp = -709415381.8623683, tmp))%(((((-1840054964)>>>(tmp = -405893120.89603686, tmp))|((-625507229)^(3128979265)))>>(x>>((tmp = -2480442390, tmp)*((x>>(tmp = -421414980.88330936, tmp))>>>((tmp = 1850868592, tmp)&(-2948543832.879225))))))|((2986545185)&((tmp = -1947550706, tmp)%(((tmp = 2590238422.1414256, tmp)/(((tmp = -361038812, tmp)>>x)|(((tmp = 1798444068, tmp)|((x&((tmp = -3104542069, tmp)-x))*((tmp = -1158658918, tmp)+((tmp = 2777031040.5552707, tmp)<<(-2816019335.9008327)))))<<x)))/(((2287795988.231702)/x)/(((-2588712925)>>>(2521189250))*((tmp = -2533527920, tmp)+(tmp = 1762281307.2162101, tmp)))))))))/x)/(tmp = 1047121955.5357032, tmp))|(((-121292251)<<(x^(x-(tmp = 1420006180, tmp))))%((-2278606219)>>>(((tmp = -1412487726, tmp)&(((((tmp = 253596554.16016424, tmp)/(tmp = 2083376247.0079951, tmp))^(x^((1549116789.8449988)>>>((((-1844170084)^(tmp = 1886066422, tmp))&x)<<(34918329)))))^(tmp = -440805555.3369155, tmp))-x))%(-1936512969)))))+(2911511178.4035435))|(1012059391))|(x>>>(tmp = -2551794626.158037, tmp)))+((2926596072.210515)/(tmp = -280299595.0450909, tmp))))&((tmp = 1501086971, tmp)^(tmp = 2114076983, tmp)))-((-1679390574.1466925)-(941349044)))-((x>>x)>>((-2600539474.2033434)+(tmp = 2567056503.9079475, tmp))))*(tmp = 1285896052, tmp))%(((tmp = 1191465410.7595167, tmp)>>((tmp = -2857472754, tmp)%x))>>>(((tmp = 1960819627.6552541, tmp)&(-2651207221.127376))*((((-687312743)+((x>>x)<<x))|((((((1549588195)*((tmp = 2733091019, tmp)^((527322540)<<(x>>x))))%(tmp = -2063962943, tmp))*x)*(734060600))&(-3049417708)))+(((((1084267726)+((x|x)^((tmp = -1917070472.4858549, tmp)%((690016078.9375831)*x))))%((((((tmp = -2091172769, tmp)%(2532365378))>>>(-871354260))/(tmp = 254167019.07825458, tmp))&(1330216175.9871218))>>(tmp = 1931099207, tmp)))^(-1116448185.2618852))>>((961660080.8135855)/x)))))))>>>(-1486048007.7053368)));
- assertEquals(0, x >>= x);
- assertEquals(0, x %= (tmp = -1202200444.6506357, tmp));
- assertEquals(-0, x *= (-527500796.4145117));
- assertEquals(0, x >>= (tmp = -2082822707, tmp));
- assertEquals(0, x *= ((-1882398459.290778)>>>x));
- assertEquals(0, x &= (x/(tmp = -1569332286.392817, tmp)));
- assertEquals(-390169607, x |= (-390169607.11600184));
- assertEquals(-780339214, x += x);
- assertEquals(-780339214, x %= (2765959073));
- assertEquals(-5954, x >>= (tmp = -1900007055, tmp));
- assertEquals(743563420, x &= ((((-1520146483.5367205)|(-2075330284.3762321))-(tmp = -2263151872, tmp))%(-1264641939.957402)));
- assertEquals(1487126840, x += (x>>>(((x+((tmp = -1263274491, tmp)>>>x))&(470419048.0490037))%(tmp = -2642587112, tmp))));
- assertEquals(Infinity, x /= (x^x));
- assertEquals(0, x ^= ((tmp = -1436368543, tmp)+(x/(tmp = -1125415374.3297129, tmp))));
- assertEquals(0, x += x);
- assertEquals(0, x <<= x);
- assertEquals(0, x &= (tmp = 3101147204.2905564, tmp));
- assertEquals(0, x &= (tmp = 2914487586.606511, tmp));
- assertEquals(0, x += x);
- assertEquals(0, x -= (((-1738542908.6138556)&(((x+x)-(tmp = -2801153969, tmp))%(tmp = -1206684064.1477358, tmp)))>>((-2575546469.271897)|(tmp = -2573119106, tmp))));
- assertEquals(-1468808707, x ^= (tmp = -1468808707, tmp));
- assertEquals(1357349882, x <<= (tmp = -2808501087.7003627, tmp));
- assertEquals(-572025862, x |= ((((tmp = -2415486246.573399, tmp)/((tmp = -707895732.4593301, tmp)&x))%((-1960091005.0425267)*(972618070.9166157)))-(1649962343)));
- assertEquals(327213586796843100, x *= (x%(1337884626)));
- assertEquals(42991616, x &= (-2905576654.1280055));
- assertEquals(-26049289585042860, x *= (-605915571.6557121));
- assertEquals(597809748, x >>= ((362850791.077795)/(tmp = 1222777657.4401796, tmp)));
- assertEquals(597809748, x |= x);
- assertEquals(770065246, x -= ((-711227660)|(tmp = -508554506, tmp)));
- assertEquals(593000483097040500, x *= x);
- assertEquals(0, x %= x);
- assertEquals(0, x <<= (317862995.456813));
- assertEquals(0, x >>= ((tmp = 2518385735, tmp)+((-2973864605.267604)/(-930953312.718833))));
- assertEquals(1227822411, x ^= (x^(1227822411.8553264)));
- assertEquals(1090520320, x &= (x+((((-2100097959)>>(x/(tmp = -2002285068, tmp)))/(-364207954.9242482))-((tmp = 2771293106.7927113, tmp)-(tmp = -847237774, tmp)))));
- assertEquals(1090520320, x >>= (((((2439492849)<<((-2932672756.2578926)*((743648426.7224461)+((2942284935)<<((x/(((tmp = 886289462.6565771, tmp)+(-459458622.7475352))>>(tmp = -785521448.4979162, tmp)))|(tmp = -11630282.877367258, tmp))))))-(tmp = -647511106.9602091, tmp))^x)&x));
- assertEquals(115944291.48829031, x %= (243644007.12792742));
- assertEquals(1, x /= x);
- assertEquals(0, x >>>= ((tmp = -819782567, tmp)%(tmp = 2774793208.1994505, tmp)));
- assertEquals(0, x >>= (tmp = 721096000.2409859, tmp));
- assertEquals(0, x &= ((x%x)%x));
- assertEquals(-0, x *= ((-1670466344)<<x));
- assertEquals(0, x >>= (-677240844.904707));
- assertEquals(NaN, x %= (((((-1575993236.6126876)/(-2846264078.9581823))^((((-2220459664)-(((-1809496020)>>>(tmp = -3015964803.4566207, tmp))&x))/(tmp = -3081895596.0486784, tmp))>>>(x&x)))%(x^(-1338943139)))^(x-((((2074140963.2841332)^(tmp = 1878485274, tmp))%(((x/(-2568856967.6491556))^x)<<((x+x)^((((2139002721)|(x<<(-1356174045.840464)))>>x)-(tmp = 2305062176, tmp)))))>>>(((((x<<(tmp = -1663280319.078543, tmp))-((1498355849.4158854)-((-1321681257)>>>(tmp = -1321415088.6152222, tmp))))^(-2266278142.1584673))+(858538943))&((((x-((x|(((tmp = -1576599651, tmp)+((tmp = 1595319586, tmp)&(-2736785205.9203863)))>>((x+((-1856237826)+x))<<(tmp = -1590561854.3540869, tmp))))^(((-41283672.55606127)&(tmp = 2971132248, tmp))+x)))/(-849371349.1667476))%(x*((-1705070934.6892798)>>>x)))<<((2418200640)*x)))))));
- assertEquals(0, x >>>= (tmp = 664214199.5283061, tmp));
- assertEquals(0, x <<= ((-2827299151)<<(1815817649)));
- assertEquals(1405772596, x |= (tmp = 1405772596, tmp));
- assertEquals(-1483422104, x <<= (-2791499935.6822596));
- assertEquals(-45271, x >>= (1740128943.4254808));
- assertEquals(-45271, x <<= ((2072269957)-((tmp = -2553664811.4472017, tmp)*(tmp = -2502730352, tmp))));
- assertEquals(1192951471.6745887, x -= (-1192996742.6745887));
- assertEquals(-353370112, x <<= (tmp = -1410280844, tmp));
- assertEquals(0, x ^= (x%((2754092728)*(-1017564599.1094015))));
- assertEquals(-2662096003.2397957, x -= (tmp = 2662096003.2397957, tmp));
- assertEquals(-2587094028.50764, x -= (tmp = -75001974.7321558, tmp));
- assertEquals(6693055512339889000, x *= x);
- assertEquals(897526784, x %= (x-((tmp = 897526813, tmp)%(-1525574090))));
- assertEquals(7011928, x >>= ((-440899641.344357)%x));
- assertEquals(8382047686388683, x += (x*(1195398423.8538609)));
- assertEquals(16764095372777366, x += x);
- assertEquals(16764096859576696, x -= (tmp = -1486799329.7207344, tmp));
- assertEquals(16764099774187724, x += (2914611029));
- assertEquals(16764102926624664, x -= (-3152436939.724612));
- assertEquals(-538220648, x |= x);
- assertEquals(269110324, x /= (((-2114698894.6014318)/(tmp = 767687453, tmp))>>(623601568.1558858)));
- assertEquals(256, x >>= x);
- assertEquals(-293446891, x += (x+(-293447403)));
- assertEquals(119, x >>>= ((1759400753)>>(2481263470.4489403)));
- assertEquals(14, x >>= (762849027.89693));
- assertEquals(16, x += (x&(x>>(1104537666.1510491))));
- assertEquals(-12499808227.980995, x *= (tmp = -781238014.2488122, tmp));
- assertEquals(1, x /= x);
- assertEquals(1, x &= x);
- assertEquals(0, x >>>= ((tmp = 1513381008, tmp)|(tmp = 1593208075.7259543, tmp)));
- assertEquals(0, x &= (-788154636.2843091));
- assertEquals(-0, x /= (tmp = -2124830879, tmp));
- assertEquals(0, x &= (934237436));
- assertEquals(0, x |= x);
- assertEquals(-79370942.97651315, x += (-79370942.97651315));
- assertEquals(-79370942.97651315, x %= ((tmp = -2683255523, tmp)<<(tmp = 2323123280.287587, tmp)));
- assertEquals(-79370942, x |= x);
- assertEquals(0.05861647801688159, x /= (-1354072177.061561));
- assertEquals(0, x <<= (((((((tmp = 1989257036, tmp)&(tmp = 1565496213.6578887, tmp))&x)&(tmp = -2798643735.905287, tmp))&(2354854813.43784))%(tmp = 1118124748, tmp))<<((tmp = 2453617740, tmp)*(((tmp = 1762604500.492329, tmp)<<(-2865619363))%(((2474193854.640994)|((tmp = 1425847419.6256948, tmp)|(((-1271669386)%((x|((tmp = -2059795445.3607287, tmp)+x))*(x*x)))>>>(tmp = -2997360849.0750895, tmp))))/(tmp = 2326894252, tmp))))));
- assertEquals(0, x >>>= ((-671325215)/((-727408755.8793397)>>(tmp = 315457854, tmp))));
- assertEquals(0, x >>= (x&x));
- assertEquals(0, x <<= ((x/x)>>>(((((x&x)-((x*(((tmp = -2689062497.0087833, tmp)^x)/((-1465906334.9701924)<<(tmp = -349000262, tmp))))*x))%(1630399442.5429945))*x)+((tmp = 605234630, tmp)%(tmp = 2325750892.5065155, tmp)))));
- assertEquals(0, x |= (x%((x>>(((((tmp = 1622100459, tmp)<<x)&((((((tmp = 2411490075, tmp)<<x)|x)>>((x<<x)-(-2133780459)))/x)&(x+x)))%(x/((((tmp = 580125125.5035453, tmp)>>>(-470336002.1246581))|((tmp = 871348531, tmp)*x))>>(2866448831.23781))))-((2352334552)-(-562797641.6467373))))-(x^(tmp = -681731388, tmp)))));
- assertEquals(0, x <<= (tmp = -1358347010.3729038, tmp));
- assertEquals(-260967814, x |= ((tmp = -260967814.45976686, tmp)%(tmp = 1126020255.1772437, tmp)));
- assertEquals(NaN, x %= ((((tmp = 3176388281, tmp)<<(tmp = 611228283.2600244, tmp))>>>((tmp = 3068009824, tmp)+(tmp = 2482705111, tmp)))>>>((tmp = -750778285.2580311, tmp)>>>x)));
- assertEquals(0, x <<= (x>>>x));
- assertEquals(0, x /= (1238919162));
- assertEquals(0, x >>= (x^x));
- assertEquals(0, x &= (-2137844801));
- assertEquals(0, x >>>= (x^(x*(-1774217252))));
- assertEquals(0, x >>= x);
- assertEquals(0, x |= x);
- assertEquals(0, x &= (x<<(tmp = 2791377560, tmp)));
- assertEquals(-1330674638.8117397, x += (tmp = -1330674638.8117397, tmp));
- assertEquals(353, x >>>= (-212202857.4320326));
- assertEquals(353, x ^= ((((x+(tmp = 1448262278, tmp))-(-3141272537))>>(tmp = 1116596587.7832575, tmp))>>>((x-(((tmp = 303953098, tmp)>>>((tmp = 691514425, tmp)/((176223098)*(((2876180016)%(-1805235275.892374))|x))))<<(((tmp = 528736141.838547, tmp)^(2556817082))*(2898381286.2846575))))|((-1445518239)&(tmp = 389789481.9604758, tmp)))));
- assertEquals(0, x >>>= (-227376461.14343977));
- assertEquals(0, x <<= (tmp = -2575967504, tmp));
- assertEquals(0, x <<= (x^((-2668391896)>>((x+(tmp = 598697235.9205595, tmp))+((((-2105306785)|((-1174912319.794015)>>>(x-((148979923)%((((tmp = -2459140558.4436393, tmp)|(1265905916.494016))^(tmp = 1213922357.2230597, tmp))|(1028030636))))))%x)+(((tmp = 1393280827.0135512, tmp)^((tmp = 1210906638, tmp)+(-1572777641.1396031)))<<x))))));
- assertEquals(0, x *= (tmp = 2134187165, tmp));
- assertEquals(-1084549964, x -= (tmp = 1084549964, tmp));
- assertEquals(-2045706240, x &= ((tmp = -1250758905.7889671, tmp)*(x+(((x<<(x/(tmp = -738983664.845448, tmp)))>>>x)&(tmp = 2197525295, tmp)))));
- assertEquals(-2045706240, x ^= (((522049712.14743733)>>(tmp = -2695628092, tmp))>>>(tmp = -2603972068, tmp)));
- assertEquals(2249261056, x >>>= x);
- assertEquals(-33291, x |= ((((1891467762)<<(184547486.213719))-((458875403.50689447)^(((x&(x*x))|x)%(-3127945140))))|(-100765232)));
- assertEquals(-33291, x %= (1460486884.1367688));
- assertEquals(-1, x >>= (tmp = -2667341441, tmp));
- assertEquals(-3.6289151568259606e-10, x /= (tmp = 2755644474.4072013, tmp));
- assertEquals(-3.6289151568259606e-10, x %= (tmp = 1186700893.0751028, tmp));
- assertEquals(0, x <<= (tmp = -1199872107.9612694, tmp));
- assertEquals(371216449, x ^= ((tmp = 371324611.1357789, tmp)&(x-(x|((tmp = -518410357, tmp)>>((tmp = 687379733, tmp)/x))))));
- assertEquals(0.3561383159088311, x /= (((((x%(((((-2293101242)%((((495316779)/x)-((-3198854939.8857965)>>>((tmp = -288916023, tmp)-(x^(tmp = -2504080119.431858, tmp)))))^(-1201674989)))-((2965433901)*(405932927)))/((1974547923)|(tmp = 534069372, tmp)))-(x-((x+(-1258297330))%x))))<<(((-2648166176.4947824)^(-3043930615))&(1550481610)))<<(tmp = -3118264986.743822, tmp))<<x)|x));
- assertEquals(-46272499.15029934, x -= (tmp = 46272499.50643766, tmp));
- assertEquals(-6, x >>= ((tmp = -731454087.0621192, tmp)>>>x));
- assertEquals(-2.7207928474520667e-9, x /= (((x<<(x|((tmp = -1650731700.9540024, tmp)/(tmp = -677823292, tmp))))^((((((1972576122.928667)>>x)%(2952412902.115453))<<((-2888879343)+(tmp = -425663504, tmp)))>>>(((((tmp = 1089969932, tmp)>>>(x|((-2088509661)/(1131470551))))>>>x)+x)|(tmp = 955695979.7982506, tmp)))|(((((tmp = 826954002.6188571, tmp)^(2016485728))|((x/(((x<<(tmp = 2493217141, tmp))/(-2259979800.997408))-(tmp = -427592173.41389966, tmp)))%(((-471172918)/x)>>>((383234436.16425097)&(tmp = 1664411146.5308032, tmp)))))*(tmp = 1863669754.7545495, tmp))*(x>>(2062197604)))))>>>((x-(2624545856))*(tmp = 1025803102, tmp))));
- assertEquals(0, x >>= ((tmp = 1068702028, tmp)*(296106770)));
- assertEquals(0, x ^= (x/x));
- assertEquals(85359536, x ^= (((x|(((tmp = 740629227, tmp)<<(-1107397366))%((tmp = 2315368172, tmp)>>(((-2269513683)|(-2698795048))+(-396757976)))))*(929482738.803125))^(((-1415213955.4198723)-(tmp = -2885808324, tmp))>>>((tmp = -472842353.85736656, tmp)&(tmp = 1684231312.4497018, tmp)))));
- assertEquals(2075131904, x <<= x);
- assertEquals(123, x >>>= (x>>>(tmp = 754093009, tmp)));
- assertEquals(0, x >>= ((-2690948145)/((1988638799)+x)));
- assertEquals(0, x >>>= (tmp = -798849903.2467625, tmp));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x *= (2431863540.4609756));
- assertEquals(484934656, x |= ((-2322193663)*(tmp = -2754666771, tmp)));
- assertEquals(-82505091404694530, x *= (tmp = -170136513, tmp));
- assertEquals(-82505090515370620, x += ((-148762237)&(tmp = 889417717, tmp)));
- assertEquals(-908221124, x %= (tmp = -2346393300, tmp));
- assertEquals(-1242515799, x ^= (2083328917));
- assertEquals(-1126056310271520600, x *= ((((tmp = -3065605442, tmp)<<(-3012703413))|x)^(-2081329316.4781387)));
- assertEquals(-1126056309941068000, x += ((((tmp = 1886925157, tmp)&((tmp = -163003119.31722307, tmp)/((tmp = 2094816076, tmp)>>((tmp = -706947027, tmp)^x))))^((1819889650.5261197)<<(-1641091933)))>>x));
- assertEquals(-1864360191, x |= (((x/x)|x)|x));
- assertEquals(-1864360191, x &= x);
- assertEquals(-3728720382, x += x);
- assertEquals(1042663165, x ^= (535165183.4230335));
- assertEquals(2644530017.8833704, x += (1601866852.8833704));
- assertEquals(-574949401, x |= ((tmp = 943193254.5210983, tmp)^((x%(tmp = -2645213497, tmp))*(-1904818769))));
- assertEquals(1763223578, x ^= ((x^(tmp = -2244359016, tmp))^(tmp = 320955522, tmp)));
- assertEquals(-1.9640961474334235, x /= (tmp = -897727731.0502782, tmp));
- assertEquals(1, x >>>= (x-(-3183031393.8967886)));
- assertEquals(1, x &= (tmp = 1732572051.4196641, tmp));
- assertEquals(1, x >>= (-1642797568));
- assertEquals(-2339115203.3140306, x += (-2339115204.3140306));
- assertEquals(1955852093, x ^= (((((-1469402389)/(-2648643333.1454573))>>>x)<<(x/x))>>x));
- assertEquals(-965322519, x ^= (3001399252));
- assertEquals(-2139727840, x &= (tmp = 2298411812.964484, tmp));
- assertEquals(2103328, x &= (tmp = -2488723009, tmp));
- assertEquals(1799011007, x |= (tmp = -2498057537.226923, tmp));
- assertEquals(1799011007, x |= ((-308193085)>>>x));
- assertEquals(1799011007, x |= x);
- assertEquals(818879107, x ^= (1542823996.423564));
- assertEquals(-2601416919234843600, x *= ((-2357923057.076759)-x));
- assertEquals(-2601416920481796600, x -= (x|(tmp = -3048039765, tmp)));
- assertEquals(-33690112, x <<= x);
- assertEquals(1039491072, x &= (tmp = 1039491474.3389125, tmp));
- assertEquals(126891, x >>= (-3079837011.6151257));
- assertEquals(-163191923097543, x *= (((tmp = -2847221258.4048786, tmp)*(x-(tmp = 1527622853.5925639, tmp)))^x));
- assertEquals(753616551, x ^= (-946895202));
- assertEquals(-347691264, x <<= (tmp = -433184408.33790135, tmp));
- assertEquals(0, x <<= (x|(tmp = -1911731462.6835637, tmp)));
- assertEquals(-0, x *= (tmp = -2616154415.1662617, tmp));
- assertEquals(0, x >>= x);
- assertEquals(0, x -= x);
- assertEquals(0, x *= (2272504250.501526));
- assertEquals(0, x ^= x);
- assertEquals(NaN, x %= x);
- assertEquals(0, x >>>= (2475346113));
- assertEquals(NaN, x /= (((x+(-2646140897))&(((tmp = 1039073714.142481, tmp)-x)*x))|(x*(((-1277822905.773948)>>(tmp = 2035512354.2400663, tmp))*(77938193.80013895)))));
- assertEquals(0, x ^= (x<<(tmp = 2491934268, tmp)));
- assertEquals(0, x &= (tmp = 569878335.4607931, tmp));
- assertEquals(-88575883, x ^= ((453890820.8012209)-((1569189876)%((-1280613677.7083852)^(-1902514249.29567)))));
- assertEquals(-88575883, x %= (tmp = 257947563.19206762, tmp));
- assertEquals(-88575881.7863678, x -= ((tmp = 1257547359.029678, tmp)/(x^(tmp = 948265672.821815, tmp))));
- assertEquals(-169, x >>= (tmp = -2530523309.6703596, tmp));
- assertEquals(-1, x >>= x);
- assertEquals(-1, x |= x);
- assertEquals(131071, x >>>= (-673590289));
- assertEquals(1117196836, x -= (-1117065765));
- assertEquals(3092236000.7125187, x -= (-1975039164.7125185));
- assertEquals(1, x /= x);
- assertEquals(-1599945863, x ^= (tmp = 2695021432.453696, tmp));
- assertEquals(940543782, x ^= (tmp = 2561494111, tmp));
- assertEquals(891400321673221800, x *= (tmp = 947749949.2662871, tmp));
- assertEquals(-1509927296, x >>= ((tmp = 1113290009, tmp)-x));
- assertEquals(-23, x >>= (tmp = 3216989626.7370152, tmp));
- assertEquals(-0, x %= x);
- assertEquals(0, x <<= (431687857.15246475));
- assertEquals(-0, x /= (tmp = -1924652745.081665, tmp));
- assertEquals(0, x <<= (1312950547.2179976));
- assertEquals(0, x %= ((tmp = 2110842937.8580878, tmp)|(x<<x)));
- assertEquals(0, x >>>= ((((-386879000)-((tmp = -2334036143.9396124, tmp)/((tmp = 965101904.2841234, tmp)<<(((3029227182.8426695)<<((tmp = -464466927, tmp)>>((((((tmp = 849594477.4111787, tmp)^(x&((513950657.6663146)%(x>>>x))))-((2898589263)|x))+(tmp = 2842171258.621288, tmp))>>>(tmp = -3158746843, tmp))<<(tmp = -2891369879, tmp))))-(x-(x&(tmp = -1707413686.2706504, tmp)))))))-(-2860419051))*(-1708418923)));
- assertEquals(-328055783, x += ((((2857010474.8010874)|((tmp = -1415997622.320347, tmp)-(-1706423374)))%(tmp = 824357977.1339042, tmp))^(x>>(x|x))));
- assertEquals(-168539902503779140, x *= ((tmp = -1057687018, tmp)<<((1408752963)-(2030056734))));
- assertEquals(-Infinity, x /= ((x-(2232683614.320658))*(((tmp = 195551174, tmp)*((((739595970)>>>(tmp = -2218890946.8788786, tmp))>>>(((tmp = -240716255.22407627, tmp)&(((((1598029916.3478878)|((tmp = -881749732, tmp)+(x>>x)))^(4443059))<<(((tmp = 2453020763, tmp)+((x>>>(tmp = -1904203813, tmp))&(-355424604.49235344)))<<(tmp = 2814696070, tmp)))%((tmp = -250266444, tmp)>>>(((((2710614972)&(((tmp = 910572052.6994087, tmp)^(tmp = -1028443184.3220406, tmp))/((-2718010521)^(tmp = 676361106, tmp))))|x)^(-1326539884))>>(-1573782639.7129154)))))/(tmp = 1923172768, tmp)))>>>(tmp = -2858780232.4886074, tmp)))/((((((-2060319376.353397)%x)>>(tmp = -3122570085.9065285, tmp))/(tmp = -1499018723.8064275, tmp))*((-655257391)<<x))>>x))));
- assertEquals(NaN, x += ((3059633304)%((((tmp = 2538190083, tmp)*((tmp = -2386800763.356364, tmp)/x))&(1341370996))%(-2929765076.078223))));
- assertEquals(NaN, x %= ((x&(347774821))>>>(462318570.2578629)));
- assertEquals(NaN, x *= ((2829810152.071517)*(tmp = 768565684.6892327, tmp)));
- assertEquals(NaN, x -= x);
- assertEquals(0, x >>>= (x&(tmp = 1786182552, tmp)));
- assertEquals(973967377, x ^= ((tmp = 2115869489.836838, tmp)&(994956497)));
- assertEquals(985246427.4230617, x += (11279050.423061728));
- assertEquals(985246427, x &= x);
- assertEquals(0, x >>= ((tmp = 1090502660.1867907, tmp)>>((-1599370623.5747645)-(tmp = -1321550958, tmp))));
- assertEquals(0, x %= (tmp = -2386531950.018572, tmp));
- assertEquals(0, x >>>= x);
- assertEquals(NaN, x /= x);
- assertEquals(0, x >>>= (tmp = -1535987507.682257, tmp));
- assertEquals(-0, x /= (-2570639987));
- assertEquals(-542895632, x |= (tmp = -542895632, tmp));
- assertEquals(-33930977, x >>= (tmp = -861198108.1147206, tmp));
- assertEquals(-0, x %= x);
- assertEquals(0, x ^= (x*(-608154714.1872904)));
- assertEquals(-140011520, x |= ((tmp = 377418995, tmp)<<((1989575902)>>(tmp = -2558458031.066773, tmp))));
- assertEquals(-140026048, x -= ((((tmp = 1465272774.7540011, tmp)<<((2164701398)<<(tmp = -818119264, tmp)))>>((tmp = -1490486001, tmp)>>(664410099.6412607)))>>(x>>>(((tmp = -2438272073.2205153, tmp)%(tmp = 2142162105.4572072, tmp))-(tmp = 2259040711.6543813, tmp)))));
- assertEquals(39214588236996610, x *= (x<<(-401696127.06632423)));
- assertEquals(1, x /= x);
- assertEquals(0, x %= x);
- assertEquals(0, x *= ((tmp = -1709874807.176726, tmp)&(-2786424611)));
- assertEquals(-1320474063.3408537, x += (tmp = -1320474063.3408537, tmp));
- assertEquals(88, x >>>= (tmp = -3179247911.7094674, tmp));
- assertEquals(1606348131, x += ((tmp = 1555621121.5726175, tmp)|(-3026277110.9493155)));
- assertEquals(200793516, x >>>= x);
- assertEquals(-2952688672.1074514, x -= (tmp = 3153482188.1074514, tmp));
- assertEquals(1342278624, x >>>= ((x>>>((tmp = 1264475713, tmp)-(-913041544)))>>>((tmp = 2008379930, tmp)%(tmp = 3105129336, tmp))));
- assertEquals(0, x ^= x);
- assertEquals(0, x /= (tmp = 788363717, tmp));
- assertEquals(430466213, x -= (tmp = -430466213, tmp));
- assertEquals(164757385222499550, x *= (tmp = 382741735, tmp));
- assertEquals(164757385222499550, x %= (((tmp = 1974063648, tmp)%((806015603)>>>x))*((tmp = 2836795324, tmp)<<(tmp = -1785878767, tmp))));
- assertEquals(-190957725.86956096, x /= (x^((-2939333300.066044)-(x|(-2085991826)))));
- assertEquals(-190957725.86956096, x %= (tmp = -948386352, tmp));
- assertEquals(0.6457336106922105, x /= (-295722141));
- assertEquals(0, x |= ((415991250)&((x>>(tmp = -3188277823, tmp))<<(511898664.1008285))));
- assertEquals(0, x &= ((793238922)|x));
- assertEquals(-1576701979, x ^= (2718265317));
- assertEquals(-49271937, x >>= x);
- assertEquals(-49271937, x |= x);
- assertEquals(-49271937, x &= x);
- assertEquals(775316382, x -= (-824588319));
- assertEquals(912498176, x <<= (tmp = -2223542776.836312, tmp));
- assertEquals(0, x -= (x&((tmp = 1999412385.1074471, tmp)/(-1628205254))));
- assertEquals(0, x -= x);
- assertEquals(0, x >>= (-768730139.7749677));
- assertEquals(-1861304245, x |= (((5128483)^(((tmp = -1768372004, tmp)/(x^(tmp = 1310002444.757094, tmp)))*((tmp = 188242683.09898067, tmp)^(tmp = -2263757432, tmp))))^((tmp = 2223246327, tmp)*((tmp = -2360528979, tmp)-((tmp = 2442334308, tmp)>>(458302081))))));
- assertEquals(1, x /= x);
- assertEquals(2, x += x);
- assertEquals(1, x /= x);
- assertEquals(0, x ^= x);
- assertEquals(-0, x *= (-1852374359.3930533));
- assertEquals(0, x <<= (tmp = 1223645195.148961, tmp));
- assertEquals(1789655087, x |= ((-2505312209.770559)>>x));
- assertEquals(-65568768, x <<= x);
- assertEquals(4229398528, x >>>= x);
- assertEquals(-8408187, x |= (-3029781627));
- assertEquals(-8408187, x |= (((2322165037)-((tmp = -1424506897.362995, tmp)%x))&x));
- assertEquals(-7884926, x += (x>>>(x|(2738095820))));
- assertEquals(-7884926, x %= (576507013));
- assertEquals(751801768, x ^= (tmp = -750241238, tmp));
- assertEquals(-1986010067668600800, x *= (tmp = -2641667195, tmp));
- assertEquals(1921196240, x ^= (x%(-1954178308)));
- assertEquals(847388880, x ^= ((tmp = 1632856124, tmp)&((tmp = -1536309755, tmp)<<(tmp = -3158362800, tmp))));
- assertEquals(-469662000.6651099, x += (tmp = -1317050880.6651099, tmp));
- assertEquals(-812358332, x ^= ((-2832480471)>>>(2016495937)));
- assertEquals(21, x ^= (((tmp = 1815603134.2513008, tmp)/((tmp = 147415927, tmp)%(-1059701742)))+x));
- assertEquals(-2844409139.792712, x += (tmp = -2844409160.792712, tmp));
- assertEquals(177070, x >>>= x);
- assertEquals(0, x %= x);
- assertEquals(0, x >>= x);
- assertEquals(1459126376, x ^= (tmp = -2835840920, tmp));
- assertEquals(1459126376, x %= (-1462864282));
- assertEquals(0, x >>>= (tmp = 2922724319, tmp));
- assertEquals(338995506, x ^= (338995506.6411549));
- assertEquals(336896258, x &= (2635904967));
- assertEquals(336634112, x -= (x&(tmp = 1659656287, tmp)));
- assertEquals(NaN, x %= (x-x));
- assertEquals(NaN, x /= (tmp = -674606200, tmp));
- assertEquals(NaN, x %= ((x|(2788108542))/(x+(tmp = 600941473, tmp))));
- assertEquals(0, x >>>= ((-1858251597.3970242)>>>x));
- assertEquals(1951294747, x |= (tmp = 1951294747, tmp));
- assertEquals(1951294747, x &= x);
- assertEquals(-153190625, x |= (-1500095737));
- assertEquals(23467367587890624, x *= x);
- assertEquals(346531290.1813514, x /= (((((-513617734.11148167)|x)/((tmp = -2042982150.1170752, tmp)%((x%((x%x)>>>(((-1369980151)&(((922678983)%(x&(tmp = -855337708, tmp)))-((tmp = -2717183760, tmp)>>>((1939904985.4701347)%(((tmp = -2473316858, tmp)&((tmp = -599556221.9046664, tmp)>>((tmp = -6352213, tmp)/x)))&x)))))%x)))/((tmp = -1842773812.8648412, tmp)>>>(((x>>>(tmp = 499774063, tmp))<<(((tmp = -1353532660.5755146, tmp)*(-3070956509))>>(((-905883994.0188017)>>(tmp = -16637173, tmp))<<((tmp = 471668537, tmp)*((tmp = -232036004.26637793, tmp)/x)))))&(tmp = 85227224, tmp))))))>>>(x|(-2528471983)))-((tmp = 1531574803, tmp)+((x>>>x)-(2889291290.158888)))));
- assertEquals(-94.42225749399837, x /= (((tmp = 2381634642.1432824, tmp)>>(tmp = -2637618935, tmp))|(2307200473)));
- assertEquals(-47, x >>= (1524333345.141235));
- assertEquals(-2.8699253616435082e-8, x /= (1637673252));
- assertEquals(0, x |= x);
- assertEquals(1083427040, x += ((-2012055268)<<(tmp = -2192382589.6911573, tmp)));
- assertEquals(1083427040, x %= (x*x));
- assertEquals(2694039776, x += ((((-1740065704.9004602)<<(-736392934))%(2781638048.424092))>>>(x&x)));
- assertEquals(-1600927520, x |= ((tmp = 2904430054.869525, tmp)*(((1054051883.4751332)*x)*((-939020743)-(tmp = 1636935481.1834455, tmp)))));
- assertEquals(-1600927520, x -= (x%x));
- assertEquals(3037584978216498700, x *= (tmp = -1897390694, tmp));
- assertEquals(372598954.1823988, x %= (tmp = 1553763703.5082102, tmp));
- assertEquals(-1476395008, x <<= ((x>>((tmp = 282496335.49494267, tmp)^((-1948623419.6947453)|((((((tmp = -1203306995, tmp)-(-5554612.355098486))>>>(1867254951.4836824))>>x)|(-695777865))/((-59122652.19377303)<<(-609999229.7448442))))))>>(x/(tmp = -1207010654.9993455, tmp))));
- assertEquals(-2.2540185787941605, x /= (((tmp = 1364159859.9199843, tmp)*x)>>x));
- assertEquals(-2, x |= x);
- assertEquals(2241824008, x *= ((3174055292.962967)>>(((-2379151623.602476)>>(tmp = -1423760236, tmp))>>(tmp = -522536019.2225733, tmp))));
- assertEquals(-2138158385, x ^= ((x>>((((1316131966.9180691)-((x*x)>>x))>>>x)>>((-2712430284)|(((((x<<(-616185937.6090865))-(((x-(tmp = 2957048661, tmp))<<(tmp = 617564839.888214, tmp))/(x%((tmp = -447175647.9393475, tmp)<<(2203298493.460617)))))-((x&((x<<(914944265))^(((-1294901094)*((tmp = 2512344795, tmp)+((((tmp = -1227572518, tmp)%(1831277766.4920158))*((x|x)^(tmp = 2515415182.6718826, tmp)))*x)))-(961485129))))>>>(tmp = 2079018304, tmp)))>>(tmp = 734028202, tmp))^(554858721.6149715)))))-((tmp = 1312985279.5114603, tmp)^(tmp = 2450817476.179955, tmp))));
- assertEquals(2.759030298237921, x /= (x|(tmp = -775901745.3688724, tmp)));
- assertEquals(8, x <<= x);
- assertEquals(NaN, x %= (((x&((1792031228.831834)>>(-1174912501)))%(((-2351757750)+(tmp = -2610099430, tmp))*(-2811655968)))*(x&(tmp = -1881632878, tmp))));
- assertEquals(0, x &= ((x*(616116645.7508612))^(2789436828.536846)));
- assertEquals(0, x *= x);
- assertEquals(35097452, x ^= ((tmp = 1023684579, tmp)%(((x|((tmp = -757953041, tmp)+(772988909)))+(tmp = -2934577578, tmp))>>>((tmp = -1973224283, tmp)>>>((x*(2244818063.270375))|(x-(-716709285)))))));
- assertEquals(0.015207441433418992, x /= (2307913014.4056892));
- assertEquals(-5865042.942076175, x -= (5865042.957283616));
- assertEquals(-67719.94207617454, x %= (((1464126615.2493973)+(398302030.0108756))>>>x));
- assertEquals(4294899577, x >>>= (x<<x));
- assertEquals(-1, x >>= (tmp = 607447902, tmp));
- assertEquals(-1, x >>= (3081219749.9119744));
- assertEquals(6.53694303504065e-10, x /= (tmp = -1529767040.4034374, tmp));
- assertEquals(6.53694303504065e-10, x %= ((tmp = 899070650.7190754, tmp)&(tmp = -1101166301, tmp)));
- assertEquals(6.53694303504065e-10, x %= (tmp = -2207346460, tmp));
- assertEquals(NaN, x %= (((x&x)>>x)%(((-10980184)+x)&(tmp = -1473044870.4729445, tmp))));
- assertEquals(NaN, x -= x);
- assertEquals(-1755985426, x ^= (tmp = 2538981870, tmp));
- assertEquals(-13842, x %= ((((-2258237411.3816605)+(-1325704332.0531585))<<((tmp = -877665450.1877053, tmp)>>(((((2420989037)+(2084279990.6278818))*(-327869571.9348242))+x)^x)))>>>x));
- assertEquals(1, x /= x);
- assertEquals(1, x >>= ((2241312290)^(2859250114)));
- assertEquals(0, x >>= x);
- assertEquals(-1615631756, x |= (-1615631756.1469975));
- assertEquals(-1615631756, x |= x);
- assertEquals(-627245056, x <<= ((x*(tmp = -1308330685.5971081, tmp))|(tmp = 1479586158, tmp)));
- assertEquals(-627245056, x |= x);
- assertEquals(1786953888, x ^= (-1340096352.1839824));
- assertEquals(1668014353, x -= (tmp = 118939535, tmp));
- assertEquals(1, x /= x);
- assertEquals(-645681, x ^= ((-1322356629)>>(tmp = 1829870283, tmp)));
- assertEquals(-1322354688, x <<= (-794779253));
- assertEquals(-4310084378.672725, x += (-2987729690.6727247));
- assertEquals(-8620168757.34545, x += x);
- assertEquals(-8720421, x |= (tmp = -748107877.6417065, tmp));
- assertEquals(-1508858270, x ^= (1500137913));
- assertEquals(-0.825735756765112, x /= (1827289490.1767085));
- assertEquals(1253449509.1742642, x += (((tmp = 1253449509.9576545, tmp)-(((tmp = 2860243975, tmp)+(367947569.85976696))>>(((((530960315)>>>((((x%(tmp = -2203199228, tmp))<<(x*(((tmp = -117302283, tmp)/(x-((2579576936)%(-1225024012))))&(tmp = -2857767500.1967726, tmp))))/((x/((tmp = -166066119, tmp)<<x))|x))>>>x))|(((2771852372)>>(((tmp = -3103692094.1463976, tmp)-(tmp = 2867208546.069278, tmp))>>>(702718610.1963737)))|(tmp = 2680447361, tmp)))>>x)>>(-2006613979.051014))))^((-1665626277.9339101)/(x<<(tmp = 342268763, tmp)))));
- assertEquals(1693336701.1742642, x += (tmp = 439887192, tmp));
- assertEquals(0.8479581831275719, x /= ((1171383583)+(((x&x)>>>(51482548.618915915))-(tmp = -825572595.1031849, tmp))));
- assertEquals(28, x |= ((tmp = -2355932919.6737213, tmp)>>(tmp = -2395605638, tmp)));
- assertEquals(0, x %= x);
- assertEquals(0, x -= x);
- assertEquals(0, x <<= (x^((tmp = 2793423893.484949, tmp)*(1585074754.3250475))));
- assertEquals(0, x >>= (x/(x-((957719861.9175875)&(1288527195)))));
- assertEquals(0, x >>>= ((-1429196921.4432657)/x));
- assertEquals(-852424225.734199, x -= (tmp = 852424225.734199, tmp));
- assertEquals(-46674433, x |= ((tmp = -2335242963, tmp)*((2135206646.2614377)>>(tmp = 505649511.8292929, tmp))));
- assertEquals(2944662357, x += (tmp = 2991336790, tmp));
- assertEquals(1404, x >>>= (849155189.1503456));
- assertEquals(-846755170, x ^= (tmp = -846753822.4471285, tmp));
- assertEquals(52615, x >>>= ((-517068110)+x));
- assertEquals(1475021859.9916897, x += (tmp = 1474969244.9916897, tmp));
- assertEquals(0, x %= x);
- assertEquals(0, x %= ((539583595.8244679)*(tmp = 1469751690.9193692, tmp)));
- assertEquals(0, x &= (807524227.2057163));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x -= (x^((tmp = -362481588, tmp)%(2611296227))));
- assertEquals(NaN, x *= x);
- assertEquals(0, x >>= ((-2519875630.999908)<<x));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x += (((tmp = 2485209575, tmp)>>(tmp = 2326979823, tmp))%(x-(((-1296334640.7476478)&x)<<x))));
- assertEquals(0, x >>= (((tmp = 1370704131, tmp)^((((tmp = 793217372.7587746, tmp)>>(((-1455696484.109328)|(((((-2186284424.5379324)<<(tmp = 3052914152.254852, tmp))-(x>>(tmp = 3121403408, tmp)))+((778194280)-(((((tmp = 2398957652, tmp)-(x+(((-2592019996.937958)>>((tmp = 1648537981, tmp)>>x))<<(-677436594))))<<(39366669.09012544))|((tmp = 3133808408.9582872, tmp)-(-2987527245.010673)))*x)))|((tmp = -2178662629, tmp)<<x)))^(((tmp = 909652440.3570575, tmp)%(-2572839902.6852217))%(-1879408081))))*(tmp = -2910988598, tmp))&(((x^x)>>(2822040993))|((x*x)^(((1072489842.6785052)|(x-(((464054192.7390214)^x)<<(tmp = -2754448095, tmp))))*((tmp = -1544182396, tmp)/(tmp = -3198554481, tmp)))))))^(tmp = 1946162396.9841106, tmp)));
- assertEquals(371272192, x |= (((x^((x-(x/x))&(tmp = 2370429394, tmp)))-(tmp = -403692829, tmp))*(tmp = 2808636109, tmp)));
- assertEquals(929786482, x |= ((729966239.8987448)^(x-((tmp = 120127779, tmp)^((tmp = -3088531385, tmp)>>>((x+((tmp = 2364833601, tmp)>>>(((599149090.6666714)>>(tmp = 2838821032, tmp))%(tmp = -662846011, tmp))))-(tmp = 1168491221.1813436, tmp)))))));
- assertEquals(-681121542, x += ((-1610909505.998718)^((tmp = -957338882, tmp)>>>(tmp = 1935594133.6531684, tmp))));
- assertEquals(-2147483648, x <<= ((tmp = 15161708, tmp)|(2453975670)));
- assertEquals(-2147483648, x >>= x);
- assertEquals(0, x <<= (2080486058));
- assertEquals(0, x &= (((x&(tmp = -767821326, tmp))/((tmp = 1877040536, tmp)>>>(tmp = 2378603217.75597, tmp)))*(-1601799835)));
- assertEquals(0, x %= (-1820240383));
- assertEquals(1621233920, x ^= ((tmp = 820230232, tmp)*(1727283900)));
- assertEquals(1621233920, x |= (x>>>x));
- assertEquals(1621233931, x += ((tmp = 794966194.9011587, tmp)>>(tmp = -597737830.5450518, tmp)));
- assertEquals(1621276543, x |= (((x^((2354444886)+(tmp = 685142845.4708651, tmp)))-(tmp = 790204976.9120214, tmp))>>>((((tmp = -2792921939, tmp)/(((((tmp = -80705524, tmp)<<x)-(((((((tmp = 1951577216.379527, tmp)>>>x)%((-529882150)>>>(tmp = -1682409624, tmp)))<<((-42043756.29025769)-(-1803729173.6855814)))/(2937202170.118023))*(tmp = -1998098798.5722106, tmp))*(tmp = -2996229463.904228, tmp)))&x)>>>(-301330643)))/(-2858859382.0050273))-(tmp = 1571854256.0740635, tmp))));
- assertEquals(810638271, x >>>= (x/(1553632833)));
- assertEquals(810638271, x <<= (tmp = -1467397440, tmp));
- assertEquals(-2147483648, x <<= x);
- assertEquals(871068871, x ^= (tmp = 3018552519, tmp));
- assertEquals(-1073743881, x |= ((tmp = 2294122324.020989, tmp)|(tmp = -1799706842.4493146, tmp)));
- assertEquals(-77816868, x += (((-2225296403)&x)>>(tmp = -2667103424.445239, tmp)));
- assertEquals(-1215889, x >>= (tmp = 1876107590.8391647, tmp));
- assertEquals(-2431778, x += x);
- assertEquals(4292535518, x >>>= (((x>>(-1825580683))/x)%x));
- assertEquals(4292802560, x -= (x|(1492864090)));
- assertEquals(0, x -= x);
- assertEquals(0, x >>= x);
- assertEquals(0, x %= (tmp = 2173121205, tmp));
- assertEquals(0, x *= (x>>x));
- assertEquals(1565261471, x |= ((1565261471.323931)>>>x));
- assertEquals(0, x -= x);
- assertEquals(-86980804, x |= (-86980804));
- assertEquals(-698956484, x -= (((((2754713793.1746016)*(((((-1514587465.0698888)>>(tmp = -1307050817, tmp))/(tmp = 2368054667.438519, tmp))*(-1908125943.5714772))<<(x>>>(-357164827.4932244))))+(1257487617))<<(2954979945))&(612330472)));
- assertEquals(-1073741824, x <<= x);
- assertEquals(54497747, x ^= (-1019244077.098908));
- assertEquals(54501375, x |= (((tmp = 1944912427, tmp)>>>x)%x));
- assertEquals(0, x -= x);
- assertEquals(0, x -= x);
- assertEquals(-0, x *= (-1748215388));
- assertEquals(0, x >>= x);
- assertEquals(0, x >>>= (((tmp = 988769112, tmp)%(tmp = -3133658477, tmp))<<x));
- assertEquals(0, x %= (1685221089.2950323));
- assertEquals(0, x >>>= (x+((793467168)-(tmp = 135877882, tmp))));
- assertEquals(0, x %= ((tmp = -2406801984, tmp)%(tmp = -987618172, tmp)));
- assertEquals(0, x *= ((-2943444887.953456)|(tmp = -2327469738.4544783, tmp)));
- assertEquals(0, x >>= x);
- assertEquals(-145484729.70167828, x += (tmp = -145484729.70167828, tmp));
- assertEquals(1140855872, x &= (x^(tmp = 3151437967.965556, tmp)));
- assertEquals(1486808408, x += (tmp = 345952536, tmp));
- assertEquals(107846582.36594129, x %= (-1378961825.6340587));
- assertEquals(-642031616, x <<= (x+x));
- assertEquals(151747770.95108718, x *= (x/(tmp = 2716379907, tmp)));
- assertEquals(192723456, x <<= (tmp = -1731167384, tmp));
- assertEquals(2151208003, x -= ((-2151208003)+x));
- assertEquals(1, x /= x);
- assertEquals(1, x |= x);
- assertEquals(1996766603, x |= (1996766602));
- assertEquals(895606123, x ^= (tmp = 1113972960.966081, tmp));
- assertEquals(-1500036886, x ^= (tmp = 2482412929, tmp));
- assertEquals(-1542644247, x ^= (x>>>((tmp = 51449105, tmp)>>>(((-2057313176)*x)/(-1768119916)))));
- assertEquals(-1496074063273093600, x *= ((tmp = 786152274, tmp)^(387292498)));
- assertEquals(-794329073, x %= (((tmp = -2314637675.617696, tmp)*((((x*(411053423.29070306))-(2889448433.4240828))/((-970630131)/(tmp = -2886607600.7423067, tmp)))<<(tmp = 1263617112.9362245, tmp)))|(2816980223.8209996)));
- assertEquals(2468008436047106600, x *= (tmp = -3107035257.725115, tmp));
- assertEquals(3040956928, x >>>= ((tmp = 1514372119.1787262, tmp)*(3169809008)));
- assertEquals(-19, x >>= (tmp = -266966022.10604453, tmp));
- assertEquals(-1.6505580654964654e-8, x /= ((-3143841480)>>(x-x)));
- assertEquals(-2.2420284729165577e-7, x *= (x*((((703414102.2523813)%(tmp = 2989948152, tmp))-((-1583401827.2949386)^((tmp = -1916731338, tmp)%((331500653.3566053)|(((tmp = 29865940, tmp)+((tmp = -2294889418.6764183, tmp)<<(tmp = -1558629267.255229, tmp)))>>>(x*(x+x)))))))|((988977957)&(-2986790281)))));
- assertEquals(0, x ^= (x/(tmp = 781117823.345541, tmp)));
- assertEquals(NaN, x *= (((x^((((tmp = -2969290335, tmp)+(((((tmp = -175387021, tmp)&(tmp = -1080807973, tmp))<<(tmp = -2395571076.6876855, tmp))|((tmp = -1775289899.4106793, tmp)^x))|(-2963463918)))*(tmp = -1761443911, tmp))^(tmp = 847135725, tmp)))<<((146689636)<<x))%x));
- assertEquals(0, x ^= x);
- assertEquals(1720182184, x -= (((tmp = 3184020508, tmp)|((-489485703)+(tmp = -2644503573, tmp)))&(tmp = 2575055579.6375213, tmp)));
- assertEquals(1720182184, x >>= (x<<(-45408034)));
- assertEquals(5.759243187540471e+27, x *= (((x&(1456298805))+(x<<(106573181)))*((566861317.2877743)+(2262937360.3733215))));
- assertEquals(5.759243187540471e+27, x -= (tmp = -1365873935, tmp));
- assertEquals(0, x <<= x);
- assertEquals(0, x >>= (1960073319.3465362));
- assertEquals(0, x <<= x);
- assertEquals(560463904, x += ((tmp = 1844076589.9286406, tmp)&((((((-691675777.5800121)|(-745631201))|x)+(tmp = 1504458593.2843904, tmp))-x)<<x)));
- assertEquals(-513210271, x -= (x|(1052702623.7761713)));
- assertEquals(3781757025, x >>>= ((-1346666404.362477)*(tmp = 2798191459, tmp)));
- assertEquals(1080100929, x &= (1122097879.882534));
- assertEquals(1276833905.8093092, x *= ((1276833905.8093092)/x));
- assertEquals(1276833905.8093092, x %= (1796226525.7152414));
- assertEquals(1276833905, x <<= (((tmp = -491205007.83412814, tmp)*(tmp = 1496201476.496839, tmp))>>(x+((tmp = -854043282.114594, tmp)-((x|(tmp = -807842056, tmp))*x)))));
- assertEquals(1276833905, x %= (((-1870099318)>>>(((tmp = -2689717222, tmp)/(248095232))/(tmp = 1036728800.5566598, tmp)))&(((((857866837)>>(tmp = 3034825801.740485, tmp))|(-1676371984))>>>(x<<x))%((-3035366571.0221004)*(1578324367.8819473)))));
- assertEquals(1, x /= x);
- assertEquals(2819223656.189109, x += (2819223655.189109));
- assertEquals(-1475743640, x >>= (((tmp = 2586723314.38089, tmp)/(x&(tmp = -697978283.9961061, tmp)))<<(x%((-1167534676)>>(x^((tmp = -284763535, tmp)*((x%x)&((((tmp = 2916973220.726839, tmp)%x)/(tmp = -1338421209.0621986, tmp))|((tmp = -834710536.803335, tmp)%x)))))))));
- assertEquals(-3267683406, x -= (tmp = 1791939766, tmp));
- assertEquals(-2090420900700614100, x *= (639725653));
- assertEquals(-1540353536, x %= ((-1800269105)<<((((x&(((tmp = 1135087416.3945065, tmp)^(613708290))>>x))>>(tmp = -1234604858.7683473, tmp))^(2404822882.7666225))>>>((tmp = -287205516, tmp)-((1648853730.1462333)^((x+(x%((tmp = 359176339, tmp)%((2856479172)<<(tmp = -1995209313, tmp)))))^(((tmp = 2857919171.839304, tmp)>>>(tmp = 2779498870, tmp))>>x)))))));
- assertEquals(-2093767030, x ^= (654554250.498078));
- assertEquals(1, x >>>= ((tmp = -166296226.12181997, tmp)^(x/x)));
- assertEquals(-1487427474, x -= ((x<<x)|(1487427475.4063978)));
- assertEquals(-1487427470.562726, x += ((-1226399959.8267038)/((tmp = 2172365551, tmp)<<x)));
- assertEquals(-3457859227618939400, x *= (tmp = 2324724597.3686075, tmp));
- assertEquals(396221312, x >>= (-1354035390));
- assertEquals(0, x %= x);
- assertEquals(0, x &= (tmp = 2733387603, tmp));
- assertEquals(1485905453, x |= ((((tmp = -1321532329.304437, tmp)&((((tmp = 1817382709.4180388, tmp)%(((tmp = 2089156555.7749293, tmp)-(-1555460267))|(tmp = 717392475.9986715, tmp)))%(tmp = 1976713214, tmp))^x))>>>x)+(tmp = -2812404197.002721, tmp)));
- assertEquals(1485905453, x |= x);
- assertEquals(-997658264, x <<= (-1409757949.6038744));
- assertEquals(-997657290, x -= ((-2041106361)>>(tmp = -2014750507, tmp)));
- assertEquals(-2138512124, x &= (tmp = 2565597060, tmp));
- assertEquals(8422400, x &= ((-2819342693.5172367)*(tmp = 1441722560, tmp)));
- assertEquals(111816531.81703067, x -= (-103394131.81703067));
- assertEquals(59606682.673836395, x *= ((tmp = -1451690098, tmp)/(x-(2835050651.717734))));
- assertEquals(-119213365.34767279, x *= (x|((-2656365050)/((-66180492)+(tmp = 284225706.32323086, tmp)))));
- assertEquals(-232839, x >>= (1694344809.435083));
- assertEquals(-1, x >>= x);
- assertEquals(1, x *= x);
- assertEquals(1, x |= x);
- assertEquals(0, x >>= (tmp = 397239268, tmp));
- assertEquals(-1525784563, x -= (tmp = 1525784563, tmp));
- assertEquals(-153.62740888512675, x /= (((tmp = -2040622579.5354173, tmp)*(tmp = -1149025861.549324, tmp))%(((tmp = 2981701364.0073133, tmp)*(tmp = 2993366361, tmp))|(x|(tmp = 1800299979, tmp)))));
- assertEquals(-1671795135, x &= (-1671795135.6173766));
- assertEquals(-4253, x |= ((((x*((1533721762.8796673)<<((tmp = 1026164775.0081646, tmp)<<x)))<<(((x-((((x>>((((((tmp = -481536070.7067797, tmp)&(tmp = 1663121016, tmp))>>>(-2974733313.5449667))+(tmp = -493019653, tmp))>>x)&(tmp = 879307404.8600142, tmp)))>>>x)%(x-(tmp = -1806412445.788453, tmp)))%x))<<(x<<(x+x)))+x))>>((tmp = -332473688.28477216, tmp)<<((tmp = 1701065928, tmp)+(((((tmp = -2407330783, tmp)+x)-((tmp = 584100783, tmp)%(tmp = -3077106506, tmp)))^x)>>x))))<<x));
- assertEquals(-0, x %= x);
- assertEquals(0, x >>>= x);
- assertEquals(0, x >>>= (1578470476.6074834));
- assertEquals(0, x >>>= (974609751));
- assertEquals(-120, x += (x-((tmp = -245718438.0842378, tmp)>>>(tmp = -1870354951, tmp))));
- assertEquals(-6.134465505515781e-8, x /= (1956160645));
- assertEquals(-0, x %= x);
- assertEquals(0, x *= (tmp = -399718472.70049024, tmp));
- assertEquals(-1803198769.8413258, x += (-1803198769.8413258));
- assertEquals(988624943, x ^= ((((tmp = 320776739.5608537, tmp)*(((tmp = -983452570.3150327, tmp)^x)&(tmp = -3181597938, tmp)))-(tmp = -1367913740.9036021, tmp))/(((tmp = -535854933.2943456, tmp)-(717666905.8122432))>>>(((((x^(tmp = 380453258.60062766, tmp))^(tmp = -1242333929, tmp))/((tmp = 1072416261, tmp)+(((2090466933)*(x*(tmp = -386283072, tmp)))|((tmp = 789259942, tmp)<<(tmp = -1475723636.1901488, tmp)))))>>>x)%((x>>(tmp = -1243048658.3818703, tmp))|((((((tmp = -619553509, tmp)|x)/(878117279.285609))|((x<<(x>>>(tmp = -749568437.7390883, tmp)))*x))/(tmp = 1674804407, tmp))-(x*(tmp = 1528620873, tmp))))))));
- assertEquals(988625135, x |= (x>>>(tmp = 2402222006, tmp)));
- assertEquals(988625135, x %= (-2691094165.990094));
- assertEquals(0, x %= x);
- assertEquals(-0, x *= (tmp = -1409904262, tmp));
- assertEquals(-0, x /= ((1176483512.8626208)<<x));
- assertEquals(0, x &= ((((1677892713.6240005)^(tmp = 2575724881, tmp))^(tmp = -2935655281.208194, tmp))*(216675668)));
- assertEquals(0, x >>= (tmp = -1296960457, tmp));
- assertEquals(0, x |= x);
- assertEquals(NaN, x /= x);
- assertEquals(0, x <<= (x>>(-3127984289.9112387)));
- assertEquals(0, x %= ((tmp = 190018725.45957255, tmp)<<((x>>>x)/x)));
- assertEquals(0, x /= (1185681972));
- assertEquals(0, x &= ((tmp = -1285574617, tmp)>>x));
- assertEquals(0, x >>>= ((tmp = 2498246277.2054763, tmp)+(((tmp = 924534435, tmp)&x)>>(tmp = 1379755429, tmp))));
- assertEquals(0, x -= x);
- assertEquals(0, x /= (3093439341));
- assertEquals(0, x *= (x>>>x));
- assertEquals(0, x &= (tmp = 551328367, tmp));
- assertEquals(-0, x /= (-3153411714.834353));
- assertEquals(1217585288, x ^= (tmp = -3077382008.637764, tmp));
- assertEquals(-639702017, x |= ((tmp = -640922633, tmp)%(tmp = -879654762, tmp)));
- assertEquals(-1645297680, x <<= (tmp = 1418982820.8182912, tmp));
- assertEquals(-1.4059558868398736, x /= (1170234212.4674253));
- assertEquals(-2650856935.66554, x *= (1885448157));
- assertEquals(1326259953.26931, x *= (((x>>(x|(-496195134.78045774)))+((2029515886)%(tmp = 1148955580, tmp)))/(tmp = -1760016519, tmp)));
- assertEquals(0, x &= (((((-273334205)+(tmp = 797224093.682485, tmp))/x)>>>((((tmp = -887577414, tmp)/x)+x)%(tmp = 720417467, tmp)))^(((x-(tmp = -309071035, tmp))>>(-3123114729.33889))/x)));
- assertEquals(0, x ^= x);
- assertEquals(0, x %= ((tmp = -2243857462, tmp)/((((((2642220700.6673346)&x)*(tmp = 1454878837, tmp))|((-25825087.30002737)%(851535616.3479034)))<<(tmp = -697581582, tmp))%(tmp = 2248990486, tmp))));
- assertEquals(0, x >>= (((x|(((tmp = -220437911, tmp)&((((255690498)*(((2993252642)>>>(tmp = 300426048.0338713, tmp))>>x))&((-364232989)+(x<<(-1824069275))))%(x+(tmp = 2696406059.026349, tmp))))+((tmp = 2911683270, tmp)/(tmp = 2718991915, tmp))))*(x/(((tmp = -982851060.0744538, tmp)^((-2903383954)<<((-85365803.80553412)^x)))%(1489258330.5730634))))>>>x));
- assertEquals(0.7805921633088815, x += (((-1886920875)/(-2417294156.5304217))%(tmp = -1176793645.8923106, tmp)));
- assertEquals(0, x <<= x);
- assertEquals(-2215008905, x -= (2215008905));
- assertEquals(1931542900, x &= (-215923724.72133207));
- assertEquals(907191462, x ^= (-3133954606.357727));
- assertEquals(453595731, x >>>= (((tmp = 2726241550, tmp)/(tmp = -332682163, tmp))*((((tmp = 2500467531, tmp)>>>(((x<<(tmp = -1847200310.4863105, tmp))/x)^x))+x)<<(191688342.22953415))));
- assertEquals(-0.21671182880645923, x /= ((((-1169180683.1316955)%x)>>>(1650525418))^((2198033206.797462)&((-6913973.910871983)%(1758398541.8440342)))));
- assertEquals(-375102237.1603561, x += (tmp = -375102236.9436443, tmp));
- assertEquals(1, x &= (((84374105.89811504)|((tmp = -2480295008.926951, tmp)>>((605043461)>>(tmp = -2495122811, tmp))))>>(-2129266088)));
- assertEquals(1, x |= x);
- assertEquals(0.0000024171579540208214, x /= (((-2600416098)>>(-2076954196))^x));
- assertEquals(0.0000024171579540208214, x %= (tmp = -2632420148.815531, tmp));
- assertEquals(1809220936.0126908, x -= (-1809220936.0126884));
- assertEquals(1682452118.2686126, x += (((2358977542)<<(x/(tmp = -2862107929, tmp)))+(x+(x%((-3101674407)/(((x*((x>>(tmp = 630458691.3736696, tmp))>>>(tmp = -852137742, tmp)))/x)-((-1875892391.1022017)&(tmp = -1027359748.9533749, tmp))))))));
- assertEquals(1682452118, x <<= (((tmp = -80832958.07816291, tmp)>>x)%(x-((x^(x<<(tmp = -156565345, tmp)))|((tmp = -1208807363.727137, tmp)/(tmp = 2614737513.304538, tmp))))));
- assertEquals(6572078, x >>= (-1573364824));
- assertEquals(13144156, x += x);
- assertEquals(1731678184, x ^= ((tmp = 593370804.9985657, tmp)|(-3124896848.53273)));
- assertEquals(845545, x >>>= (tmp = -605637621.2299933, tmp));
- assertEquals(-1383361088, x ^= (tmp = -1383632087, tmp));
- assertEquals(-82545896480031520, x += ((x+(1023183845.7316296))*((((tmp = 576673669, tmp)>>(((-584800080.1625061)/(2388147521.9174623))+((((x>>>(-905032341.5830328))^(tmp = -2170356357, tmp))-x)+((136459319)+(-1799824119.689473)))))|x)&(tmp = -2688743506.0257063, tmp))));
- assertEquals(-895206176, x |= x);
- assertEquals(-0, x %= x);
- assertEquals(1791306023, x ^= ((tmp = -3219480856, tmp)+(tmp = 715819582.0181161, tmp)));
- assertEquals(1791306023, x &= x);
- assertEquals(2725167636753240600, x *= (1521330025));
- assertEquals(-281190679, x |= (tmp = -1422045975.798171, tmp));
- assertEquals(-281190679, x += (x%x));
- assertEquals(-2342097426.906673, x -= (tmp = 2060906747.906673, tmp));
- assertEquals(-4651462701.906673, x -= (2309365275));
- assertEquals(1878, x >>>= (2544974549.345834));
- assertEquals(1964, x += (x&((1067649861)>>(182139255.7513579))));
- assertEquals(2209, x += (x>>(tmp = -1775039165, tmp)));
- assertEquals(0, x -= x);
- assertEquals(-0, x /= (tmp = -1634697185, tmp));
- assertEquals(NaN, x /= x);
- assertEquals(0, x >>>= ((tmp = 3075747652, tmp)&(tmp = 819236484, tmp)));
- assertEquals(0, x /= ((1276203810.476657)%(-2434960500.784484)));
- assertEquals(0, x >>>= (tmp = -503633649, tmp));
- assertEquals(-982731931, x |= (-982731931));
- assertEquals(-1965463862, x += x);
- assertEquals(-0.221469672913716, x %= ((tmp = -1742292120, tmp)/x));
- assertEquals(-0.221469672913716, x %= (-2021391941.1839576));
- assertEquals(0, x <<= (((((tmp = -2802447851, tmp)>>((2534456072.6518855)&x))%(tmp = 2841162496.610816, tmp))<<((89341820)/(2565367990.0552235)))>>(tmp = 2700250984.4830647, tmp)));
- assertEquals(0, x >>= x);
- assertEquals(0, x >>= ((tmp = -636189745, tmp)>>>(x/(((tmp = 2634252476, tmp)%(2026595795))>>(tmp = -2048078394.743723, tmp)))));
- assertEquals(NaN, x %= ((x%((((x%((tmp = -2583207106, tmp)&x))|(190357769))<<(tmp = 595856931.2599536, tmp))%x))*((-2433186614.6715775)<<((2856869562.1088696)^(tmp = 1112328003, tmp)))));
- assertEquals(1621713910, x |= (tmp = 1621713910.0282416, tmp));
- assertEquals(3243427820, x += x);
- assertEquals(0, x *= (x&(x-x)));
- assertEquals(0, x >>>= (((2871235439)<<((x+((tmp = -1319445828.9659343, tmp)+(tmp = 1595655077.959171, tmp)))>>(tmp = -86333903, tmp)))-(x/(2907174373.268768))));
- assertEquals(0, x >>= (-1091774077.2173789));
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x *= (tmp = 1976023677.7015994, tmp));
- assertEquals(NaN, x -= (-3013707698));
- assertEquals(NaN, x += ((x+(((tmp = -3119865782.9691515, tmp)<<(1327383504.0158405))^(((-143382411.7239611)>>>((-2157016781)+(((-335815848)/x)<<(tmp = 1953515427, tmp))))&(-2715729178))))/(413738158.2334299)));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x += (-845480493));
- assertEquals(-789816013, x |= (tmp = -789816013.129916, tmp));
- assertEquals(0, x ^= x);
- assertEquals(0, x <<= (3032573320));
- assertEquals(47630, x ^= ((1086705488)%((x^(tmp = -1610832418, tmp))>>>(tmp = 1136352558, tmp))));
- assertEquals(47630, x >>= (tmp = 1035320352.4269229, tmp));
- assertEquals(47630, x >>= ((((x^x)<<(x*((((x&((-1657468419)*((tmp = -674435523, tmp)&((tmp = 2992300334, tmp)|x))))*((tmp = -489509378.31950426, tmp)*(tmp = 2276316053, tmp)))>>>x)<<x)))%(tmp = -1209988989, tmp))/(tmp = -2080515253.3541622, tmp)));
- assertEquals(3192518951.8129544, x += (3192471321.8129544));
- assertEquals(648116457.8129544, x %= (-2544402494));
- assertEquals(0, x -= x);
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x /= x);
- assertEquals(0, x <<= x);
- assertEquals(0, x >>= x);
- assertEquals(0, x *= (tmp = 30051865, tmp));
- assertEquals(0, x ^= ((x&(((x&x)>>>(((((((x+(2319551861.0414495))>>>(tmp = -3099624461, tmp))^((((tmp = 1574312763, tmp)|x)>>>((-2723797246)&(tmp = -1993956152, tmp)))|(-1830179045)))|(((((((-2545698704.3662167)>>>x)-(((-79478653)|x)%(x+(x>>((tmp = 2386405508.2180576, tmp)/x)))))>>((((-1947911815.2808042)*((x+(368522081.2884482))-(tmp = 2452991210, tmp)))>>(343556643.1123545))>>((((tmp = 1869261547.537739, tmp)>>(3193214755))|x)&(x*(2027025120)))))<<((-1149196187)>>>(814378291.8374172)))+((((((((-160721403)/(2079201480.2186408))+((x|((((tmp = -299595483.16805863, tmp)>>>((x|((x+x)/(-2359032023.9366207)))<<(tmp = -3095108545, tmp)))>>((tmp = -1547963617.9087071, tmp)*(x>>x)))&((tmp = -1568186648.7499216, tmp)+(((2646528453)^(-2004832723.0506048))>>>(tmp = -3188715603.921877, tmp)))))+(tmp = 1578824724, tmp)))^x)^x)/(tmp = -985331362, tmp))|(tmp = 445135036, tmp))<<(tmp = -73386074.43413758, tmp)))+(((-1674995105.9837937)-(tmp = 1392915573, tmp))>>x)))%(tmp = 1215953864, tmp))&((tmp = -439264643.5238693, tmp)>>>x))+(((tmp = 2311895902, tmp)|(1604405793.6399229))&(tmp = -565192829, tmp))))-x))>>(-2455985321)));
- assertEquals(0, x %= ((1177798817)>>(tmp = 2081394163.5420477, tmp)));
- assertEquals(0, x >>>= ((x^(tmp = -41947528.33954811, tmp))>>(x>>>((tmp = 1367644771, tmp)+x))));
- assertEquals(0, x %= ((x+((tmp = 163275724, tmp)<<((tmp = -514460883.3040788, tmp)+x)))|(tmp = -287112073.2482593, tmp)));
- assertEquals(0, x &= (3067975906));
- assertEquals(201342051, x |= (tmp = 201342051, tmp));
- assertEquals(0, x %= (((((-2580351108.8990865)<<(tmp = 2675329316, tmp))&((1338398946)%((-1548041558)+((x>>(-1568233868.7366815))|((x>>((tmp = -1064582207, tmp)/(-1062237014)))>>(tmp = 854123209, tmp))))))<<(((989032887)*(1842748656))%(tmp = -1566983130, tmp)))-x));
- assertEquals(-0, x /= (tmp = -828519512.617768, tmp));
- assertEquals(0, x &= ((((1449608518)+(-1829731972))*(1828894311))*(((tmp = -1121326205.614264, tmp)^(-2057547855))<<(tmp = -2758835896, tmp))));
- assertEquals(NaN, x %= ((tmp = -2138671333, tmp)%x));
- assertEquals(0, x &= x);
- assertEquals(665568613.0328879, x += (665568613.0328879));
- assertEquals(317, x >>= (2627267349.735873));
- assertEquals(0, x -= x);
- assertEquals(0, x &= (((tmp = 3030611035, tmp)*(((tmp = 476143340.933007, tmp)>>(x-(2238302130.2331467)))|(x|x)))%(tmp = 320526262, tmp)));
- assertEquals(0, x <<= (tmp = 729401206, tmp));
- assertEquals(0, x >>>= (1721412276));
- assertEquals(217629949.3530736, x += ((tmp = 217629949.3530736, tmp)%((-931931100.601475)%(x^(tmp = -2149340123.548764, tmp)))));
- assertEquals(217629949.3530736, x %= (tmp = 2275384959.4243402, tmp));
- assertEquals(0, x >>>= (1112677437.5524077));
- assertEquals(0, x *= (500256656.7476063));
- assertEquals(0, x >>>= x);
- assertEquals(0, x -= x);
- assertEquals(0, x -= x);
- assertEquals(0, x &= (-1076968794));
- assertEquals(0, x /= (tmp = 1774420931.0082943, tmp));
- assertEquals(0, x |= x);
- assertEquals(0, x >>= x);
- assertEquals(0, x %= (-2978890122.943079));
- assertEquals(-0, x /= (tmp = -2954608787, tmp));
- assertEquals(-800048201, x ^= ((tmp = -800048201.7227018, tmp)>>>((-2016227566.1480863)/(tmp = -2263395521, tmp))));
- assertEquals(3333, x >>>= (-2038839052));
- assertEquals(487957736.625432, x += (487954403.625432));
- assertEquals(-1650983426, x |= (2643918270));
- assertEquals(-1861867448, x &= (tmp = -251254199.12813115, tmp));
- assertEquals(-7.934314690172143e-18, x %= ((((x^(-703896560.6519544))>>(tmp = -1853262409, tmp))/(tmp = -1168012152.177894, tmp))/(tmp = 837616075.1097361, tmp)));
- assertEquals(0, x ^= x);
- assertEquals(0, x &= (tmp = -2328150260.5399947, tmp));
- assertEquals(-1954860020, x |= (tmp = 2340107276, tmp));
- assertEquals(-1954860020, x >>= ((tmp = 159177341, tmp)*(x&(-705832619))));
- assertEquals(-1954895727, x -= (x>>>((-1443742544.7183702)^((((tmp = 869581714.0137681, tmp)+x)^((x%(tmp = -1036566362.5189383, tmp))^(x%x)))>>x))));
- assertEquals(1.0241361338078498, x /= (tmp = -1908824093.2692068, tmp));
- assertEquals(16777216, x <<= (x*(((-1925197281)^(tmp = -1392300089.4750946, tmp))|x)));
- assertEquals(-225882765524992, x *= (tmp = -13463662, tmp));
- assertEquals(-1845493760, x |= x);
- assertEquals(-1845493760, x %= (tmp = 3181618519.786825, tmp));
- assertEquals(0, x ^= x);
- assertEquals(0, x <<= x);
- assertEquals(0, x >>>= x);
- assertEquals(NaN, x /= (x>>>x));
- assertEquals(NaN, x %= (((((tmp = -521176477, tmp)>>(((tmp = 370693623, tmp)/(((tmp = -1181033022.4136918, tmp)>>(x|(x*(2601660441))))+(tmp = -1696992780, tmp)))|(x|(-1197454193.198036))))>>>(((2512453418.3855605)+((((((tmp = 799501914, tmp)&(((1788580469.7069902)*(((((1476778529.5109258)<<(tmp = -1873387738.3541565, tmp))-((tmp = -521988584.7945764, tmp)*(-1598785351.3914914)))&(-1899161721.8061454))&((x/x)*(690506460))))>>>((tmp = 2255896398.840741, tmp)>>((tmp = -1331486014.6180065, tmp)+(-1159698058.534132)))))*((1112115365.2633948)&((x>>((x>>(-784426389.4693215))&(-492064338.97227573)))>>x)))^((x-((tmp = 2986028023, tmp)>>(tmp = 2347380320.00517, tmp)))*(tmp = -1463851121, tmp)))*(tmp = -1059437133, tmp))%(x-(tmp = 1238739493.7636225, tmp))))^(2029235174)))*(-1923899530))>>>x));
- assertEquals(0, x >>>= (2848792983.510682));
- assertEquals(0, x >>= (((tmp = 3042817032.705198, tmp)>>>x)&((((tmp = -829389221, tmp)-((2669682285.8576303)+(tmp = 1812236814.3082042, tmp)))^x)%((tmp = -2401726554, tmp)^((tmp = 2464685683, tmp)|(-2685039620.224061))))));
- assertEquals(2069649722, x |= (2069649722.311271));
- assertEquals(NaN, x %= (((((-68757739.39282179)&(-1382816369))/(3122326124))<<(x-(-507995800.3369653)))<<(((-1962768567.343907)+((tmp = 1357057125, tmp)/x))^(tmp = 1997617124, tmp))));
- assertEquals(NaN, x += x);
- assertEquals(0, x >>= (26895919));
- assertEquals(0, x >>>= x);
- assertEquals(0, x %= (tmp = 1092448030, tmp));
- assertEquals(0, x <<= (tmp = -477672441.46258235, tmp));
- assertEquals(0, x /= (2113701907));
- assertEquals(0, x >>>= x);
- assertEquals(NaN, x /= x);
- assertEquals(1341078673, x |= (-2953888623));
- assertEquals(1341078673, x &= x);
- assertEquals(0, x %= x);
- assertEquals(414817852.151006, x -= (-414817852.151006));
- assertEquals(1006632960, x <<= ((((((126465614.8316778)+(x-(2511803375)))+(tmp = 1620717148.352402, tmp))*x)/(tmp = -3013745105.5275207, tmp))-((tmp = -418034061.6865432, tmp)/(-300492911))));
- assertEquals(1055624813, x |= (tmp = 921407085, tmp));
- assertEquals(-3, x |= ((((tmp = 1382397819.7507677, tmp)+(tmp = -111851147.7289567, tmp))+x)/((tmp = 247980405.7238742, tmp)^(tmp = -592156399.8577058, tmp))));
- assertEquals(35161, x &= (((((((-2973570544.725141)*(tmp = -1244715638, tmp))+x)<<(x/((x>>>(-2143371615.073137))/(226072236))))%((x-(tmp = 1971392936, tmp))^(tmp = 2653103658, tmp)))%((tmp = 2828319571.7066674, tmp)>>((1528970502)^((tmp = -55869558, tmp)%x))))>>(889380585.6738582)));
- assertEquals(0, x ^= x);
- assertEquals(0, x *= (2749718750));
- assertEquals(0, x >>>= ((((-1633495402.6252813)*(tmp = 2943656739.1108646, tmp))+(tmp = 977432165, tmp))&((tmp = -2338132019, tmp)*(408176349.8061733))));
- assertEquals(-1778794752, x -= (((tmp = -1391412154.5199084, tmp)-((-3172342474)|x))&(1854366052)));
- assertEquals(-1778794752, x %= (tmp = 2024807296.6901965, tmp));
- assertEquals(-1114410.466337204, x %= ((tmp = -240344444.24487805, tmp)%(-47661164)));
- assertEquals(-0, x %= x);
- assertEquals(0, x >>= (x>>x));
- assertEquals(0, x *= x);
- assertEquals(0, x /= ((-3134902611)|(tmp = -3131158951, tmp)));
- assertEquals(-0, x /= (((tmp = 1430247610.634234, tmp)&x)+((tmp = -2047191110.8623483, tmp)-((((x%((((x/(tmp = -2599234213, tmp))|(tmp = 2650380060, tmp))|x)+x))>>>x)&(-1961373866))<<x))));
- assertEquals(-718394682, x -= ((x|(tmp = 1764417670.8577194, tmp))%(1046022988)));
- assertEquals(3576572614, x >>>= (((tmp = 2480472883.078992, tmp)<<x)>>((2035208402.8039393)&(tmp = 492980449, tmp))));
- assertEquals(434034142, x %= (x&((x>>>(311110449.48751545))|(-243530647))));
- assertEquals(524703439.3065736, x += (((tmp = 1392771723.3065736, tmp)%(x&x))%(tmp = -2199704930, tmp)));
- assertEquals(373686272, x &= (x<<((tmp = 2103372351.9456532, tmp)%(tmp = -1367109519, tmp))));
- assertEquals(373686272, x >>= x);
- assertEquals(-0.12245430020241108, x /= (tmp = -3051638622.5907507, tmp));
- assertEquals(1, x /= x);
- assertEquals(1, x %= (3095983855));
- assertEquals(-1454736871, x ^= (x*(tmp = -1454736872, tmp)));
- assertEquals(-1454736866, x ^= (((724989405.7338341)|(tmp = -2834298786.384371, tmp))>>>(tmp = -2029602148.1758833, tmp)));
- assertEquals(-1454736866, x &= x);
- assertEquals(-197394432, x <<= (tmp = -1562128975, tmp));
- assertEquals(251658240, x <<= (tmp = 2126510950, tmp));
- assertEquals(3295700610.703306, x -= (tmp = -3044042370.703306, tmp));
- assertEquals(-51152917, x |= ((949179883.1784958)|(((tmp = -2046168220, tmp)>>(x/x))/(((835064313)*(tmp = 2197600689, tmp))^(((tmp = 2717104216, tmp)&x)<<(-1402661995.3845913))))));
- assertEquals(-1549204421, x ^= ((((tmp = -481013711, tmp)>>>((tmp = 119589341.80209589, tmp)%(-995489985.2905662)))-(635717011))^(x+(x*x))));
- assertEquals(-1078356672.3999934, x += (470847748.6000067));
- assertEquals(1484987268.4638166, x += (tmp = 2563343940.86381, tmp));
- assertEquals(277020804, x &= (tmp = 2532819117, tmp));
- assertEquals(-2097118208, x <<= (x>>>x));
- assertEquals(-2147483648, x <<= (tmp = 761285045, tmp));
- assertEquals(2147483648, x >>>= x);
- assertEquals(-935909870282997800, x *= ((-2583300643)|x));
- assertEquals(-370753566.54721737, x %= (-1084543510.4524941));
- assertEquals(-177, x >>= (-946264747.6588805));
- assertEquals(-416077682, x ^= (tmp = 416077761, tmp));
- assertEquals(NaN, x %= ((((tmp = 779607408, tmp)*(((tmp = -3007128117, tmp)*(851442866.6153773))+x))&(1283388806))/(-876363553)));
- assertEquals(NaN, x %= (x/(tmp = -1668413939.652408, tmp)));
- assertEquals(-1726405921, x ^= (tmp = -1726405921, tmp));
- assertEquals(-1, x >>= ((3031008213.807012)>>x));
- assertEquals(4294967295, x >>>= ((x>>>x)&(tmp = 2788082290, tmp)));
- assertEquals(8544111670008449000, x *= (tmp = 1989331020.0417833, tmp));
- assertEquals(268435456, x <<= (tmp = 3121736017.2098465, tmp));
- assertEquals(-2.1011176170964474e+26, x -= (((tmp = 1392503299, tmp)*(tmp = 1446108825.1572113, tmp))*(x^(tmp = 372776014.213725, tmp))));
- assertEquals(0, x |= x);
- assertEquals(0, x >>= ((-112413907.70074797)*(-702798603)));
- assertEquals(1829518838, x |= (tmp = -2465448458, tmp));
- assertEquals(57172463, x >>= ((tmp = 2979642955.241792, tmp)%(tmp = -2464398693.291434, tmp)));
- assertEquals(114344926, x += x);
- assertEquals(113279134, x &= (2397742238.6877637));
- assertEquals(54, x >>= (1908522709.6377516));
- assertEquals(-2.966982919573829e-7, x /= (tmp = -182003070, tmp));
- assertEquals(0, x <<= (-1078417156));
- assertEquals(-147831390, x ^= (((-147831390)>>>x)+x));
- assertEquals(0, x -= x);
- assertEquals(-242221450.44696307, x -= (tmp = 242221450.44696307, tmp));
- assertEquals(-484442900, x <<= (((tmp = -2033947265.088614, tmp)&x)/(x^(tmp = -2893953848, tmp))));
- assertEquals(-3227648, x <<= (x<<((tmp = -193993010, tmp)*((983187830)|(3146465242.2783365)))));
- assertEquals(-6455296, x += x);
- assertEquals(-1771542585, x -= (x^(tmp = -1767335879, tmp)));
- assertEquals(-0, x %= x);
- assertEquals(0, x >>>= ((((tmp = -1612864670.4532743, tmp)*(tmp = 786265765.210487, tmp))*((((tmp = -893735877.3250401, tmp)*((x^(tmp = -2804782464.233885, tmp))<<x))&(x-x))^x))<<x));
- assertEquals(0, x -= (x>>>(-1648118674.380736)));
- assertEquals(0, x >>= ((tmp = -2706058813.0028524, tmp)>>(2745047169)));
- assertEquals(0, x += x);
- assertEquals(0, x %= (-898267735.137356));
- assertEquals(0, x >>>= x);
- assertEquals(0, x >>= ((265527509)/((tmp = 2190845136.7048635, tmp)+((x>>x)>>>((x%(x-x))&((((-2080184609.8989801)&((-327231633)>>>((tmp = 864849136, tmp)%(((-524363239)*(((((tmp = 2245852565.3713694, tmp)&(1918365.8978698254))>>>(tmp = -2463081769, tmp))-(((2438244059.471446)|((((-135303645.38470244)*(-861663832.2253196))%(tmp = 1273185196.0261836, tmp))|((2261539338.832875)%((320267076.2363237)+x))))>>(tmp = -2731398821, tmp)))/(tmp = -1947938611, tmp)))^x))))>>(tmp = 833666235, tmp))|x))))));
- assertEquals(-1116704570, x ^= (-1116704570));
- assertEquals(1379561710, x ^= (tmp = -280362968.19654894, tmp));
- assertEquals(-1673822208, x <<= x);
- assertEquals(-1673822208, x |= (x<<(tmp = 1389479193.9038138, tmp)));
- assertEquals(2559712, x >>>= (-2703763734.0354066));
- assertEquals(2593499, x ^= (x>>>((tmp = 148668150.03291285, tmp)^(tmp = -1580360304, tmp))));
- assertEquals(2070393855, x |= (tmp = -2227002907, tmp));
- assertEquals(304197770, x &= (tmp = 2453257354, tmp));
- assertEquals(304197770, x <<= ((-669331453.8814087)-(x^(x^(tmp = 33804899.98928583, tmp)))));
- assertEquals(297068, x >>= x);
- assertEquals(Infinity, x /= (x-x));
- assertEquals(NaN, x %= x);
- assertEquals(0, x ^= x);
- assertEquals(0, x %= ((tmp = 1723087085, tmp)%(2859382131.304421)));
- assertEquals(0, x %= (((tmp = 2935439763, tmp)<<(-3163992768.637094))%(tmp = 67176733, tmp)));
- assertEquals(0, x &= (tmp = 2480771277, tmp));
- assertEquals(0, x >>>= (x+(tmp = -3168690063, tmp)));
- assertEquals(0, x *= ((tmp = -1915275449.1806245, tmp)>>>((tmp = -1644482094.1822858, tmp)/(tmp = -432927173, tmp))));
- assertEquals(0, x += (((2766509428.071809)/(x/((942453848.5423365)/(((tmp = -1284574492, tmp)&((tmp = 760186450.7301528, tmp)-(2464974117.358138)))/((x/(x|(672536969)))*(x>>(-1272232579)))))))>>(x*(-3175565978))));
- assertEquals(-1277710521, x -= (1277710521));
- assertEquals(-1277710521, x >>= (((tmp = -2349135858, tmp)-x)-x));
- assertEquals(-1277710521, x >>= ((tmp = 2135645051, tmp)*(tmp = -2468555366, tmp)));
- assertEquals(-155971, x >>= (-1294859507));
- assertEquals(-0, x %= x);
- assertEquals(0, x >>>= (((861078292.6597499)|(-268063679))-(((((-221864206.9494424)-(-3186868203.2201176))&(tmp = 1287132927, tmp))<<(((tmp = 1964887915, tmp)<<((25908382)^(tmp = -688293519.875164, tmp)))*(2075946055)))&(x-((x>>x)&(1395338223.7954774))))));
- assertEquals(788002218, x -= (-788002218));
- assertEquals(716399906, x &= (-1145868506));
- assertEquals(145776674, x &= (-1661931477.360386));
- assertEquals(145776674, x |= x);
- assertEquals(-0.05255700469257692, x /= (tmp = -2773686873, tmp));
- assertEquals(-660918434, x |= (-660918434.2915542));
- assertEquals(1223537346, x ^= (tmp = -1871274596, tmp));
- assertEquals(305884336, x >>= (x&x));
- assertEquals(-1.1123775647978218e-8, x *= ((tmp = -793393031.4229445, tmp)/((tmp = -503919284, tmp)*(((((tmp = 429810625, tmp)>>>x)-((2091544148.870375)<<(((((x^x)%x)|x)/(-260773261))<<((tmp = -1323834653, tmp)&x))))*((-1231800099.3724015)+x))*((x+((-559726167)^x))>>>((-549148877)<<((((tmp = 1196115201, tmp)/((tmp = -2654658968.390111, tmp)%(tmp = -1044419580, tmp)))*(((((x>>>(733571228))+(2919762692.511447))/(-2718451983.570547))^x)+((2891533060.1804514)^((tmp = -2514488663, tmp)&x))))<<(tmp = -2526139641.6733007, tmp))))))));
- assertEquals(0, x >>>= x);
- assertEquals(0, x *= x);
- assertEquals(0, x |= x);
- assertEquals(3076984066.336236, x -= ((tmp = -3076984066.336236, tmp)+((tmp = -446575828.5155368, tmp)&x)));
- assertEquals(1, x /= x);
- assertEquals(1513281647.839972, x *= (1513281647.839972));
- assertEquals(1251138155, x ^= ((tmp = 2124481052, tmp)&(2431937351.4392214)));
- assertEquals(1, x /= x);
- assertEquals(0, x &= (tmp = 627050040, tmp));
- assertEquals(497153016, x ^= (497153016));
- assertEquals(-1112801283, x |= (tmp = 2752196557, tmp));
- assertEquals(0.5735447276296568, x /= ((((tmp = -500878794, tmp)%(tmp = -2559962372.2930336, tmp))%(2661010102))+(tmp = -1439338297, tmp)));
- assertEquals(1.0244795995097235e-9, x /= (559840067));
- assertEquals(0.43468811912309857, x *= (424301391));
- assertEquals(-1972757928, x ^= (tmp = -1972757928.9227014, tmp));
- assertEquals(-606757265, x ^= (tmp = -2923461577.264596, tmp));
- assertEquals(-37, x >>= (((-2736561559.7474318)%(tmp = -27668972.662741184, tmp))*(2774711606)));
- assertEquals(-1923785671, x += ((-1923785597)+x));
- assertEquals(-3877639176, x += (tmp = -1953853505, tmp));
- assertEquals(-4688259242, x -= ((810620066.4394455)>>(((-1474285107.459875)>>x)/(((((-570672326.4007359)>>(tmp = -3086802075, tmp))%x)>>>(((tmp = 286938819.28193486, tmp)>>>((1712478502)>>(tmp = 3045149117.796816, tmp)))<<(tmp = 750463263.292952, tmp)))&(tmp = 2055350255.5669963, tmp)))));
- assertEquals(-0, x %= x);
- assertEquals(0, x <<= (1037856162.5105649));
- assertEquals(0, x *= x);
- assertEquals(0, x &= (997845077.4917375));
- assertEquals(0, x *= x);
- assertEquals(0, x *= x);
- assertEquals(0, x <<= (((x<<x)&(57691805))>>(786927663)));
- assertEquals(0, x ^= x);
- assertEquals(0, x += x);
- assertEquals(0, x &= (-2131910624.1429484));
- assertEquals(0, x >>>= (-43787814));
- assertEquals(-2415062021, x += (tmp = -2415062021, tmp));
- assertEquals(-4830124042, x += x);
- assertEquals(-186683401, x |= (tmp = 1960135383, tmp));
- assertEquals(NaN, x *= ((tmp = -1674740173.9864025, tmp)%(((((((-432895485.7261934)-x)^x)>>>(((-1627743078.3383338)>>(179992151))<<((tmp = 911484278.0555259, tmp)|(((tmp = -3042492703, tmp)>>(((-663866035.302746)>>(((x-((440661929.50030375)>>>(tmp = 263692082, tmp)))*x)+x))/((1546004407)^(((tmp = 2023662889.1594632, tmp)*(tmp = -2456602312, tmp))+(tmp = 755602286.1810379, tmp)))))%((tmp = -336449961, tmp)|(tmp = 206780145, tmp))))))/(1068005219.1508512))<<(tmp = -474008862.6864624, tmp))/(((((((1518711056.5437899)>>>(tmp = 287418286.63085747, tmp))<<(tmp = 2823048707, tmp))^(((x<<(x^(-1600970311)))&(x>>(((tmp = 157300110.7636031, tmp)*(tmp = -3047000529, tmp))&(1743024951.3535223))))>>x))-(tmp = -2895435807, tmp))*((tmp = -314120704, tmp)&(tmp = 1759205369, tmp)))>>(tmp = 1833555960.046526, tmp)))));
- assertEquals(NaN, x -= (tmp = 694955369, tmp));
- assertEquals(NaN, x *= (x%x));
- assertEquals(0, x |= x);
- assertEquals(0, x ^= x);
- assertEquals(0, x &= x);
- assertEquals(NaN, x /= (x+x));
- assertEquals(NaN, x %= ((tmp = -1595988845, tmp)*((1754043345)>>>(-601631332))));
- assertEquals(0, x >>>= (tmp = 862768754.5445609, tmp));
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x *= (tmp = -1774545519, tmp));
- assertEquals(0, x >>>= (tmp = -2492937784, tmp));
- assertEquals(0, x %= ((((x<<(-1657262788.2028513))&((x^(tmp = -671811451, tmp))<<(-2984124996)))^(1455422699.7504625))-((-340550620)>>x)));
- assertEquals(918278025, x ^= ((tmp = -918278027, tmp)^((tmp = 2889422870, tmp)/(tmp = -657306935.7725658, tmp))));
- assertEquals(918278025, x %= (2603186571.0582614));
- assertEquals(107034679.32509923, x %= (tmp = -811243345.6749008, tmp));
- assertEquals(53517339, x >>= (x%((((x*((tmp = -983766424, tmp)^(-1881545357.8686862)))|(tmp = -1429937087, tmp))>>((x<<x)>>((((tmp = -2347470476, tmp)&x)+((x&x)<<(396061331.6476157)))*(tmp = -3136296453.209073, tmp))))>>>(((tmp = 908427836, tmp)|(tmp = 207737064, tmp))|(((1253036041)-(tmp = 2705074182, tmp))+(-431215157.82083917))))));
- assertEquals(53477378, x &= ((((-1128036654.165636)*x)+x)>>(x>>(3080099059))));
- assertEquals(0, x >>= (-590692293));
- assertEquals(0, x %= (-2395850570.9700127));
- assertEquals(0, x *= ((tmp = 1377485272, tmp)&(1129370608)));
- assertEquals(0, x += (x>>>(x%(((((tmp = -1746827236, tmp)+((tmp = -326913490, tmp)&((-58256967)&x)))*(tmp = -1176487022.001651, tmp))>>>(-2089147643))-x))));
- assertEquals(0, x <<= (tmp = 1073298160.2914447, tmp));
- assertEquals(-837811832, x ^= (-837811832));
- assertEquals(102760448, x <<= (tmp = 2833582450.4544373, tmp));
- assertEquals(0, x &= (((((((tmp = 2595641175, tmp)*x)+(tmp = -2049260172.1025927, tmp))%((2986747823)>>(tmp = -2120598518, tmp)))&((tmp = -2742408622, tmp)&x))>>x)*((1043474247.9601482)&(tmp = 1686365779.9885998, tmp))));
- assertEquals(0, x >>= ((tmp = 1717862848, tmp)-(tmp = 1077024446.4160957, tmp)));
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x /= (-1669429787.975099));
- assertEquals(NaN, x -= (-2299895633.4807186));
- assertEquals(138173970, x ^= (138173970.56627905));
- assertEquals(-2084183776, x <<= (3073345316));
- assertEquals(-0, x %= x);
- assertEquals(0, x >>= (-3080556066.068573));
- assertEquals(0, x &= ((tmp = -2587514820, tmp)*(x-((x^(1995672257))*(1125326747.2339358)))));
- assertEquals(NaN, x %= x);
- assertEquals(0, x >>= (tmp = 2139186585, tmp));
- assertEquals(-1904096640, x |= ((-602301360.1919911)*(-1270444810)));
- assertEquals(1073741824, x <<= (tmp = -1069467849, tmp));
- assertEquals(1073741824, x ^= (x-x));
- assertEquals(536870912, x >>>= (-1579466367.160293));
- assertEquals(512, x >>= (972402804.3890183));
- assertEquals(512, x &= (tmp = 2664796831, tmp));
- assertEquals(16777216, x <<= (-2738292561));
- assertEquals(0, x >>>= ((((1397663615.3889246)|(1117420260.6730964))-(-1173734560))<<((tmp = 1007006104.0172879, tmp)<<((tmp = -623002097, tmp)%(tmp = -35829654.379403114, tmp)))));
- assertEquals(1200191544, x ^= (tmp = -3094775752, tmp));
- assertEquals(71, x >>>= x);
- assertEquals(71, x |= x);
- assertEquals(1394763772, x += (1394763701));
- assertEquals(-1.492717171027427, x /= ((x&(tmp = 1243787435, tmp))-(2043911970.26752)));
- assertEquals(-1.1002448961224718e-8, x /= ((((835185744)*(((tmp = 2165818437, tmp)^(tmp = 2567417009.1166553, tmp))/x))/x)/(((63485842.39971793)^(2668248282.597389))/x)));
- assertEquals(0, x <<= (tmp = 1598238578.637568, tmp));
- assertEquals(0, x |= (x&((tmp = -1812945547.5373957, tmp)>>>x)));
- assertEquals(0, x >>>= (x+(-1969679729.7299538)));
- assertEquals(1582033662, x += (tmp = 1582033662, tmp));
- assertEquals(1, x >>>= x);
- assertEquals(-550748739, x += ((tmp = -550748740, tmp)/(x&((2537822642.235506)^((-2167656297)%(tmp = 1161201210, tmp))))));
- assertEquals(-268921, x >>= (tmp = 1916069547.7381654, tmp));
- assertEquals(-0.00021776939364231114, x /= (tmp = 1234888868, tmp));
- assertEquals(0, x <<= (-1036375023));
- assertEquals(0, x &= ((((x/(2398886792.27443))&(x|((-1813057854.1797302)-x)))&(x/(((tmp = 3091133731.4967556, tmp)|(3013139691.823039))<<x)))>>>(2542784636.963599)));
- assertEquals(0, x += ((x*x)/(tmp = 347079383, tmp)));
- assertEquals(788347904, x |= ((1462257124.6374629)*((3180592147.4065146)-(x&(1922244678)))));
- assertEquals(2130672735, x |= (tmp = -2846986145, tmp));
- assertEquals(-1331327970, x ^= ((656251304)-(tmp = 1489152359, tmp)));
- assertEquals(-0.14377179742889856, x %= (((2889747597.813753)-(1730428996))/(((tmp = -1378710998, tmp)&x)|x)));
- assertEquals(-1754612583.143772, x += ((-1754725729)^((-2285838408)>>>(1434074349))));
- assertEquals(-0, x %= x);
- assertEquals(0, x &= (tmp = -1031961332, tmp));
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x /= (3059476325));
- assertEquals(NaN, x *= ((x*((((tmp = 13529540.462185979, tmp)&x)^((x<<(-1312696238.1628869))&(-2029766712.3852897)))>>x))/x));
- assertEquals(1657339940, x ^= ((tmp = -488956817.1491232, tmp)&(tmp = -2352413900.1983714, tmp)));
- assertEquals(-530683621952432200, x *= (tmp = -320202035.2882054, tmp));
- assertEquals(229226258, x ^= ((tmp = -1263410990.026416, tmp)+(((-808046349)&(tmp = -1294442506, tmp))&((tmp = 1147437219, tmp)<<((tmp = -820299900, tmp)-(tmp = -1947748943.3443851, tmp))))));
- assertEquals(7163320, x >>= (-2631307131));
- assertEquals(-68, x |= (((-1271721343)>>x)%x));
- assertEquals(-39956523818.38862, x *= (587595938.505715));
- assertEquals(0, x -= x);
- assertEquals(0, x >>>= ((x^(x+x))<<(tmp = 265212367, tmp)));
- assertEquals(0, x |= (((x>>((tmp = 2294761023, tmp)/(x>>(2125624288))))&((-2125650113)|(tmp = 1014409884, tmp)))%(tmp = -527324757, tmp)));
- assertEquals(0, x >>= ((tmp = 2267075595, tmp)*(-1681569641.8304193)));
- assertEquals(0, x >>>= x);
- assertEquals(0.5738410949707031, x -= ((tmp = -1846572645.573841, tmp)%((((((x^(((-156613905.64173532)/x)<<x))+((x|((2405109060)>>>x))^x))/(570585894.8542807))+(x&(-2544708558)))^((((tmp = -2539082152.490635, tmp)+((((-657138283)/(2204743293))-((tmp = -1422552246.565012, tmp)+x))<<(x-x)))>>(x/(x>>>(tmp = -3027022305.484394, tmp))))<<x))&((-2066650303.3258202)/(tmp = -1666842593.0050385, tmp)))));
- assertEquals(0, x >>>= ((((tmp = 2473451837.613817, tmp)>>((2526373359.1434193)>>(x<<x)))+((tmp = -579162065, tmp)+((tmp = -3115798169.551487, tmp)-(tmp = 933004398.9618305, tmp))))&(tmp = 131167062, tmp)));
- assertEquals(-2067675316, x ^= (-2067675316.6300585));
- assertEquals(543772, x >>>= x);
- assertEquals(-1073741824, x <<= x);
- assertEquals(3221225472, x >>>= ((x*(1478586441.081221))&(tmp = -3050416829.2279186, tmp)));
- assertEquals(0, x ^= x);
- assertEquals(0, x *= x);
- assertEquals(-1017771903.0298333, x -= (1017771903.0298333));
- assertEquals(0.6404112721149928, x /= ((tmp = -144667370, tmp)^(-2849599562)));
- assertEquals(-2410517638773644000, x -= (((tmp = 1759631550, tmp)*x)*((((tmp = -2949481475, tmp)>>>x)*x)|(tmp = -2977983804, tmp))));
- assertEquals(-0, x %= (x+((((tmp = -1307866327.7569134, tmp)<<((x&((tmp = -2380043169.8405933, tmp)|x))>>(472992789.7639668)))|(((((x<<(tmp = -1416427232.7298179, tmp))%(-1404989679.409946))*((x/(tmp = -992416608, tmp))/(tmp = 524646495, tmp)))-(tmp = 734405570, tmp))>>x))/(1079256317.7325506))));
- assertEquals(0, x <<= (tmp = 2459834668, tmp));
- assertEquals(-0, x /= (tmp = -1892164840.5719755, tmp));
- assertEquals(0, x >>= (x|(((1299844244)>>>(((tmp = -2422924469.9824634, tmp)|x)-((((1914590293.2194016)+(-3033885853.8243046))-((tmp = -1720088308, tmp)%x))<<(tmp = 2210817619, tmp))))<<x)));
- assertEquals(0, x <<= (((tmp = 3192483902.841396, tmp)>>>(((x^(2944537154))|(tmp = -1334426566, tmp))*(((((((-2705218389)&x)+(1987320749))+(tmp = -111851605, tmp))|(2894234323))-(265580345))&x)))%(((tmp = 1431928204.6987057, tmp)&(tmp = 914901046, tmp))&(x>>>x))));
- assertEquals(0, x >>>= (tmp = 1941940941, tmp));
- assertEquals(0, x %= (3089014384));
- assertEquals(0, x += ((tmp = 2948646615, tmp)*x));
- assertEquals(-0, x /= (tmp = -1480146895, tmp));
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x %= (-2995257724.158043));
- assertEquals(NaN, x %= (tmp = 2714835455, tmp));
- assertEquals(NaN, x /= (tmp = -311440765.98078775, tmp));
- assertEquals(NaN, x -= (-1600234513.697098));
- assertEquals(0, x <<= x);
- assertEquals(0, x <<= (-1499045929));
- assertEquals(-0, x *= (-2491783113));
- assertEquals(0, x ^= (x%((x>>(((1234398704.3681123)>>>x)%(x+x)))>>(402257223.4673699))));
- assertEquals(-643225204, x ^= (((-55960194.698637486)+((((721411198)-(((tmp = 1308676208.7953796, tmp)%(2242904895))-x))>>((((tmp = 332791012, tmp)&((tmp = -2094787948, tmp)/((x/(2427791092))^(2444944499.6414557))))%(((x+(1253986263.5049214))+(((((3135584075.248715)+((tmp = -2569819028.5414333, tmp)%(440908176.1619092)))>>>(x<<((3061615025)-x)))%x)%(x+((2369612016)*((((tmp = 1173615806, tmp)*(-1910894327))&(2428053015.077821))*(-55668334.70082307))))))<<(tmp = -2129259989.0307562, tmp)))+(1579400360)))%((-3053590451.8996153)>>x)))+(x>>(x%(x^((-1772493876)^x))))));
- assertEquals(413738663060841600, x *= x);
- assertEquals(1581062538.4501781, x %= ((tmp = -1298397672.0300272, tmp)-((2237197923)+(tmp = -1385478459, tmp))));
- assertEquals(755644566.8709538, x %= (tmp = -825417971.5792243, tmp));
- assertEquals(1, x /= x);
- assertEquals(0, x >>>= ((89330582)%(-1012731642.4855506)));
- assertEquals(0, x >>>= x);
- assertEquals(NaN, x %= ((x>>>((x/(tmp = -1848848941.2352903, tmp))>>>(tmp = -71862893, tmp)))&(-2385996598.2015553)));
- assertEquals(NaN, x += (-2292484503.318904));
- assertEquals(NaN, x *= (2961064461));
- assertEquals(NaN, x += (x<<((2076798243.6442)/((tmp = -81541044.75366282, tmp)^((3041366498.551101)+((2126874365)/(tmp = -177610359, tmp)))))));
- assertEquals(NaN, x %= ((x/((x/x)+x))>>>x));
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x += (1171761980.678));
- assertEquals(NaN, x += ((2355675823)<<(-390497521)));
- assertEquals(NaN, x %= x);
- assertEquals(0, x &= (tmp = -658428225.56619, tmp));
- assertEquals(0, x ^= x);
- assertEquals(0, x <<= (1643310725.5713737));
- assertEquals(0, x <<= x);
- assertEquals(0, x <<= (-397005335.3712895));
- assertEquals(0, x >>>= (tmp = -2804713458.166788, tmp));
- assertEquals(0, x <<= (((((((tmp = 1879988501, tmp)%(1528081313.9360204))+(1376936736))*((((x>>>((1736268617.339198)>>>(-2598735297.4277673)))<<((((((((-2742982036)/(231867353.4549594))-(875335564))<<x)|((2241386341.742653)<<((-22024910.828409433)&(x<<x))))*(-756987803.5693252))+x)^(tmp = 1084498737, tmp)))<<(1920373881.8464394))&(2370827451.82652)))&(x^(tmp = -891503574, tmp)))<<x)>>>((-1519588625.2332087)^(483024636.2600144))));
- assertEquals(52193878.40997505, x -= ((tmp = -341753803.40997505, tmp)%(tmp = -96519975, tmp)));
- assertEquals(-1665844168.938803, x -= (1718038047.348778));
- assertEquals(3.6962232549405003e-19, x /= (((((-809583468.5507183)>>>((tmp = 286797763, tmp)%((1579183142.7321532)/(1853824036.001172))))<<x)>>(((x|x)^((tmp = -2641304815, tmp)<<(x<<x)))>>(((((268338128.8300134)&(-1778318362.8509881))*(751081373.346478))<<(((525066612)>>(-1139761212))*(2949167563.299916)))<<x)))+((tmp = 664905121, tmp)*((-2208280205)*(3069462420)))));
- assertEquals(4710721795.110161, x += (((217604832)+((1307891481.781326)-x))+(tmp = 3185225481.328835, tmp)));
- assertEquals(0, x %= x);
- assertEquals(0, x -= (((x>>>(x/(tmp = 46977522.46204984, tmp)))>>(-2466993199.615269))&(tmp = 14524430.287991166, tmp)));
- assertEquals(0, x >>= x);
- assertEquals(0, x /= (tmp = 578120637, tmp));
- assertEquals(-17267104, x -= (((tmp = 1515285919.495792, tmp)+(((tmp = -1364790286.7057304, tmp)+((954599071)>>((897770243.1509961)*x)))^x))>>>(566027942.1732262)));
- assertEquals(-17267104, x &= x);
- assertEquals(189138241, x ^= ((tmp = 1565742675.9503145, tmp)-((tmp = 1737806643, tmp)|((x*(tmp = -1382435297.5955122, tmp))*(-2820516692.153056)))));
- assertEquals(189138241, x %= (x*(tmp = -1670678493, tmp)));
- assertEquals(1693, x %= ((-2328713314)>>>(1623637325)));
- assertEquals(1693, x %= ((-1019394014)*(x|x)));
- assertEquals(3386, x += x);
- assertEquals(9268970871604, x *= (2737439714));
- assertEquals(-4720.120483643183, x /= (tmp = -1963714889, tmp));
- assertEquals(-1, x >>= ((x^(((-2404688047.455056)|((1439590234.6203847)<<(tmp = -2496557617, tmp)))/((x<<((tmp = 1865549512.282249, tmp)/(((360384191.55661833)>>(tmp = -1225297117.344188, tmp))>>>(2703264010.4122753))))*(1521960888.0071676))))%(tmp = 2834001448.0508294, tmp)));
- assertEquals(63, x >>>= (x&(-3079339174.6490154)));
- assertEquals(0, x >>>= (1039770956.6196513));
- assertEquals(0, x >>>= (-1074820214));
- assertEquals(0, x >>>= (x/x));
- assertEquals(0, x >>= ((tmp = -449117604.2811785, tmp)&x));
- assertEquals(-0, x /= (tmp = -118266935.1241343, tmp));
- assertEquals(2226140134, x += (tmp = 2226140134, tmp));
- assertEquals(2068827161, x ^= ((tmp = -1950744808.846384, tmp)>>((2258661151)^((tmp = -1118176421.8650177, tmp)<<(2828634014)))));
- assertEquals(123, x >>>= (-1779624840.0515127));
- assertEquals(0, x >>>= (x|((tmp = -239082904, tmp)<<(tmp = 1404827607, tmp))));
- assertEquals(0, x >>>= x);
- assertEquals(1793109749, x ^= (tmp = -2501857547.710491, tmp));
- assertEquals(855, x >>>= x);
- assertEquals(0, x >>>= (-847289833));
- assertEquals(0, x %= (-2271241045));
- assertEquals(169648072, x ^= (((tmp = 169648072.66759944, tmp)^x)|x));
- assertEquals(176025927479164930, x *= ((tmp = 1111997198.8803885, tmp)<<(tmp = 2913623691, tmp)));
- assertEquals(176025926613281700, x += ((tmp = -865883245, tmp)<<(x+(-2624661650))));
- assertEquals(3406506912, x >>>= ((x|(tmp = 2436016535, tmp))*(((tmp = -1222337225, tmp)<<((1765930268)&x))*(tmp = 1600702938, tmp))));
- assertEquals(1.694694170868292, x %= (x/(-1597121830.794548)));
- assertEquals(0, x >>= (tmp = -2443203089, tmp));
- assertEquals(0, x >>>= (1323174858.2229874));
- assertEquals(0, x &= ((tmp = 846556929.2764134, tmp)|(((1483000635.0020065)|(-3151225553))|(tmp = -229028309, tmp))));
- assertEquals(0, x >>= x);
- assertEquals(0, x >>= ((((((-2677334787)>>>x)>>((tmp = 496077992, tmp)&((((x<<(x*(tmp = 1095163344.2352686, tmp)))+(-952017952))%((x<<((x*x)/(tmp = 2983152477, tmp)))^((tmp = -939521852.1514642, tmp)^(tmp = 143967625.83755958, tmp))))*((tmp = 551827709.8366535, tmp)>>>x))))^((-1552681253.69869)-(-1874069995)))>>>(x>>(x%(tmp = -2554673215, tmp))))|(tmp = -190693051.77664518, tmp)));
- assertEquals(0, x /= (tmp = 427402761.37668264, tmp));
- assertEquals(0, x <<= x);
- assertEquals(0, x |= (x>>>(((((-543326164.0673618)>>>(-2344090136.707964))>>>((((-563350246.6026886)/x)/(1525481037.3332934))&(tmp = -2917983401.88958, tmp)))^(-1094667845.1208413))^x)));
- assertEquals(0, x &= (1080322749.897747));
- assertEquals(0, x %= (tmp = -1572157280, tmp));
- assertEquals(0, x >>>= x);
- assertEquals(0, x %= ((377280936)|x));
- assertEquals(708335912, x -= (tmp = -708335912, tmp));
- assertEquals(2766937, x >>>= x);
- assertEquals(547342779, x += (tmp = 544575842, tmp));
- assertEquals(546273751, x -= ((x>>>(472833385.9560914))|((tmp = -1164832103.9970903, tmp)/(3147856452.1699758))));
- assertEquals(546273751, x &= x);
- assertEquals(0, x ^= x);
- assertEquals(0, x >>>= (tmp = -3181805175, tmp));
- assertEquals(-375546685, x |= (-375546685.08261824));
- assertEquals(1089992785780217200, x *= (tmp = -2902416209, tmp));
- assertEquals(0, x %= x);
- assertEquals(-1854981526, x -= ((x-x)-(-1854981526)));
- assertEquals(-3709963052, x += x);
- assertEquals(-316772482, x %= (tmp = -1696595285, tmp));
- assertEquals(-316772482, x |= x);
- assertEquals(1, x /= x);
- assertEquals(0, x -= x);
- assertEquals(-1418375842, x ^= (-1418375842));
- assertEquals(-2, x >>= x);
- assertEquals(-4, x += x);
- assertEquals(-8388608, x &= (x<<(-350555339.30086184)));
- assertEquals(-16777216, x += x);
- assertEquals(-0, x %= x);
- assertEquals(1083355129, x += (tmp = 1083355129, tmp));
- assertEquals(0, x &= (((tmp = 389729053, tmp)-(tmp = 2944192190.0939536, tmp))/(x-(2081712461.2657034))));
- assertEquals(0, x += x);
- assertEquals(-3, x += ((3147270119.5831738)>>((2455837253.1801558)%((-2100649096)>>(((290236808.01408327)|(x&((2661741230.3235292)|((tmp = 1686874589.4690177, tmp)<<x))))*(x+(tmp = 2327674670, tmp)))))));
- assertEquals(-3, x %= ((x>>(((-2962686431)%x)>>((((2438370783)-(tmp = 2667305770.4839745, tmp))>>>x)>>>x)))<<((x&(tmp = 1428498616, tmp))|((tmp = 2621728539.102742, tmp)/(-204559901)))));
- assertEquals(2, x ^= (x|((((tmp = 1751230118.6865973, tmp)/(-867465831.207304))>>((-808143600.0912395)+(-2882191493.0506454)))^x)));
- assertEquals(2, x %= (-2015954220.2250996));
- assertEquals(0, x >>>= (tmp = 401373999, tmp));
- assertEquals(0, x >>= (2371830723));
- assertEquals(0, x >>>= ((((tmp = 2765919396, tmp)-x)-(530310269.7131671))|(tmp = -615761207.9006102, tmp)));
- assertEquals(-145389011, x ^= (tmp = -145389011, tmp));
- assertEquals(-145389011, x |= x);
- assertEquals(1632929832, x &= (-2518898392));
- assertEquals(4190540017.751949, x += (tmp = 2557610185.751949, tmp));
- assertEquals(4980024282.153588, x += ((1841304364.1177452)%(tmp = 1051820099.7161053, tmp)));
- assertEquals(0, x >>>= (((((1379314342.4233718)>>((-2782805860)^((x%(tmp = 1328845288, tmp))>>>(tmp = 901403219.858733, tmp))))+(x/((tmp = -3078904299, tmp)/x)))/x)|(x|(1399702815))));
- assertEquals(-1820494882, x ^= (tmp = -1820494882.407127, tmp));
- assertEquals(-305870376, x %= (tmp = -757312253, tmp));
- assertEquals(-577530443, x += (x|(tmp = -1958083619.6653333, tmp)));
- assertEquals(333541412591776260, x *= x);
- assertEquals(-949341696, x >>= ((((1550069663)<<((x>>>(tmp = 2406565178.902887, tmp))>>>((1844746612.632984)/((tmp = 2233757197, tmp)*((-1524891464.1028347)>>(tmp = 2498623474.5616803, tmp))))))&x)<<(x&(tmp = -370379833.3884752, tmp))));
- assertEquals(-277202090, x |= ((-762200848.8405354)-(tmp = 1749136282, tmp)));
- assertEquals(0.13704539927239265, x /= (tmp = -2022702633.373563, tmp));
- assertEquals(0, x -= x);
- assertEquals(0, x %= ((132951580.19304836)-((427623236.27544415)-(1212242858))));
- assertEquals(0, x &= ((449148576)&(-1609588210.249217)));
- assertEquals(0, x >>= x);
- assertEquals(0, x -= x);
- assertEquals(-0, x /= (tmp = -1640777090.9694843, tmp));
- assertEquals(0, x &= (((tmp = -1923412153, tmp)>>>((x>>(tmp = 3027958119.0651507, tmp))+(60243350)))>>(tmp = -2610106062, tmp)));
- assertEquals(0, x ^= (((-186998676)/(tmp = 2697937056, tmp))-x));
- assertEquals(-1147950080, x |= ((2425449461)*(tmp = -2525854833, tmp)));
- assertEquals(457688198, x ^= (2698274950.660941));
- assertEquals(8724, x %= ((1174351031)>>>((371599047.36048746)+(3025292010))));
- assertEquals(0, x <<= (tmp = -710011617, tmp));
- assertEquals(0, x >>>= (1693410026));
- assertEquals(1443005362, x ^= ((tmp = -2851961934, tmp)+((((x%x)-(tmp = 547622400, tmp))<<(((tmp = 722396486.5553623, tmp)|x)>>>((((tmp = -542268973.5080287, tmp)<<(tmp = 1347854903.771954, tmp))>>>(tmp = -889664427.7115686, tmp))&((tmp = 1549560114, tmp)*(tmp = 964918035, tmp)))))&(-2422502602.920377))));
- assertEquals(3986573462, x -= (-2543568100));
- assertEquals(7973146924, x += x);
- assertEquals(-1, x >>= (-75987297));
- assertEquals(-12, x += ((2940824338.64834)>>(tmp = 3061467355, tmp)));
- assertEquals(-3.8229398525977614e-8, x /= (313894554));
- assertEquals(-2.890709270374084e-17, x /= (tmp = 1322491989, tmp));
- assertEquals(0, x |= (x-x));
- assertEquals(0, x >>>= (tmp = -1205300664, tmp));
- assertEquals(-0, x /= (((2869505187.6914144)>>(tmp = 1541407065, tmp))/(((-571132581)>>>(x>>x))/((x^(170373762.8793683))>>>((((tmp = -363073421.05897164, tmp)|(((tmp = -1591421637, tmp)>>(1095719702.8838692))&(636687681.9145031)))^x)^(x|x))))));
- assertEquals(-1487828433, x ^= (-1487828433.3462324));
- assertEquals(-0, x %= x);
- assertEquals(1716342498, x -= ((tmp = 2578624798, tmp)^x));
- assertEquals(1636, x >>= ((264194540)>>>(-801900756)));
- assertEquals(0, x >>>= ((tmp = 2502688876, tmp)+((x<<(x|((-628272226.0338528)|((x<<(-2083074091))>>>(tmp = 1692123246.8418589, tmp)))))>>(1594759826.990993))));
- assertEquals(0, x <<= (tmp = -904399643, tmp));
- assertEquals(NaN, x /= ((x^(x-x))%((tmp = 1744962024.4882128, tmp)%x)));
- assertEquals(NaN, x /= (-1013142883.1845908));
- assertEquals(NaN, x /= ((tmp = 793633198, tmp)^(-2993598490.8659954)));
- assertEquals(0, x &= (x>>((tmp = 1200937851, tmp)<<(((tmp = -2807378465, tmp)&(tmp = -143778237, tmp))|(tmp = -1200772223, tmp)))));
- assertEquals(0, x <<= x);
- assertEquals(88144, x |= (((((tmp = 3002723937.8560686, tmp)*(tmp = -3171720774.2612267, tmp))%(((tmp = -2586705978.7271833, tmp)%((x+(-1553704278))&(2405085526.501994)))>>((-240842053)>>>(((((tmp = -1886367228.4794896, tmp)>>>x)^(tmp = 2604098316, tmp))^(tmp = 1362808529, tmp))<<((tmp = -1062263918, tmp)|((-172718753)%(tmp = -1910172365.4882073, tmp)))))))^((1444153362)>>((x&((-1205465523.2604182)^(tmp = -2062463383, tmp)))>>(tmp = 956712476, tmp))))>>((((-1004215312)^((((-1707378612.5424936)^(tmp = 2372161553, tmp))/((tmp = 1802586581, tmp)*((2082257.1896460056)&((tmp = -1270773477, tmp)^(tmp = 942517360.3447798, tmp)))))+x))%((((666494127)^(x^x))>>>(tmp = -2592829775, tmp))+((-1601528223)+((x+(tmp = -2417034771.7409983, tmp))>>>((tmp = -730673817, tmp)*x)))))>>x)));
- assertEquals(-2603179111.7557006, x -= ((2603267255.755627)+(x/(1200979191.2823262))));
- assertEquals(1691788185, x >>= (tmp = 3088840032, tmp));
- assertEquals(-168382533, x |= (tmp = -780750941.4590135, tmp));
- assertEquals(-168382533, x >>= (60741120.48285198));
- assertEquals(-134287365, x |= (x*(tmp = 834637940.7151251, tmp)));
- assertEquals(-1481917089, x -= (tmp = 1347629724, tmp));
- assertEquals(1, x >>>= x);
- assertEquals(262144, x <<= (2680216914));
- assertEquals(1075132032, x ^= (x-((tmp = 3220359552.3398685, tmp)^(((-434474746.6039338)|((((((((tmp = 1945689314.9683735, tmp)>>(1300022273))>>>(333705550))&x)%(588357521))-(x+(x^(((tmp = -134560382, tmp)+x)-((((994246147.7195556)-(-1506599689.7383268))%(x<<x))>>((1256426985.5269494)+(tmp = 1860295952.8232574, tmp)))))))^(((tmp = 917333220.2226384, tmp)>>x)>>>(tmp = 865898066, tmp)))%((x|(x%((tmp = -2660580370, tmp)&(tmp = 2966426022, tmp))))*x)))/(((tmp = 682585452, tmp)&(-3219368609))+((tmp = -1330253964, tmp)+((x&(2857161427))/x)))))));
- assertEquals(274944, x &= ((2606953028.1319966)-(-1707165702)));
- assertEquals(266752, x &= ((x<<((x+(x+(x^(-1570175484))))^x))^(x+(x<<(tmp = 90330700.84649956, tmp)))));
- assertEquals(266752, x &= ((((x*(tmp = 2033225408, tmp))-(x-((tmp = 1507658653, tmp)/(-3016036094))))>>>((1497480588)>>(2784070758)))|(tmp = -3025904401.93921, tmp)));
- assertEquals(-1680442631, x |= ((x/(445284843))|((tmp = 2614520057.2723284, tmp)<<x)));
- assertEquals(40851947, x >>>= (tmp = -1577031386.938616, tmp));
- assertEquals(2493, x >>= ((3044630989.3662357)-(-2670572992.8580284)));
- assertEquals(-0.0000017317105653562252, x /= (-1439617017.9207587));
- assertEquals(0, x &= (2359806567));
- assertEquals(623768541, x ^= (623768541));
- assertEquals(1028567149.0716183, x += (((tmp = 1307794561, tmp)%(x>>x))-(-404798608.0716183)));
- assertEquals(-1.2971762489811298, x /= (tmp = -792927830.6471529, tmp));
- assertEquals(-1.2971762489811298, x %= ((-2426421701.2490773)/(-689566815.3393874)));
- assertEquals(-2147483648, x <<= x);
- assertEquals(-2147483648, x &= (tmp = -869991477, tmp));
- assertEquals(-268435456, x >>= (1383186659));
- assertEquals(0, x -= x);
- assertEquals(-2009742037, x |= (-2009742037.5389993));
- assertEquals(-1386630820, x ^= (627864695));
- assertEquals(-1033479103975173600, x *= (tmp = 745316697.9046186, tmp));
- assertEquals(-1628048487, x |= (2662654361));
- assertEquals(325551, x >>>= (340874477));
- assertEquals(-1235730537, x ^= (tmp = 3059533880.0725217, tmp));
- assertEquals(-1235730537, x %= (2247137328));
- assertEquals(-220200960, x <<= ((x>>x)-x));
- assertEquals(0, x <<= ((tmp = 337220439.90653336, tmp)|(tmp = 2901619168.375105, tmp)));
- assertEquals(0, x >>>= ((-2114406183)/x));
- assertEquals(0, x %= ((1425828626.3896675)/x));
- assertEquals(0, x >>>= ((3213757494)>>>(2595550834.3436537)));
- assertEquals(0, x <<= x);
- assertEquals(-0, x /= ((1544519069.5634403)/((tmp = -1332146306, tmp)&(-762835430.0022461))));
- assertEquals(0, x ^= x);
- assertEquals(0, x >>= (x|((((x*((-786272700)+x))<<x)+((tmp = -1868484904, tmp)-(tmp = -1692200376, tmp)))+(-1010450257.6674457))));
- assertEquals(0, x -= x);
- assertEquals(0, x ^= (x>>>(706010741)));
- assertEquals(-964928697, x |= (-964928697));
- assertEquals(1, x /= x);
- assertEquals(0, x >>= ((((tmp = 1778003555.3780043, tmp)>>(x%((tmp = -766158535, tmp)^((-2681449292.8257303)%((x-(x|(tmp = 1966478387.2443752, tmp)))^(((tmp = -1848398085, tmp)&x)>>>(tmp = -2860470842, tmp)))))))%(tmp = 2315077030, tmp))^x));
- assertEquals(0, x ^= x);
- assertEquals(-288007757, x ^= ((tmp = 183607156.1803962, tmp)-(tmp = 471614914, tmp)));
- assertEquals(-270573581, x |= (tmp = -849475741.9424644, tmp));
- assertEquals(-2129929, x |= (((((1942852445)&(tmp = 1280372312, tmp))*(x*(tmp = -1601900291, tmp)))^((509080002.81080174)-(tmp = 2699498226.9164257, tmp)))>>(((-335361221)>>(tmp = 843134832, tmp))%(-35532542))));
- assertEquals(-232622355, x ^= ((-3060885134.5375547)-(((tmp = 1965966723, tmp)-((tmp = 1248630129.6970558, tmp)<<(tmp = 1859637857.5027392, tmp)))*x)));
- assertEquals(-52149658093200070, x *= (224181627.31264615));
- assertEquals(-697122968, x ^= (x-(x+(tmp = 2747211186.407712, tmp))));
- assertEquals(-2146269688, x &= ((tmp = -1466710519, tmp)^(x/(1419998975))));
- assertEquals(-536567422, x >>= (((((tmp = -1760701688.999274, tmp)>>(-1821976334))/(((tmp = -1660849531, tmp)>>>x)-((x+((tmp = -2489545009.4327965, tmp)>>>((tmp = -267360771.39148235, tmp)^x)))*(((-1453528661)%x)>>>(((243967010.3118453)/((((((2977476024)>>>((-1630798246)<<x))&(591563895.2506002))*(((2668543723.9720144)>>>x)|(1600638279)))^x)>>(x<<(tmp = -152589389, tmp))))>>>(x|(2821305924.9225664)))))))+(618968002.8307843))%(tmp = -1005408074.368274, tmp)));
- assertEquals(40962, x &= (114403906));
- assertEquals(19741977727890, x *= ((-2367133915.963945)>>>(-3119344126)));
- assertEquals(1313341440, x <<= x);
- assertEquals(626, x >>>= ((((-333992843)%(tmp = -2742280618.6046286, tmp))>>>x)|x));
- assertEquals(0, x <<= (2598188575));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x %= x);
- assertEquals(0, x ^= (x%((2507288229.3233204)&(tmp = -1714553169.9276752, tmp))));
- assertEquals(0, x /= ((633436914.3859445)>>>(tmp = 1579804050.6442273, tmp)));
- assertEquals(0, x *= ((tmp = 1172218326, tmp)<<((tmp = -2491306095.8456626, tmp)*(((tmp = 1305371897.9753594, tmp)>>((x^(((3077992060)*x)<<(492815553.904796)))>>((652151523)|x)))%x))));
- assertEquals(0, x <<= x);
- assertEquals(0, x %= (1118131711));
- assertEquals(0, x &= ((tmp = 2734673884, tmp)|(x-((tmp = 2694578672.8975897, tmp)*(((x>>(2350811280.974167))*(1052548515))&(x^(x*(tmp = -1336287059.0982835, tmp))))))));
- assertEquals(-2632782867.1256156, x += ((tmp = -2743992725.1256156, tmp)+(tmp = 111209858, tmp)));
- assertEquals(-0, x %= x);
- assertEquals(0, x >>>= (((tmp = -2050519887, tmp)^(106865302.74529803))>>(1642851915.2909596)));
- assertEquals(-171964826, x |= (tmp = -171964826.6087358, tmp));
- assertEquals(-2.113405951193522, x /= (tmp = 81368572.80206144, tmp));
- assertEquals(3, x >>>= x);
- assertEquals(0, x %= x);
- assertEquals(-1717345907.837667, x += (-1717345907.837667));
- assertEquals(-100964883, x |= (tmp = -109574931.80629134, tmp));
- assertEquals(-33849857, x |= (-974111718.2433801));
- assertEquals(1, x >>>= (tmp = -2556222849.005595, tmp));
- assertEquals(1, x /= x);
- assertEquals(0, x >>>= (-1796630999.4739401));
- assertEquals(0, x >>>= x);
- assertEquals(2031695758, x += (((x/(((tmp = -2364918403, tmp)%(x^((tmp = 277767803.6375599, tmp)>>((((tmp = 540036080, tmp)/(x|(2665298931)))/(x|((x>>(-2035456216.6165116))<<(2143184420.5651584))))^x))))&(tmp = 927798419.8784283, tmp)))-(-2031695758))>>>x));
- assertEquals(2031695758, x |= x);
- assertEquals(2031695758, x <<= (((x>>(x%x))|(tmp = -1164531232.7384055, tmp))*x));
- assertEquals(124004, x >>>= x);
- assertEquals(529846352, x += ((529722348)%((2417645298.865121)|(x>>(x>>>(x+x))))));
- assertEquals(60067920, x &= (((tmp = -3166008541.8486233, tmp)-x)|(x%x)));
- assertEquals(1415594240755200, x *= ((-2786707452.873729)>>(((tmp = -2369315809, tmp)*((1559868465)|(1011218835.1735028)))>>>x)));
- assertEquals(1415595182259140, x += (941503939.9023957));
- assertEquals(0, x <<= ((tmp = 2887184784.265529, tmp)/(-2575891671.0881453)));
- assertEquals(0, x &= ((tmp = -1546339583, tmp)>>>(tmp = -587433830, tmp)));
- assertEquals(0, x *= (((tmp = 1356991166.5990682, tmp)%(tmp = -284401292, tmp))*(1869973719.9757812)));
- assertEquals(NaN, x %= x);
- assertEquals(0, x ^= (((tmp = 92575404.43720293, tmp)>>>(263475358.17717505))%x));
- assertEquals(0, x <<= (((561514358)*(tmp = -439584969, tmp))%((((-3005411368.7172136)+x)|(-2230472917))&x)));
- assertEquals(0, x >>= ((x>>>x)-((x-(1630649280.510933))+x)));
- assertEquals(0, x >>= (tmp = -1772403084.7012017, tmp));
- assertEquals(0, x *= x);
- assertEquals(0, x += x);
- assertEquals(0, x &= x);
- assertEquals(0, x >>= (tmp = 1622680387, tmp));
- assertEquals(1033887633558225200, x -= ((-510616337)*(tmp = 2024783695, tmp)));
- assertEquals(-2.8073538539158063e+27, x *= (tmp = -2715337492, tmp));
- assertEquals(-2.8073538539158063e+27, x -= ((tmp = -1664804757, tmp)&((tmp = -226616419, tmp)>>>(1006711498))));
- assertEquals(1894539615, x |= (tmp = -2400427681.1831083, tmp));
- assertEquals(7400545, x >>= (774629608.4463601));
- assertEquals(456756268, x += (449355723));
- assertEquals(285771784, x &= (-1316427366));
- assertEquals(17, x >>= ((tmp = -220509931.20787525, tmp)*(((tmp = 2518859292, tmp)+(-1477543005.1586645))>>(tmp = 3172820250.687789, tmp))));
- assertEquals(85924262443, x *= (x*((tmp = -2856669745.965829, tmp)&(((tmp = 401420695, tmp)^(tmp = 2355371132, tmp))|(tmp = 590645330.021911, tmp)))));
- assertEquals(1703875715, x ^= ((-2576394029.7843904)-x));
- assertEquals(1703875715, x %= (tmp = 2234144310, tmp));
- assertEquals(271405807, x ^= (1973569132));
- assertEquals(1060178, x >>>= (tmp = -84823096, tmp));
- assertEquals(8, x >>>= (tmp = 2246120561.905554, tmp));
- assertEquals(-2846791089, x += (-2846791097));
- assertEquals(104933962, x &= (x-(-2969030955.99584)));
- assertEquals(489215611.96215343, x -= (-384281649.96215343));
- assertEquals(489215611, x |= x);
- assertEquals(1186191360, x <<= ((tmp = 774407142.993727, tmp)%x));
- assertEquals(1186191360, x %= (1555004022));
- assertEquals(-1697134080, x ^= (tmp = -597421568, tmp));
- assertEquals(-1102053376, x <<= ((-927370769.4059179)^((tmp = 1093490918, tmp)>>(((-2522227493.3821955)%x)+(-2657319903)))));
- assertEquals(1086450058, x ^= (-23991926.187098265));
- assertEquals(1086450058, x |= x);
- assertEquals(-1.6554590588410778, x /= (x|(x<<(x+x))));
- assertEquals(67108863, x >>>= ((-926530233)+x));
- assertEquals(494553310, x ^= (tmp = 512079649, tmp));
- assertEquals(207751168, x &= (2892146720.6261826));
- assertEquals(207751168, x &= x);
- assertEquals(207751168, x |= x);
- assertEquals(6340, x >>>= (((((x<<(x-((-2819638321)*((x<<x)+x))))>>x)+(tmp = 2016170261, tmp))+(tmp = 2755496043.772017, tmp))+(-841368625.1402085)));
- assertEquals(6340, x ^= ((x/(tmp = -192734784, tmp))>>>(((-140306239)&x)-x)));
- assertEquals(1, x /= x);
- assertEquals(0, x >>= x);
- assertEquals(26786600, x ^= (tmp = 26786600, tmp));
- assertEquals(-0.014657576899542954, x /= ((-1454855938.0338)+(-372635753.3681567)));
- assertEquals(0, x &= ((tmp = 2480635933, tmp)&(-2986584704.9165974)));
- assertEquals(-2108639122, x += ((tmp = 2108639123.8683565, tmp)^((-881296055)/(((x<<(2026200582))|(tmp = -862495245.138771, tmp))-(-1111596494.892467)))));
- assertEquals(1893466112, x <<= (tmp = 607974481, tmp));
- assertEquals(1893466112, x |= x);
- assertEquals(1133122783.997418, x += ((tmp = -760343332, tmp)-((x-(tmp = -878561823.4218843, tmp))/(tmp = -693454632.596637, tmp))));
- assertEquals(8, x >>>= (tmp = 700339003.3919828, tmp));
- assertEquals(4.605305035175536e-9, x /= (1737127060.8343256));
- assertEquals(4.605305035175536e-9, x -= ((x%(897221779))>>>x));
- assertEquals(-1864423625.5704088, x += (tmp = -1864423625.5704088, tmp));
- assertEquals(1132240092, x <<= (1304417186.1193643));
- assertEquals(-2088985380, x ^= (x<<x));
- assertEquals(-4, x >>= ((tmp = 1959823884.0935726, tmp)%(-1679792398.569136)));
- assertEquals(-268435456, x <<= ((tmp = 2586838136, tmp)|((tmp = -481716750.718518, tmp)>>>((1485826674.882607)/(tmp = -2826294011, tmp)))));
- assertEquals(-32768, x >>= (2060648973));
- assertEquals(1, x /= x);
- assertEquals(-2838976297, x -= (tmp = 2838976298, tmp));
- assertEquals(-1382985298, x <<= ((tmp = -2104305023, tmp)&x));
- assertEquals(10, x >>>= (x+x));
- assertEquals(10, x -= (x>>>(361588901.70779836)));
- assertEquals(854603510, x -= (-854603500));
- assertEquals(-557842432, x <<= (tmp = 1212985813.6094751, tmp));
- assertEquals(-459390188241943040, x *= (tmp = 823512450.6304014, tmp));
- assertEquals(-232800033621957060, x /= ((((((686635689)/(tmp = 2013252543, tmp))*(tmp = -1591617746.8678951, tmp))|(((tmp = -1777454093.5611362, tmp)>>>((tmp = 2680809394, tmp)^(((x>>((((((tmp = -265022244, tmp)%((tmp = -3075004537, tmp)>>(((((1427784269.5686688)^((tmp = -1095171528.911587, tmp)^(-942424985.7979553)))>>(-1279441481.1987405))*((2493620394)>>(-2769016043)))/(x&((tmp = 2059033657, tmp)%(((tmp = 1948606940.1488457, tmp)-(tmp = -2645984114.13219, tmp))^x))))))^x)^x)%(x%((((tmp = 3209433446.4551353, tmp)%(tmp = 1364430104.0424738, tmp))/(tmp = -2103044578.349498, tmp))+(tmp = -2613222750, tmp))))*(2099218034)))&(((tmp = -378500985.49700975, tmp)>>(((x+x)|(x%(((-1841907486)<<(-1220613546.194021))<<(tmp = -1260884176, tmp))))^(tmp = 1858784116, tmp)))>>>((x%x)%((x>>>(tmp = -2540799113.7667685, tmp))|x))))/((((tmp = 642072894.6455215, tmp)-(-324951103.6679399))*(tmp = 1424524615, tmp))+((x<<(tmp = -904578863.5945344, tmp))*(tmp = 49233475.435349464, tmp))))))<<(tmp = 1680210257, tmp)))+((tmp = -1516431503, tmp)>>>(-1105406695.3068116)))/(-275019361.6764543)));
- assertEquals(192359387.42913792, x /= (-1210234846));
- assertEquals(192359387.42913792, x %= (-2920206625.0154076));
- assertEquals(192359387.42913803, x -= (((((((tmp = -1263203016.3258834, tmp)-(2432034005.6011124))&x)<<(1479434294))>>((tmp = -1695856315.523002, tmp)>>>(tmp = 557391345, tmp)))/(tmp = -1280240246.2501266, tmp))%((tmp = -2196489823.034029, tmp)>>(((x&((912221637.1101809)+((tmp = -3003677979.652423, tmp)>>(tmp = -716129460.1668484, tmp))))-((x+(x-(-2780610859)))>>>(-2445608016)))<<((x*(x+(x+(((-2124412727.9007604)%(tmp = -593539041.5539455, tmp))&(tmp = 2404054468.768749, tmp)))))%(x>>(tmp = -2913066344.404591, tmp)))))));
- assertEquals(11740, x >>= (688848398.7228824));
- assertEquals(11740, x >>= ((1545765912)*(307650529.9764147)));
- assertEquals(23480, x += x);
- assertEquals(0, x >>>= ((tmp = 1313078391, tmp)|x));
- assertEquals(1726251264, x -= ((1939413887)<<(1004888744.2840619)));
- assertEquals(765324793.5278986, x %= (960926470.4721014));
- assertEquals(747387, x >>= ((2483010044)-(tmp = -413698190, tmp)));
- assertEquals(1, x /= x);
- assertEquals(3016811624, x *= (3016811624));
- assertEquals(17408, x &= (((tmp = -991624868, tmp)<<(((63107932)/(tmp = 2659939199, tmp))|(tmp = -1968768911.3575773, tmp)))>>(((-2876822038.9910746)|(tmp = 2550230179.243425, tmp))<<((x*(x<<((x<<((tmp = -1627718523.616604, tmp)|((2154120561.254636)-(x%(x<<(1484563622.1791654))))))<<((((x^(tmp = 3016524169, tmp))<<(((x+(tmp = 1887816698.2455955, tmp))+x)-x))-(-3023329069))-x))))+x))));
- assertEquals(0, x <<= (((1247441062.177967)/(-1717276234))+x));
- assertEquals(0, x |= ((x%((-1648299429.4520087)>>(-137511052)))>>(tmp = 221301016.4926411, tmp)));
- assertEquals(0, x /= ((-2598501544.913707)>>>(-2177037696)));
- assertEquals(NaN, x %= (x>>x));
- assertEquals(0, x &= (tmp = 1852419158, tmp));
- assertEquals(-829029120, x |= (((2122339180)*((((((tmp = 768748914, tmp)<<((1008490427)&((1937367899.957056)-(((635094486)>>(((tmp = -795046025, tmp)*(2665104134.4455256))^(tmp = 706594584.2462804, tmp)))/(504397522)))))/(-556057788))>>((x/(tmp = -2732280594, tmp))-x))+(-1989667473))+(tmp = 2766802447.789895, tmp)))<<(((tmp = -2969169096, tmp)-x)+(tmp = 2093593159.0942125, tmp))));
- assertEquals(0.6451933462602606, x /= ((-1284931292)<<(x<<(tmp = 1294716764, tmp))));
- assertEquals(1515416866.520901, x *= (2348779440));
- assertEquals(-1620606242886682600, x *= ((-993898625.5357854)&(((tmp = -571100481, tmp)/x)*((2428590177.311031)%(tmp = -2671379453, tmp)))));
- assertEquals(-1137472828, x %= (tmp = -1195183004, tmp));
- assertEquals(-3096634005473250000, x *= (tmp = 2722380640, tmp));
- assertEquals(-3096634003996758500, x -= (-1476491033.833419));
- assertEquals(-3096634000805538000, x += (3191220521.978341));
- assertEquals(-3096634000805468000, x += ((((tmp = -3024976741, tmp)&(952616360))|((x*(-1547952311))+(x*x)))>>>(tmp = 981373323, tmp)));
- assertEquals(-3096633998655594000, x += (2149873927));
- assertEquals(-118812224101.54297, x %= (((2641881276.9898443)*(((502159480)^x)<<x))%((tmp = -2840045365.547772, tmp)*(((((-2297661528)>>>(x>>(-229103883.94961858)))&(((-1285047374.6746495)<<((-360045084)>>>((x-(tmp = -956123411.1260898, tmp))%x)))>>((tmp = -2375660287.5213504, tmp)+((((tmp = -2753478891, tmp)>>>(((tmp = 101438098, tmp)>>(((tmp = -2736502951, tmp)<<((tmp = -3084561882.368902, tmp)&(tmp = 1491700884, tmp)))|x))&(tmp = 1627412882.6404104, tmp)))>>>(tmp = 1039002116.6784904, tmp))<<((tmp = -2840130800, tmp)-(tmp = -740035567, tmp))))))&(tmp = -416316142, tmp))>>x))));
- assertEquals(86, x >>>= (tmp = -293489896.5572462, tmp));
- assertEquals(172, x += (x%((((-2635082487.364155)|((-2361650420.634912)&(-2147095650.7451198)))<<((tmp = 2258905145.9231243, tmp)%((((tmp = -1365987098.5130103, tmp)*(((((((932437391)/x)/(289270413.0780891))%(x-x))+((((2194986374.917528)>>(((((tmp = -1553805025, tmp)|x)^(((x>>(-564400586.0780811))^(tmp = 1738428582.0238137, tmp))>>(tmp = 1717774140, tmp)))&(tmp = -2789427438, tmp))%(((tmp = -1386118057, tmp)*(-2333221237.7915535))*(x>>>(((((41346648.46438944)&x)%(-478973697.6792319))|(tmp = 2108106738, tmp))/x)))))-(tmp = -133437701.64136505, tmp))>>>x))+(tmp = -1567210003, tmp))*(x+((x&x)-(2942851671)))))>>>(tmp = -446377136, tmp))*((((((tmp = 1597203255, tmp)>>>(619157171))|(-2766246629.005985))>>((tmp = 3130227370, tmp)%x))*(tmp = 2072227901.6101904, tmp))|((tmp = 1369019520, tmp)^(759659487))))))>>>x)));
- assertEquals(1996475731, x ^= ((1456327892.2281098)|(1728022827)));
- assertEquals(0, x %= x);
- assertEquals(0, x &= (1323847974));
- assertEquals(3076829073.8848357, x += (3076829073.8848357));
- assertEquals(9569842648396755000, x *= (3110293883.2782717));
- assertEquals(9569842646260304000, x -= (2136450372.9038036));
- assertEquals(9.158188827418242e+37, x *= x);
- assertEquals(0, x <<= ((x&(tmp = -2241179286, tmp))+((tmp = 2553144081, tmp)&((tmp = -1914709694, tmp)^(tmp = -1469651409.0651562, tmp)))));
- assertEquals(0, x <<= x);
- assertEquals(0, x /= (2177840666.276347));
- assertEquals(0, x %= (-690827104));
- assertEquals(0, x >>>= x);
- assertEquals(0, x ^= x);
- assertEquals(-0, x /= (tmp = -803415280, tmp));
- assertEquals(-2355576914.316743, x += (-2355576914.316743));
- assertEquals(-833671722514674000, x *= ((3053388806.692315)-(tmp = 2699474775.081724, tmp)));
- assertEquals(1, x /= x);
- assertEquals(1898147684, x += ((tmp = 1898147683, tmp)|(x<<x)));
- assertEquals(2.192324660388075, x %= ((tmp = 2630187518, tmp)/((2868794982.790862)|(490860748))));
- assertEquals(0, x >>>= ((2751021779)/(-952522559)));
- assertEquals(321040461, x ^= ((321040461.153594)-x));
- assertEquals(-2.3814602031636922, x /= ((tmp = -170472190, tmp)|x));
- assertEquals(-1, x >>= (2200125174.177402));
- assertEquals(-2964432647.9379396, x += (-2964432646.9379396));
- assertEquals(-370116502.93793964, x %= (tmp = -518863229, tmp));
- assertEquals(777927355.2283959, x -= (-1148043858.1663356));
- assertEquals(0, x *= ((tmp = 1134913539, tmp)&(((x>>>((tmp = -989822787, tmp)>>>x))%x)&(tmp = 1078636160.7313156, tmp))));
- assertEquals(-1089245637, x ^= (3205721659.3548856));
- assertEquals(-1192493056, x <<= (-1173291054));
- assertEquals(78013832, x += ((tmp = 2462999944, tmp)+x));
- assertEquals(0, x %= x);
- assertEquals(0, x >>>= (1794908927.7409873));
- assertEquals(1708338504, x += ((-2586628792.3484306)<<x));
- assertEquals(12, x >>= (-545794789.3827574));
- assertEquals(0, x &= ((2753207225)<<(((-1776581207.557251)+((tmp = -2414140402, tmp)*x))+(x<<(x|(tmp = 772358560.3022032, tmp))))));
- assertEquals(0, x <<= ((tmp = -2755724712.152605, tmp)/((x>>(-732875466))&x)));
- assertEquals(NaN, x *= (((tmp = 2617815318.1134562, tmp)/x)%(x|((((((-851659337.194871)<<(tmp = 2072294700, tmp))%((x+(2193880878.5566335))^((tmp = 3005338026, tmp)-(2947963290))))/x)/(x+(2091745239.4210382)))-(x>>x)))));
- assertEquals(NaN, x /= (tmp = -427684595.0278094, tmp));
- assertEquals(NaN, x /= (tmp = -263945678, tmp));
- assertEquals(0, x <<= x);
- assertEquals(0, x <<= x);
- assertEquals(0, x -= (((x>>((x&x)-(tmp = -673697315, tmp)))>>(((1575095242.2330558)/(x-(-1816886266)))%(-1580195729)))>>>x));
- assertEquals(0, x >>>= x);
- assertEquals(0, x >>= (-2815518206));
- assertEquals(0, x -= (x/(1795634670.692437)));
- assertEquals(-2753579891, x += (tmp = -2753579891, tmp));
- assertEquals(2.7773776150171776, x /= (tmp = -991431585, tmp));
- assertEquals(5.554755230034355, x += x);
- assertEquals(3.362161997528237e-9, x /= (1652137890.4758453));
- assertEquals(3.362161997528237e-9, x %= (tmp = -10848734.527020693, tmp));
- assertEquals(1, x /= x);
- assertEquals(-2978012493, x -= (x+(2978012493)));
- assertEquals(-5.158905851797543, x /= (((x+((tmp = -2548840164, tmp)>>x))<<(x^((tmp = -533281232.7294345, tmp)&x)))&(tmp = -1502692171, tmp)));
- assertEquals(-5.158905851797543, x %= (-3009435255.5612025));
- assertEquals(-20971520, x <<= ((tmp = -2728812464, tmp)%(2619809573.672677)));
- assertEquals(-1900019712, x &= (2398099552));
- assertEquals(-1991377, x %= ((tmp = 1562364373.7334614, tmp)>>>(((x-(-946283217))<<(-2044590694))^(((tmp = 1681238509, tmp)>>(-2801649769))-x))));
- assertEquals(1, x /= x);
- assertEquals(1, x %= (x/(x-x)));
- assertEquals(1.3525631913093335e-9, x /= (739336991));
- assertEquals(0, x &= ((x&(x|(-1530424204)))<<((((tmp = -295143065.9115021, tmp)>>x)+x)<<x)));
- assertEquals(0, x <<= (-1311017801));
- assertEquals(-0, x /= (-667133339.1918633));
- assertEquals(1038307283, x += (1038307283));
- assertEquals(506985, x >>>= ((tmp = 1550624472.9157984, tmp)^x));
- assertEquals(506985, x >>>= ((254646626)<<(tmp = 1572845412.744642, tmp)));
- assertEquals(32447040, x <<= (tmp = -2427326042, tmp));
- assertEquals(0, x -= (x<<((x|x)>>>x)));
- assertEquals(0, x &= x);
- assertEquals(0, x &= ((-484420357)|((tmp = 807540590.6132902, tmp)/(x/x))));
- assertEquals(-890607324, x ^= ((tmp = -890607324, tmp)>>((((-2876826295)>>x)<<((tmp = 2351495148.117994, tmp)>>(tmp = 1368611893.274765, tmp)))*(tmp = 1531795251, tmp))));
- assertEquals(-729075363, x += (x+(tmp = 1052139285, tmp)));
- assertEquals(531550884933581760, x *= x);
- assertEquals(1980836332, x ^= ((-746269795.2320724)-((2400458512)>>((1290672548)>>>((((1536843439.5629003)&(3185059975.158061))*(tmp = -1339249276.2667086, tmp))&x)))));
- assertEquals(941373096, x %= ((x+(-451098412))^(tmp = 1725497732, tmp)));
- assertEquals(-1766019323, x += (tmp = -2707392419, tmp));
- assertEquals(2528947973, x >>>= (x^(-896237435.3809054)));
- assertEquals(-263192576, x <<= (-866361580));
- assertEquals(-2008, x >>= (-2608071791));
- assertEquals(-88, x %= (((-1076807218.4792447)&((tmp = 601044863, tmp)>>((tmp = 1228976729, tmp)+((((-2711426325)*x)|x)|(x%(-2700007330.3266068))))))&(tmp = 3147972836.778858, tmp)));
- assertEquals(1762886843, x ^= (tmp = 2532080403, tmp));
- assertEquals(1762886843, x %= ((((((tmp = -2059247788, tmp)>>x)/x)+(x<<x))^x)>>>(-1969283040.3683646)));
- assertEquals(4812334726.587896, x += (tmp = 3049447883.587897, tmp));
- assertEquals(1, x /= x);
- assertEquals(1, x *= x);
- assertEquals(-2150507334, x -= ((tmp = 1578221999, tmp)+(tmp = 572285336, tmp)));
- assertEquals(-4546475858941548500, x *= ((tmp = -931533139.5546813, tmp)^(tmp = 3061503275, tmp)));
- assertEquals(-269064192, x |= ((207217276.91936445)<<(tmp = -957353678.4997551, tmp)));
- assertEquals(1, x /= x);
- assertEquals(1, x <<= (((1463856021.8616743)%(x*(tmp = -2286419102, tmp)))/(-2852887593)));
- assertEquals(2223868564.8383617, x *= (tmp = 2223868564.8383617, tmp));
- assertEquals(918797189.9033995, x -= ((1305071374.9349623)%(x+(2211992629))));
- assertEquals(-2212004787.4668465, x -= (tmp = 3130801977.370246, tmp));
- assertEquals(31783, x >>= (2951958960));
- assertEquals(31783, x ^= ((((tmp = -2441511566, tmp)&((tmp = 91427553.90168321, tmp)+((tmp = 3001737720.327718, tmp)%x)))>>>(-2263859841))>>>((2109161329)>>(tmp = -2816295136.7443414, tmp))));
- assertEquals(4068224, x <<= (x%((tmp = -682576250.4464607, tmp)*(x/(((x-x)>>>(x&((((x<<(x<<x))>>>((((2243036981.528562)/(((-1839328916.9411087)>>(-1907748022.162144))<<(x+x)))+((tmp = 2362574171, tmp)<<(tmp = 1987834539, tmp)))|(-444329240)))|(399451601.1717081))>>x)))&(968363335.6089249))))));
- assertEquals(0.0030991932898194294, x /= ((tmp = 1067316540.5529796, tmp)^(-2388640366)));
- assertEquals(0, x >>= x);
- assertEquals(0, x >>>= (tmp = -393433349.1636851, tmp));
- assertEquals(0, x *= (((x^(((1806955787.471396)<<x)^((517668047.55566347)>>>(x%(x<<(tmp = -276586733.4844558, tmp))))))%(1661242196.1472542))|x));
- assertEquals(0, x |= (x>>x));
- assertEquals(-155236210, x |= (tmp = -155236210.19366312, tmp));
- assertEquals(-606392, x >>= ((tmp = -1533446042.97781, tmp)^x));
- assertEquals(-1, x >>= (936126810));
- assertEquals(2325115611, x -= (-2325115612));
- assertEquals(0, x -= x);
- assertEquals(0, x >>= (tmp = -354826623, tmp));
- assertEquals(-0, x *= (-1232528947.7321298));
- assertEquals(0, x |= x);
- assertEquals(0, x <<= (((tmp = 187758893.4254812, tmp)&(x-(tmp = 648201576, tmp)))&(385106597)));
- assertEquals(0, x >>= (tmp = 2554891961, tmp));
- assertEquals(-1311492611.2970417, x += (-1311492611.2970417));
- assertEquals(-688179220.3221785, x += (623313390.9748632));
- assertEquals(1416835528, x &= (tmp = 1953739224, tmp));
- assertEquals(-11.04719252755072, x /= (-128252995));
- assertEquals(-6.287413042114223e-9, x /= (tmp = 1757033052.1558928, tmp));
- assertEquals(-4231171, x |= (((((2022730885.7773404)*((-2495777565.221855)|(tmp = 274627292, tmp)))<<(-3072596920.4902725))>>>((-2215057529)+(-1134713759.4247034)))^((tmp = -1888181788, tmp)/(572025985.2748461))));
- assertEquals(-4194305, x |= ((tmp = 167328318.038759, tmp)>>>(153800904.34551537)));
- assertEquals(-1316525687, x -= (1312331382));
- assertEquals(1448723245.7863903, x += (2765248932.7863903));
- assertEquals(1.7219707102205526, x /= (tmp = 841317008, tmp));
- assertEquals(1872027792.5217001, x *= (x|(tmp = 1087142645.6665378, tmp)));
- assertEquals(3504488055973669400, x *= x);
- assertEquals(-1075254784, x |= x);
- assertEquals(-5, x >>= (((844461331.8957539)-((x&x)<<((tmp = 1443904777, tmp)+(tmp = 736164505.3670597, tmp))))-(((tmp = 1348422110, tmp)>>((tmp = -2878252514, tmp)/(-1175443113)))|((-2138724317)%(2057081133)))));
- assertEquals(-3.038875804165675e-9, x /= (1645345292.8698258));
- assertEquals(1.25204541454491e-18, x /= (-2427129055.274914));
- assertEquals(-1.7151576137235622e-9, x *= (-1369884505.6247284));
- assertEquals(1590804618, x ^= (1590804618.4910607));
- assertEquals(5061318665300252000, x *= (x+x));
- assertEquals(5061318665300252000, x %= ((tmp = 1102144242, tmp)*x));
- assertEquals(-7, x >>= (2772167516.624264));
- assertEquals(16383, x >>>= (-2979259214.5855684));
- assertEquals(47108415435, x *= ((2944456517.839616)>>>(1041288554.5330646)));
- assertEquals(61, x >>>= (x^(((-1305163705)<<((948566605)-x))-x)));
- assertEquals(0, x %= x);
- assertEquals(0, x ^= (((tmp = 1918861879.3521824, tmp)/((x%(tmp = 945292773.7188392, tmp))%(x|x)))>>x));
- assertEquals(-0, x *= ((((x|((2810775287)|(tmp = 1265530406, tmp)))^((tmp = 3198912504.175658, tmp)-(((tmp = 1422607729.281712, tmp)<<(tmp = 2969836271.8682737, tmp))&x)))<<((tmp = 844656612, tmp)*(((((tmp = -828311659, tmp)%(((-2083870654)>>>(x^(((((933133782)-(tmp = 1033670745, tmp))-(629026895.4391923))%((-605095673.8097742)*((((-227510375.38460112)*x)+x)&(((((tmp = 472873752.68609154, tmp)^(tmp = 2815407038.712165, tmp))+((x>>>((tmp = -1331030665.3510115, tmp)>>>(2281234581)))-(x>>>x)))&(tmp = -2160840573.325921, tmp))&x))))<<(tmp = 1411888595, tmp))))|(((tmp = -915703839.0444739, tmp)/((x+(418836101.8158506))%(-1112605325.4404268)))&((-3098311830.6721926)-x))))-((49446671.477988124)*(-2522433127)))+((tmp = 443068797, tmp)>>(tmp = 418030554.97275746, tmp)))*((tmp = 38931296.738208175, tmp)+(1842742215.3282685)))))-((tmp = 1325672181.205841, tmp)^(tmp = 669284428, tmp))));
- assertEquals(-0, x *= (tmp = 93843030, tmp));
- assertEquals(0, x ^= x);
- assertEquals(0, x ^= x);
- assertEquals(0, x <<= x);
- assertEquals(0, x >>>= (x%((((((tmp = -107458601, tmp)>>(x*((x|((tmp = 2117286494, tmp)>>((x^(tmp = 114214295.42048478, tmp))>>>(tmp = 1032826615, tmp))))&((x*x)&(-225386977.67686415)))))^((-780566702.5911419)+(-1113319771)))|(((x^x)<<(1288064444))>>(-2292704291.619477)))>>(365125945))-((tmp = -1986270727.235776, tmp)/x))));
- assertEquals(-0, x *= (((-18925517.67125845)|((((-1975220517)+(tmp = -1250070128.296064, tmp))+(1085931410.5895243))<<(((x|(((x*(tmp = 160207581.50536323, tmp))|(tmp = 1798744469.7958293, tmp))-x))>>>(((x+((x%x)&((((x^x)<<((tmp = 2538012074.623554, tmp)^x))*x)&x)))/(x+(tmp = -2563837407, tmp)))/(tmp = 2189564730, tmp)))/(((-1703793330.5770798)<<((176432492)|x))<<(1347017755.345185)))))<<(((tmp = -577100582.7258489, tmp)&x)/(-31246973))));
- assertEquals(0, x >>>= x);
- assertEquals(NaN, x %= ((x*(tmp = 1167625971, tmp))&(((tmp = -770445060, tmp)>>((339248786)^((2058689781.2387645)-((-2381162024)*(660448066)))))&x)));
- assertEquals(NaN, x += ((3088519732.515986)-(-267270786.06493092)));
- assertEquals(0, x &= (tmp = 2748768426.3393354, tmp));
- assertEquals(-1109969306, x ^= ((-1109969306)>>>x));
- assertEquals(-1109969306, x %= (tmp = 1150376563.581773, tmp));
- assertEquals(-2058145178, x &= (-2057586057));
- assertEquals(-850185626, x |= ((x^(tmp = 1223093422, tmp))&((-589909669)<<(2299786170))));
- assertEquals(1489215443, x += (2339401069));
- assertEquals(-23592960, x <<= x);
- assertEquals(2063937322, x ^= (-2053296342.2317986));
- assertEquals(12922122, x %= (x^((-2259987830)>>(x*(((tmp = -799867804.7716949, tmp)&(tmp = -1068744142, tmp))*(((((1091932754.8596292)-((tmp = -1778727010, tmp)>>(((tmp = 1207737073.2689717, tmp)-(x-(tmp = -1191958946, tmp)))+(-631801383.7488799))))-(-618332177))>>>(-156558558))>>>(3032101547.6262517)))))));
- assertEquals(12922122, x &= x);
- assertEquals(Infinity, x /= (x%x));
- assertEquals(0, x &= (x*(-227800722.62070823)));
- assertEquals(-865648691, x ^= (-865648691));
- assertEquals(1, x /= (x%(tmp = 1524739353.8907173, tmp)));
- assertEquals(16, x <<= (x<<(2335214658.789205)));
- assertEquals(0, x &= ((tmp = 570332368.1239192, tmp)^(-2278439501)));
- assertEquals(1881145344, x -= (((-569715735.8853142)+(2093355159))<<(tmp = 2788920949, tmp)));
- assertEquals(0, x ^= x);
- assertEquals(NaN, x -= ((tmp = -1427789954, tmp)%((((((411038329.49866784)-x)-(x<<((-1330832247)+x)))/x)^((x*(845763550.2134092))>>(tmp = 1427987604.5938706, tmp)))>>>(1857667535))));
- assertEquals(NaN, x /= (-313793473));
- assertEquals(0, x >>>= (x/x));
- assertEquals(1869358566, x -= (-1869358566));
- assertEquals(-1901664519209545200, x += ((tmp = 944729941.3936644, tmp)*(-2012918653)));
- assertEquals(-1901664519209545200, x += ((tmp = 1348246793, tmp)/(x&x)));
- assertEquals(-1576791552, x &= (tmp = 2719250966.739456, tmp));
- assertEquals(-305087899, x ^= (-2955630491.030272));
- assertEquals(0, x ^= (x%(1575252839.559443)));
- assertEquals(4184604407, x += ((((tmp = -244720076.17657042, tmp)|(2819320515))^((((tmp = 1222623743.9184055, tmp)*(-95662379.577173))/(x/(x+(((x-(tmp = -3024718107.6310973, tmp))^(-1494390781))&(tmp = 2284054218.8323536, tmp)))))>>>(tmp = 2090069761, tmp)))>>>(x%x)));
- assertEquals(3148907440, x -= (((tmp = -332379100.7695112, tmp)-(-1145399547))^(((((((tmp = 3133792677.785844, tmp)+x)<<(2306999139.5799255))>>((tmp = -2051266106, tmp)*(((((x+(((-728654312.8954825)>>(x>>>(((x%x)&(-1587152364))|(((((-2114138294)&x)&(1547554688))^x)-(-1856094268)))))*(((-1135018784)&((x+(tmp = -1444020289, tmp))|x))+x)))>>x)&x)/(2449005489))<<((131073798.64314616)%(x>>>((-2592101383.2205048)^(tmp = -757096673.0381112, tmp)))))))^(2766467316.8307915))-(-2465892914.515834))-((((tmp = 234064056, tmp)^((x>>>(1622627548.7944543))+(-1750474146)))|(-1959662039.4687617))^((-1222880974)&(-2794536175.906498))))));
- assertEquals(-1157627488, x &= (-1156639323));
- assertEquals(-1342170624, x <<= ((x/((((1829945345.0613894)/(x*((tmp = 1278865203.0854595, tmp)/(((tmp = -2298274086.519347, tmp)+(tmp = -545203761, tmp))-(tmp = 2712195820, tmp)))))>>>((tmp = 240870798.9384452, tmp)-(tmp = -3188865300.4768195, tmp)))>>>(x%((648799266)>>>(tmp = 24460403.864815235, tmp)))))|((tmp = 232533924, tmp)|x)));
- assertEquals(-2684341248, x += x);
- assertEquals(1073755136, x &= (((-662718514.9245079)>>(tmp = -1915462105, tmp))+(tmp = 1478850441.8689613, tmp)));
- assertEquals(-1073755136, x /= (x|((tmp = -1767915185, tmp)|((325827419.1430224)|(((-1343423676)|(tmp = -1929549501, tmp))|(-866933068.9585254))))));
- assertEquals(-1073755136, x %= ((tmp = 547342356, tmp)-((tmp = 2213249646.7047653, tmp)-((((((-2463314705)^(tmp = -993331620, tmp))^(((x%x)>>(tmp = 1798026491.3658786, tmp))-(((1024072781)/(tmp = -2407354455, tmp))%(1973295010))))<<(-1966787233))^x)|(-1787730004)))));
- assertEquals(-1073754452, x |= (tmp = 3099823788.077907, tmp));
- assertEquals(-1540683096, x &= (-1540674632.7013893));
- assertEquals(-1540683052, x ^= ((tmp = -126183090, tmp)>>>((-622437575.5788481)|((((tmp = -2947914022, tmp)%(((tmp = 2512586745, tmp)>>x)>>>((27238232.23677671)/(tmp = 3203958551, tmp))))/(tmp = 2906005721.402535, tmp))^((((tmp = 1763897860.737334, tmp)^(1445562340.2485332))/x)+(-2393501217.716533))))));
- assertEquals(-1258599433, x |= (tmp = 351291767.59661686, tmp));
- assertEquals(-1241560065, x |= (626346046.5083935));
- assertEquals(-1241560065, x ^= ((2263372092)/((tmp = -2868907862, tmp)>>>x)));
- assertEquals(-893685228, x -= (tmp = -347874837, tmp));
- assertEquals(3401282068, x >>>= (x*x));
- assertEquals(0, x %= x);
- assertEquals(0, x >>>= x);
- assertEquals(-2079237393, x ^= (tmp = 2215729903, tmp));
- assertEquals(NaN, x %= ((((tmp = 3203450436, tmp)/(2867575150.6528325))&(1864945829))&((x&((((tmp = -1927086741.3438427, tmp)|x)|(-1783290909.3240588))*((-1074778499.0697656)*(x-((tmp = -848983542.8456669, tmp)^(tmp = -1324673961, tmp))))))>>(tmp = -2144580304.245896, tmp))));
- assertEquals(-43334009, x |= (x^(-43334009.72683525)));
- assertEquals(-43334009, x &= x);
- assertEquals(-43334009, x %= (tmp = 1252450645.060542, tmp));
- assertEquals(-43334009, x |= (((((((tmp = 968062202, tmp)/(x|(tmp = 2766801984, tmp)))*((2173353793.938968)>>(((tmp = -2459317247, tmp)<<(tmp = -2333601397, tmp))>>>((tmp = -578254251.8969193, tmp)*(tmp = 839964110.7893236, tmp)))))&(((1675305119)&(tmp = -929153707, tmp))*((x*x)*x)))/x)|(x/(tmp = 384740559.43867135, tmp)))%(1657362591)));
- assertEquals(0, x -= x);
- assertEquals(0, x %= (-1334758781.1087842));
- assertEquals(0, x -= x);
- assertEquals(-54, x += ((tmp = -1787151355.470972, tmp)>>((tmp = 237028977, tmp)>>(((2829473542)<<(x>>>(((((((x-(-1950724753))*(((x>>>(2807353513.6283565))<<((-583810779.1155353)>>(x*x)))>>(-1068513265)))^(x^(-696263908.5131407)))%(((tmp = -1325619399, tmp)<<((tmp = -1030194450, tmp)-x))^x))+((-2852768585.3718724)>>(tmp = -3160022361, tmp)))%(x&x))>>(tmp = 2667222702.5454206, tmp))))+((804998368.8915854)<<x)))));
- assertEquals(-54, x %= (-1601267268.4306633));
- assertEquals(1, x >>>= (tmp = -543199585.579128, tmp));
- assertEquals(4.732914708226396e-10, x /= (tmp = 2112862922, tmp));
- assertEquals(-4266932650, x -= ((((x^((((tmp = 2784618443, tmp)^(tmp = -2271260297.9010153, tmp))|((((tmp = -599752639.7516592, tmp)*(2751967680.3680997))^(tmp = -1478450055.578217, tmp))*x))-x))&((tmp = -520061982, tmp)-((tmp = 1400176711.9637299, tmp)^(((2100417541)|(x+(tmp = -674592897.0420957, tmp)))>>x))))^(tmp = -365650686.7947228, tmp))>>>((-2943521813)&(((tmp = -1888789582, tmp)>>(tmp = 700459655.488978, tmp))+(tmp = -1725725703.655931, tmp)))));
- assertEquals(224277168, x <<= (tmp = 2885115011.8229475, tmp));
- assertEquals(224277168, x %= (tmp = -2655345206.442777, tmp));
- assertEquals(850395136, x <<= (x-(((((-769868538.1729524)/((tmp = -298603579, tmp)%(x^x)))+((2691475692)|(((x>>>(628995710.4745524))^(x<<(((tmp = -1046054749, tmp)|(919868171))-x)))^((-1377678789.8170452)&((3065147797)%(tmp = 2638804433, tmp))))))^(tmp = -2036295169, tmp))&(((tmp = -157844758.08476114, tmp)*(tmp = -2819601496, tmp))&((((tmp = 78921441, tmp)<<(653551762.5197772))/(1801316098))*(-1479268961.8276927))))));
- assertEquals(1645565728, x ^= (tmp = 1353013024, tmp));
- assertEquals(1645565728, x >>>= x);
- assertEquals(3020513544, x += (1374947816));
- assertEquals(0, x %= x);
- assertEquals(0, x %= ((((((tmp = -304228072.4115715, tmp)>>>((-90523260.45975709)-(tmp = -3013349171.084838, tmp)))%((-1640997281)*((tmp = -1600634553, tmp)%((tmp = 557387864, tmp)<<((888796080.766409)|(x^((((x%(((((tmp = 1164377954.1041703, tmp)*x)|(2742407432.192806))&((tmp = 1707928950, tmp)<<(1279554132.4481683)))+(tmp = -2108725405.7752397, tmp)))%(tmp = -465060827, tmp))^((tmp = 2422773793, tmp)+x))^((((((((tmp = -1755376249, tmp)^((-267446806)^x))/(((tmp = -1808578662.4939392, tmp)+((tmp = -1997100217, tmp)+x))+(((tmp = -2469853122.411479, tmp)/x)>>(tmp = 660624616.7956645, tmp))))%((x<<((((((tmp = -1701946558, tmp)-(tmp = 133302235, tmp))>>>x)/(738231394))<<(-1060468151.4959564))&(((((-1877380837.4678264)|(tmp = 2366186363, tmp))%x)>>>(-2382914822.1745577))>>((-1874291848.9775913)<<(tmp = 2522973186, tmp)))))<<(-2672141993)))|(tmp = 732379966, tmp))%x)^x)^x))))))))%(tmp = 2385998902.7287374, tmp))*x)+(tmp = -2195749866.017106, tmp)));
- assertEquals(401488, x ^= (((-320896627)>>>(tmp = 2812780333.9572906, tmp))&(tmp = -2088849328, tmp)));
- assertEquals(-1661116571.0046256, x += (tmp = -1661518059.0046256, tmp));
- assertEquals(-1616122720, x <<= x);
- assertEquals(-1616122720, x >>= x);
- assertEquals(-390439413, x %= (tmp = -1225683307, tmp));
- assertEquals(-84189205, x |= ((x|(2054757858))^(((x<<(((x|x)|(((x>>>((-2938303938.1397676)<<((2993545056)^((tmp = -643895708.5427527, tmp)/((1371449825.5345795)-(1896270238.695752))))))-(tmp = 1061837650, tmp))+(x+(tmp = 3072396681, tmp))))>>(x-((((tmp = -1877865355.1550744, tmp)&x)%(-2766344937))>>>(2055121782)))))-((x<<x)|(tmp = -2742351880.1974454, tmp)))<<((-2600270279.219802)>>(-1625612979)))));
- assertEquals(-168378410, x += x);
- assertEquals(-168378410, x &= x);
- assertEquals(-1534983792, x &= (-1501412943));
- assertEquals(-1821543761, x ^= (938439487));
- assertEquals(-1821543761, x &= (x^(((tmp = -4237854, tmp)>>x)/x)));
- assertEquals(2358, x >>>= (2954252724.620632));
- assertEquals(4716, x <<= ((-75522382.8757689)/((tmp = 1074334479, tmp)|((tmp = -720387522, tmp)>>(x>>>(-3085295162.6877327))))));
- assertEquals(-1313079316, x |= (2981887904.020387));
- assertEquals(-1957790646, x -= (644711330));
- assertEquals(17831, x >>>= ((tmp = -2550108342, tmp)-(((tmp = 454671414.0146706, tmp)+(-661129693.9333956))>>(x>>>(((tmp = 1752959432.3473055, tmp)*(-2619510342.1812334))%(tmp = -456773274.2411971, tmp))))));
- assertEquals(689287937.6879716, x -= ((tmp = -397126863.6879716, tmp)-(((x>>x)^(x/(-1387467129.6278908)))|((x>>((tmp = -2361114214.8413954, tmp)<<(tmp = -805670024.4717407, tmp)))<<(-2724018098)))));
- assertEquals(1378575875.3759432, x += x);
- assertEquals(84112428460187.8, x *= (((((2681425112.3513584)%(tmp = -1757945333, tmp))|x)>>(-1793353713.0003397))%x));
- assertEquals(-3221, x >>= (-1976874128));
- assertEquals(-3221, x %= (((tmp = 2318583056.834932, tmp)|((tmp = -1016115125, tmp)+((-472566636.32567954)+x)))|(tmp = 3135899138.065598, tmp)));
- assertEquals(-6596608, x <<= x);
- assertEquals(-1249902592, x <<= (((tmp = -2025951709.5051148, tmp)/((-465639441)<<(-2273423897.9682302)))*((tmp = -2408892408.0294642, tmp)-(tmp = 1017739741, tmp))));
- assertEquals(73802092170444800, x *= (tmp = -59046275, tmp));
- assertEquals(-1619001344, x <<= x);
- assertEquals(0, x <<= (tmp = 1610670303, tmp));
- assertEquals(-0, x *= ((((x+(tmp = 2039867675, tmp))|(tmp = 399355061, tmp))<<(1552355369.313559))^x));
- assertEquals(0, x *= x);
- assertEquals(0, x >>>= (((2875576018.0610805)>>x)%(tmp = -2600467554, tmp)));
- assertEquals(2290405226.139538, x -= (-2290405226.139538));
- assertEquals(0, x %= x);
- assertEquals(0, x ^= (((tmp = 2542309844.485515, tmp)-x)%((-2950029429.0027323)/(tmp = 2943628481, tmp))));
- assertEquals(0, x += x);
- assertEquals(0, x -= x);
- assertEquals(0, x >>>= (tmp = 2337330038, tmp));
- assertEquals(0, x += (x/(((292272669.0808271)&(tmp = 2923699026.224247, tmp))^(tmp = 367745855, tmp))));
- assertEquals(0, x &= x);
- assertEquals(0, x %= ((tmp = 1565155613.3644123, tmp)<<(-308403859.5844681)));
- assertEquals(-1845345399.3731332, x += (tmp = -1845345399.3731332, tmp));
- assertEquals(5158590659731951000, x *= (-2795460763.8680177));
- assertEquals(-364664, x >>= (1837745292.5701954));
- assertEquals(1, x /= x);
- assertEquals(-860616114.8182092, x += ((tmp = 2076961323.1817908, tmp)+(-2937577439)));
- assertEquals(-860616115, x ^= ((x*(tmp = 2841422442.583121, tmp))>>>((tmp = 1929082917.9039137, tmp)>>(-2602087246.7521305))));
- assertEquals(-38387843, x |= (3114677624));
- assertEquals(2927507837, x += (tmp = 2965895680, tmp));
- assertEquals(1, x /= x);
- assertEquals(-1792887531, x *= (-1792887531));
- assertEquals(-0, x %= ((x^x)+x));
- assertEquals(-0, x %= (tmp = 2800752702.562547, tmp));
- assertEquals(1384510548, x ^= (tmp = 1384510548, tmp));
- assertEquals(42251, x >>= (1645421551.363844));
- assertEquals(0, x >>>= (17537561));
- assertEquals(-2076742862, x ^= (tmp = 2218224434, tmp));
- assertEquals(-2.790313825067623, x /= (744268563.3934636));
- assertEquals(5313538, x &= (((((tmp = -2406579239.0691676, tmp)+((-1470174628)+(((tmp = -783981599, tmp)<<(tmp = -1789801141.272646, tmp))^(((((((tmp = -844643189.5616491, tmp)&(tmp = -252337862, tmp))&(x|x))%((-3159642145.7728815)+(tmp = 2149920003.9525595, tmp)))&(x>>(1737589807.9431858)))-((((((((1610161800)<<(497024994))>>x)<<x)/x)>>>x)&x)-(757420763.2141517)))-(tmp = -3061016994.9596977, tmp)))))/(tmp = 1810041920.4089384, tmp))&(tmp = 5887654.786785364, tmp))&((tmp = 1626414403.2432103, tmp)+(x%x))));
- assertEquals(-2147483648, x <<= (tmp = 1304102366.8011155, tmp));
- assertEquals(-208418816, x %= (((((-2850404799)*(x+(3158771063.226051)))*(-2017465205))/(x>>x))>>(x%(tmp = 2760203322, tmp))));
- assertEquals(-2189223477, x -= (1980804661));
- assertEquals(-859239912, x ^= (tmp = 2974421971.3544703, tmp));
- assertEquals(-1599850415, x ^= (tmp = -2475871671.140151, tmp));
- assertEquals(-1600636847, x += ((((tmp = -1311002944, tmp)<<((tmp = -1137871342, tmp)<<(tmp = 115719116, tmp)))/(413107255.6242596))<<(x>>((((-1908022173)&(((-1519897333)^((x>>(x*(tmp = -2886087774.426503, tmp)))*(tmp = 530910975, tmp)))+(-2579617265.889692)))+((2518127437.127563)>>>((tmp = 481642471.56441486, tmp)>>>(792447239))))^(x<<(248857393.6819017))))));
- assertEquals(-191, x >>= (-1591265193));
- assertEquals(-192.27421813247196, x += ((tmp = 2627329028.207775, tmp)/(tmp = -2061914644.9523563, tmp)));
- assertEquals(1230613220, x ^= (tmp = 3064354212.307105, tmp));
- assertEquals(1230613220, x &= x);
- assertEquals(1230613220, x %= (1833479205.1064768));
- assertEquals(1230613220, x >>>= ((((1559450742.1425748)|((2151905260.956583)*(1213275165)))%(514723483.12764716))>>>x));
- assertEquals(1230613493, x |= ((((3004939197.578903)*(tmp = -576274956, tmp))+((tmp = 1037832416.2243971, tmp)^x))>>>(tmp = 2273969109.7735467, tmp)));
- assertEquals(2461226986, x += x);
- assertEquals(-27981, x >>= ((692831755.8048055)^((tmp = -1593598757, tmp)%(x-((((-1470536513.882593)|((tmp = -2716394020.466401, tmp)|(tmp = 2399097686, tmp)))&x)%x)))));
- assertEquals(-1.4660454948034359e+23, x *= (((x>>>((((((tmp = -3056016696, tmp)<<(-2882888332))*(2041143608.321916))&(((tmp = -634710040, tmp)|(tmp = -2559412457, tmp))>>(1916553549.7552106)))%((-2150969350.3643866)*x))<<((x*(tmp = 2657960438.247278, tmp))|x)))%((tmp = 526041379, tmp)*(tmp = 2514771352.4509397, tmp)))*(1219908294.8107886)));
- assertEquals(-1.4660454948034359e+23, x -= ((1709004428)>>(((x|(-422745730.626189))%x)>>x)));
- assertEquals(-2247766068, x %= (-3105435508));
- assertEquals(-386845856.0649812, x -= (-1860920211.9350188));
- assertEquals(-386846803.0649812, x -= ((((-3214465921)|((tmp = -1326329034, tmp)+(((tmp = -1203188938.9833462, tmp)%((((((-1318276502)+(x+x))^((x<<x)%(x>>>x)))+(tmp = -439689881, tmp))+((-1455448168.695214)^(x-((-388589993)>>((((940252202)^(-2218777278))|x)/(tmp = -1007511556, tmp))))))&(-140407706.28176737)))-(x/((888903270.7746506)-((tmp = -2885938478.632409, tmp)<<(((((tmp = -1750518830.270917, tmp)>>(((((((tmp = 868557365.7908674, tmp)/(tmp = -2805687195.5172157, tmp))*x)|((((((-1342484550)-((tmp = 1089284576, tmp)^(tmp = 120651272, tmp)))<<(tmp = 2230578669.4642825, tmp))-(x*x))%(x^(((tmp = -3177941534, tmp)+(x>>(-1595660968)))/(-1738933247))))>>>(tmp = 2860175623, tmp)))-(((2392690115.8475947)>>>(tmp = -1754609670.2068992, tmp))>>>(tmp = 2615573062, tmp)))-(tmp = 2590387730, tmp))^((x+((((x-(tmp = -2823664112.4548965, tmp))*(200070977))>>>(((x|((((tmp = 1361398, tmp)>>((tmp = 1649209268, tmp)%x))+x)+(x>>>(tmp = -2379989262.1245675, tmp))))|(x^((tmp = -647953298.7526417, tmp)-x)))&(tmp = -1881232501.1945808, tmp)))>>>x))%(x^(tmp = -1737853471.005935, tmp)))))>>>(427363558))>>>((tmp = -3076726422.0846386, tmp)^(-1518782569.1853383)))/x)))))))|x)>>>(1854299126)));
- assertEquals(-386846803.0649812, x -= (x%x));
- assertEquals(238532, x >>>= (-448890706.10774803));
- assertEquals(232, x >>>= (-791593878));
- assertEquals(232, x <<= (((x^((x-x)&(tmp = 1219114201, tmp)))/(tmp = -427332955, tmp))%(tmp = 1076283154, tmp)));
- assertEquals(210, x ^= (x>>>((2975097430)>>>x)));
- assertEquals(1, x /= x);
- assertEquals(2317899531, x *= (2317899531));
- assertEquals(1131786, x >>>= x);
- assertEquals(2301667519.6379366, x += ((tmp = 193109669.63793683, tmp)+(tmp = 2107426064, tmp)));
- assertEquals(3842614963.6379366, x += (((-1676516834)>>>(tmp = -1817478916.5658965, tmp))^(((tmp = 1122659711, tmp)>>>(tmp = -2190796437, tmp))|(tmp = -2754023244, tmp))));
- assertEquals(-452352333, x &= x);
- assertEquals(-863, x >>= x);
- assertEquals(-3.777863669459606e-7, x /= (2284359827.424491));
- assertEquals(-3.777863669459606e-7, x %= ((tmp = -2509759238, tmp)>>>x));
- assertEquals(0, x <<= (-814314066.6614306));
- assertEquals(0, x %= (tmp = 190720260, tmp));
- assertEquals(2301702913, x += (2301702913));
- assertEquals(-249158048, x >>= (tmp = -2392013853.302008, tmp));
- assertEquals(-249158048, x >>= x);
- assertEquals(-498316096, x += x);
- assertEquals(-498316096, x %= (tmp = 2981330372.914731, tmp));
- assertEquals(106616.2199211318, x *= (((((tmp = 1020104482.2766557, tmp)^((tmp = -416114189.96786, tmp)>>>(1844055704)))|(tmp = 1665418123, tmp))>>(1826111980.6564898))/(-2446724367)));
- assertEquals(106616, x |= x);
- assertEquals(1094927345, x -= (((-1229759420)|(741260479.7854375))-x));
- assertEquals(8353, x >>= x);
- assertEquals(0, x >>>= (tmp = -327942828, tmp));
- assertEquals(-953397616.8888416, x += (tmp = -953397616.8888416, tmp));
- assertEquals(-1906641240.7776833, x += (x+((-3033450184.9106326)>>>(tmp = 2090901325.5617187, tmp))));
- assertEquals(-1906641240.7776833, x %= (tmp = 2584965124.3953505, tmp));
- assertEquals(-1098907671, x |= (tmp = -1272590495, tmp));
- assertEquals(-1.8305258600334393, x /= (600323489));
- assertEquals(-1, x &= x);
- assertEquals(-1, x |= ((x+x)-x));
- assertEquals(1, x *= x);
- assertEquals(867473898, x ^= (tmp = 867473899.0274491, tmp));
- assertEquals(6, x >>>= (tmp = 1174763611.341228, tmp));
- assertEquals(0, x >>= ((689882795)^(2250084531)));
- assertEquals(0, x /= (tmp = 2545625607, tmp));
- assertEquals(0, x >>= x);
- assertEquals(0, x += x);
- assertEquals(0, x -= (x*(-1098372339.5157008)));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x -= (tmp = -1797344676.375759, tmp));
- assertEquals(1121476698, x |= (tmp = 1121476698, tmp));
- assertEquals(1, x /= x);
- assertEquals(1, x &= (-191233693));
- assertEquals(330137888.92595553, x += (330137887.92595553));
- assertEquals(-1792236714, x ^= (tmp = 2256609910, tmp));
- assertEquals(269000724, x &= (316405813.62093115));
- assertEquals(256, x >>= x);
- assertEquals(256, x %= ((2556320341.54669)|(1066176021.2344948)));
- assertEquals(256, x |= x);
- assertEquals(131072, x <<= ((-1650561175.8467631)|x));
- assertEquals(-286761951, x -= ((tmp = 287024095, tmp)-((-2293511421)&(x|x))));
- assertEquals(-1561852927, x &= (3002663949.0989227));
- assertEquals(-460778761, x %= (tmp = -550537083, tmp));
- assertEquals(-3023749308.0492287, x += (tmp = -2562970547.0492287, tmp));
- assertEquals(-481313332.04922867, x %= ((x|((tmp = -855929299, tmp)%((2181641323)%(x|(220607471.33018696)))))&x));
- assertEquals(17510668, x &= (tmp = 363557663, tmp));
- assertEquals(12552, x &= (3020225307));
- assertEquals(1814655896, x |= ((x<<(((-1475967464)*(-3122830185))*x))+(x^(-2480340864.2661023))));
- assertEquals(-3209124403525266400, x -= ((1146847590)*(tmp = 2798213497, tmp)));
- assertEquals(-6418248807050533000, x += x);
- assertEquals(1.1856589432073933e+28, x *= (-1847324681.313275));
- assertEquals(-1238853292, x ^= (-1238853292));
- assertEquals(-77428331, x >>= (x&((((2043976651.8514216)>>>x)^(x>>>(((tmp = -1785122464.9720652, tmp)%x)<<(1570073474.271266))))*x)));
- assertEquals(2011, x >>>= x);
- assertEquals(2011, x &= x);
- assertEquals(0, x >>= (-2682377538));
- assertEquals(-1.1367252770299785, x -= (((tmp = 2704334195.566802, tmp)/(2379056972))%((((-1764065164)*((((468315142.8822602)>>((x%(((tmp = 2537190513.506641, tmp)+((x&(x|((tmp = -947458639, tmp)^(2653736677.417406))))*((x<<((1243371170.1759553)>>>(((tmp = 1572208816, tmp)<<((tmp = 963855806.1090456, tmp)>>>x))%((-3078281718.7743487)*x))))^(-1154518374))))^(-2839738226.6314087)))^((-2865141241.190915)*(-2400659423.8207664))))>>((tmp = 32940590, tmp)/(tmp = 2917024064.570817, tmp)))+(((27601850)/(tmp = 3168834986, tmp))>>x)))+(tmp = 2528181032.600125, tmp))/(3162473952))));
- assertEquals(-1697395408.7948515, x -= (1697395407.6581264));
- assertEquals(1536992607912062500, x *= (tmp = -905500627.5781817, tmp));
- assertEquals(102759872, x >>= (tmp = -707887133.4484048, tmp));
- assertEquals(102759872, x %= (tmp = -1764067619.7913327, tmp));
- assertEquals(12543, x >>>= (-144142995.1469829));
- assertEquals(-2059555229.2592103, x += ((-2059555229.2592103)-x));
- assertEquals(-537022593, x |= (tmp = -2770761410.407701, tmp));
- assertEquals(23777505, x ^= (-560496738.6854918));
- assertEquals(-64329014115772310, x *= ((tmp = -2729234369.198843, tmp)+x));
- assertEquals(189083830, x ^= (tmp = 933619934, tmp));
- assertEquals(189083830, x %= ((tmp = -2918083254, tmp)-(x|(x^(-2481479224.0329475)))));
- assertEquals(378167660, x += x);
- assertEquals(-0.45833387791900504, x /= ((tmp = 2727991875.241294, tmp)<<(tmp = 2570034571.9084663, tmp)));
- assertEquals(0, x <<= x);
- assertEquals(-0, x /= (tmp = -67528553.30662966, tmp));
- assertEquals(0, x <<= (938440044.3983492));
- assertEquals(-945479171, x ^= (tmp = -945479171, tmp));
- assertEquals(-225632619284361200, x *= (238643670.00884593));
- assertEquals(-0, x %= x);
- assertEquals(-585826304, x ^= ((-1256265560)<<(tmp = 1144713549, tmp)));
- assertEquals(-671583855, x ^= (183333265.1468178));
- assertEquals(-484311040, x <<= x);
- assertEquals(-3969762.62295082, x /= ((((tmp = -1164308668.931008, tmp)-x)%x)>>>(((397816647)>>(-1605343671.4070785))<<x)));
- assertEquals(758097879, x ^= ((tmp = -2871307491, tmp)^(-2043176492.646442)));
- assertEquals(0, x *= ((x>>(tmp = 1983292927, tmp))&(tmp = -860505131.4484091, tmp)));
- assertEquals(0, x <<= x);
- assertEquals(0, x &= x);
- assertEquals(0, x %= ((3132981707)-(-2832016477)));
- assertEquals(0, x >>= (x<<((1830195133.0342631)>>>(tmp = -1003969250, tmp))));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x += (tmp = 273271019.87603223, tmp));
- assertEquals(NaN, x += (625749326.1155348));
- assertEquals(0, x >>= (tmp = -531039433.3702333, tmp));
- assertEquals(0, x -= (((tmp = 2029464099, tmp)-(x-(tmp = -329058111.411458, tmp)))*(x<<x)));
- assertEquals(-0, x *= ((-1112957170.5613296)|((tmp = 847344494, tmp)>>>(tmp = 2735119927, tmp))));
- assertEquals(-0, x /= (tmp = 544636506, tmp));
- assertEquals(0, x >>>= (x^(545093699)));
- assertEquals(0, x %= (((tmp = -2208409647.5052004, tmp)+(3083455385.374988))+(((-482178732.7077277)*x)>>>((2661060565)*(-2125201239)))));
- assertEquals(0, x >>>= (-212334007.34016395));
- assertEquals(0.7004300865203454, x -= ((2032883941)/(-2902336693.0154715)));
- assertEquals(0, x <<= (x<<((265868133.50175047)>>>(1162631094))));
- assertEquals(604920272.4394834, x -= (-604920272.4394834));
- assertEquals(604920272, x &= x);
- assertEquals(0, x <<= (((-1961880051.1127694)%(tmp = 1715021796, tmp))|((tmp = 2474759639.4587016, tmp)|(243416152.55635))));
- assertEquals(-46419074, x |= (((tmp = -518945938.5238774, tmp)%((x+(tmp = 242636408, tmp))+(-1974062910)))|(1546269242.0259726)));
- assertEquals(-46419074, x += ((-629802130)*((tmp = -658144149, tmp)%((-905005358.5370393)>>>x))));
- assertEquals(-46419074, x |= (x%(-1103652494)));
- assertEquals(7892881050983985, x *= (-170035297.36469936));
- assertEquals(1105701997.4273424, x %= ((((-490612260.0023911)>>>(tmp = 1803426906, tmp))^(x%(2725270344.2568116)))-(1010563167.8934317)));
- assertEquals(1088619532, x &= (-2232199650));
- assertEquals(1073807364, x &= (-888024506.5008001));
- assertEquals(1153062254980628500, x *= x);
- assertEquals(1153062255703627000, x -= (tmp = -722998613.897227, tmp));
- assertEquals(-1141418584, x |= (3017232552.4814596));
- assertEquals(-373464140, x ^= (-2914372068));
- assertEquals(994050048, x <<= x);
- assertEquals(0, x ^= x);
- assertEquals(0, x &= (tmp = -3166402389, tmp));
- assertEquals(0, x &= ((-1760842506.337213)|(tmp = 2538748127.795164, tmp)));
- assertEquals(-0, x /= (-2635127769.808626));
- assertEquals(0, x &= ((((tmp = 1414701581, tmp)^(((2425608769)/((x<<x)^(x-x)))^((tmp = -2641946468.737288, tmp)|(tmp = -313564549.1754241, tmp))))*(tmp = -2126027460, tmp))|(-2255015479)));
- assertEquals(225482894, x ^= (225482894.8767246));
- assertEquals(0, x ^= x);
- assertEquals(306216231, x += (tmp = 306216231, tmp));
- assertEquals(306216231, x -= ((-465875275.19848967)&((-806775661.4260025)/((((-184966089.49763203)>>>((x>>x)+((tmp = -1951107532, tmp)|x)))%x)*((2704859526.4047284)%((x*x)>>x))))));
- assertEquals(30754, x &= (1706162402.033193));
- assertEquals(30454.010307602264, x -= (((590456519)>>>(tmp = 2713582726.8181214, tmp))/x));
- assertEquals(8419062, x |= ((2848886788)<<(tmp = 2993383029.402275, tmp)));
- assertEquals(16, x >>= (tmp = -1651287021, tmp));
- assertEquals(1, x /= x);
- assertEquals(-1407643485, x ^= (-1407643486));
- assertEquals(2, x >>>= (-1126004674));
- assertEquals(470812081, x ^= ((-2411718964)>>>x));
- assertEquals(550443688.6407901, x += (tmp = 79631607.6407901, tmp));
- assertEquals(3669092443.64079, x -= (-3118648755));
- assertEquals(-625874853, x <<= (((tmp = -1640437346, tmp)/(((x*x)>>>x)<<x))/x));
- assertEquals(-1431439050363516700, x *= (2287101077));
- assertEquals(-1921660672, x |= ((((((((-1912249689.9978154)&(-1676922742.5343294))*(2625527768))<<((820676465)^(((x+(tmp = -852743692, tmp))&((x-((((1361714551)/(311531668))>>>(tmp = -1330495518.8175917, tmp))<<(((tmp = 1369938417.8760853, tmp)*(-1217947853.8942266))<<(-2048029668))))-(-513455284)))>>>(tmp = 1980267333.6201067, tmp))))<<(((1503464217.2901971)>>(tmp = 2258265389, tmp))>>>(1868451148)))&(x-(x^(tmp = -1565209787, tmp))))*x)<<(tmp = -2426550685, tmp)));
- assertEquals(-1921660672, x %= (((tmp = 523950472.3315773, tmp)+(((2971865706)^x)-x))&(-1773969177)));
- assertEquals(420176973.1169958, x += (2341837645.116996));
- assertEquals(420176973, x >>>= (((tmp = -2485489141, tmp)<<((tmp = -2520928568.360244, tmp)+x))&(543950045.0932506)));
- assertEquals(50, x ^= (x|((tmp = 2001660699.5898843, tmp)>>>(tmp = 1209151128, tmp))));
- assertEquals(138212770720.96973, x *= (2764255414.4193945));
- assertEquals(-28683, x |= (((-535647551)|x)>>((((2065261509)>>(-354214733))*x)+(-3218217378.2592907))));
- assertEquals(1627048838, x ^= (tmp = -1627044749, tmp));
- assertEquals(-839408795, x ^= (2903337187.480303));
- assertEquals(-1000652427, x += (tmp = -161243632, tmp));
- assertEquals(740237908.4196916, x += ((tmp = 1587000348, tmp)+(tmp = 153889987.41969144, tmp)));
- assertEquals(Infinity, x /= (((((-615607376.1012697)&(57343184.023578644))+((-1967741575)|(-3082318496)))<<(((tmp = -958212971.99792, tmp)>>(tmp = 2962656321.3519197, tmp))-(x|(x*(969365195)))))<<(tmp = -1739470562.344624, tmp)));
- assertEquals(-Infinity, x /= ((tmp = -1736849852, tmp)%x));
- assertEquals(0, x <<= x);
- assertEquals(0, x %= (tmp = -226505646, tmp));
- assertEquals(1982856549, x -= (((x+(-1982856549))%(-2274946222))>>(x%(((tmp = -1289577208.9097936, tmp)>>x)^(778147661)))));
- assertEquals(1648018703, x ^= ((3085618856)+((tmp = 1546283467, tmp)&(((x|((-2376306530)*(((((((tmp = -2807616416, tmp)%(((((tmp = 347097983.1491085, tmp)<<x)|(((((1135380667)/(x>>>(tmp = 1679395106, tmp)))^((1277761947)<<((tmp = -1614841203.5244312, tmp)>>x)))%((tmp = 1552249234.2065845, tmp)>>>x))>>>(tmp = -1677859287, tmp)))>>>(2605907565))/(tmp = 2291657422.221277, tmp)))%(((tmp = 425501732.6666014, tmp)>>>(1327403879.455553))+x))>>((tmp = -3075752653.2474413, tmp)&(x-(tmp = -71834630, tmp))))|((((2532199449.6500597)*(-842197612.4577162))%x)>>x))*(((1220047194.5100307)<<((tmp = 1642962251, tmp)<<((-662340)>>>((tmp = -1672316631.3251066, tmp)<<((tmp = 1762690952.542441, tmp)-(x/(1904755683.3277364)))))))>>x))|(((((tmp = 1625817700.7052522, tmp)%(tmp = -2990984460, tmp))|(2395645662))-((2619930607.550086)>>x))^(tmp = 130618712, tmp)))))&((-3142462204.4628367)/(1078126534.8819227)))%(((tmp = -256343715.2267704, tmp)+x)^(tmp = 2009243755, tmp))))));
- assertEquals(1937698223, x |= (((tmp = 866354374.7435778, tmp)+(tmp = 2751925259.3264275, tmp))%(-2252220455)));
- assertEquals(0, x -= x);
- assertEquals(-823946290.6515498, x -= (tmp = 823946290.6515498, tmp));
- assertEquals(706970324, x ^= (-457174758));
- assertEquals(32916, x &= (25740724));
- assertEquals(0, x >>>= ((-1658933418.6445677)|(tmp = -846929510.4794133, tmp)));
- assertEquals(0, x ^= ((-834208600)/((-1256752740)&(tmp = 1973248337.8973258, tmp))));
- assertEquals(-1639195806, x += (-1639195806));
- assertEquals(-1559416478, x ^= ((tmp = 1349893449.0193534, tmp)*(tmp = 2044785568.1713037, tmp)));
- assertEquals(0, x &= ((x>>(tmp = 1720833612, tmp))/((x+(-1305879952.5854573))^x)));
- assertEquals(-0, x *= (tmp = -1713182743, tmp));
- assertEquals(0, x >>= x);
- assertEquals(NaN, x /= (((x%((x>>>(((-1515761763.5499895)^(-3076528507.626539))<<(tmp = 1293944457.8983147, tmp)))<<(tmp = 276867491.8483894, tmp)))>>(tmp = -2831726496.6887417, tmp))%((((tmp = 1780632637.3666987, tmp)^x)%((208921173.18897665)>>(tmp = 633138136, tmp)))+x)));
- assertEquals(0, x >>= (tmp = -2755513767.0561147, tmp));
- assertEquals(0, x |= x);
- assertEquals(840992300.0324914, x -= ((-840992300.0324914)+x));
- assertEquals(840992300, x &= x);
- assertEquals(-1094140277, x ^= (2364029095));
- assertEquals(-Infinity, x /= ((((((1257084956)<<(2009241695))>>(x+x))*x)>>>x)>>>(205318919.85870552)));
- assertEquals(-Infinity, x -= (((x>>>(tmp = 3037168809.20163, tmp))&x)*(x&(((806151109)*x)-(tmp = -1741679480.58333, tmp)))));
- assertEquals(400659949, x ^= (tmp = 400659949, tmp));
- assertEquals(5, x >>= (tmp = 1175519290, tmp));
- assertEquals(5, x |= x);
- assertEquals(0, x >>= x);
- assertEquals(0, x >>= ((1317772443)&(x<<x)));
- assertEquals(-1123981819, x ^= (tmp = 3170985477, tmp));
- assertEquals(1123864651, x ^= ((x%(((x&x)&(-2606227299.7590737))<<((tmp = -2018123078.1859496, tmp)*x)))|(x+(((((1935939774.8139446)/((-1303958190)/(2802816697.32639)))<<((2880056582)*x))+x)+x))));
- assertEquals(1543368927, x |= (-2795691884));
- assertEquals(NaN, x /= (x%((tmp = -1129915114, tmp)<<x)));
- assertEquals(NaN, x += (tmp = -3045743135, tmp));
- assertEquals(NaN, x -= (tmp = -2849555731.8207827, tmp));
- assertEquals(NaN, x /= (((((2127485827)>>>((((tmp = 363239924, tmp)>>x)|((((tmp = -1419142286.0523334, tmp)-(x<<x))^(tmp = -1990365089.8283136, tmp))*((tmp = 2780242444.0739098, tmp)>>>(((-2336511023.342298)&x)/(tmp = 2296926221.402897, tmp)))))>>((tmp = 1378982475.6839466, tmp)>>(tmp = -816522530, tmp))))&(x^(tmp = -1668642255.0586753, tmp)))%(((tmp = 921249300.1500335, tmp)^x)*(tmp = -2228816905, tmp)))>>x));
- assertEquals(-1460685191, x |= (tmp = 2834282105, tmp));
- assertEquals(-1463439264, x &= (tmp = 2881860064.146755, tmp));
- assertEquals(20.98100714963762, x /= (((3017150580.7875347)^((250499372.5339837)<<(tmp = -42767556.30788112, tmp)))|(x%(-2829281526))));
- assertEquals(1, x /= x);
- assertEquals(2, x += x);
- assertEquals(8, x <<= x);
- assertEquals(0, x >>>= ((730174750)>>>x));
- assertEquals(0, x ^= x);
- assertEquals(-1459637373, x ^= (2835329923.456409));
- assertEquals(-1233115861, x ^= (511678120));
- assertEquals(95682857, x >>>= ((tmp = 1534570885, tmp)|(tmp = -414425499.3786578, tmp)));
- assertEquals(70254633, x &= (-1502067585));
- assertEquals(51384749748909710, x *= (tmp = 731407276, tmp));
- assertEquals(9390482.873469353, x %= (tmp = -592576964.7982686, tmp));
- assertEquals(4695241, x >>>= (tmp = -1879898431.5395758, tmp));
- assertEquals(-3129811912538149000, x += (((-727481809)^((3106908604)%x))*((((tmp = -1218123690, tmp)^(x>>((-942923806)^x)))/(x+x))>>>(-1508881888.969373))));
- assertEquals(1596870236, x ^= (-1135673764.9721224));
- assertEquals(0, x ^= x);
- assertEquals(2133782410, x |= (((-2202469371)>>((tmp = 1327588406.183342, tmp)/(tmp = 253581265.7246865, tmp)))-((tmp = 2226575446.838795, tmp)^x)));
- assertEquals(-81895217.83608055, x -= (tmp = 2215677627.8360806, tmp));
- assertEquals(812089344, x <<= ((tmp = 882824005, tmp)/(((x>>((((((((tmp = 1211145185, tmp)/((-137817273)-(((tmp = 2165480503.1144185, tmp)-(-1840859887.1288517))*((155886014.8393339)>>((-1984526598)<<(tmp = 1331249058.3246582, tmp))))))>>(x*x))%(2830324652))%(933701061))|(1346496215))^(tmp = -988800810, tmp))+x))>>>x)<<(-2372088384))));
- assertEquals(812089344, x <<= x);
- assertEquals(8472, x %= ((((x|(((x%(tmp = 2772099481.664402, tmp))+(2894690616))-x))&(x&(((-715790638.6454093)>>(tmp = -1447931029, tmp))-(tmp = 1761027889, tmp))))^x)%(((tmp = 830969811, tmp)|x)|((-1102267929)-(3193018687)))));
- assertEquals(-0.0000028559857417864914, x /= (-2966401364));
- assertEquals(0, x >>= x);
- assertEquals(-701800392, x += (tmp = -701800392, tmp));
- assertEquals(2034756873, x -= (tmp = -2736557265, tmp));
- assertEquals(-0.9475075048394501, x /= (((((82879340.27231383)+((tmp = -2876678920.653639, tmp)*(-2801097850)))<<x)>>>((x<<(((((x|x)&(tmp = -1572694766, tmp))>>(x+(x/((x-(((tmp = 1435301275, tmp)|(tmp = 983577854.212041, tmp))>>(tmp = 632633852.1644179, tmp)))+x))))>>>x)|(-850932021)))>>x))<<(-821983991)));
- assertEquals(0, x >>= (x>>(2424003553.0883207)));
- assertEquals(2599386349, x -= (-2599386349));
- assertEquals(-68157441, x |= (((tmp = -1170343454.9327996, tmp)+((((tmp = 448468098, tmp)|(x>>(x>>(((x>>(((x/(x&(x<<x)))<<(2436876051.2588806))^(3010167261)))%((tmp = 2577616315.7538686, tmp)>>>(-2953152591.015912)))%((tmp = -1304628613, tmp)/(x&((x|((-2000952119)%((691146914)/((tmp = 1480966978.7766845, tmp)<<((tmp = 2644449477.392441, tmp)|(-2143869305.871568))))))+(tmp = -315254308, tmp))))))))&(-2060205555))|((-604140518.8186448)^(x*x))))%(x*((tmp = 1383244000.2807684, tmp)/(3195793656)))));
- assertEquals(-68157441, x |= x);
- assertEquals(-1, x >>= x);
- assertEquals(-2147483648, x <<= x);
- assertEquals(-1.5257198286933313, x /= (tmp = 1407521622, tmp));
- assertEquals(1149084989.47428, x += (((tmp = 1149084991.9004865, tmp)&x)^((((((2797053000)/(x^x))*(-2829253694))>>>((tmp = -610924351, tmp)>>x))>>>(tmp = -675681012, tmp))<<(2812852729))));
- assertEquals(0, x %= x);
- assertEquals(0, x <<= ((tmp = -584069073, tmp)*(-2953140326)));
- assertEquals(0, x <<= (tmp = -481515023.6404002, tmp));
- assertEquals(-1441535370, x ^= (2853431926));
- assertEquals(2853431926, x >>>= (((((((tmp = 2215663525.9620194, tmp)%((-1102832735.9274108)/x))>>x)&(3220898702.76322))&(((2077584946)*((x>>x)<<((tmp = 1845701049, tmp)-x)))/(tmp = 1947184202.5737212, tmp)))|(((tmp = 2976351488, tmp)^(-42517339))%((2648230244.410125)^(1520051731.31089))))/(1761635964)));
- assertEquals(43539, x >>>= (tmp = 1361671184.7432632, tmp));
- assertEquals(21769, x >>= ((tmp = -804932298.9572575, tmp)>>((((tmp = 1749006993.253409, tmp)+(276536978))^x)|(2698166994))));
- assertEquals(1103025563, x |= (tmp = 1103007891, tmp));
- assertEquals(1327594607, x += (tmp = 224569044, tmp));
- assertEquals(1327594607, x |= x);
- assertEquals(-478674944, x <<= (((672378508)&x)^(((-2070209708.6470091)|x)|(x>>>x))));
- assertEquals(-478674943, x ^= ((-1832457698.6345716)>>>((tmp = -3077714019, tmp)/(1809383028))));
- assertEquals(229129701056053250, x *= x);
- assertEquals(1, x /= x);
- assertEquals(2, x <<= (-1522529727));
- assertEquals(2, x &= x);
- assertEquals(-2016989182, x |= ((((tmp = -1267845511, tmp)*(1225350332))+((tmp = -1397690831.5717893, tmp)>>>(tmp = -2575382994, tmp)))+x));
- assertEquals(-241, x >>= (tmp = 931869591, tmp));
- assertEquals(-1048087547, x &= (tmp = -1048087403.1163051, tmp));
- assertEquals(-4004486369.844599, x += (tmp = -2956398822.844599, tmp));
- assertEquals(-4004486368.844599, x -= (((2701878498)>>x)|(x|(-1079354967))));
- assertEquals(1, x >>= (tmp = -1583689092, tmp));
- assertEquals(1, x *= (x>>(x%x)));
- assertEquals(0, x %= x);
- assertEquals(-0, x *= (-120818969));
- assertEquals(0, x >>= ((tmp = 1794099660, tmp)/(((x&(((-321906091)^(tmp = -3009885933.8449526, tmp))&((tmp = -140917780, tmp)|(2037803173.4075825))))&x)&(tmp = -745357154, tmp))));
- assertEquals(0, x <<= (563984257.3493614));
- assertEquals(NaN, x %= ((((x>>(tmp = -2190891392.320677, tmp))-x)<<(462714956))<<((tmp = -84413570, tmp)|((x|(-2787022855))-((tmp = 2028532622, tmp)|(tmp = 1103757073.9178817, tmp))))));
- assertEquals(NaN, x *= ((2137674085.3142445)|((tmp = -1054749859.2353804, tmp)%x)));
- assertEquals(NaN, x /= (x>>>(((((tmp = 597103360.9069608, tmp)>>>(-2850217714.1866236))-((tmp = 1125150527, tmp)*x))%(tmp = -982662312, tmp))|((x/(((968656808.6069037)*(((128484784.15362918)>>x)^x))&((((x/((((tmp = 748775979, tmp)*((x-(((tmp = 709571811.9883962, tmp)%(-2083567026))%(x/(tmp = -680467505, tmp))))/((tmp = -167543858, tmp)/(tmp = -3113588783, tmp))))/x)<<(-2605415230)))>>>(tmp = 3133054172, tmp))%(tmp = -1904650393, tmp))*((x|(-1193709562))*(tmp = -1731312795.718104, tmp)))))/((tmp = -672386301, tmp)/(tmp = 808898833.4163612, tmp))))));
- assertEquals(-9, x |= (((((tmp = 150377964.57195818, tmp)/(tmp = 2161910879.0514045, tmp))-(-2381625849))>>(-2715928517))/(((452113643)^(-2502232011))/((-3076471740)^(((tmp = 1664851172, tmp)*(((-1460011714)>>>x)<<((-2870606437)%x)))*((tmp = -2836565755.609597, tmp)-((x/(tmp = -871461415, tmp))-(2278867564))))))));
- assertEquals(-1, x >>= x);
- assertEquals(-1, x |= ((-1319927272)>>>(-2866709980)));
- assertEquals(-1, x >>= ((2345179803.155703)&(-978025218.2243443)));
- assertEquals(1, x /= x);
- assertEquals(-260730973, x |= (tmp = -260730973, tmp));
- assertEquals(1174405120, x <<= (2681054073));
- assertEquals(1174405120, x &= x);
- assertEquals(1073741824, x &= (tmp = 2017166572.7622075, tmp));
- assertEquals(1073741824, x |= x);
- assertEquals(168806102, x %= ((((tmp = -2939969193.950067, tmp)|((-2325174027.614815)/(-2329212715)))*(x/(((((-2927776738)/(x|x))+(x%(tmp = -3007347037.698492, tmp)))<<(-1898633380))>>(tmp = 204338085.45241892, tmp))))^x));
- assertEquals(168806102, x %= ((-832849739.5197744)&(tmp = -141908598, tmp)));
- assertEquals(-401033205.05225074, x -= (tmp = 569839307.0522507, tmp));
- assertEquals(-401033205, x &= x);
- assertEquals(-401130402, x ^= ((x*(tmp = 311418759.22436893, tmp))>>x));
- assertEquals(793533469, x ^= (-950312893.5201888));
- assertEquals(756, x >>>= (-1096189516));
- assertEquals(711, x += ((tmp = -753105189, tmp)>>(599823192.5381484)));
- assertEquals(0, x >>>= ((tmp = -2859668634.4641137, tmp)+(-1160392986.1521513)));
- assertEquals(2427599726.176195, x -= (-2427599726.176195));
- assertEquals(1942312465.2523103, x -= (485287260.92388475));
- assertEquals(0, x >>>= ((tmp = -1740656456, tmp)/(tmp = 1339746799.9335847, tmp)));
- assertEquals(0, x <<= ((-7017077.38786912)*((-699490904.4551768)^x)));
- assertEquals(0, x <<= (tmp = 715662384, tmp));
- assertEquals(0, x *= (x>>>(2149735450.0758677)));
- assertEquals(NaN, x /= x);
- assertEquals(0, x >>= ((397078885)*((851639692.8982519)-x)));
- assertEquals(0, x &= (-2526654445));
- assertEquals(0, x %= (-1204924598));
- assertEquals(251639720, x ^= (x|(tmp = 251639720, tmp)));
- assertEquals(695433573, x ^= (663539405));
- assertEquals(-1038050104, x -= (1733483677));
- assertEquals(0, x ^= x);
- assertEquals(NaN, x %= x);
- assertEquals(0, x &= (392107269));
- assertEquals(0, x %= (-3084908458.241551));
- assertEquals(0, x ^= x);
- assertEquals(-2121660509, x ^= (tmp = -2121660509.7861986, tmp));
- assertEquals(2285041855588855800, x *= (x|(3209046634)));
- assertEquals(54915072, x >>>= (x%(((((x%((((tmp = -1429433339.5078833, tmp)|(tmp = 2906845137, tmp))^(3207260333))&(-848438650)))-(-2721099735))&(141851917.19978714))+x)/x)));
- assertEquals(54915072, x &= x);
- assertEquals(54915072, x %= (x+(1855489160)));
- assertEquals(70078753, x ^= ((((((-1648661736)+(x%((-1421237596)+(tmp = 2053180992.3857927, tmp))))+(tmp = 38606889, tmp))<<((-241334284)%((x>>(215316122))*(tmp = 396488307, tmp))))+((tmp = -2900704565, tmp)^x))^(((1103481003.1111188)^x)-(tmp = 1304113534, tmp))));
- assertEquals(1149501440, x <<= ((x>>(tmp = 3203172843, tmp))*(tmp = -192535531, tmp)));
- assertEquals(0, x ^= x);
- assertEquals(0, x >>= ((tmp = 2751499787, tmp)&((tmp = 2217654798, tmp)*(tmp = -2798728014, tmp))));
- assertEquals(NaN, x /= ((((-2019592425)>>>((((-1571930240.741224)>>>((-183952981)/((((1990518443.672842)>>(((((2051371284)%(685322833.6793983))>>>(2662885938))<<(-1212029669.6675105))|((-2790877875)<<(1546643473))))<<x)-(tmp = 804296674.4579233, tmp))))-(tmp = -417759051.68770766, tmp))/((-621859758)>>>x)))&x)<<(tmp = -48558935.55320549, tmp)));
- assertEquals(0, x <<= (x&x));
- assertEquals(0, x *= (x%(tmp = 301196068, tmp)));
- assertEquals(398290944, x |= (((tmp = 1904146839, tmp)+(1521017178))*(-3174245888.562067)));
- assertEquals(1256401076, x ^= (1566464180));
- assertEquals(149620758, x %= ((tmp = 532626355, tmp)^(tmp = -382971203, tmp)));
- assertEquals(149620791, x |= (x>>x));
- assertEquals(-0.07034576194938641, x /= ((tmp = -1977313182.7573922, tmp)-x));
- assertEquals(0, x <<= x);
- assertEquals(0, x &= x);
- assertEquals(0, x /= ((2182424851.139966)%(((-2768516150)+x)>>>x)));
- assertEquals(0, x %= (-504299638.53962016));
- assertEquals(-0, x *= (-2915134629.6909094));
- assertEquals(0, x <<= ((tmp = 952692723.402582, tmp)%(2146335996.785011)));
- assertEquals(230457472, x |= ((tmp = -574776101.8681948, tmp)*(683185125)));
- assertEquals(933795934, x ^= (tmp = 974395614, tmp));
- assertEquals(933801974, x ^= (x>>>((-148683729)*(((tmp = 2912596991.415531, tmp)^(-2883672328))/x))));
- assertEquals(222, x >>= (-3060224682));
- assertEquals(27, x >>>= (1429156099.1338701));
- assertEquals(754519106, x ^= (tmp = 754519129.7281355, tmp));
- assertEquals(188629776, x >>>= ((x>>>((1247267193)<<(tmp = -936228622, tmp)))%((tmp = 978604324.8236886, tmp)*((tmp = -3018953108, tmp)^(((tmp = 259650195, tmp)>>>(tmp = 2762928902.7901163, tmp))*(x>>((tmp = 787444263.5542864, tmp)/(x>>>(((-2039193776)<<(tmp = -1408159169, tmp))-(1238893783))))))))));
- assertEquals(188629775.33987066, x += ((tmp = 1040520414, tmp)/((-1576237184)|((tmp = -970083705, tmp)&(((tmp = -312062761.12228274, tmp)|(1171754278.2968853))<<(-2069846597.7723892))))));
- assertEquals(1473670, x >>>= ((tmp = 202409672, tmp)^x));
- assertEquals(2171703268900, x *= (x>>(((tmp = 840468550, tmp)&(-3208057101.2136793))/x)));
- assertEquals(0, x ^= x);
- assertEquals(0, x ^= (x&((tmp = 2569871408.2405066, tmp)|((tmp = -3149374622, tmp)<<(x-(x|((tmp = -821239139.1626894, tmp)>>>x)))))));
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x %= (tmp = 1926106354, tmp));
- assertEquals(0, x >>= ((x/(-2848416))/(tmp = 2484293767, tmp)));
- assertEquals(0, x <<= ((tmp = -2484137114, tmp)>>>(tmp = -887083772.8318355, tmp)));
- assertEquals(0, x >>= (tmp = -2651389432, tmp));
- assertEquals(0, x ^= x);
- assertEquals(1041871201, x += ((tmp = 1041871201.9272791, tmp)|(x<<(-1136959830))));
- assertEquals(651390879501530900, x *= ((tmp = 1250424964.0346212, tmp)>>x));
- assertEquals(1965815296.245636, x %= ((2650603245.655831)+((-1610821947.8640454)>>>(((878987151.6917406)*((((784630543)%(((1448720244)>>(((tmp = 3036767847, tmp)+((tmp = 1012548422, tmp)<<(1957000200)))-x))/(x>>x)))<<((tmp = 914710268, tmp)*(((x^(1559603121))<<(tmp = 3181816736, tmp))|((-1964115655)+x))))-(-1055603890)))&(946797797.0616649)))));
- assertEquals(1965815296.245636, x %= (tmp = -2601038357.593118, tmp));
- assertEquals(-769384440.872302, x += (-2735199737.117938));
- assertEquals(-769384440.872302, x %= (2193123162));
- assertEquals(1, x /= x);
- assertEquals(1, x -= (((x>>>(-1968465925))*((tmp = 563037904, tmp)>>((tmp = 3009534415.769578, tmp)>>((-2567240601.7038674)<<(tmp = -1258402723.4150183, tmp)))))%(3112239470.276867)));
- assertEquals(1, x |= x);
- assertEquals(1505461527, x ^= (tmp = 1505461526.5858076, tmp));
- assertEquals(406553877, x &= (tmp = 2558242293, tmp));
- assertEquals(406553877, x |= x);
- assertEquals(-574902339, x |= ((-709809495)%(tmp = -2880884811.410611, tmp)));
- assertEquals(-20281777.349363208, x %= (22184822.46602547));
- assertEquals(1, x /= x);
- assertEquals(-4360732, x ^= ((x|(tmp = 3178620274, tmp))>>(((2686286888)&(((-1107223053.8716578)/(((-2955575332.3675404)+(-2770518721))|(-2705016953.640522)))-x))^((1473641110.4633303)*((((-1466496401)<<x)+x)%(1805868749.082736))))));
- assertEquals(-1158545408, x <<= ((((x/((-2710098221.691819)-(-2421462965.788145)))/(((((x>>>(tmp = 1994541591.1032422, tmp))+(tmp = -1276676679.9747126, tmp))&((tmp = 1764029634.2493339, tmp)+((x|(tmp = -3050446156, tmp))-((tmp = -9441859, tmp)/(((-2072420232)&x)*(-1003199889))))))+(tmp = -2443230628, tmp))*x))*((x&((((x|(747566933))*(((2039741506)>>>((tmp = -2456000554, tmp)>>>(-1566360933.7788877)))^((tmp = 960600745, tmp)/x)))&(x^(((-2649310348.777452)^((2224282875)-(tmp = -2129141087.3182096, tmp)))<<((x<<x)+((-1307892509.3874407)-(x|(tmp = -2831643528.9720087, tmp)))))))/(((tmp = -35502946, tmp)<<((tmp = 1091279222, tmp)>>(((-2686069468.8930416)-x)+(tmp = 367442353.2904701, tmp))))%(1218262628))))/x))^(-919079153.7857773)));
- assertEquals(747, x >>>= (1229157974));
- assertEquals(747, x |= x);
- assertEquals(NaN, x %= (((3086718766.4715977)*((7912648.497568846)*((-2713828337.1659327)*(-176492425.4011252))))<<(tmp = -1074475173, tmp)));
- assertEquals(0, x >>>= ((((444923201)<<x)>>>(-883391420.2142565))*((((617245412)<<x)>>>x)*(-913086143.2793813))));
- assertEquals(1941802406, x ^= (tmp = -2353164890, tmp));
- assertEquals(14, x >>>= (-1600311077.4571416));
- assertEquals(-18229482703.7246, x += (((x+(-993157139.7880647))%x)*(1862419512.1781366)));
- assertEquals(-14.531388114858734, x /= ((tmp = -1649072797.951641, tmp)<<x));
- assertEquals(0, x ^= x);
- assertEquals(0, x >>= ((x/x)^x));
- assertEquals(2, x ^= ((-1597416259)/(-738770020)));
- assertEquals(0, x >>= (tmp = -387850072.74833393, tmp));
- assertEquals(0, x >>>= ((2491085477.186817)>>(x*(((tmp = -1592498533, tmp)+(tmp = 2086841852, tmp))&(-3174019330.8288536)))));
- assertEquals(0, x >>= x);
- assertEquals(0, x >>>= (tmp = -3045348659.45243, tmp));
- assertEquals(-1208573479, x |= ((3086393817)-x));
- assertEquals(1460649854142163500, x *= x);
- assertEquals(1588199424, x <<= (-1902076952));
- assertEquals(1586102272, x &= (tmp = 2139876091.9142454, tmp));
- assertEquals(-460908552.5528109, x -= (tmp = 2047010824.552811, tmp));
- assertEquals(-460908552.5528109, x %= (tmp = 507904117.09368753, tmp));
- assertEquals(-460908552.5528109, x %= (2749577642.527038));
- assertEquals(234012, x >>>= (-340465746.91275));
- assertEquals(0, x >>>= x);
- assertEquals(0, x %= (tmp = -2601875531, tmp));
- assertEquals(0, x %= (x|(tmp = 650979981.1158671, tmp)));
- assertEquals(0, x %= (tmp = -2286020987, tmp));
- assertEquals(0, x |= x);
- assertEquals(0, x &= (x|((tmp = 2568101411, tmp)-(-1438002403))));
- assertEquals(0, x >>>= (1399248574));
- assertEquals(0, x %= (-1906670287.2043698));
- assertEquals(0, x >>= (1019286379.6962404));
- assertEquals(0, x |= (x/(tmp = -82583591.62643051, tmp)));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x *= (x^(1874776436)));
- assertEquals(NaN, x -= ((-1238826797)-(-2971588236.7228813)));
- assertEquals(0, x <<= (2064632559));
- assertEquals(-0.5967273958864694, x += (((tmp = 1502995019, tmp)>>x)/(-2518729707)));
- assertEquals(0, x >>>= x);
- assertEquals(-0, x /= (-1923030890));
- assertEquals(NaN, x %= x);
- assertEquals(0, x >>= (tmp = 1081732779.9449487, tmp));
- assertEquals(-820183066, x |= ((tmp = -3169007292.4721155, tmp)|(-1912588318)));
- assertEquals(0, x -= x);
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x /= (tmp = 287181840, tmp));
- assertEquals(0, x &= (x/((tmp = -1139766051, tmp)<<(x&(tmp = 2779004578, tmp)))));
- assertEquals(0, x >>= (((tmp = -1816938028, tmp)+(-224851993.3139863))*(-2933829524)));
- assertEquals(0, x |= ((((tmp = 305077929.1808746, tmp)&((x-(((((tmp = 2122810346.7475111, tmp)<<(717271979))*(tmp = 256854043.72633624, tmp))%((x+(tmp = -318657223.9992106, tmp))*((1993144830)<<(2594890698.603228))))^((((tmp = 257370667, tmp)>>>((((x^(3160746820))>>>(2049640466.8116226))>>>(2543930504.7117066))^(x-x)))^(x%(964838975)))^x)))%(x*x)))>>>x)*(tmp = -46861540, tmp)));
- assertEquals(747575633, x ^= ((-2406502427)-(-3154078060.3794584)));
- assertEquals(0, x *= (x%x));
- assertEquals(0, x <<= (1313773705.3087234));
- assertEquals(0, x >>>= ((x+x)>>>(3068164056)));
- assertEquals(-0, x *= (tmp = -1771797797, tmp));
- assertEquals(1784146970, x ^= (tmp = 1784146970, tmp));
- assertEquals(1784146970, x >>>= (tmp = -2219972320.7195597, tmp));
- assertEquals(1744830464, x <<= ((((-2769476584)-(((1798431604)>>(tmp = 1337687914.799577, tmp))>>>((-2802941943.15014)>>x)))>>>(tmp = 646033678, tmp))-x));
- assertEquals(3044433348102455300, x *= x);
- assertEquals(0, x >>= ((tmp = 1592076570.1900845, tmp)-((645774223.6317859)>>x)));
- assertEquals(0, x >>= (x>>>(-3045822290.1536255)));
- assertEquals(-0, x *= (tmp = -2450298800.986624, tmp));
- assertEquals(0, x >>= (tmp = 1379605393, tmp));
- assertEquals(0, x &= (((x-((((tmp = 837939461.6683749, tmp)+((((-813261853.3247359)|(x&(((-2565113940)*(tmp = -2725085381.240134, tmp))|x)))%(-1457259320))-(x+((tmp = -273947066, tmp)%((1164825698.879649)>>(1653138880.3434052))))))>>>(2823967606.411492))>>>((((((((1189235604.9646997)/(tmp = -2875620103.4002438, tmp))-(tmp = -801261493, tmp))<<(((1832556579.5095325)<<x)|((tmp = -2740330665, tmp)>>(tmp = -2352814025, tmp))))-(tmp = -1445043552.99499, tmp))&(x<<(((((445325471)*(1293047043.1808558))>>>(((1901837408.5910044)-(tmp = -2349093446.5313253, tmp))>>>(tmp = 1000847053.1861948, tmp)))*(x>>>(1771853406.6567078)))>>x)))>>>x)>>>(x^((tmp = 2813422715, tmp)-(x+(-342599947)))))))&(x>>>x))*x));
- assertEquals(NaN, x %= ((tmp = -3027713526, tmp)-((((x%(((((x/((2711155710)^(((((x>>>x)%((1098599291.155015)^(((((tmp = 1855724377.8987885, tmp)/(x|x))*((-1963179786)*((x-((-1634717702)%x))<<x)))>>(2008859507))>>((tmp = 2635024299.7983694, tmp)^(tmp = -602049246, tmp)))))*(x>>x))&(tmp = -1925103609, tmp))*((tmp = 2106913531.2828505, tmp)%((tmp = -200970069, tmp)*(-2809001910.951446))))))%x)*((1990098169)>>((x<<(2303347904.2601404))%x)))|(2767962065.9846206))+(201589933.301661)))>>(((tmp = 1921071149.5140274, tmp)>>(1054558799.1731887))|x))*(x/((((-2833879637.345674)>>>(tmp = 2849099601, tmp))%x)+(x%(x%(((tmp = 1983018049, tmp)^(tmp = -2659637454, tmp))>>((-1335497229.6945198)-(x+(((((tmp = 1136612609.848967, tmp)%(2471741030.01762))<<(x|(((tmp = 1644081190.1972675, tmp)&(-1422527338))^(2379264356.265957))))/(tmp = 2979299484.1884174, tmp))/x)))))))))*((tmp = 1858298882, tmp)^((tmp = -547417134.9651439, tmp)*x)))));
- assertEquals(-7664, x |= ((2286000258.825538)>>(1716389170)));
- assertEquals(-1, x >>= x);
- assertEquals(-1231640486.3023372, x += ((tmp = 1231640485.3023372, tmp)*x));
- assertEquals(-2463280972.6046743, x += x);
- assertEquals(1746, x >>>= x);
- assertEquals(1746, x >>>= (((tmp = -562546488.0669937, tmp)*((-2475357745.8508205)&((x%(821425388.8633704))%((((-2315481592.687686)&(((tmp = 3130530521.7453523, tmp)+x)-x))^(-973033390.1773088))/x))))<<x));
- assertEquals(1746, x %= (-1544973951.076033));
- assertEquals(27936, x <<= (-525441532.33816123));
- assertEquals(27936, x %= (x*((tmp = 344991423.5336287, tmp)+(-2267207281))));
- assertEquals(27, x >>>= (tmp = 1249792906, tmp));
- assertEquals(0, x >>>= (tmp = -1068989615, tmp));
- assertEquals(0, x >>>= (tmp = 347969658.92579734, tmp));
- assertEquals(-2656611892, x -= (2656611892));
- assertEquals(1944539596, x |= (((tmp = 3000889963, tmp)-x)<<((tmp = 2917390580.5323124, tmp)^(-996041439))));
- assertEquals(1944539596, x |= x);
- assertEquals(-739740167.0752468, x -= ((1712009965.0752468)+(x>>((tmp = -740611560.99014, tmp)>>>((tmp = -1033267419.6253037, tmp)&(862184116.3583733))))));
- assertEquals(-1479480334.1504936, x += x);
- assertEquals(-4294967296.150494, x -= (x>>>((1219235492.3661718)&(3138970355.0665245))));
- assertEquals(0, x >>= (x*x));
- assertEquals(-0, x *= ((-2202530054.6558375)-(-676578695)));
- assertEquals(-0, x %= (1336025846));
- assertEquals(0, x &= x);
- assertEquals(0, x /= (1759366510));
- assertEquals(630007622, x |= (630007622));
- assertEquals(-0.22460286863455903, x /= (tmp = -2804984753, tmp));
- assertEquals(1102410276.775397, x -= (-1102410277));
- assertEquals(1102410276.775397, x %= ((((-2569525203)&x)*(x|(-1932675298)))/((-2376634450)>>>(x>>>(tmp = 936937604.9491489, tmp)))));
- assertEquals(33642, x >>= (3028252527));
- assertEquals(2181106522.688034, x -= (-2181072880.688034));
- assertEquals(-2113861630, x &= (2523921542));
- assertEquals(-2147483646, x &= (-1996601566.9370148));
- assertEquals(-2147483648, x &= (tmp = -665669175.1968856, tmp));
- assertEquals(-2858673260.1367273, x -= (tmp = 711189612.1367272, tmp));
- assertEquals(350657, x >>= (tmp = -170243892.25474262, tmp));
- assertEquals(-0.0001405571562140975, x /= (-2494764474.7868776));
- assertEquals(0, x ^= x);
- assertEquals(NaN, x /= ((x&(-2041236879))*((tmp = -2182530229, tmp)^((1274197078)*x))));
- assertEquals(0, x |= (x&(x-(1794950303))));
- assertEquals(1222105379, x |= (tmp = 1222105379, tmp));
- assertEquals(729884484, x ^= (tmp = 1666645607.6907792, tmp));
- assertEquals(729884484, x %= (tmp = -2896922082, tmp));
- assertEquals(8768, x &= ((tmp = 358940932, tmp)>>>(3159687631.3308897)));
- assertEquals(1892384495, x |= (-2402591569));
- assertEquals(1892470533, x += ((((x^(-2266612043))>>>(tmp = -531009952, tmp))<<(x>>>((-1365315963.5698428)>>>((x+((-3168207800.184341)-(tmp = 1776222157.609917, tmp)))+(-1588857469.3596382)))))>>>x));
- assertEquals(143587205, x += (tmp = -1748883328, tmp));
- assertEquals(0, x ^= x);
- assertEquals(0, x >>= (tmp = 2334880462.3195543, tmp));
- assertEquals(0, x &= ((tmp = 1819359625.4396145, tmp)|(tmp = -1323513565, tmp)));
- assertEquals(-1102259874, x ^= (3192707422));
- assertEquals(2567457772588852700, x *= (-2329267202));
- assertEquals(-16783687, x |= ((-2212476227.060922)^(378973700.78452563)));
- assertEquals(4278183609, x >>>= ((((((((tmp = 1766363150.197206, tmp)*(-2774552871))%x)>>>((3071429820)&((((((tmp = 351068445.27642524, tmp)<<(tmp = 2646575765, tmp))^(806452682))<<((x>>>(-2217968415.505327))<<(1564726716)))|x)-(tmp = -3110814468.9023848, tmp))))+x)^x)>>>(tmp = -617705282.0788529, tmp))>>>x));
- assertEquals(4314933530, x -= ((1032195469.789219)|(tmp = -448053861.9531791, tmp)));
- assertEquals(9709850, x %= (((tmp = -3056286252.5853324, tmp)*x)&x));
- assertEquals(9709850, x %= (tmp = -2596800940, tmp));
- assertEquals(2655489828.9461126, x -= (tmp = -2645779978.9461126, tmp));
- assertEquals(369266212, x &= (((335712316.24874604)|(tmp = 33648215, tmp))-((x/(2639848695))<<((-499681175)<<(-2490554556)))));
- assertEquals(-2147483648, x <<= (-834465507));
- assertEquals(1073741824, x >>>= (((tmp = 3018385473.1824775, tmp)>>(x*(-2574502558.216812)))|(((tmp = -1742844828, tmp)*(1698724455))&x)));
- assertEquals(-270818218, x += (-1344560042));
- assertEquals(360710144, x <<= x);
- assertEquals(0, x <<= (tmp = 612718075, tmp));
- assertEquals(0, x <<= x);
- assertEquals(-0, x /= (tmp = -1922423684, tmp));
- assertEquals(-0, x *= ((((tmp = 741806213.3264687, tmp)%(-711184803.2022421))+((tmp = -3209040938, tmp)&(525355849.044886)))&(x<<(tmp = -698610297, tmp))));
- assertEquals(0, x <<= (-482471790));
- assertEquals(0, x &= ((-921538707)/(tmp = -482498765.988616, tmp)));
- assertEquals(0, x ^= (x^x));
- assertEquals(-351721702, x ^= (-351721702.8850286));
- assertEquals(726242219625599900, x -= ((2064820612)*x));
- assertEquals(1452484439251199700, x += x);
- assertEquals(2.52318299412847e-15, x %= ((((x<<((2508143285)+x))>>(-2493225905.011774))%(1867009511.0792103))/((((x<<(2542171236))>>((x|x)&(tmp = -384528563, tmp)))+((-1168755343)*(1731980691.6745195)))+(tmp = -1608066022.71164, tmp))));
- assertEquals(79905008, x += ((((-2702081714.590131)&(x+(tmp = -1254725471.2121565, tmp)))*(3088309981))%(((tmp = 1476844981.1453142, tmp)|((((tmp = -1243556934.7291331, tmp)%x)^(-1302096154))+((660489180)/(tmp = -681535480.8642154, tmp))))^(tmp = -8410710, tmp))));
- assertEquals(1215822204, x ^= ((-3008054900)>>>(tmp = -1990206464.460693, tmp)));
- assertEquals(-394790532, x |= ((((-1334779133.2038574)+(tmp = -1407958866.832946, tmp))<<(1699208315))-(((x^(x%x))<<(3216443))>>(x+((((2576716374.3081336)|((tmp = 2316167191.348064, tmp)&((51086351.20208645)&((x|(tmp = -357261999, tmp))^(x/x)))))*(-45901631.10155654))*(((-439588079)>>>((-2358959768.7634916)|(1613636894.9373643)))+(((-908627176)<<x)%(x%((-1669567978)>>>((x>>(1289400876))+(tmp = 2726174270, tmp)))))))))));
- assertEquals(-0.17717467607696327, x /= (2228255982.974148));
- assertEquals(-1905616474, x ^= (tmp = 2389350822.851587, tmp));
- assertEquals(-0, x %= x);
- assertEquals(2818124981.508915, x -= (-2818124981.508915));
- assertEquals(-1476842315, x |= x);
- assertEquals(73408564, x &= (-3147390604.3453345));
- assertEquals(70, x >>>= x);
- assertEquals(1, x >>= x);
- assertEquals(3086527319.899181, x *= (3086527319.899181));
- assertEquals(-145, x >>= x);
- assertEquals(-145, x %= (tmp = -2500421077.3982406, tmp));
- assertEquals(-1, x >>= (tmp = -2970678326.712191, tmp));
- assertEquals(-1, x %= ((tmp = -535932632.4668834, tmp)+(((-1226598339.347982)<<((tmp = 616949449, tmp)/(tmp = 2779464046, tmp)))/(214578501.67984307))));
- assertEquals(1, x *= x);
- assertEquals(1, x >>= ((tmp = 11080208, tmp)<<(460763913)));
- assertEquals(-1.8406600706723492e-19, x /= ((tmp = -2334126306.1720915, tmp)*(tmp = 2327566272.5901165, tmp)));
- assertEquals(856681434186007200, x -= ((tmp = -2286974992.8133907, tmp)*(374591518)));
- assertEquals(3126084224, x >>>= x);
- assertEquals(-1160460669, x |= (tmp = 181716099, tmp));
- assertEquals(873988096, x <<= (tmp = 406702419, tmp));
- assertEquals(0, x <<= ((tmp = 802107965.4672925, tmp)-((tmp = 1644174603, tmp)>>((tmp = 604679952, tmp)+(tmp = -515450096.51425123, tmp)))));
- assertEquals(NaN, x %= ((x>>(tmp = 2245570378, tmp))*(tmp = 1547616585, tmp)));
- assertEquals(NaN, x /= ((tmp = -776657947.0382309, tmp)&(tmp = 163929332.28270507, tmp)));
- assertEquals(NaN, x *= (tmp = 243725679.78916526, tmp));
- assertEquals(NaN, x /= (x>>x));
- assertEquals(0, x <<= ((tmp = -1293291295.5735884, tmp)%(((((63309078)>>>x)&(x&(-2835108260.025297)))+x)>>>(-1317213424))));
- assertEquals(0, x *= ((((tmp = -1140319441.0068483, tmp)*(tmp = 2102496185, tmp))&(-2326380427))<<(tmp = -2765904696, tmp)));
- assertEquals(0, x /= (tmp = 2709618593, tmp));
- assertEquals(0, x >>= (-1753085095.7670164));
- assertEquals(1766381484, x |= (-2528585812));
- assertEquals(1766381484, x %= (2735943476.6363373));
- assertEquals(1766381484, x %= (x*(tmp = 2701354268, tmp)));
- assertEquals(-2147483648, x <<= (-323840707.4949653));
- assertEquals(4611686018427388000, x *= (x<<x));
- assertEquals(0, x <<= (3066735113));
- assertEquals(0, x ^= ((((x*x)^(tmp = -2182795086.39927, tmp))<<(x^(tmp = 1661144992.4371827, tmp)))<<((((-2885512572.176741)*(tmp = 609919485, tmp))|(tmp = 929399391.0790694, tmp))>>>((((((((((399048996)>>((-107976581.61751771)>>>x))|(((-1502100015)<<(tmp = -1108852531.9494338, tmp))&(x/(tmp = -3198795871.7239237, tmp))))+((-2627653357)>>x))>>>x)*(1066736757.2718519))%(tmp = 1326732482.201604, tmp))/(tmp = 2513496019.814191, tmp))>>>((1694891519)>>>(-2860217254.378931)))<<(tmp = 31345503, tmp)))));
- assertEquals(0, x ^= (x/((-2556481161)>>>(x/(x%(x&(1302923615.7148068)))))));
- assertEquals(NaN, x /= x);
- assertEquals(NaN, x += (tmp = 846522031, tmp));
- assertEquals(0, x >>= (x+(-1420249556.419045)));
- assertEquals(0, x ^= (((x%(-1807673170))&x)-x));
- assertEquals(-3484.311990686845, x -= ((((((-510347602.0068991)>>>x)<<((tmp = 1647999950, tmp)&(((305407727)>>((1781066601.791009)&x))<<((tmp = -998795238, tmp)%(((x/x)+x)<<(((2586995491.434947)<<x)-((((tmp = 545715607.9395425, tmp)*x)>>>x)>>>(((((2332534960.4595165)^(-3159493972.3695474))<<(tmp = 867030294, tmp))|(2950723135.753855))^(((3150916666)<<x)>>((tmp = 414988690, tmp)|((tmp = -1879594606, tmp)/(tmp = 1485647336.933429, tmp))))))))))))>>(tmp = -2676293177, tmp))%(617312699.1995015))/((((tmp = -1742121185, tmp)^((((x&x)<<(tmp = 698266916, tmp))/(-1860886248))+((-213304430)%((((((-2508973021.1333447)+(tmp = 2678876318.4903, tmp))&(tmp = -43584540, tmp))-x)^(-2251323850.4611115))-x))))>>>(tmp = 2555971284, tmp))%((((tmp = 16925106, tmp)^x)&x)|((x/((x|(tmp = -2787677257.125139, tmp))<<(-853699567)))+(tmp = -1721553520, tmp))))));
- assertEquals(-447873933.26863855, x += (-447870448.9566479));
- assertEquals(200591060101520900, x *= x);
- assertEquals(200591062202483420, x -= (-2100962536));
- assertEquals(-5.261023346568228e+24, x *= ((tmp = -419641692.6377077, tmp)>>(tmp = -224703100, tmp)));
- assertEquals(1269498660, x |= (195756836));
- assertEquals(1269498660, x |= x);
- assertEquals(1269498660, x |= x);
- assertEquals(-37.75978948486164, x /= (((tmp = -595793780, tmp)+((tmp = 2384365752, tmp)>>>(1597707155)))|((968887032)^(tmp = 2417905313.4337964, tmp))));
- assertEquals(-37.75978948486164, x %= (tmp = -1846958365.291661, tmp));
- assertEquals(1102319266.6421175, x += (1102319304.401907));
- assertEquals(-1664202255175155200, x -= ((x^(tmp = 407408729, tmp))*x));
- assertEquals(-752874653, x ^= (tmp = 314673507, tmp));
- assertEquals(-72474761, x |= (tmp = -2538726025.8884344, tmp));
- assertEquals(-72474761, x |= x);
- assertEquals(-122849418, x += ((tmp = -2332080457, tmp)|(((((30496388.145492196)*(((-1654329438.451212)|(-2205923896))&(x>>(tmp = -1179784444.957002, tmp))))&(tmp = 319312118, tmp))*(651650825))|(((-2305190283)|x)>>>(-428229803)))));
- assertEquals(994, x >>>= x);
- assertEquals(614292, x *= (((((2565736877)/((tmp = 649009094, tmp)>>>(((x>>>(2208471260))>>(x>>>x))%x)))&(tmp = 357846438, tmp))<<(tmp = -2175355851, tmp))%x));
- assertEquals(1792008118, x |= (tmp = 1791924774.5121183, tmp));
- assertEquals(1246238208, x &= (tmp = 1264064009.9569638, tmp));
- assertEquals(-88877082, x ^= (2969289190.285704));
- assertEquals(0.044923746573582474, x /= ((tmp = -3057438043, tmp)^(-1009304907)));
- assertEquals(0, x <<= ((-828383918)-((((x>>(734512101))*(tmp = -3108890379, tmp))-(x|((tmp = 3081370585.3127823, tmp)^((-271087194)-(x/(tmp = -2777995324.4073873, tmp))))))%x)));
- assertEquals(1604111507.3365753, x -= (-1604111507.3365753));
- assertEquals(-1721314970, x ^= (tmp = -956686859, tmp));
- assertEquals(-102247425, x |= (tmp = -2535095555, tmp));
- assertEquals(-102247425, x %= (-955423877));
- assertEquals(1053144489850425, x *= (((tmp = 1583243590.9550207, tmp)&(1356978114.8592746))|(tmp = -10299961.622774363, tmp)));
- assertEquals(-0.0043728190668037336, x /= ((-1196259252.435701)*(((-689529982)|(tmp = -1698518652.4373918, tmp))<<x)));
- assertEquals(-2, x ^= (((x+(tmp = 2961627388, tmp))>>(tmp = 231666110.84104693, tmp))|x));
- assertEquals(-1, x >>= (tmp = -83214419.92958307, tmp));
- assertEquals(-1, x %= (-1303878209.6288595));
- assertEquals(2944850457.5213213, x -= (tmp = -2944850458.5213213, tmp));
- assertEquals(-1.6607884436053055, x /= (-1773164107));
- assertEquals(-0.6607884436053055, x %= ((x>>(1240245489.8629928))%(tmp = -3044136221, tmp)));
- assertEquals(-0, x *= ((x*x)>>>((1069542313.7656753)+x)));
- assertEquals(0, x >>>= (tmp = -202931587.00212693, tmp));
- assertEquals(-0, x *= (-375274420));
- assertEquals(0, x |= ((x/(((tmp = -876417141, tmp)*(x>>>x))&(-2406962078)))<<x));
- assertEquals(0, x &= ((tmp = -650283599.0780096, tmp)*(tmp = 513255913.34108484, tmp)));
- assertEquals(3027255453.458466, x += (3027255453.458466));
- assertEquals(-12568623413253943000, x *= (((x-(198689694.92141533))|x)-x));
- assertEquals(-12568623410285185000, x -= (tmp = -2968758030.3694654, tmp));
- assertEquals(-2008903680, x &= (3111621747.7679076));
- assertEquals(-110045263.26583672, x += (tmp = 1898858416.7341633, tmp));
- assertEquals(15964, x >>>= (1141042034));
- assertEquals(31928, x += x);
- assertEquals(0, x ^= x);
- assertEquals(-1159866377, x |= (-1159866377));
- assertEquals(0, x ^= x);
- assertEquals(3072699529.4306993, x -= (tmp = -3072699529.4306993, tmp));
- assertEquals(1, x /= x);
- assertEquals(-1471195029, x |= (2823772267.429641));
- assertEquals(-4152937108, x += (-2681742079));
- assertEquals(142030188, x |= x);
- assertEquals(270, x >>= (tmp = 1013826483, tmp));
- assertEquals(0, x >>>= (529670686));
- assertEquals(-2912300367, x -= (2912300367));
- assertEquals(2213791134963007500, x *= (x<<((((-3214746140)>>(tmp = -588929463, tmp))+((tmp = -3084290306, tmp)>>x))>>x)));
- assertEquals(2213791133466809900, x -= (tmp = 1496197641, tmp));
- assertEquals(69834416, x >>>= (x|(((2755815509.6323137)^(x%(((x*((((tmp = 375453453, tmp)<<(x*x))>>(tmp = -973199642, tmp))*x))>>((tmp = -356288629, tmp)>>(tmp = 2879464644, tmp)))<<((((1353647167.9291127)>>>(x/x))<<((2919449101)/(2954998123.5529594)))^x))))&((-2317273650)>>>(tmp = 34560010.71060455, tmp)))));
- assertEquals(69834416, x >>>= (x^(-2117657680.8646245)));
- assertEquals(2217318064, x -= ((tmp = 2035883891, tmp)<<(tmp = -1884739265, tmp)));
- assertEquals(-1272875686, x ^= (tmp = 805889002.7165648, tmp));
- assertEquals(-1272875686, x >>= (x&(((1750455903)*x)>>((722098015)%((tmp = 1605335626, tmp)>>(tmp = -565369634, tmp))))));
- assertEquals(-1274351316, x -= (x>>>((tmp = 2382002632, tmp)-((tmp = -2355012843, tmp)+(1465018311.6735773)))));
- assertEquals(-2982908522.4418216, x -= ((tmp = 1635549038.4418216, tmp)+(((1952167017.720186)&((tmp = -2284822073.1002254, tmp)>>(-1403893917)))%(tmp = 655347757, tmp))));
- assertEquals(312, x >>>= x);
- assertEquals(1248, x <<= (2376583906));
- assertEquals(0, x ^= x);
- assertEquals(0, x *= ((((tmp = 1914053541.881434, tmp)>>>(tmp = 1583032186, tmp))>>>(-2511688231))%(tmp = -2647173031, tmp)));
- assertEquals(0, x >>>= (tmp = -2320612994.2421227, tmp));
- assertEquals(0, x %= (((x+(tmp = -720216298.5403998, tmp))<<(414712685))>>(tmp = 480416588, tmp)));
- assertEquals(0, x >>= ((((3039442014.271272)<<x)%(-2402430612.9724464))&((-2141451461.3664773)%((x>>(1361764256))/((tmp = -1723952801.9320493, tmp)%(477351810.2485285))))));
- assertEquals(-0, x /= (tmp = -1627035877, tmp));
- assertEquals(0, x >>>= (tmp = 1745193212, tmp));
- assertEquals(0, x >>>= (2309131575));
- assertEquals(NaN, x %= (((x*(tmp = -1730907131.6124666, tmp))%((((1481750041)|(x>>((((x>>>(tmp = 3128156522.5936565, tmp))/(tmp = -1277222645.9880452, tmp))^(tmp = -2327254789, tmp))+x)))>>>(-1161176960))>>>(tmp = 3135906272.5466847, tmp)))*(((((-2230902834.464362)^(1822893689.8183987))+(((tmp = 1597326356, tmp)/(x&((tmp = -3044163063.587389, tmp)>>(tmp = 2844997555, tmp))))%(x^x)))>>((x|x)/x))^(2634614167.2529745))));
- assertEquals(0, x &= (3081901595));
- assertEquals(0, x &= (-2453019214.8914948));
- assertEquals(0, x &= x);
- assertEquals(0, x >>>= (-596810618.3666217));
- assertEquals(0, x >>= (((908276623)|x)/x));
- assertEquals(0, x ^= x);
- assertEquals(958890056, x |= (tmp = 958890056.474458, tmp));
- assertEquals(1325436928, x <<= (tmp = -2474326583, tmp));
- assertEquals(711588532333838300, x *= ((-148161646.68183947)<<(tmp = -1149179108.8049204, tmp)));
- assertEquals(0, x ^= (((2862565506)%x)/(tmp = -2865813112, tmp)));
- assertEquals(-2064806628, x += (((tmp = -2677361175.7317276, tmp)/((817159440)>>>(tmp = 1895467706, tmp)))^(x|(tmp = -2309094859, tmp))));
- assertEquals(-69806982479424, x *= ((x&(tmp = 2857559765.1909904, tmp))&(-3166908966.754988)));
- assertEquals(-430255744, x %= ((((((-2968574724.119535)<<x)<<((tmp = 1603913671, tmp)%((-1495838556.661653)^(tmp = 1778219751, tmp))))*(-400364265))<<((((1607866371.235576)-(1961740136))|(1259754297))&(tmp = -1018024797.1352971, tmp)))^x));
- assertEquals(6.828637393208647e-7, x /= (x*(tmp = 1464421, tmp)));
- assertEquals(0, x &= x);
- assertEquals(-0, x *= (((tmp = -2510016276, tmp)-(2088209546))<<((tmp = -1609442851.3789036, tmp)+(tmp = 1919930212, tmp))));
- assertEquals(-0, x %= (tmp = 1965117998, tmp));
- assertEquals(-290294792.53186846, x += ((tmp = -2361555894.5318685, tmp)%(2071261102)));
- assertEquals(-70873, x >>= (tmp = 2206814124, tmp));
- assertEquals(-141746, x += x);
- assertEquals(-141733.9831459089, x -= (((tmp = -806523527, tmp)>>>(tmp = 1897214891, tmp))/x));
- assertEquals(-141733.9831459089, x %= ((tmp = 1996295696, tmp)<<(tmp = 3124244672, tmp)));
- assertEquals(141733.9831459089, x /= (x>>(2688555704.561076)));
- assertEquals(3196954517.3075542, x -= (tmp = -3196812783.3244085, tmp));
- assertEquals(-19929155, x |= (((x|x)+x)^((tmp = 391754876, tmp)-(((((((tmp = -3051902902.5100636, tmp)*(x/(1546924993)))|(tmp = 1494375949, tmp))/((((-795378522)/(tmp = 509984856, tmp))>>>(tmp = -106173186, tmp))+x))|x)|(1916921307))>>>x))));
- assertEquals(1279271449, x &= ((tmp = 1289446971, tmp)&(tmp = 1836102619, tmp)));
- assertEquals(17876992, x <<= (-207633461));
- assertEquals(0, x >>= (tmp = -903885218.9406946, tmp));
- assertEquals(0, x >>>= x);
- assertEquals(-2999, x -= (((754533336.2183633)%(tmp = 557970276.0537136, tmp))>>(tmp = -1171045520, tmp)));
- assertEquals(-0.000003020470363504361, x /= (tmp = 992891715.2229724, tmp));
- assertEquals(1, x /= x);
- assertEquals(0.45768595820301217, x %= ((tmp = 673779031, tmp)/(tmp = -1242414872.3263657, tmp)));
- assertEquals(-980843052.1872087, x += (tmp = -980843052.6448946, tmp));
- assertEquals(-Infinity, x /= ((((tmp = 317747175.8024508, tmp)&(x&(((tmp = 1632953053, tmp)>>x)/x)))%x)/(3145184986)));
- assertEquals(0, x &= (x<<x));
- assertEquals(0, x ^= (x-((2969023660.5619783)/x)));
- assertEquals(0, x *= x);
- assertEquals(NaN, x %= (x/(((x-x)/((tmp = -1622970458.3812745, tmp)-(1626134522)))&((((((tmp = 1384729039.4149384, tmp)^(x%(tmp = -2736365959, tmp)))+((-1465172172)%x))>>(tmp = -1839184810.2603343, tmp))^(((tmp = 1756918419, tmp)>>>(x+(x%(tmp = -2011122996.9794662, tmp))))<<(-3026600748.902623)))*((tmp = -2040286580, tmp)>>(-2899217430.655154))))));
- assertEquals(0, x >>>= (tmp = 2100066003.3046467, tmp));
- assertEquals(1362012169, x ^= (tmp = 1362012169, tmp));
- assertEquals(1476312683, x |= ((457898409)>>>(-3079768830.723079)));
- assertEquals(1441711, x >>>= (905040778.7770994));
- assertEquals(2078530607521, x *= x);
- assertEquals(-208193103, x |= ((tmp = -241750000, tmp)^x));
- assertEquals(745036378, x ^= (((tmp = -1737151062.4726632, tmp)<<x)|(tmp = -1900321813, tmp)));
- assertEquals(1744830464, x <<= x);
- assertEquals(212992, x >>>= ((1210741037)-(x-(x>>>((x^(-1273817997.0036907))+((2401915056.5471)%(x<<(tmp = 1696738364.277438, tmp))))))));
- assertEquals(0.0001604311565639742, x /= (1327622418));
- assertEquals(0, x <<= (tmp = 166631979.34529006, tmp));
- assertEquals(0, x *= ((((tmp = 657814984, tmp)/(((-831055031)>>>(1531978379.1768064))|((tmp = 2470027754.302619, tmp)^(-223467597))))/(tmp = 1678697269.468965, tmp))&(tmp = -1756260071.4360774, tmp)));
- assertEquals(-2049375053, x ^= (tmp = -2049375053, tmp));
- assertEquals(-1879109889, x |= (tmp = -1963586818.0436726, tmp));
- assertEquals(718239919, x ^= (tmp = -1523550640.1925273, tmp));
- assertEquals(-1361085185, x |= (-1939964707));
- assertEquals(2, x >>>= (1864136030.7395325));
- assertEquals(0.794648722849246, x %= ((-668830999)*(((-2227700170.7193384)%(x^(x>>>x)))/(tmp = 399149892, tmp))));
- assertEquals(0, x >>= x);
- assertEquals(0, x *= x);
- assertEquals(0, x &= ((tmp = -2389008496.5948563, tmp)|((((tmp = -2635919193.905919, tmp)*((-64464127)<<(2136112830.1317358)))>>((184057979)*(-1204959085.8362718)))>>>(-442946870.3341484))));
- assertEquals(-243793920, x -= ((tmp = 3002998032, tmp)<<((537875759)<<x)));
- assertEquals(0, x -= x);
- assertEquals(0, x *= ((((66852616.82442963)/((((x^x)&(2975318321.223734))+(((tmp = -1388210811.1249495, tmp)^((((-680567297.7620237)%(x-(tmp = -672906716.4672911, tmp)))-x)*(tmp = -1452125821.0132627, tmp)))*(((2770387154.5427895)%x)%x)))-x))<<((-1481832432.924325)>>(tmp = 3109693867, tmp)))>>>(x/(((((((tmp = 928294418, tmp)^(((-1018314535)/(tmp = -3167523001, tmp))%((((((tmp = -1639338126, tmp)-(tmp = -2613558829, tmp))&x)/x)%(tmp = 513624872, tmp))/((-520660667)&x))))*(2620452414))^((tmp = 2337189239.5949326, tmp)*(3200887846.7954993)))>>>((tmp = 1173330667, tmp)^x))<<x)>>(((tmp = -2475534594.982338, tmp)*x)|x)))));
- assertEquals(0, x /= (2520915286));
- assertEquals(0, x &= x);
- assertEquals(0, x >>= (-1908119327));
- assertEquals(0, x >>>= (tmp = 549007635, tmp));
- assertEquals(0, x >>= (-994747873.8117285));
- assertEquals(0, x <<= ((((x>>>((-3084793026.846681)%((1107295502)&(tmp = -296613957.8133817, tmp))))&((19637717.166736007)/(x+x)))+x)/(-2479724242)));
- assertEquals(-695401420, x += (-695401420));
- assertEquals(-695401394, x += (x>>>(tmp = 2340097307.6556053, tmp)));
- assertEquals(-555745552, x -= (x|(-483851950.68644)));
- assertEquals(-17825792, x <<= x);
- assertEquals(-17825792, x >>= x);
- assertEquals(-17, x %= ((tmp = 1799361095, tmp)|((x>>(((-1201252592)<<((((543273288)+(-2859945716.606924))*x)<<((-3030193601)<<(3081129914.9217644))))|((1471431587.981769)>>(-246180750))))|(((tmp = -2689251055.1605787, tmp)>>x)&(((2131333169)^x)-((tmp = -951555489, tmp)/x))))));
- assertEquals(-8912896, x <<= (1146444211));
- assertEquals(2854567584, x += (tmp = 2863480480, tmp));
- assertEquals(426232502.24151134, x %= (1214167540.8792443));
- assertEquals(1806802048, x ^= (-2368317898));
- assertEquals(432537600, x <<= (tmp = 2831272652.589364, tmp));
- assertEquals(432537600, x %= (((1713810619.3880467)-x)&((-2853023009.553296)&(tmp = -3158798098.3355417, tmp))));
- assertEquals(-509804066, x += (tmp = -942341666, tmp));
- assertEquals(-509804066, x %= (-732349220));
- assertEquals(259900185710132350, x *= x);
- assertEquals(711598501.7021885, x %= ((tmp = 2020395586.2280731, tmp)-(tmp = 3031459563.1386633, tmp)));
- assertEquals(711598503.0618857, x += ((tmp = 967558548.4141241, tmp)/x));
- assertEquals(711598503, x &= x);
- assertEquals(711598503, x ^= (((((1609355669.1963444)+((((tmp = -2660082403.258437, tmp)+(tmp = -235367868, tmp))&(x/x))*((-2595932186.69466)|((tmp = -3039202860, tmp)<<x))))>>>(-951354869))-((tmp = -691482949.6335375, tmp)/(tmp = -1735502400, tmp)))/(tmp = 798440377, tmp)));
- assertEquals(558262613882868500, x *= (784519095.4299527));
- assertEquals(558262611968479000, x -= ((((tmp = 1039039153.4026555, tmp)/(-3138845051.6240187))*(tmp = 633557994, tmp))&(1981507217)));
- assertEquals(1170427648, x |= ((x>>((((-1086327124)%((tmp = -1818798806.368613, tmp)^(tmp = 2183576654.9959817, tmp)))>>x)&((((((tmp = 1315985464.0330539, tmp)&(2774283689.333836))%x)*((2722693772.8994813)&(tmp = -2720671984.945404, tmp)))^(tmp = -76808019, tmp))<<((tmp = 685037799.2336662, tmp)^((tmp = 1057250849, tmp)&(tmp = 1469205111.2989025, tmp))))))+(x*(((tmp = 448288818.47173154, tmp)-(-2527606231))-((8387088.402292728)>>x)))));
- assertEquals(558, x >>>= (tmp = 2732701109, tmp));
- assertEquals(558, x &= x);
- assertEquals(-0.00015855057024653912, x /= ((x+(((tmp = -1963815633, tmp)-(x>>x))-((x|x)>>x)))/x));
- assertEquals(1.3458861596445712e-13, x /= (-1178038492.4116466));
- assertEquals(0, x <<= (-104550232));
- assertEquals(0, x >>>= (x>>(tmp = -255275244.12613606, tmp)));
- assertEquals(0, x >>= x);
- assertEquals(375, x |= ((1576819294.6991196)>>>(-2570246122)));
- assertEquals(96000, x <<= ((2252913843.0150948)>>>(-49239716)));
- assertEquals(6144000, x <<= ((((tmp = -2478967279, tmp)&((x%((tmp = -1705332610.8018858, tmp)+(x+(tmp = 590766349, tmp))))<<(tmp = 1759375933, tmp)))+(-2024465658.849834))&(1564539207.3650014)));
- assertEquals(-1149239296, x <<= (1862803657.7241006));
- assertEquals(-9, x >>= (((tmp = 463306384.05696774, tmp)^x)|((x>>((((-2098070856.799663)<<((-2054870274.9012866)<<(((-2582579691)/(829257170.0266814))<<(((((tmp = -1753535573.7074275, tmp)<<((x>>(-197886116))%((2487188445)%(tmp = 2465391564.873364, tmp))))&(((tmp = -500069832, tmp)&(tmp = 3016637032, tmp))&((tmp = 2525942628, tmp)|((((-920996215)|x)^((((tmp = -687548533.419106, tmp)&(1423222636.058937))<<((tmp = -1096532228, tmp)>>((((tmp = -3124481449.2740726, tmp)^(tmp = 2724328271.808975, tmp))>>x)*x)))+(-1661789589.5808442)))+(((x*(tmp = -1224371664.9549093, tmp))^((tmp = 3202970043, tmp)^x))/(tmp = 131494054.58501709, tmp))))))|(((tmp = -1654136720, tmp)<<x)>>((1652979932.362416)-(tmp = -863732721, tmp))))^(-113307998)))))^(-90820449.91417909))*((tmp = 641519890, tmp)-((((x<<(tmp = 2349936514.071881, tmp))*(2324420443.587892))^x)%(x<<((tmp = -1838473742, tmp)/(((-3154172718.4274178)-x)+x)))))))|(x>>>((tmp = 2096024376.4308293, tmp)<<x)))));
- assertEquals(81, x *= x);
- assertEquals(81, x &= x);
- assertEquals(81, x %= (tmp = 2223962994, tmp));
- assertEquals(81, x ^= ((x/(((-1606183420.099584)|(-1242175583))&(((x|((tmp = 828718431.3311573, tmp)/(x>>x)))+(((-2207542725.4531174)^(x*x))*(tmp = 551575809.955105, tmp)))/x)))&((x>>x)&x)));
- assertEquals(81, x %= (tmp = 279598358.6976975, tmp));
- assertEquals(101.72338484518858, x -= (((tmp = 2452584495.44003, tmp)%((-1181192721)+(((x>>(((x&x)^x)+((x>>>((x+(-2472793823.57181))/(((2854104951)>>(-1208718359.6554642))>>>(1089411895.694705))))/(x|(-2821482890.1780205)))))^(-1786654551))/(-29404242.70557475))))/(((-4352531)<<((-1227287545)<<x))%(-2558589438))));
- assertEquals(101.72338484518858, x %= (-943645643));
- assertEquals(0, x -= x);
- assertEquals(0, x >>>= (-2440404084));
- assertEquals(0, x >>= (tmp = 1029680958.405923, tmp));
- assertEquals(0, x >>>= (1213820208.7204895));
- assertEquals(-0, x /= (tmp = -103093683, tmp));
- assertEquals(0, x >>>= (-2098144813));
- assertEquals(-0, x /= (((-3087283334)+(((tmp = -3129028112.6859293, tmp)%(tmp = 2413829931.1605015, tmp))-(2578195237.8071446)))|x));
- assertEquals(-15, x |= ((((-178926550.92823577)>>>(-965071271))^((tmp = -484633724.7237625, tmp)-(tmp = 473098919.1486404, tmp)))>>((-2264998310.203265)%(tmp = -499034672, tmp))));
- assertEquals(0, x ^= x);
- assertEquals(0, x >>= (((-3207915976.698118)<<(tmp = 2347058630, tmp))|(tmp = -2396250098.559627, tmp)));
- assertEquals(NaN, x %= x);
- assertEquals(NaN, x *= (621843222));
- assertEquals(0, x >>= (((-2409032228.7238913)*x)-(tmp = -887793239, tmp)));
- assertEquals(NaN, x /= x);
- assertEquals(1193017666, x ^= (tmp = 1193017666, tmp));
- assertEquals(3.5844761899682753, x /= (tmp = 332829011.206393, tmp));
- assertEquals(-888572929, x |= (((tmp = 1032409228, tmp)+(tmp = -1920982163.7853453, tmp))+x));
- assertEquals(-1817051951333455600, x *= (((-1506265102)^(tmp = -775881816, tmp))-(tmp = -32116372.59181881, tmp)));
- assertEquals(-1638479616, x |= x);
- assertEquals(-114489, x %= (((tmp = -247137297.37866855, tmp)>>>((((((-322805409)-x)^x)>>((((((((x>>>(tmp = -900610424.7148039, tmp))/(-1155208489.6240904))|((-2874045803)|(tmp = 3050499811, tmp)))+(x/((tmp = -613902712, tmp)^((-982142626.2892077)*((((tmp = -3201753245.6026397, tmp)|((1739238762.0423079)^x))/(243217629.47237313))^((tmp = -11944405.987132788, tmp)/(tmp = 2054031985.633406, tmp)))))))*(tmp = 2696108952.450961, tmp))*x)>>>(tmp = 3058430643.0660386, tmp))>>(x<<x)))>>(-984468302.7450335))%((tmp = 1302320585.246251, tmp)>>>x)))%(tmp = -2436842285.8208156, tmp)));
- assertEquals(2047, x >>>= (2380161237));
- assertEquals(0, x >>= x);
- assertEquals(0, x &= (tmp = 980821012.975836, tmp));
- assertEquals(-1090535537, x -= ((-3064511503.1214876)&((tmp = -2598316939.163751, tmp)<<((tmp = -969452391.8925576, tmp)*x))));
- assertEquals(-2181071074, x += x);
- assertEquals(1, x >>>= ((2902525386.449062)>>x));
- assertEquals(1, x += (x&(tmp = -2643758684.6636515, tmp)));
- assertEquals(1, x %= ((tmp = -2646526891.7004848, tmp)/x));
- assertEquals(448735695.7888887, x -= (tmp = -448735694.7888887, tmp));
- assertEquals(1, x /= x);
- assertEquals(1, x >>= ((-480385726)<<(2641021142)));
- assertEquals(1, x %= (375099107.9200462));
- assertEquals(1, x >>= (((x&((tmp = -2402469116.9903326, tmp)%(tmp = -2862459555.860298, tmp)))*(tmp = -2834162871.0586414, tmp))%(((x>>>(tmp = 721589907.5073895, tmp))*(x^x))%(((tmp = 2844611489.231776, tmp)^((983556913)&(906035409.6693488)))^(x>>>(1239322375))))));
- assertEquals(268435456, x <<= (tmp = 178807644.80966163, tmp));
- assertEquals(44, x %= ((tmp = 2527026779.081539, tmp)>>>(2736129559)));
- assertEquals(88, x += x);
- assertEquals(0, x >>>= x);
- assertEquals(0, x -= x);
- assertEquals(-1523121602, x |= (2771845694));
- assertEquals(-2, x >>= x);
- assertEquals(-4, x += x);
- assertEquals(-256, x <<= (((2522793132.8616533)>>(tmp = 77232772.94058788, tmp))+(3118669244.49152)));
- assertEquals(4294967040, x >>>= x);
- assertEquals(-256, x &= x);
- assertEquals(1278370155.835435, x -= (-1278370411.835435));
- assertEquals(-3.488228054921667, x /= (tmp = -366481243.6881058, tmp));
- assertEquals(1.162742684973889, x /= ((x|(((((2404819175.562809)*(tmp = -2524589506, tmp))&(tmp = -675727145, tmp))>>>(x*x))&((-413250006)<<(tmp = 2408322715, tmp))))|((2940367603)>>>x)));
- assertEquals(0, x >>>= ((2513665793)-(tmp = 1249857454.3367786, tmp)));
- assertEquals(0, x ^= x);
- assertEquals(0, x ^= x);
- assertEquals(1989998348.6336238, x -= (-1989998348.6336238));
- assertEquals(903237918.986834, x %= (1086760429.6467898));
- assertEquals(-4.4185765232981975, x /= (-204418304));
- assertEquals(1471621914, x ^= (tmp = -1471621914.1771696, tmp));
- assertEquals(1471621914, x |= ((((((x<<(tmp = -2676407394.536844, tmp))%(((343324258)+(x/(x>>(((-221193011)>>>x)|x))))>>(((-2737713893)^((tmp = -49214797.00735545, tmp)+((-2818106123.172874)/(tmp = -2361786565.3028684, tmp))))<<(1859353297.6355076))))*(tmp = -751970685, tmp))|((tmp = 2502717391.425871, tmp)/(tmp = -2647169430, tmp)))*((tmp = -1647567294, tmp)&(((tmp = 1819557651, tmp)/x)>>((((-3073469753)/x)-(((tmp = -1973810496.6407511, tmp)&((x-(x+(tmp = -2986851659, tmp)))>>>(tmp = -2226975699, tmp)))|(418770782.142766)))<<x))))*(((((tmp = 125466732, tmp)/((((1453655756.398259)|(((874792086.7064595)-(194880772.91499102))>>>x))%(x<<(tmp = -1445557137, tmp)))<<x))>>>(tmp = -1953751906, tmp))/((tmp = -2140573172.2979035, tmp)*((-108581964)^x)))|(-481484013.0393069))));
- assertEquals(1454179065, x += ((tmp = 947147038.2829313, tmp)|(tmp = -154822975.3629098, tmp)));
- assertEquals(1, x /= x);
- assertEquals(1, x %= ((((((tmp = -2262250297.991866, tmp)-(tmp = 481953960, tmp))/(1629215187.6020458))|(2515244216))>>>((tmp = -3040594752.2184515, tmp)-(tmp = -1116041279, tmp)))^(((-182133502)-(1065160192.6609197))+(((((-1850040207)^(tmp = -1570328610, tmp))^(tmp = 20542725.09256518, tmp))*x)|(2386866629)))));
- assertEquals(1, x &= (2889186303));
- assertEquals(0, x >>= (((-1323093107.050538)>>(x%x))-(((((((-1736522840)+(tmp = -2623890690.8318863, tmp))*(959395040.5565329))*(233734920))<<((x+(x%((tmp = -2370717284.4370327, tmp)%(tmp = 2109311949, tmp))))-(tmp = -1005532894, tmp)))|(861703605))>>>((2399820772)/x))));
- assertEquals(0, x >>= x);
- assertEquals(57233408, x |= ((tmp = 2655923764.4179816, tmp)*(-1353634624.3025436)));
- assertEquals(997939728, x |= (980552208.9005274));
- assertEquals(1859642592476610800, x *= (1863481872));
- assertEquals(-977190656, x <<= x);
- assertEquals(4.378357529141239e+26, x *= ((((x/(((tmp = 2429520991, tmp)/(x/(tmp = 784592802, tmp)))-(tmp = -2704781982, tmp)))*(tmp = -2161015768.2322354, tmp))&((((-3164868762)>>(tmp = 2390893153.32907, tmp))^x)>>(-2422626718.322538)))*(tmp = 278291869, tmp)));
- assertEquals(4.378357529141239e+26, x -= (1710777896.992369));
- assertEquals(0, x &= (((((tmp = -2532956158.400033, tmp)|((2195255831.279001)|(1051047432)))|(-1628591858))|(tmp = -2042607521.947963, tmp))>>((-1471225208)/(((-133621318)>>(1980416325.7358408))*((1741069593.1036062)-(x|(2133911581.991011)))))));
- assertEquals(-0, x /= (-656083507));
- assertEquals(NaN, x += ((tmp = -1071410982.2789869, tmp)%x));
- assertEquals(NaN, x *= (tmp = -1513535145.3146675, tmp));
- assertEquals(0, x >>= ((2831245247.5267224)>>(x<<((x+(((3068824580.7922907)|(1708295544.275714))*((tmp = -1662930228.1170444, tmp)-(((tmp = 1979994889, tmp)<<(tmp = -1826911988, tmp))&((x/(x<<(1909384611.043981)))+(1958052414.7139997))))))<<(tmp = 2481909816.56558, tmp)))));
- assertEquals(0, x *= (((tmp = -2979739958.1614842, tmp)&x)+x));
- assertEquals(-0, x *= ((-332769864.50313234)^x));
- assertEquals(0, x >>= ((((689018886.1436445)+(tmp = -2819546038.620694, tmp))|(((tmp = -1459669934.9066005, tmp)|x)/x))<<(((tmp = 2640360389, tmp)/((x%((-1947492547.9056122)%((1487212416.2083092)-(-1751984129))))^x))%(tmp = 2666842881, tmp))));
- assertEquals(-1801321460, x |= (tmp = 2493645836, tmp));
- assertEquals(-1801321460, x %= (2400405136));
- assertEquals(-2905399858195810300, x *= (tmp = 1612926911, tmp));
- assertEquals(-2905399858195810300, x -= (x>>(tmp = 1603910263.9593458, tmp)));
- assertEquals(-238798848, x &= ((tmp = -2638646212.767516, tmp)/(((tmp = 1755616291.436998, tmp)>>>(tmp = 1083349775, tmp))-(x%(((tmp = 1728859105.53634, tmp)^(1931522619.0403612))/(tmp = 712460587.0025489, tmp))))));
- assertEquals(-2363873607.2302856, x += (-2125074759.230286));
- assertEquals(1712665, x &= (((117229515)>>>(((1707090894.1915488)>>>((-1696008695)>>(((-1045367326.7522249)<<(tmp = -209334716, tmp))-x)))|(-1707909786.080653)))%(1260761349.172689)));
- assertEquals(1073741824, x <<= (tmp = -289437762.34742975, tmp));
- assertEquals(1073741824, x &= (tmp = 2079141140, tmp));
- assertEquals(0, x <<= ((x^(-3139646716.1615124))-(((-362323071.74237394)|(tmp = 2989896849, tmp))*(tmp = -218217991, tmp))));
- assertEquals(0, x &= (tmp = -1476835288.425903, tmp));
- assertEquals(0, x >>>= (tmp = 61945262.70868635, tmp));
- assertEquals(0, x ^= x);
- assertEquals(-2735263498.7189775, x -= (2735263498.7189775));
- assertEquals(-1182289920, x <<= (x+x));
- assertEquals(-1182289580, x ^= ((2858446263.2258)>>>(2387398039.6273785)));
- assertEquals(696693056, x &= ((2178665823)*(-51848583)));
- assertEquals(1652555776, x <<= (((tmp = 2943916975, tmp)-((-1544273901)>>(-1671503106.2896929)))|x));
- assertEquals(6455296, x >>>= (tmp = 1492638248.675439, tmp));
- assertEquals(2097152, x &= (((x|x)*(2873891571.7000637))^((2165264807)+(tmp = 451721563, tmp))));
- assertEquals(2097152, x %= (tmp = 1089484582.1455994, tmp));
- assertEquals(2097152, x <<= x);
- assertEquals(2097152, x &= ((tmp = 119096343.4032247, tmp)^((-1947874541)*x)));
- assertEquals(0, x &= (tmp = 2363070677, tmp));
- assertEquals(0, x &= ((tmp = -1897325383, tmp)>>>((2368480527)>>>((tmp = 1837528979, tmp)*(-1838904077)))));
- assertEquals(-1898659416, x ^= (-1898659416.1125412));
- assertEquals(-725506048, x <<= x);
- assertEquals(1392943104, x <<= (295287938.9104482));
- assertEquals(-63620329, x ^= ((tmp = -3175925826.5573816, tmp)-(tmp = 2474613927, tmp)));
- assertEquals(-1135111726, x -= ((tmp = -1133259081, tmp)^(((tmp = -742228219, tmp)>>((-7801909.587711811)%((tmp = -642758873, tmp)+(tmp = 2893927824.6036444, tmp))))^((tmp = -2145465178.9142997, tmp)+x))));
- assertEquals(0, x ^= x);
- assertEquals(660714589, x |= (660714589));
- assertEquals(660714676, x ^= ((-376720042.8047826)>>>(2196220344)));
- assertEquals(660714676, x |= ((((((((x<<(-1140465568))-(tmp = -1648489774.1573918, tmp))%(((tmp = -2955505390.573639, tmp)*x)<<((((tmp = -1769375963, tmp)*(tmp = -440619797, tmp))&((tmp = 1904284066, tmp)%(-2420852665.0629807)))+(-324601009.2063596))))>>(tmp = 2317210783.9757776, tmp))^((tmp = 750057067.4541628, tmp)^(tmp = -1391814244.7286487, tmp)))>>((344544658.6054913)%((tmp = -1508630423.218488, tmp)&(tmp = 1918909238.2974637, tmp))))>>((-647746783.685822)&(tmp = 2444858958.3595476, tmp)))&x));
- assertEquals(-962337195, x ^= (tmp = -507358495.30825853, tmp));
- assertEquals(-182008925.58535767, x %= (tmp = -195082067.35366058, tmp));
- assertEquals(502070, x >>>= (tmp = 1459732237.1447744, tmp));
- assertEquals(-2391009930.7235765, x -= (tmp = 2391512000.7235765, tmp));
- assertEquals(1568669696, x <<= x);
- assertEquals(0, x <<= (tmp = -571056688.2717848, tmp));
- assertEquals(1770376226, x ^= (tmp = 1770376226.0584736, tmp));
- assertEquals(0, x ^= x);
- assertEquals(0, x &= ((((x<<x)>>>x)|x)|(((tmp = -2141573723, tmp)^x)|(64299956))));
- assertEquals(0, x ^= x);
- assertEquals(0, x &= x);
- assertEquals(0, x <<= (1106060336.7362857));
- assertEquals(-0, x /= (x|(tmp = 2760823963, tmp)));
- assertEquals(0, x <<= ((-2436225757)|(-1800598694.4062433)));
- assertEquals(0, x >>>= ((-728332508.9870625)<<x));
- assertEquals(-173377680, x ^= ((tmp = -173377680, tmp)%(tmp = -2843994892, tmp)));
- assertEquals(-173377680, x |= ((((-819217898)&(tmp = -1321650255, tmp))&(x+((x^x)<<((1700753064)>>((((((-1038799327)>>((782275464)^x))-(tmp = -2113814317.8539028, tmp))>>(2143804838))&x)-((2970418921)/(-3073015285.6587048)))))))&((-1759593079.4077306)%((1699128805)-((tmp = -467193967, tmp)&(((2225788267.3466334)*(((2687946762.5504274)+x)>>>x))<<(-1853556066.880512)))))));
- assertEquals(-0.5520657226957338, x /= ((tmp = -755493878, tmp)&(tmp = 918108389, tmp)));
- assertEquals(0.30477656217556287, x *= x);
- assertEquals(0, x &= ((tmp = -2746007517, tmp)<<(2749629340)));
- assertEquals(0, x ^= ((x%(tmp = 1683077876, tmp))%(-162706778)));
- assertEquals(0, x *= (tmp = 10203423, tmp));
- assertEquals(119043212.1461842, x += (tmp = 119043212.1461842, tmp));
- assertEquals(587202560, x <<= (tmp = 658697910.7051642, tmp));
- assertEquals(-138689730, x |= (x-(tmp = 1296317634.5661907, tmp)));
- assertEquals(-138663011, x -= ((-1751010109.5506423)>>(152829872)));
- assertEquals(-138663011, x %= (-1266200468));
- assertEquals(-138663011, x &= (x|((tmp = -571277275.622529, tmp)<<x)));
- assertEquals(-138663011, x >>= ((971259905.1265712)*(tmp = 2203764981, tmp)));
- assertEquals(-138663011, x %= (-904715829));
- assertEquals(-138663011, x |= ((tmp = -2823047885.283391, tmp)>>>(((tmp = 533217000, tmp)|(650754598.7836078))|(-1475565890))));
- assertEquals(-1610612736, x <<= x);
- assertEquals(-1610612736, x &= x);
- assertEquals(163840, x >>>= (-188885010));
- assertEquals(-1224224814, x |= (tmp = 3070742482, tmp));
- assertEquals(1498726395213334500, x *= x);
- assertEquals(1723591210, x |= ((tmp = 615164458, tmp)|x));
- assertEquals(1721910480, x ^= (x>>>x));
- assertEquals(4505284605.764313, x -= (tmp = -2783374125.7643127, tmp));
- assertEquals(-9504912393868483000, x *= (((tmp = 2896651872, tmp)<<(-2896385692.9017262))&(((((tmp = -2081179810.20238, tmp)|(tmp = -2484863999, tmp))>>((tmp = 1560885110.2665749, tmp)/(((tmp = 934324123.4289343, tmp)<<((tmp = -1591614157.0496385, tmp)+x))/(((x%(((tmp = 1672629986.8055913, tmp)%x)>>(tmp = 2116315086.2559657, tmp)))/(((-2687682697.5806303)>>x)/(-2034391222.5029132)))%(x-((((((tmp = 2598594967, tmp)/(((((((2950032233)%x)/x)^(tmp = -2126753451.3732262, tmp))<<(tmp = -3019113473, tmp))+(tmp = -2021220129.2320697, tmp))%((((-587645875.4666483)>>(((((x+x)+x)&(tmp = 533801785, tmp))|x)-((tmp = -2224808495.678903, tmp)/(1501942300))))>>>(-2558947646))>>((2798508249.020792)>>>x))))>>>((1060584557)/((((((((x&x)|(1426725365))>>>(tmp = 1500508838, tmp))>>(-1328705938))*((tmp = -2288009425.598777, tmp)>>>(((2586897285.9759064)%((-1605651559.2122297)>>>(tmp = 1936736684.4887302, tmp)))+((tmp = 2316261040, tmp)^(570340750.353874)))))&(x^((tmp = -2266524143, tmp)-(tmp = 2358520476, tmp))))+(tmp = 1449254900.9222453, tmp))%((-100598196)%((tmp = -2985318242.153491, tmp)>>((620722274.4565848)>>(871118975)))))))<<x)*(tmp = -1287065606.4143271, tmp))>>>(1038059916.2438471)))))))+((x/(-276990308.1264961))&(tmp = 2471016351.2195315, tmp)))|(((((tmp = -1288792769.3210807, tmp)+((tmp = -641817194, tmp)*(x<<(((-1933817364)>>(((tmp = 2084673536, tmp)|x)&x))&(tmp = -2752464480, tmp)))))%((796026752)*x))+(((tmp = -3083359669, tmp)|x)-((715303522)|(tmp = 181297266, tmp))))*(-1691520182.3207517)))));
- assertEquals(0, x <<= (-2322389800));
- assertEquals(0, x *= (tmp = 3188682235, tmp));
- assertEquals(0, x |= (x>>>((tmp = -2729325231.8288336, tmp)^((-393497076.96012783)*(x/(tmp = -2198942459.9466457, tmp))))));
- assertEquals(0, x ^= x);
- assertEquals(0, x %= (2835024997.4447937));
- assertEquals(0, x <<= x);
- assertEquals(0, x >>= (tmp = 1109824126, tmp));
- assertEquals(0, x <<= (3013043386));
- assertEquals(206825782.74659085, x -= (-206825782.74659085));
- assertEquals(-645346761227699500, x *= (-3120243292));
- assertEquals(6825462, x >>= ((tmp = 1457908135, tmp)<<x));
- assertEquals(-612366097.9189918, x -= (619191559.9189918));
- assertEquals(-612306090.9189918, x -= ((2328676543.893506)>>x));
- assertEquals(0, x ^= (x>>(((x>>>(1856200611.2269292))&(tmp = 2003217473, tmp))%((((((-107135673)+(((3062079356.170611)<<(tmp = -676928983, tmp))>>((tmp = -1487074941.2638814, tmp)|((-1601614031)/(1317006144.5025365)))))+x)*(((1163301641)>>>(448796567))/((x%((tmp = 72293197.34410787, tmp)+(-2304112723)))/((455610361)%(-2799431520)))))>>>(-217305041.09432888))<<(x-(tmp = -2168353649, tmp))))));
- assertEquals(0, x >>= x);
- assertEquals(-Infinity, x -= (((-1651597599.8950624)+(1780404320))/x));
- assertEquals(0, x <<= (tmp = 2246420272.4321294, tmp));
- assertEquals(0, x *= ((2793605382)-(tmp = -272299011, tmp)));
- assertEquals(0, x *= x);
- assertEquals(0, x <<= x);
- assertEquals(0, x >>= (tmp = 2556413090, tmp));
- assertEquals(0, x >>= ((tmp = -1784710085, tmp)%x));
- assertEquals(0, x %= (tmp = -1929880813, tmp));
- assertEquals(0, x *= (2586983368));
- assertEquals(0, x &= x);
- assertEquals(0, x <<= (-2144588807));
- assertEquals(0, x ^= ((x<<(((((((-596537598)+((x-(((((((tmp = -3179604796, tmp)/((tmp = 1156725365.3543215, tmp)>>>(tmp = -2762144319, tmp)))%(x<<x))&((tmp = 1750241928.1271567, tmp)&(x/((tmp = 1781306819, tmp)|x))))+((((2893068644)/((tmp = -576164593.9720252, tmp)<<((2724671.48995471)&(tmp = -573132475, tmp))))%(tmp = -1355625108, tmp))&(tmp = -302869512.5880568, tmp)))+x)<<x))>>((tmp = -2569172808, tmp)/x)))^x)-(tmp = -1174006275.2213159, tmp))&x)&(((((((-2303274799)>>(tmp = -814839320, tmp))/(tmp = 183887306.09810615, tmp))>>(((tmp = 1054106394.3704875, tmp)|x)>>>x))-(x-(tmp = 1313696830, tmp)))-((tmp = 2373274399.0742035, tmp)|((((tmp = -3163779539.4902935, tmp)*(tmp = -3056125181.726942, tmp))&(((x^(x^(x/((tmp = -576441696.6015451, tmp)<<(tmp = -26223719.920306206, tmp)))))>>(tmp = -2332835940, tmp))|((-146303509.41093707)&(tmp = -2676964025, tmp))))/((((x*(tmp = 1059918020, tmp))|((((2341797349)|(tmp = -744763805.1381104, tmp))<<x)+((2991320875.552578)^(2920702604.701831))))^(-1721756138))^(((tmp = -2794367554, tmp)>>((-2671235923.2097874)<<(x&((((tmp = -621472314.0859051, tmp)-(((x*x)+x)>>>((tmp = 1834038956, tmp)+x)))*x)^(tmp = -2090567586.321468, tmp)))))<<(321395210))))))>>>(tmp = -1207661719, tmp)))+(-2877264053.3805156)))/(x%(tmp = -2226991657.709366, tmp))));
- assertEquals(0, x *= (tmp = 986904991.061398, tmp));
- assertEquals(0, x -= (x%(650819306.6671969)));
- assertEquals(0, x >>>= (905893666.2871252));
- assertEquals(0, x += (((tmp = 2501942710.4804144, tmp)&x)/((tmp = -851080399.1751502, tmp)-(-1168623992))));
- assertEquals(-0, x *= (tmp = -2014577821.4554045, tmp));
- assertEquals(0, x &= (tmp = 1995246018, tmp));
- assertEquals(0, x %= (1724355237.7031958));
- assertEquals(-954696411, x += (((-2825222201)+(((1662353496.1795506)>>>(x-x))|(tmp = 225015046, tmp)))^(x&x)));
- assertEquals(-2158427339993389800, x *= (2260852052.1539803));
- assertEquals(19559, x >>>= (-862409169.4978967));
- assertEquals(-0.000012241163878671237, x /= (x^(tmp = 2697144215.160239, tmp)));
- assertEquals(0, x -= x);
- assertEquals(1448177644, x |= (tmp = 1448177644.624848, tmp));
- assertEquals(1448177644, x %= (((-1497553637.4976408)+(402228446))<<x));
- assertEquals(2304640553, x -= (-856462909));
- assertEquals(152436736, x &= ((766686903)*(((tmp = 660964683.1744609, tmp)|((((tmp = 297369746, tmp)-(x+((tmp = -2677127146, tmp)/x)))>>(((((((x%(x<<x))-(((((529254728)|((x|(-1407086127.6088922))&(tmp = -1968465008.5000398, tmp)))/(x%x))&((((-2761805265.92574)-x)*(x^(tmp = 110730179, tmp)))%((177220657.06030762)*(((2532585190.671373)/x)+(-1465143151)))))<<((tmp = -3008848338, tmp)<<(-2475597073))))|((-192996756.38619018)|((((1445996780)|(x>>>((((tmp = -2482370545.791443, tmp)*(tmp = -270543594, tmp))^x)*((1346780586)/(tmp = -625613363.885356, tmp)))))-(x<<(x/(-562307527))))&(-125701272))))*((x&x)%(tmp = 752963070, tmp)))>>>(tmp = 17419750.79086232, tmp))*x)^(x^((-157821212.04674292)-(tmp = 503849221.598824, tmp)))))-(tmp = 1479418449, tmp)))>>>((((((-78138548.2193842)<<(((2319032860.806689)-(tmp = -1564963892.5137577, tmp))>>>(-73673322.28957987)))<<((1797573493.3467085)*x))>>(tmp = 759994997, tmp))>>>(-1066441220))&(((((((tmp = 1972048857, tmp)*(((x&((-1347017320.0747669)>>>x))*(-2332716925.705054))%(-376976019.24362826)))>>>((tmp = -466479974, tmp)+x))&(-2282789473.3675604))|(((((((((269205423.7510414)-(tmp = 21919626.105656862, tmp))*((x-(tmp = -378670528, tmp))>>(tmp = -1045706598, tmp)))>>(tmp = -3062647341.234485, tmp))>>>x)|(tmp = -285399599.9386575, tmp))%(tmp = 2731214562, tmp))|((((tmp = 837093165.3438574, tmp)|(tmp = -2956931321, tmp))+((1871874558.3292787)<<((x|((tmp = -3169147427, tmp)%(((x^x)%(1479885041))%((1769991217)%(tmp = -1899472458, tmp)))))*(tmp = -837098563.71806, tmp))))>>(tmp = -1866722748, tmp)))-(2037734340.8345597)))>>((tmp = -1262019180.5332131, tmp)+(x*(1274173993.9800131))))*(tmp = 2336989321.855402, tmp))))));
- assertEquals(4, x >>= (tmp = -2577728327, tmp));
- assertEquals(16, x *= (x<<((2622323372.580596)*(tmp = -1947643367, tmp))));
- assertEquals(33554432, x <<= (tmp = -2938370507, tmp));
- assertEquals(-2399497018.987414, x -= (tmp = 2433051450.987414, tmp));
- assertEquals(1, x /= x);
- assertEquals(2, x <<= x);
- assertEquals(0, x >>= (x&x));
- assertEquals(0, x <<= x);
-}
-f();
diff --git a/src/3rdparty/v8/test/mjsunit/object-define-property.js b/src/3rdparty/v8/test/mjsunit/object-define-property.js
index fdaf82d..970a803 100644
--- a/src/3rdparty/v8/test/mjsunit/object-define-property.js
+++ b/src/3rdparty/v8/test/mjsunit/object-define-property.js
@@ -27,7 +27,7 @@
// Tests the object.defineProperty method - ES 15.2.3.6
-// Flags: --allow-natives-syntax
+// Flags: --allow-natives-syntax --es5-readonly
// Check that an exception is thrown when null is passed as object.
var exception = false;
@@ -1057,6 +1057,8 @@ assertEquals(999, o[999]);
// Regression test: Bizzare behavior on non-strict arguments object.
+// TODO(yangguo): Tests disabled, needs investigation!
+/*
(function test(arg0) {
// Here arguments[0] is a fast alias on arg0.
Object.defineProperty(arguments, "0", {
@@ -1075,7 +1077,7 @@ assertEquals(999, o[999]);
assertEquals(2, arg0);
assertEquals(3, arguments[0]);
})(0);
-
+*/
// Regression test: We should never observe the hole value.
var objectWithGetter = {};
@@ -1085,3 +1087,106 @@ assertEquals(undefined, objectWithGetter.__lookupSetter__('foo'));
var objectWithSetter = {};
objectWithSetter.__defineSetter__('foo', function(x) {});
assertEquals(undefined, objectWithSetter.__lookupGetter__('foo'));
+
+// An object with a getter on the prototype chain.
+function getter() { return 111; }
+function anotherGetter() { return 222; }
+
+function testGetterOnProto(expected, o) {
+ assertEquals(expected, o.quebec);
+}
+
+obj1 = {};
+Object.defineProperty(obj1, "quebec", { get: getter, configurable: true });
+obj2 = Object.create(obj1);
+obj3 = Object.create(obj2);
+
+testGetterOnProto(111, obj3);
+testGetterOnProto(111, obj3);
+%OptimizeFunctionOnNextCall(testGetterOnProto);
+testGetterOnProto(111, obj3);
+testGetterOnProto(111, obj3);
+
+Object.defineProperty(obj1, "quebec", { get: anotherGetter });
+
+testGetterOnProto(222, obj3);
+testGetterOnProto(222, obj3);
+%OptimizeFunctionOnNextCall(testGetterOnProto);
+testGetterOnProto(222, obj3);
+testGetterOnProto(222, obj3);
+
+// An object with a setter on the prototype chain.
+var modifyMe;
+function setter(x) { modifyMe = x+1; }
+function anotherSetter(x) { modifyMe = x+2; }
+
+function testSetterOnProto(expected, o) {
+ modifyMe = 333;
+ o.romeo = 444;
+ assertEquals(expected, modifyMe);
+}
+
+obj1 = {};
+Object.defineProperty(obj1, "romeo", { set: setter, configurable: true });
+obj2 = Object.create(obj1);
+obj3 = Object.create(obj2);
+
+testSetterOnProto(445, obj3);
+testSetterOnProto(445, obj3);
+%OptimizeFunctionOnNextCall(testSetterOnProto);
+testSetterOnProto(445, obj3);
+testSetterOnProto(445, obj3);
+
+Object.defineProperty(obj1, "romeo", { set: anotherSetter });
+
+testSetterOnProto(446, obj3);
+testSetterOnProto(446, obj3);
+%OptimizeFunctionOnNextCall(testSetterOnProto);
+testSetterOnProto(446, obj3);
+testSetterOnProto(446, obj3);
+
+// Removing a setter on the prototype chain.
+function testSetterOnProtoStrict(o) {
+ "use strict";
+ o.sierra = 12345;
+}
+
+obj1 = {};
+Object.defineProperty(obj1, "sierra",
+ { get: getter, set: setter, configurable: true });
+obj2 = Object.create(obj1);
+obj3 = Object.create(obj2);
+
+testSetterOnProtoStrict(obj3);
+testSetterOnProtoStrict(obj3);
+%OptimizeFunctionOnNextCall(testSetterOnProtoStrict);
+testSetterOnProtoStrict(obj3);
+testSetterOnProtoStrict(obj3);
+
+Object.defineProperty(obj1, "sierra",
+ { get: getter, set: undefined, configurable: true });
+
+exception = false;
+try {
+ testSetterOnProtoStrict(obj3);
+} catch (e) {
+ exception = true;
+ assertTrue(/which has only a getter/.test(e));
+}
+assertTrue(exception);
+
+// Test assignment to a getter-only property on the prototype chain. This makes
+// sure that crankshaft re-checks its assumptions and doesn't rely only on type
+// feedback (which would be monomorphic here).
+
+function Assign(o) {
+ o.blubb = 123;
+}
+
+function C() {}
+
+Assign(new C);
+Assign(new C);
+%OptimizeFunctionOnNextCall(Assign);
+Object.defineProperty(C.prototype, "blubb", {get: function() { return -42; }});
+Assign(new C);
diff --git a/src/3rdparty/v8/test/mjsunit/override-read-only-property.js b/src/3rdparty/v8/test/mjsunit/override-read-only-property.js
index b8fa501..2876ae1 100644
--- a/src/3rdparty/v8/test/mjsunit/override-read-only-property.js
+++ b/src/3rdparty/v8/test/mjsunit/override-read-only-property.js
@@ -25,6 +25,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --es5_readonly
+
// According to ECMA-262, sections 8.6.2.2 and 8.6.2.3 you're not
// allowed to override read-only properties, not even if the read-only
// property is in the prototype chain.
@@ -38,19 +40,19 @@ F.prototype = Number;
var original_number_max = Number.MAX_VALUE;
// Assignment to a property which does not exist on the object itself,
-// but is read-only in a prototype takes effect.
+// but is read-only in a prototype does not take effect.
var f = new F();
assertEquals(original_number_max, f.MAX_VALUE);
f.MAX_VALUE = 42;
-assertEquals(42, f.MAX_VALUE);
+assertEquals(original_number_max, f.MAX_VALUE);
// Assignment to a property which does not exist on the object itself,
-// but is read-only in a prototype takes effect.
+// but is read-only in a prototype does not take effect.
f = new F();
with (f) {
MAX_VALUE = 42;
}
-assertEquals(42, f.MAX_VALUE);
+assertEquals(original_number_max, f.MAX_VALUE);
// Assignment to read-only property on the object itself is ignored.
Number.MAX_VALUE = 42;
diff --git a/src/3rdparty/v8/test/mjsunit/packed-elements.js b/src/3rdparty/v8/test/mjsunit/packed-elements.js
new file mode 100644
index 0000000..cfcdf80
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/packed-elements.js
@@ -0,0 +1,112 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --smi-only-arrays --packed-arrays
+
+var has_packed_elements = !%HasFastHoleyElements(Array());
+
+function test1() {
+ var a = Array(8);
+ assertTrue(%HasFastSmiOrObjectElements(a));
+ assertTrue(%HasFastHoleyElements(a));
+}
+
+function test2() {
+ var a = Array();
+ assertTrue(%HasFastSmiOrObjectElements(a));
+ assertFalse(%HasFastHoleyElements(a));
+}
+
+function test3() {
+ var a = Array(1,2,3,4,5,6,7);
+ assertTrue(%HasFastSmiOrObjectElements(a));
+ assertFalse(%HasFastHoleyElements(a));
+}
+
+function test4() {
+ var a = [1, 2, 3, 4];
+ assertTrue(%HasFastSmiElements(a));
+ assertFalse(%HasFastHoleyElements(a));
+ var b = [1, 2,, 4];
+ assertTrue(%HasFastSmiElements(b));
+ assertTrue(%HasFastHoleyElements(b));
+}
+
+function test5() {
+ var a = [1, 2, 3, 4.5];
+ assertTrue(%HasFastDoubleElements(a));
+ assertFalse(%HasFastHoleyElements(a));
+ var b = [1,, 3.5, 4];
+ assertTrue(%HasFastDoubleElements(b));
+ assertTrue(%HasFastHoleyElements(b));
+ var c = [1, 3.5,, 4];
+ assertTrue(%HasFastDoubleElements(c));
+ assertTrue(%HasFastHoleyElements(c));
+}
+
+function test6() {
+ var x = new Object();
+ var a = [1, 2, 3.5, x];
+ assertTrue(%HasFastObjectElements(a));
+ assertFalse(%HasFastHoleyElements(a));
+ assertEquals(1, a[0]);
+ assertEquals(2, a[1]);
+ assertEquals(3.5, a[2]);
+ assertEquals(x, a[3]);
+ var b = [1,, 3.5, x];
+ assertTrue(%HasFastObjectElements(b));
+ assertTrue(%HasFastHoleyElements(b));
+ assertEquals(1, b[0]);
+ assertEquals(undefined, b[1]);
+ assertEquals(3.5, b[2]);
+ assertEquals(x, b[3]);
+ var c = [1, 3.5, x,,];
+ assertTrue(%HasFastObjectElements(c));
+ assertTrue(%HasFastHoleyElements(c));
+ assertEquals(1, c[0]);
+ assertEquals(3.5, c[1]);
+ assertEquals(x, c[2]);
+ assertEquals(undefined, c[3]);
+}
+
+function test_with_optimization(f) {
+ // Run tests in a loop to make sure that inlined Array() constructor runs out
+ // of new space memory and must fall back on runtime impl.
+ for (i = 0; i < 25000; ++i) f();
+ %OptimizeFunctionOnNextCall(f);
+ for (i = 0; i < 25000; ++i) f(); // Make sure GC happens
+}
+
+if (has_packed_elements) {
+ test_with_optimization(test1);
+ test_with_optimization(test2);
+ test_with_optimization(test3);
+ test_with_optimization(test4);
+ test_with_optimization(test5);
+ test_with_optimization(test6);
+}
+
diff --git a/src/3rdparty/v8/test/mjsunit/parse-int-float.js b/src/3rdparty/v8/test/mjsunit/parse-int-float.js
index 2e4f648..5a9b6f3 100644
--- a/src/3rdparty/v8/test/mjsunit/parse-int-float.js
+++ b/src/3rdparty/v8/test/mjsunit/parse-int-float.js
@@ -29,10 +29,10 @@ assertEquals(0, parseInt('0'));
assertEquals(0, parseInt(' 0'));
assertEquals(0, parseInt(' 0 '));
-assertEquals(63, parseInt('077'));
-assertEquals(63, parseInt(' 077'));
-assertEquals(63, parseInt(' 077 '));
-assertEquals(-63, parseInt(' -077'));
+assertEquals(77, parseInt('077'));
+assertEquals(77, parseInt(' 077'));
+assertEquals(77, parseInt(' 077 '));
+assertEquals(-77, parseInt(' -077'));
assertEquals(3, parseInt('11', 2));
assertEquals(4, parseInt('11', 3));
diff --git a/src/3rdparty/v8/test/mjsunit/pixel-array-rounding.js b/src/3rdparty/v8/test/mjsunit/pixel-array-rounding.js
index ef5a10b..0c307e6 100644..100755
--- a/src/3rdparty/v8/test/mjsunit/pixel-array-rounding.js
+++ b/src/3rdparty/v8/test/mjsunit/pixel-array-rounding.js
@@ -27,7 +27,7 @@
// Flags: --allow-natives-syntax
-var pixels = new PixelArray(8);
+var pixels = new Uint8ClampedArray(8);
function f() {
for (var i = 0; i < 8; i++) {
diff --git a/src/3rdparty/v8/test/mjsunit/readonly.js b/src/3rdparty/v8/test/mjsunit/readonly.js
new file mode 100644
index 0000000..4d06b7c
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/readonly.js
@@ -0,0 +1,228 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --harmony-proxies --es5_readonly
+
+// Different ways to create an object.
+
+function CreateFromLiteral() {
+ return {};
+}
+
+function CreateFromObject() {
+ return new Object;
+}
+
+function CreateDefault() {
+ return Object.create(Object.prototype);
+}
+
+function CreateFromConstructor(proto) {
+ function C() {}
+ (new C).b = 9; // Make sure that we can have an in-object property.
+ C.prototype = proto;
+ return function() { return new C; }
+}
+
+function CreateFromApi(proto) {
+ return function() { return Object.create(proto); }
+}
+
+function CreateWithProperty(proto) {
+ function C() { this.a = -100; }
+ C.prototype = proto;
+ return function() { return new C; }
+}
+
+var bases = [CreateFromLiteral, CreateFromObject, CreateDefault];
+var inherits = [CreateFromConstructor, CreateFromApi, CreateWithProperty];
+var constructs = [CreateFromConstructor, CreateFromApi];
+
+function TestAllCreates(f) {
+ // The depth of the prototype chain up the.
+ for (var depth = 0; depth < 3; ++depth) {
+ // Introduce readonly-ness this far up the chain.
+ for (var up = 0; up <= depth; ++up) {
+ // Try different construction methods.
+ for (var k = 0; k < constructs.length; ++k) {
+ // Construct a fresh prototype chain from above functions.
+ for (var i = 0; i < bases.length; ++i) {
+ var p = bases[i]();
+ // There may be a preexisting property under the insertion point...
+ for (var j = 0; j < depth - up; ++j) {
+ p = inherits[Math.floor(inherits.length * Math.random())](p)();
+ }
+ // ...but not above it.
+ for (var j = 0; j < up; ++j) {
+ p = constructs[Math.floor(constructs.length * Math.random())](p)();
+ }
+ // Create a fresh constructor.
+ var c = constructs[k](p);
+ f(function() {
+ var o = c();
+ o.up = o;
+ for (var j = 0; j < up; ++j) o.up = Object.getPrototypeOf(o.up);
+ return o;
+ })
+ }
+ }
+ }
+ }
+}
+
+
+// Different ways to make a property read-only.
+
+function ReadonlyByNonwritableDataProperty(o, name) {
+ Object.defineProperty(o, name, {value: -41, writable: false});
+}
+
+function ReadonlyByAccessorPropertyWithoutSetter(o, name) {
+ Object.defineProperty(o, name, {get: function() { return -42; }});
+}
+
+function ReadonlyByGetter(o, name) {
+ o.__defineGetter__("a", function() { return -43; });
+}
+
+function ReadonlyByFreeze(o, name) {
+ o[name] = -44;
+ Object.freeze(o);
+}
+
+function ReadonlyByProto(o, name) {
+ var p = Object.create(o.__proto__);
+ Object.defineProperty(p, name, {value: -45, writable: false});
+ o.__proto__ = p;
+}
+
+function ReadonlyByProxy(o, name) {
+ var p = Proxy.create({
+ getPropertyDescriptor: function() {
+ return {value: -46, writable: false, configurable: true};
+ }
+ });
+ o.__proto__ = p;
+}
+
+var readonlys = [
+ ReadonlyByNonwritableDataProperty, ReadonlyByAccessorPropertyWithoutSetter,
+ ReadonlyByGetter, ReadonlyByFreeze, ReadonlyByProto, ReadonlyByProxy
+]
+
+function TestAllReadonlys(f) {
+ // Provide various methods to making a property read-only.
+ for (var i = 0; i < readonlys.length; ++i) {
+ print(" readonly =", i)
+ f(readonlys[i]);
+ }
+}
+
+
+// Different use scenarios.
+
+function Assign(o, x) {
+ o.a = x;
+}
+
+function AssignStrict(o, x) {
+ "use strict";
+ o.a = x;
+}
+
+function TestAllModes(f) {
+ for (var strict = 0; strict < 2; ++strict) {
+ print(" strict =", strict);
+ f(strict);
+ }
+}
+
+function TestAllScenarios(f) {
+ for (var t = 0; t < 100; t = 2*t + 1) {
+ print("t =", t)
+ f(function(strict, create, readonly) {
+ // Make sure that the assignments are monomorphic.
+ %DeoptimizeFunction(Assign);
+ %DeoptimizeFunction(AssignStrict);
+ %ClearFunctionTypeFeedback(Assign);
+ %ClearFunctionTypeFeedback(AssignStrict);
+ for (var i = 0; i < t; ++i) {
+ var o = create();
+ assertFalse("a" in o && !("a" in o.__proto__));
+ if (strict === 0)
+ Assign(o, i);
+ else
+ AssignStrict(o, i);
+ assertEquals(i, o.a);
+ }
+ %OptimizeFunctionOnNextCall(Assign);
+ %OptimizeFunctionOnNextCall(AssignStrict);
+ var o = create();
+ assertFalse("a" in o && !("a" in o.__proto__));
+ readonly(o.up, "a");
+ assertTrue("a" in o);
+ if (strict === 0)
+ Assign(o, t + 1);
+ else
+ assertThrows(function() { AssignStrict(o, t + 1) }, TypeError);
+ assertTrue(o.a < 0);
+ });
+ }
+}
+
+
+// Runner.
+
+TestAllScenarios(function(scenario) {
+ TestAllModes(function(strict) {
+ TestAllReadonlys(function(readonly) {
+ TestAllCreates(function(create) {
+ scenario(strict, create, readonly);
+ });
+ });
+ });
+});
+
+
+// Extra test forcing bailout.
+
+function Assign2(o, x) { o.a = x }
+
+(function() {
+ var p = CreateFromConstructor(Object.prototype)();
+ var c = CreateFromConstructor(p);
+ for (var i = 0; i < 3; ++i) {
+ var o = c();
+ Assign2(o, i);
+ assertEquals(i, o.a);
+ }
+ %OptimizeFunctionOnNextCall(Assign2);
+ ReadonlyByNonwritableDataProperty(p, "a");
+ var o = c();
+ Assign2(o, 0);
+ assertTrue(o.a < 0);
+})();
diff --git a/src/3rdparty/v8/test/mjsunit/regexp-capture-3.js b/src/3rdparty/v8/test/mjsunit/regexp-capture-3.js
index 9bdd600..b676f01 100644..100755
--- a/src/3rdparty/v8/test/mjsunit/regexp-capture-3.js
+++ b/src/3rdparty/v8/test/mjsunit/regexp-capture-3.js
@@ -162,7 +162,6 @@ assertEquals("*foo * baz", a);
// string we can test that the relevant node is removed by verifying that
// there is no hang.
function NoHang(re) {
- print(re);
"This is an ASCII string that could take forever".match(re);
}
@@ -216,3 +215,5 @@ regex10.exec(input0);
var regex11 = /^(?:[^\u0000-\u0080]|[0-9a-z?,.!&\s#()])+$/i;
regex11.exec(input0);
+var regex12 = /u(\xf0{8}?\D*?|( ? !)$h??(|)*?(||)+?\6((?:\W\B|--\d-*-|)?$){0, }?|^Y( ? !1)\d+)+a/;
+regex12.exec("");
diff --git a/src/3rdparty/v8/test/mjsunit/regexp-capture.js b/src/3rdparty/v8/test/mjsunit/regexp-capture.js
index 8aae717..3073094 100755
--- a/src/3rdparty/v8/test/mjsunit/regexp-capture.js
+++ b/src/3rdparty/v8/test/mjsunit/regexp-capture.js
@@ -56,3 +56,5 @@ assertEquals(["bbc", "b"], /^(b+|a){1,2}?bc/.exec("bbc"));
assertEquals(["bbaa", "a", "", "a"],
/((\3|b)\2(a)){2,}/.exec("bbaababbabaaaaabbaaaabba"));
+// From crbug.com/128821 - don't hang:
+"".match(/((a|i|A|I|u|o|U|O)(s|c|b|c|d|f|g|h|j|k|l|m|n|p|q|r|s|t|v|w|x|y|z|B|C|D|F|G|H|J|K|L|M|N|P|Q|R|S|T|V|W|X|Y|Z)*) de\/da([.,!?\s]|$)/);
diff --git a/src/3rdparty/v8/test/mjsunit/regexp-global.js b/src/3rdparty/v8/test/mjsunit/regexp-global.js
new file mode 100644
index 0000000..093dba1
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regexp-global.js
@@ -0,0 +1,254 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+// Test that an optional capture is cleared between two matches.
+var str = "ABX X";
+str = str.replace(/(\w)?X/g, function(match, capture) {
+ assertTrue(match.indexOf(capture) >= 0 ||
+ capture === undefined);
+ return capture ? capture.toLowerCase() : "-";
+ });
+assertEquals("Ab -", str);
+
+// Test zero-length matches.
+str = "Als Gregor Samsa eines Morgens";
+str = str.replace(/\b/g, function(match, capture) {
+ return "/";
+ });
+assertEquals("/Als/ /Gregor/ /Samsa/ /eines/ /Morgens/", str);
+
+// Test zero-length matches that have non-zero-length sub-captures.
+str = "It was a pleasure to burn.";
+str = str.replace(/(?=(\w+))\b/g, function(match, capture) {
+ return capture.length;
+ });
+assertEquals("2It 3was 1a 8pleasure 2to 4burn.", str);
+
+// Test multiple captures.
+str = "Try not. Do, or do not. There is no try.";
+str = str.replace(/(not?)|(do)|(try)/gi,
+ function(match, c1, c2, c3) {
+ assertTrue((c1 === undefined && c2 === undefined) ||
+ (c2 === undefined && c3 === undefined) ||
+ (c1 === undefined && c3 === undefined));
+ if (c1) return "-";
+ if (c2) return "+";
+ if (c3) return "="
+ });
+assertEquals("= -. +, or + -. There is - =.", str);
+
+// Test multiple alternate captures.
+str = "FOUR LEGS GOOD, TWO LEGS BAD!";
+str = str.replace(/(FOUR|TWO) LEGS (GOOD|BAD)/g,
+ function(match, num_legs, likeability) {
+ assertTrue(num_legs !== undefined);
+ assertTrue(likeability !== undefined);
+ if (num_legs == "FOUR") assertTrue(likeability == "GOOD");
+ if (num_legs == "TWO") assertTrue(likeability == "BAD");
+ return match.length - 10;
+ });
+assertEquals("4, 2!", str);
+
+
+// The same tests with UC16.
+
+//Test that an optional capture is cleared between two matches.
+str = "AB\u1234 \u1234";
+str = str.replace(/(\w)?\u1234/g,
+ function(match, capture) {
+ assertTrue(match.indexOf(capture) >= 0 ||
+ capture === undefined);
+ return capture ? capture.toLowerCase() : "-";
+ });
+assertEquals("Ab -", str);
+
+// Test zero-length matches.
+str = "Als \u2623\u2642 eines Morgens";
+str = str.replace(/\b/g, function(match, capture) {
+ return "/";
+ });
+assertEquals("/Als/ \u2623\u2642 /eines/ /Morgens/", str);
+
+// Test zero-length matches that have non-zero-length sub-captures.
+str = "It was a pleasure to \u70e7.";
+str = str.replace(/(?=(\w+))\b/g, function(match, capture) {
+ return capture.length;
+ });
+assertEquals("2It 3was 1a 8pleasure 2to \u70e7.", str);
+
+// Test multiple captures.
+str = "Try not. D\u26aa, or d\u26aa not. There is no try.";
+str = str.replace(/(not?)|(d\u26aa)|(try)/gi,
+ function(match, c1, c2, c3) {
+ assertTrue((c1 === undefined && c2 === undefined) ||
+ (c2 === undefined && c3 === undefined) ||
+ (c1 === undefined && c3 === undefined));
+ if (c1) return "-";
+ if (c2) return "+";
+ if (c3) return "="
+ });
+assertEquals("= -. +, or + -. There is - =.", str);
+
+// Test multiple alternate captures.
+str = "FOUR \u817f GOOD, TWO \u817f BAD!";
+str = str.replace(/(FOUR|TWO) \u817f (GOOD|BAD)/g,
+ function(match, num_legs, likeability) {
+ assertTrue(num_legs !== undefined);
+ assertTrue(likeability !== undefined);
+ if (num_legs == "FOUR") assertTrue(likeability == "GOOD");
+ if (num_legs == "TWO") assertTrue(likeability == "BAD");
+ return match.length - 7;
+ });
+assertEquals("4, 2!", str);
+
+// Test capture that is a real substring.
+var str = "Beasts of England, beasts of Ireland";
+str = str.replace(/(.*)/g, function(match) { return '~'; });
+assertEquals("~~", str);
+
+// Test zero-length matches that have non-zero-length sub-captures that do not
+// start at the match start position.
+str = "up up up up";
+str = str.replace(/\b(?=u(p))/g, function(match, capture) {
+ return capture.length;
+ });
+
+assertEquals("1up 1up 1up 1up", str);
+
+
+// Create regexp that has a *lot* of captures.
+var re_string = "(a)";
+for (var i = 0; i < 500; i++) {
+ re_string = "(" + re_string + ")";
+}
+re_string = re_string + "1";
+// re_string = "(((...((a))...)))1"
+
+var regexps = new Array();
+var last_match_expectations = new Array();
+var first_capture_expectations = new Array();
+
+// Atomic regexp.
+regexps.push(/a1/g);
+last_match_expectations.push("a1");
+first_capture_expectations.push("");
+// Small regexp (no capture);
+regexps.push(/\w1/g);
+last_match_expectations.push("a1");
+first_capture_expectations.push("");
+// Small regexp (one capture).
+regexps.push(/(a)1/g);
+last_match_expectations.push("a1");
+first_capture_expectations.push("a");
+// Large regexp (a lot of captures).
+regexps.push(new RegExp(re_string, "g"));
+last_match_expectations.push("a1");
+first_capture_expectations.push("a");
+
+function test_replace(result_expectation,
+ subject,
+ regexp,
+ replacement) {
+ for (var i = 0; i < regexps.length; i++) {
+ // Overwrite last match info.
+ "deadbeef".replace(/(dead)beef/, "$1holeycow");
+ // Conduct tests.
+ assertEquals(result_expectation, subject.replace(regexps[i], replacement));
+ if (subject.length == 0) {
+ assertEquals("deadbeef", RegExp.lastMatch);
+ assertEquals("dead", RegExp["$1"]);
+ } else {
+ assertEquals(last_match_expectations[i], RegExp.lastMatch);
+ assertEquals(first_capture_expectations[i], RegExp["$1"]);
+ }
+ }
+}
+
+
+function test_match(result_expectation,
+ subject,
+ regexp) {
+ for (var i = 0; i < regexps.length; i++) {
+ // Overwrite last match info.
+ "deadbeef".replace(/(dead)beef/, "$1holeycow");
+ // Conduct tests.
+ if (result_expectation == null) {
+ assertNull(subject.match(regexps[i]));
+ } else {
+ assertArrayEquals(result_expectation, subject.match(regexps[i]));
+ }
+ if (subject.length == 0) {
+ assertEquals("deadbeef", RegExp.lastMatch);
+ assertEquals("dead", RegExp["$1"]);
+ } else {
+ assertEquals(last_match_expectations[i], RegExp.lastMatch);
+ assertEquals(first_capture_expectations[i], RegExp["$1"]);
+ }
+ }
+}
+
+
+// Test for different number of matches.
+for (var m = 0; m < 200; m++) {
+ // Create string that matches m times.
+ var subject = "";
+ var test_1_expectation = "";
+ var test_2_expectation = "";
+ var test_3_expectation = (m == 0) ? null : new Array();
+ for (var i = 0; i < m; i++) {
+ subject += "a11";
+ test_1_expectation += "x1";
+ test_2_expectation += "1";
+ test_3_expectation.push("a1");
+ }
+
+ // Test 1a: String.replace with string.
+ test_replace(test_1_expectation, subject, /a1/g, "x");
+
+ // Test 1b: String.replace with function.
+ function f() { return "x"; }
+ test_replace(test_1_expectation, subject, /a1/g, f);
+
+ // Test 2a: String.replace with empty string.
+ test_replace(test_2_expectation, subject, /a1/g, "");
+
+ // Test 3a: String.match.
+ test_match(test_3_expectation, subject, /a1/g);
+}
+
+
+// Test String hashing (compiling regular expression includes hashing).
+var crosscheck = "\x80";
+for (var i = 0; i < 12; i++) crosscheck += crosscheck;
+new RegExp(crosscheck);
+
+var subject = "ascii~only~string~here~";
+var replacement = "\x80";
+var result = subject.replace(/~/g, replacement);
+for (var i = 0; i < 5; i++) result += result;
+new RegExp(result);
diff --git a/src/3rdparty/v8/test/mjsunit/regexp-results-cache.js b/src/3rdparty/v8/test/mjsunit/regexp-results-cache.js
new file mode 100644
index 0000000..7ee8c3f
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regexp-results-cache.js
@@ -0,0 +1,78 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Long string to trigger caching.
+var string =
+"Friends, Romans, countrymen, lend me your ears! \
+ I come to bury Caesar, not to praise him. \
+ The evil that men do lives after them, \
+ The good is oft interred with their bones; \
+ So let it be with Caesar. The noble Brutus \
+ Hath told you Caesar was ambitious; \
+ If it were so, it was a grievous fault, \
+ And grievously hath Caesar answer'd it. \
+ Here, under leave of Brutus and the rest- \
+ For Brutus is an honorable man; \
+ So are they all, all honorable men- \
+ Come I to speak in Caesar's funeral. \
+ He was my friend, faithful and just to me; \
+ But Brutus says he was ambitious, \
+ And Brutus is an honorable man. \
+ He hath brought many captives home to Rome, \
+ Whose ransoms did the general coffers fill. \
+ Did this in Caesar seem ambitious? \
+ When that the poor have cried, Caesar hath wept; \
+ Ambition should be made of sterner stuff: \
+ Yet Brutus says he was ambitious, \
+ And Brutus is an honorable man. \
+ You all did see that on the Lupercal \
+ I thrice presented him a kingly crown, \
+ Which he did thrice refuse. Was this ambition? \
+ Yet Brutus says he was ambitious, \
+ And sure he is an honorable man. \
+ I speak not to disprove what Brutus spoke, \
+ But here I am to speak what I do know. \
+ You all did love him once, not without cause; \
+ What cause withholds you then to mourn for him? \
+ O judgement, thou art fled to brutish beasts, \
+ And men have lost their reason. Bear with me; \
+ My heart is in the coffin there with Caesar, \
+ And I must pause till it come back to me.";
+
+var replaced = string.replace(/\b\w+\b/g, function() { return "foo"; });
+for (var i = 0; i < 3; i++) {
+ assertEquals(replaced,
+ string.replace(/\b\w+\b/g, function() { return "foo"; }));
+}
+
+// Check that the result is in a COW array.
+var words = string.split(" ");
+assertEquals("Friends,", words[0]);
+words[0] = "Enemies,";
+words = string.split(" ");
+assertEquals("Friends,", words[0]);
+
diff --git a/src/3rdparty/v8/test/mjsunit/regexp.js b/src/3rdparty/v8/test/mjsunit/regexp.js
index ec82c96..c2d9282 100644
--- a/src/3rdparty/v8/test/mjsunit/regexp.js
+++ b/src/3rdparty/v8/test/mjsunit/regexp.js
@@ -705,3 +705,14 @@ assertThrows("RegExp('(?!*)')");
// Test trimmed regular expression for RegExp.test().
assertTrue(/.*abc/.test("abc"));
assertFalse(/.*\d+/.test("q"));
+
+// Test that RegExp.prototype.toString() throws TypeError for
+// incompatible receivers (ES5 section 15.10.6 and 15.10.6.4).
+assertThrows("RegExp.prototype.toString.call(null)", TypeError);
+assertThrows("RegExp.prototype.toString.call(0)", TypeError);
+assertThrows("RegExp.prototype.toString.call('')", TypeError);
+assertThrows("RegExp.prototype.toString.call(false)", TypeError);
+assertThrows("RegExp.prototype.toString.call(true)", TypeError);
+assertThrows("RegExp.prototype.toString.call([])", TypeError);
+assertThrows("RegExp.prototype.toString.call({})", TypeError);
+assertThrows("RegExp.prototype.toString.call(function(){})", TypeError);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1117.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1117.js
index b013a22..981a1b7 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-1117.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1117.js
@@ -25,11 +25,20 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --allow-natives-syntax
+
// Test that we actually return the right value (-0) when we multiply
// constant 0 with a negative integer.
function foo(y) {return 0 * y; }
-for( var i = 0; i< 1000000; i++){
- foo(42);
-}
assertEquals(1/foo(-42), -Infinity);
+assertEquals(1/foo(-42), -Infinity);
+%OptimizeFunctionOnNextCall(foo);
+assertEquals(1/foo(-42), -Infinity);
+
+function bar(x) { return x * 0; }
+assertEquals(Infinity, 1/bar(5));
+assertEquals(Infinity, 1/bar(5));
+%OptimizeFunctionOnNextCall(bar);
+assertEquals(-Infinity, 1/bar(-5));
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1118.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1118.js
index 7e0461d..3e3920f 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-1118.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1118.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --allow-natives-syntax
+// Flags: --allow-natives-syntax --noparallel-recompilation
// An exception thrown in a function optimized by on-stack replacement (OSR)
// should be able to construct a receiver from all optimized stack frames.
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-115100.js b/src/3rdparty/v8/test/mjsunit/regress/regress-115100.js
new file mode 100644
index 0000000..c917446
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-115100.js
@@ -0,0 +1,36 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function foo(obj) { obj.prop = 0; }
+function mk() { return Object.create(null); }
+
+foo(mk());
+foo(mk());
+%OptimizeFunctionOnNextCall(foo);
+foo(mk());
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-117409.js b/src/3rdparty/v8/test/mjsunit/regress/regress-117409.js
index 9222191..98aab5a 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-117409.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-117409.js
@@ -36,7 +36,7 @@ var literal = [1.2];
KeyedStoreIC(literal);
KeyedStoreIC(literal);
-// Trruncate array to 0 elements, at which point backing store will be replaced
+// Truncate array to 0 elements, at which point backing store will be replaced
// with empty fixed array.
literal.length = 0;
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1199637.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1199637.js
index 9c560a9..8b02a65 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-1199637.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1199637.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --allow-natives-syntax
+// Flags: --allow-natives-syntax --es52_globals
// Make sure that we can introduce global variables (using
// both var and const) that shadow even READ_ONLY variables
@@ -74,5 +74,3 @@ assertEquals(5678, z);
assertEquals(1234, w);
eval("with({}) { const w = 5678; }");
assertEquals(5678, w);
-
-
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-128146.js b/src/3rdparty/v8/test/mjsunit/regress/regress-128146.js
index 5c22b4e..730dd91 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-128146.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-128146.js
@@ -25,14 +25,9 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Define accessor properties, resulting in an AccessorPair with 2 transitions.
Object.defineProperty({},"foo",{set:function(){},configurable:false});
Object.defineProperty({},"foo",{get:function(){},configurable:false});
-Object.defineProperty({},"foo",{});
-
-// From WebKit layout tests (fast/js/prototypes.html)
-var wasSet = false;
-var o = { };
-o.__defineGetter__("__proto__", function() { wasSet = true });
-o.__proto__;
-assertFalse(wasSet);
+// Define a data property under the same name.
+Object.defineProperty({},"foo",{});
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-131923.js b/src/3rdparty/v8/test/mjsunit/regress/regress-131923.js
new file mode 100644
index 0000000..58da07c
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-131923.js
@@ -0,0 +1,30 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+assertFalse(/\u9999{4}/.test(""));
+assertTrue(/\u9999{0,4}/.test(""));
+assertFalse(/\u9999{4,}/.test(""));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-131994.js b/src/3rdparty/v8/test/mjsunit/regress/regress-131994.js
new file mode 100644
index 0000000..8347653
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-131994.js
@@ -0,0 +1,70 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+
+// Test that a variable in the local scope that shadows a context-allocated
+// variable is correctly resolved when being evaluated in the debugger.
+
+Debug = debug.Debug;
+
+var exception = false;
+
+function listener(event, exec_state, event_data, data) {
+ if (event != Debug.DebugEvent.Break) return;
+ var breakpoint = exec_state.frame(0);
+ try {
+ // Assert correct break point.
+ assertTrue(breakpoint.sourceLineText().indexOf("// Break") > -1);
+ // Assert correct value.
+ assertEquals(3, breakpoint.evaluate('x').value());
+ } catch (e) {
+ exception = e;
+ }
+}
+
+Debug.setListener(listener);
+
+function h() {
+ var x; // Context-allocated due to g().
+
+ var g = function g() {
+ x = -7;
+ };
+
+ var f = function f() {
+ var x = 3; // Allocated in the local scope.
+ debugger; // Break.
+ };
+
+ f();
+}
+
+h();
+
+assertFalse(exception);
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-133211.js b/src/3rdparty/v8/test/mjsunit/regress/regress-133211.js
new file mode 100644
index 0000000..f9473d1
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-133211.js
@@ -0,0 +1,35 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Make sure we don't leak maps when reading intermediate property descriptors.
+
+var o = {};
+var x = {};
+Object.defineProperty(o, "foo", { get: undefined });
+Object.defineProperty(x, "foo", { get: undefined, set: undefined });
+var pd = Object.getOwnPropertyDescriptor(o, "foo");
+assertEquals(undefined, pd.set);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-133211b.js b/src/3rdparty/v8/test/mjsunit/regress/regress-133211b.js
new file mode 100644
index 0000000..87e35f4
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-133211b.js
@@ -0,0 +1,39 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-gc
+
+setter = function(x) { return; }
+var o = {};
+Object.defineProperty(o, "foo", { set: setter });
+var x = {};
+Object.defineProperty(x, "foo", { set: setter });
+x.bar = 20;
+x = {};
+gc();
+o.foo = 20;
+assertEquals(undefined, o.foo);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-136048.js b/src/3rdparty/v8/test/mjsunit/regress/regress-136048.js
new file mode 100644
index 0000000..c9972e9
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-136048.js
@@ -0,0 +1,34 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+try {
+ /foo/\u0069
+} catch (e) {
+ assertEquals(
+ "SyntaxError: Invalid flags supplied to RegExp constructor '\\u0069'",
+ e.toString());
+}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-137768.js b/src/3rdparty/v8/test/mjsunit/regress/regress-137768.js
new file mode 100644
index 0000000..9fbd7f3
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-137768.js
@@ -0,0 +1,73 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Create elements in a constructor function to ensure map sharing.
+function TestConstructor() {
+ this[0] = 1;
+ this[1] = 2;
+ this[2] = 3;
+}
+
+function bad_func(o,a) {
+ var s = 0;
+ for (var i = 0; i < 1; ++i) {
+ o.newFileToChangeMap = undefined;
+ var x = a[0];
+ s += x;
+ }
+ return s;
+}
+
+o = new Object();
+a = new TestConstructor();
+bad_func(o, a);
+
+// Make sure that we're out of pre-monomorphic state for the member add of
+// 'newFileToChangeMap' which causes a map transition.
+o = new Object();
+a = new TestConstructor();
+bad_func(o, a);
+
+// Optimize, before the fix, the element load and subsequent tagged-to-i were
+// hoisted above the map check, which can't be hoisted due to the map-changing
+// store.
+o = new Object();
+a = new TestConstructor();
+%OptimizeFunctionOnNextCall(bad_func);
+bad_func(o, a);
+
+// Pass in a array of doubles. Before the fix, the optimized load and
+// tagged-to-i will treat part of a double value as a pointer and de-ref it
+// before the map check was executed that should have deopt.
+o = new Object();
+// Pass in an elements buffer where the bit representation of the double numbers
+// are two adjacent small 32-bit values with the lowest bit set to one, causing
+// tagged-to-i to SIGSEGV.
+a = [2.122e-314, 2.122e-314, 2.122e-314];
+bad_func(o, a);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-143967.js b/src/3rdparty/v8/test/mjsunit/regress/regress-143967.js
new file mode 100644
index 0000000..7c12e67
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-143967.js
@@ -0,0 +1,34 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Check that Accessors::FunctionGetPrototype traverses the prototype
+// chain correctly and doesn't get stuck.
+
+var functionWithoutProto = [].filter;
+var obj = Object.create(functionWithoutProto);
+functionWithoutProto.__proto__ = function() {};
+assertEquals(functionWithoutProto.prototype, obj.prototype);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-145201.js b/src/3rdparty/v8/test/mjsunit/regress/regress-145201.js
new file mode 100644
index 0000000..7fe7bce
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-145201.js
@@ -0,0 +1,107 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Fix some corner cases in skipping native methods using caller.
+var net = [];
+
+
+var x = 0;
+
+function collect () {
+ function item(operator) {
+ binary(operator, 1, false);
+ binary(operator, 1, true);
+ binary(operator, '{}', false);
+ binary(operator, '{}', true);
+ binary(operator, '"x"', false);
+ binary(operator, '"x"', true);
+ unary(operator, "");
+ }
+
+ function unary(op, after) {
+ // Capture:
+ try {
+ eval(op + " custom " + after);
+ } catch(e) {
+ }
+ }
+
+ function binary(op, other_side, inverted) {
+ // Capture:
+ try {
+ if (inverted) {
+ eval("custom " + op + " " + other_side);
+ } else {
+ eval(other_side + " " + op + " custom");
+ }
+ } catch(e) {
+ }
+ }
+
+ function catcher() {
+ var caller = catcher.caller;
+ if (/native/i.test(caller) || /ADD/.test(caller)) {
+ net[caller] = 0;
+ }
+ }
+
+ var custom = Object.create(null, {
+ toString: { value: catcher },
+ length: { get: catcher }
+ });
+
+ item('^');
+ item('~');
+ item('<<');
+ item('<');
+ item('==');
+ item('>>>');
+ item('>>');
+ item('|');
+ item('-');
+ item('*');
+ item('&');
+ item('%');
+ item('+');
+ item('in');
+ item('instanceof');
+ unary('{}[', ']');
+ unary('delete {}[', ']');
+ unary('(function() {}).apply(null, ', ')');
+}
+
+collect();
+collect();
+collect();
+
+var keys = 0;
+for (var key in net) {
+ print(key);
+ keys++;
+}
+
+assertTrue(keys == 0);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-148378.js b/src/3rdparty/v8/test/mjsunit/regress/regress-148378.js
new file mode 100644
index 0000000..d37cea1
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-148378.js
@@ -0,0 +1,38 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"a".replace(/a/g, function() { return "c"; });
+
+function test() {
+ try {
+ test();
+ } catch(e) {
+ "b".replace(/(b)/g, function() { return "c"; });
+ }
+}
+
+test();
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1563.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1563.js
index c25b6c7..884b125 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-1563.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1563.js
@@ -27,7 +27,7 @@
// Flags: --allow-natives-syntax
-obj = new PixelArray(10);
+obj = new Uint8ClampedArray(10);
// Test that undefined gets properly clamped in Crankshafted pixel array
// assignments.
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1591.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1591.js
new file mode 100644
index 0000000..69efd0b
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1591.js
@@ -0,0 +1,48 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var stack;
+var used_custom_lookup = false;
+
+({
+ __lookupGetter__ : function() {
+ used_custom_lookup = true;
+ },
+
+ test : function() {
+ try {
+ f();
+ } catch (err) {
+ stack = err.stack;
+ }
+ }
+}).test();
+
+var expected_message = "ReferenceError: f is not defined";
+assertTrue(stack.indexOf(expected_message) >= 0);
+assertFalse(used_custom_lookup);
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1849.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1849.js
index 176f918..5b8fc50 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-1849.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1849.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// See: http://code.google.com/p/v8/issues/detail?id=1878
+// See: http://code.google.com/p/v8/issues/detail?id=1849
// Flags: --allow-natives-syntax
@@ -36,4 +36,4 @@ for (var i = 0; i < count; i++) {
arr[i] = 0;
}
assertFalse(%HasFastDoubleElements(arr));
-assertTrue(%HasFastSmiOnlyElements(arr));
+assertTrue(%HasFastSmiElements(arr));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1878.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1878.js
index a1648b1..fbc47bd 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-1878.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1878.js
@@ -34,11 +34,11 @@ var a = Array();
for (var i = 0; i < 1000; i++) {
var ai = natives.InternalArray(10000);
assertFalse(%HaveSameMap(ai, a));
- assertTrue(%HasFastElements(ai));
+ assertTrue(%HasFastObjectElements(ai));
}
for (var i = 0; i < 1000; i++) {
var ai = new natives.InternalArray(10000);
assertFalse(%HaveSameMap(ai, a));
- assertTrue(%HasFastElements(ai));
+ assertTrue(%HasFastObjectElements(ai));
}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1969.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1969.js
deleted file mode 100644
index 2728c2c..0000000
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-1969.js
+++ /dev/null
@@ -1,5045 +0,0 @@
-// Copyright 2012 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Flags: --allow-natives-syntax
-
-f();
-f();
-%OptimizeFunctionOnNextCall(f);
-var start = (new Date()).getTime();
-var array = f();
-var end = (new Date()).getTime();
-
-// Assert that recompiling and executing f() takes less than a second.
-assertTrue((end - start) < 1000);
-
-for (var i = 0; i < 5000; i++) assertEquals(0, array[i]);
-
-function f() {
- var a = new Array(5000);
- a[0]=0;
- a[1]=0;
- a[2]=0;
- a[3]=0;
- a[4]=0;
- a[5]=0;
- a[6]=0;
- a[7]=0;
- a[8]=0;
- a[9]=0;
- a[10]=0;
- a[11]=0;
- a[12]=0;
- a[13]=0;
- a[14]=0;
- a[15]=0;
- a[16]=0;
- a[17]=0;
- a[18]=0;
- a[19]=0;
- a[20]=0;
- a[21]=0;
- a[22]=0;
- a[23]=0;
- a[24]=0;
- a[25]=0;
- a[26]=0;
- a[27]=0;
- a[28]=0;
- a[29]=0;
- a[30]=0;
- a[31]=0;
- a[32]=0;
- a[33]=0;
- a[34]=0;
- a[35]=0;
- a[36]=0;
- a[37]=0;
- a[38]=0;
- a[39]=0;
- a[40]=0;
- a[41]=0;
- a[42]=0;
- a[43]=0;
- a[44]=0;
- a[45]=0;
- a[46]=0;
- a[47]=0;
- a[48]=0;
- a[49]=0;
- a[50]=0;
- a[51]=0;
- a[52]=0;
- a[53]=0;
- a[54]=0;
- a[55]=0;
- a[56]=0;
- a[57]=0;
- a[58]=0;
- a[59]=0;
- a[60]=0;
- a[61]=0;
- a[62]=0;
- a[63]=0;
- a[64]=0;
- a[65]=0;
- a[66]=0;
- a[67]=0;
- a[68]=0;
- a[69]=0;
- a[70]=0;
- a[71]=0;
- a[72]=0;
- a[73]=0;
- a[74]=0;
- a[75]=0;
- a[76]=0;
- a[77]=0;
- a[78]=0;
- a[79]=0;
- a[80]=0;
- a[81]=0;
- a[82]=0;
- a[83]=0;
- a[84]=0;
- a[85]=0;
- a[86]=0;
- a[87]=0;
- a[88]=0;
- a[89]=0;
- a[90]=0;
- a[91]=0;
- a[92]=0;
- a[93]=0;
- a[94]=0;
- a[95]=0;
- a[96]=0;
- a[97]=0;
- a[98]=0;
- a[99]=0;
- a[100]=0;
- a[101]=0;
- a[102]=0;
- a[103]=0;
- a[104]=0;
- a[105]=0;
- a[106]=0;
- a[107]=0;
- a[108]=0;
- a[109]=0;
- a[110]=0;
- a[111]=0;
- a[112]=0;
- a[113]=0;
- a[114]=0;
- a[115]=0;
- a[116]=0;
- a[117]=0;
- a[118]=0;
- a[119]=0;
- a[120]=0;
- a[121]=0;
- a[122]=0;
- a[123]=0;
- a[124]=0;
- a[125]=0;
- a[126]=0;
- a[127]=0;
- a[128]=0;
- a[129]=0;
- a[130]=0;
- a[131]=0;
- a[132]=0;
- a[133]=0;
- a[134]=0;
- a[135]=0;
- a[136]=0;
- a[137]=0;
- a[138]=0;
- a[139]=0;
- a[140]=0;
- a[141]=0;
- a[142]=0;
- a[143]=0;
- a[144]=0;
- a[145]=0;
- a[146]=0;
- a[147]=0;
- a[148]=0;
- a[149]=0;
- a[150]=0;
- a[151]=0;
- a[152]=0;
- a[153]=0;
- a[154]=0;
- a[155]=0;
- a[156]=0;
- a[157]=0;
- a[158]=0;
- a[159]=0;
- a[160]=0;
- a[161]=0;
- a[162]=0;
- a[163]=0;
- a[164]=0;
- a[165]=0;
- a[166]=0;
- a[167]=0;
- a[168]=0;
- a[169]=0;
- a[170]=0;
- a[171]=0;
- a[172]=0;
- a[173]=0;
- a[174]=0;
- a[175]=0;
- a[176]=0;
- a[177]=0;
- a[178]=0;
- a[179]=0;
- a[180]=0;
- a[181]=0;
- a[182]=0;
- a[183]=0;
- a[184]=0;
- a[185]=0;
- a[186]=0;
- a[187]=0;
- a[188]=0;
- a[189]=0;
- a[190]=0;
- a[191]=0;
- a[192]=0;
- a[193]=0;
- a[194]=0;
- a[195]=0;
- a[196]=0;
- a[197]=0;
- a[198]=0;
- a[199]=0;
- a[200]=0;
- a[201]=0;
- a[202]=0;
- a[203]=0;
- a[204]=0;
- a[205]=0;
- a[206]=0;
- a[207]=0;
- a[208]=0;
- a[209]=0;
- a[210]=0;
- a[211]=0;
- a[212]=0;
- a[213]=0;
- a[214]=0;
- a[215]=0;
- a[216]=0;
- a[217]=0;
- a[218]=0;
- a[219]=0;
- a[220]=0;
- a[221]=0;
- a[222]=0;
- a[223]=0;
- a[224]=0;
- a[225]=0;
- a[226]=0;
- a[227]=0;
- a[228]=0;
- a[229]=0;
- a[230]=0;
- a[231]=0;
- a[232]=0;
- a[233]=0;
- a[234]=0;
- a[235]=0;
- a[236]=0;
- a[237]=0;
- a[238]=0;
- a[239]=0;
- a[240]=0;
- a[241]=0;
- a[242]=0;
- a[243]=0;
- a[244]=0;
- a[245]=0;
- a[246]=0;
- a[247]=0;
- a[248]=0;
- a[249]=0;
- a[250]=0;
- a[251]=0;
- a[252]=0;
- a[253]=0;
- a[254]=0;
- a[255]=0;
- a[256]=0;
- a[257]=0;
- a[258]=0;
- a[259]=0;
- a[260]=0;
- a[261]=0;
- a[262]=0;
- a[263]=0;
- a[264]=0;
- a[265]=0;
- a[266]=0;
- a[267]=0;
- a[268]=0;
- a[269]=0;
- a[270]=0;
- a[271]=0;
- a[272]=0;
- a[273]=0;
- a[274]=0;
- a[275]=0;
- a[276]=0;
- a[277]=0;
- a[278]=0;
- a[279]=0;
- a[280]=0;
- a[281]=0;
- a[282]=0;
- a[283]=0;
- a[284]=0;
- a[285]=0;
- a[286]=0;
- a[287]=0;
- a[288]=0;
- a[289]=0;
- a[290]=0;
- a[291]=0;
- a[292]=0;
- a[293]=0;
- a[294]=0;
- a[295]=0;
- a[296]=0;
- a[297]=0;
- a[298]=0;
- a[299]=0;
- a[300]=0;
- a[301]=0;
- a[302]=0;
- a[303]=0;
- a[304]=0;
- a[305]=0;
- a[306]=0;
- a[307]=0;
- a[308]=0;
- a[309]=0;
- a[310]=0;
- a[311]=0;
- a[312]=0;
- a[313]=0;
- a[314]=0;
- a[315]=0;
- a[316]=0;
- a[317]=0;
- a[318]=0;
- a[319]=0;
- a[320]=0;
- a[321]=0;
- a[322]=0;
- a[323]=0;
- a[324]=0;
- a[325]=0;
- a[326]=0;
- a[327]=0;
- a[328]=0;
- a[329]=0;
- a[330]=0;
- a[331]=0;
- a[332]=0;
- a[333]=0;
- a[334]=0;
- a[335]=0;
- a[336]=0;
- a[337]=0;
- a[338]=0;
- a[339]=0;
- a[340]=0;
- a[341]=0;
- a[342]=0;
- a[343]=0;
- a[344]=0;
- a[345]=0;
- a[346]=0;
- a[347]=0;
- a[348]=0;
- a[349]=0;
- a[350]=0;
- a[351]=0;
- a[352]=0;
- a[353]=0;
- a[354]=0;
- a[355]=0;
- a[356]=0;
- a[357]=0;
- a[358]=0;
- a[359]=0;
- a[360]=0;
- a[361]=0;
- a[362]=0;
- a[363]=0;
- a[364]=0;
- a[365]=0;
- a[366]=0;
- a[367]=0;
- a[368]=0;
- a[369]=0;
- a[370]=0;
- a[371]=0;
- a[372]=0;
- a[373]=0;
- a[374]=0;
- a[375]=0;
- a[376]=0;
- a[377]=0;
- a[378]=0;
- a[379]=0;
- a[380]=0;
- a[381]=0;
- a[382]=0;
- a[383]=0;
- a[384]=0;
- a[385]=0;
- a[386]=0;
- a[387]=0;
- a[388]=0;
- a[389]=0;
- a[390]=0;
- a[391]=0;
- a[392]=0;
- a[393]=0;
- a[394]=0;
- a[395]=0;
- a[396]=0;
- a[397]=0;
- a[398]=0;
- a[399]=0;
- a[400]=0;
- a[401]=0;
- a[402]=0;
- a[403]=0;
- a[404]=0;
- a[405]=0;
- a[406]=0;
- a[407]=0;
- a[408]=0;
- a[409]=0;
- a[410]=0;
- a[411]=0;
- a[412]=0;
- a[413]=0;
- a[414]=0;
- a[415]=0;
- a[416]=0;
- a[417]=0;
- a[418]=0;
- a[419]=0;
- a[420]=0;
- a[421]=0;
- a[422]=0;
- a[423]=0;
- a[424]=0;
- a[425]=0;
- a[426]=0;
- a[427]=0;
- a[428]=0;
- a[429]=0;
- a[430]=0;
- a[431]=0;
- a[432]=0;
- a[433]=0;
- a[434]=0;
- a[435]=0;
- a[436]=0;
- a[437]=0;
- a[438]=0;
- a[439]=0;
- a[440]=0;
- a[441]=0;
- a[442]=0;
- a[443]=0;
- a[444]=0;
- a[445]=0;
- a[446]=0;
- a[447]=0;
- a[448]=0;
- a[449]=0;
- a[450]=0;
- a[451]=0;
- a[452]=0;
- a[453]=0;
- a[454]=0;
- a[455]=0;
- a[456]=0;
- a[457]=0;
- a[458]=0;
- a[459]=0;
- a[460]=0;
- a[461]=0;
- a[462]=0;
- a[463]=0;
- a[464]=0;
- a[465]=0;
- a[466]=0;
- a[467]=0;
- a[468]=0;
- a[469]=0;
- a[470]=0;
- a[471]=0;
- a[472]=0;
- a[473]=0;
- a[474]=0;
- a[475]=0;
- a[476]=0;
- a[477]=0;
- a[478]=0;
- a[479]=0;
- a[480]=0;
- a[481]=0;
- a[482]=0;
- a[483]=0;
- a[484]=0;
- a[485]=0;
- a[486]=0;
- a[487]=0;
- a[488]=0;
- a[489]=0;
- a[490]=0;
- a[491]=0;
- a[492]=0;
- a[493]=0;
- a[494]=0;
- a[495]=0;
- a[496]=0;
- a[497]=0;
- a[498]=0;
- a[499]=0;
- a[500]=0;
- a[501]=0;
- a[502]=0;
- a[503]=0;
- a[504]=0;
- a[505]=0;
- a[506]=0;
- a[507]=0;
- a[508]=0;
- a[509]=0;
- a[510]=0;
- a[511]=0;
- a[512]=0;
- a[513]=0;
- a[514]=0;
- a[515]=0;
- a[516]=0;
- a[517]=0;
- a[518]=0;
- a[519]=0;
- a[520]=0;
- a[521]=0;
- a[522]=0;
- a[523]=0;
- a[524]=0;
- a[525]=0;
- a[526]=0;
- a[527]=0;
- a[528]=0;
- a[529]=0;
- a[530]=0;
- a[531]=0;
- a[532]=0;
- a[533]=0;
- a[534]=0;
- a[535]=0;
- a[536]=0;
- a[537]=0;
- a[538]=0;
- a[539]=0;
- a[540]=0;
- a[541]=0;
- a[542]=0;
- a[543]=0;
- a[544]=0;
- a[545]=0;
- a[546]=0;
- a[547]=0;
- a[548]=0;
- a[549]=0;
- a[550]=0;
- a[551]=0;
- a[552]=0;
- a[553]=0;
- a[554]=0;
- a[555]=0;
- a[556]=0;
- a[557]=0;
- a[558]=0;
- a[559]=0;
- a[560]=0;
- a[561]=0;
- a[562]=0;
- a[563]=0;
- a[564]=0;
- a[565]=0;
- a[566]=0;
- a[567]=0;
- a[568]=0;
- a[569]=0;
- a[570]=0;
- a[571]=0;
- a[572]=0;
- a[573]=0;
- a[574]=0;
- a[575]=0;
- a[576]=0;
- a[577]=0;
- a[578]=0;
- a[579]=0;
- a[580]=0;
- a[581]=0;
- a[582]=0;
- a[583]=0;
- a[584]=0;
- a[585]=0;
- a[586]=0;
- a[587]=0;
- a[588]=0;
- a[589]=0;
- a[590]=0;
- a[591]=0;
- a[592]=0;
- a[593]=0;
- a[594]=0;
- a[595]=0;
- a[596]=0;
- a[597]=0;
- a[598]=0;
- a[599]=0;
- a[600]=0;
- a[601]=0;
- a[602]=0;
- a[603]=0;
- a[604]=0;
- a[605]=0;
- a[606]=0;
- a[607]=0;
- a[608]=0;
- a[609]=0;
- a[610]=0;
- a[611]=0;
- a[612]=0;
- a[613]=0;
- a[614]=0;
- a[615]=0;
- a[616]=0;
- a[617]=0;
- a[618]=0;
- a[619]=0;
- a[620]=0;
- a[621]=0;
- a[622]=0;
- a[623]=0;
- a[624]=0;
- a[625]=0;
- a[626]=0;
- a[627]=0;
- a[628]=0;
- a[629]=0;
- a[630]=0;
- a[631]=0;
- a[632]=0;
- a[633]=0;
- a[634]=0;
- a[635]=0;
- a[636]=0;
- a[637]=0;
- a[638]=0;
- a[639]=0;
- a[640]=0;
- a[641]=0;
- a[642]=0;
- a[643]=0;
- a[644]=0;
- a[645]=0;
- a[646]=0;
- a[647]=0;
- a[648]=0;
- a[649]=0;
- a[650]=0;
- a[651]=0;
- a[652]=0;
- a[653]=0;
- a[654]=0;
- a[655]=0;
- a[656]=0;
- a[657]=0;
- a[658]=0;
- a[659]=0;
- a[660]=0;
- a[661]=0;
- a[662]=0;
- a[663]=0;
- a[664]=0;
- a[665]=0;
- a[666]=0;
- a[667]=0;
- a[668]=0;
- a[669]=0;
- a[670]=0;
- a[671]=0;
- a[672]=0;
- a[673]=0;
- a[674]=0;
- a[675]=0;
- a[676]=0;
- a[677]=0;
- a[678]=0;
- a[679]=0;
- a[680]=0;
- a[681]=0;
- a[682]=0;
- a[683]=0;
- a[684]=0;
- a[685]=0;
- a[686]=0;
- a[687]=0;
- a[688]=0;
- a[689]=0;
- a[690]=0;
- a[691]=0;
- a[692]=0;
- a[693]=0;
- a[694]=0;
- a[695]=0;
- a[696]=0;
- a[697]=0;
- a[698]=0;
- a[699]=0;
- a[700]=0;
- a[701]=0;
- a[702]=0;
- a[703]=0;
- a[704]=0;
- a[705]=0;
- a[706]=0;
- a[707]=0;
- a[708]=0;
- a[709]=0;
- a[710]=0;
- a[711]=0;
- a[712]=0;
- a[713]=0;
- a[714]=0;
- a[715]=0;
- a[716]=0;
- a[717]=0;
- a[718]=0;
- a[719]=0;
- a[720]=0;
- a[721]=0;
- a[722]=0;
- a[723]=0;
- a[724]=0;
- a[725]=0;
- a[726]=0;
- a[727]=0;
- a[728]=0;
- a[729]=0;
- a[730]=0;
- a[731]=0;
- a[732]=0;
- a[733]=0;
- a[734]=0;
- a[735]=0;
- a[736]=0;
- a[737]=0;
- a[738]=0;
- a[739]=0;
- a[740]=0;
- a[741]=0;
- a[742]=0;
- a[743]=0;
- a[744]=0;
- a[745]=0;
- a[746]=0;
- a[747]=0;
- a[748]=0;
- a[749]=0;
- a[750]=0;
- a[751]=0;
- a[752]=0;
- a[753]=0;
- a[754]=0;
- a[755]=0;
- a[756]=0;
- a[757]=0;
- a[758]=0;
- a[759]=0;
- a[760]=0;
- a[761]=0;
- a[762]=0;
- a[763]=0;
- a[764]=0;
- a[765]=0;
- a[766]=0;
- a[767]=0;
- a[768]=0;
- a[769]=0;
- a[770]=0;
- a[771]=0;
- a[772]=0;
- a[773]=0;
- a[774]=0;
- a[775]=0;
- a[776]=0;
- a[777]=0;
- a[778]=0;
- a[779]=0;
- a[780]=0;
- a[781]=0;
- a[782]=0;
- a[783]=0;
- a[784]=0;
- a[785]=0;
- a[786]=0;
- a[787]=0;
- a[788]=0;
- a[789]=0;
- a[790]=0;
- a[791]=0;
- a[792]=0;
- a[793]=0;
- a[794]=0;
- a[795]=0;
- a[796]=0;
- a[797]=0;
- a[798]=0;
- a[799]=0;
- a[800]=0;
- a[801]=0;
- a[802]=0;
- a[803]=0;
- a[804]=0;
- a[805]=0;
- a[806]=0;
- a[807]=0;
- a[808]=0;
- a[809]=0;
- a[810]=0;
- a[811]=0;
- a[812]=0;
- a[813]=0;
- a[814]=0;
- a[815]=0;
- a[816]=0;
- a[817]=0;
- a[818]=0;
- a[819]=0;
- a[820]=0;
- a[821]=0;
- a[822]=0;
- a[823]=0;
- a[824]=0;
- a[825]=0;
- a[826]=0;
- a[827]=0;
- a[828]=0;
- a[829]=0;
- a[830]=0;
- a[831]=0;
- a[832]=0;
- a[833]=0;
- a[834]=0;
- a[835]=0;
- a[836]=0;
- a[837]=0;
- a[838]=0;
- a[839]=0;
- a[840]=0;
- a[841]=0;
- a[842]=0;
- a[843]=0;
- a[844]=0;
- a[845]=0;
- a[846]=0;
- a[847]=0;
- a[848]=0;
- a[849]=0;
- a[850]=0;
- a[851]=0;
- a[852]=0;
- a[853]=0;
- a[854]=0;
- a[855]=0;
- a[856]=0;
- a[857]=0;
- a[858]=0;
- a[859]=0;
- a[860]=0;
- a[861]=0;
- a[862]=0;
- a[863]=0;
- a[864]=0;
- a[865]=0;
- a[866]=0;
- a[867]=0;
- a[868]=0;
- a[869]=0;
- a[870]=0;
- a[871]=0;
- a[872]=0;
- a[873]=0;
- a[874]=0;
- a[875]=0;
- a[876]=0;
- a[877]=0;
- a[878]=0;
- a[879]=0;
- a[880]=0;
- a[881]=0;
- a[882]=0;
- a[883]=0;
- a[884]=0;
- a[885]=0;
- a[886]=0;
- a[887]=0;
- a[888]=0;
- a[889]=0;
- a[890]=0;
- a[891]=0;
- a[892]=0;
- a[893]=0;
- a[894]=0;
- a[895]=0;
- a[896]=0;
- a[897]=0;
- a[898]=0;
- a[899]=0;
- a[900]=0;
- a[901]=0;
- a[902]=0;
- a[903]=0;
- a[904]=0;
- a[905]=0;
- a[906]=0;
- a[907]=0;
- a[908]=0;
- a[909]=0;
- a[910]=0;
- a[911]=0;
- a[912]=0;
- a[913]=0;
- a[914]=0;
- a[915]=0;
- a[916]=0;
- a[917]=0;
- a[918]=0;
- a[919]=0;
- a[920]=0;
- a[921]=0;
- a[922]=0;
- a[923]=0;
- a[924]=0;
- a[925]=0;
- a[926]=0;
- a[927]=0;
- a[928]=0;
- a[929]=0;
- a[930]=0;
- a[931]=0;
- a[932]=0;
- a[933]=0;
- a[934]=0;
- a[935]=0;
- a[936]=0;
- a[937]=0;
- a[938]=0;
- a[939]=0;
- a[940]=0;
- a[941]=0;
- a[942]=0;
- a[943]=0;
- a[944]=0;
- a[945]=0;
- a[946]=0;
- a[947]=0;
- a[948]=0;
- a[949]=0;
- a[950]=0;
- a[951]=0;
- a[952]=0;
- a[953]=0;
- a[954]=0;
- a[955]=0;
- a[956]=0;
- a[957]=0;
- a[958]=0;
- a[959]=0;
- a[960]=0;
- a[961]=0;
- a[962]=0;
- a[963]=0;
- a[964]=0;
- a[965]=0;
- a[966]=0;
- a[967]=0;
- a[968]=0;
- a[969]=0;
- a[970]=0;
- a[971]=0;
- a[972]=0;
- a[973]=0;
- a[974]=0;
- a[975]=0;
- a[976]=0;
- a[977]=0;
- a[978]=0;
- a[979]=0;
- a[980]=0;
- a[981]=0;
- a[982]=0;
- a[983]=0;
- a[984]=0;
- a[985]=0;
- a[986]=0;
- a[987]=0;
- a[988]=0;
- a[989]=0;
- a[990]=0;
- a[991]=0;
- a[992]=0;
- a[993]=0;
- a[994]=0;
- a[995]=0;
- a[996]=0;
- a[997]=0;
- a[998]=0;
- a[999]=0;
- a[1000]=0;
- a[1001]=0;
- a[1002]=0;
- a[1003]=0;
- a[1004]=0;
- a[1005]=0;
- a[1006]=0;
- a[1007]=0;
- a[1008]=0;
- a[1009]=0;
- a[1010]=0;
- a[1011]=0;
- a[1012]=0;
- a[1013]=0;
- a[1014]=0;
- a[1015]=0;
- a[1016]=0;
- a[1017]=0;
- a[1018]=0;
- a[1019]=0;
- a[1020]=0;
- a[1021]=0;
- a[1022]=0;
- a[1023]=0;
- a[1024]=0;
- a[1025]=0;
- a[1026]=0;
- a[1027]=0;
- a[1028]=0;
- a[1029]=0;
- a[1030]=0;
- a[1031]=0;
- a[1032]=0;
- a[1033]=0;
- a[1034]=0;
- a[1035]=0;
- a[1036]=0;
- a[1037]=0;
- a[1038]=0;
- a[1039]=0;
- a[1040]=0;
- a[1041]=0;
- a[1042]=0;
- a[1043]=0;
- a[1044]=0;
- a[1045]=0;
- a[1046]=0;
- a[1047]=0;
- a[1048]=0;
- a[1049]=0;
- a[1050]=0;
- a[1051]=0;
- a[1052]=0;
- a[1053]=0;
- a[1054]=0;
- a[1055]=0;
- a[1056]=0;
- a[1057]=0;
- a[1058]=0;
- a[1059]=0;
- a[1060]=0;
- a[1061]=0;
- a[1062]=0;
- a[1063]=0;
- a[1064]=0;
- a[1065]=0;
- a[1066]=0;
- a[1067]=0;
- a[1068]=0;
- a[1069]=0;
- a[1070]=0;
- a[1071]=0;
- a[1072]=0;
- a[1073]=0;
- a[1074]=0;
- a[1075]=0;
- a[1076]=0;
- a[1077]=0;
- a[1078]=0;
- a[1079]=0;
- a[1080]=0;
- a[1081]=0;
- a[1082]=0;
- a[1083]=0;
- a[1084]=0;
- a[1085]=0;
- a[1086]=0;
- a[1087]=0;
- a[1088]=0;
- a[1089]=0;
- a[1090]=0;
- a[1091]=0;
- a[1092]=0;
- a[1093]=0;
- a[1094]=0;
- a[1095]=0;
- a[1096]=0;
- a[1097]=0;
- a[1098]=0;
- a[1099]=0;
- a[1100]=0;
- a[1101]=0;
- a[1102]=0;
- a[1103]=0;
- a[1104]=0;
- a[1105]=0;
- a[1106]=0;
- a[1107]=0;
- a[1108]=0;
- a[1109]=0;
- a[1110]=0;
- a[1111]=0;
- a[1112]=0;
- a[1113]=0;
- a[1114]=0;
- a[1115]=0;
- a[1116]=0;
- a[1117]=0;
- a[1118]=0;
- a[1119]=0;
- a[1120]=0;
- a[1121]=0;
- a[1122]=0;
- a[1123]=0;
- a[1124]=0;
- a[1125]=0;
- a[1126]=0;
- a[1127]=0;
- a[1128]=0;
- a[1129]=0;
- a[1130]=0;
- a[1131]=0;
- a[1132]=0;
- a[1133]=0;
- a[1134]=0;
- a[1135]=0;
- a[1136]=0;
- a[1137]=0;
- a[1138]=0;
- a[1139]=0;
- a[1140]=0;
- a[1141]=0;
- a[1142]=0;
- a[1143]=0;
- a[1144]=0;
- a[1145]=0;
- a[1146]=0;
- a[1147]=0;
- a[1148]=0;
- a[1149]=0;
- a[1150]=0;
- a[1151]=0;
- a[1152]=0;
- a[1153]=0;
- a[1154]=0;
- a[1155]=0;
- a[1156]=0;
- a[1157]=0;
- a[1158]=0;
- a[1159]=0;
- a[1160]=0;
- a[1161]=0;
- a[1162]=0;
- a[1163]=0;
- a[1164]=0;
- a[1165]=0;
- a[1166]=0;
- a[1167]=0;
- a[1168]=0;
- a[1169]=0;
- a[1170]=0;
- a[1171]=0;
- a[1172]=0;
- a[1173]=0;
- a[1174]=0;
- a[1175]=0;
- a[1176]=0;
- a[1177]=0;
- a[1178]=0;
- a[1179]=0;
- a[1180]=0;
- a[1181]=0;
- a[1182]=0;
- a[1183]=0;
- a[1184]=0;
- a[1185]=0;
- a[1186]=0;
- a[1187]=0;
- a[1188]=0;
- a[1189]=0;
- a[1190]=0;
- a[1191]=0;
- a[1192]=0;
- a[1193]=0;
- a[1194]=0;
- a[1195]=0;
- a[1196]=0;
- a[1197]=0;
- a[1198]=0;
- a[1199]=0;
- a[1200]=0;
- a[1201]=0;
- a[1202]=0;
- a[1203]=0;
- a[1204]=0;
- a[1205]=0;
- a[1206]=0;
- a[1207]=0;
- a[1208]=0;
- a[1209]=0;
- a[1210]=0;
- a[1211]=0;
- a[1212]=0;
- a[1213]=0;
- a[1214]=0;
- a[1215]=0;
- a[1216]=0;
- a[1217]=0;
- a[1218]=0;
- a[1219]=0;
- a[1220]=0;
- a[1221]=0;
- a[1222]=0;
- a[1223]=0;
- a[1224]=0;
- a[1225]=0;
- a[1226]=0;
- a[1227]=0;
- a[1228]=0;
- a[1229]=0;
- a[1230]=0;
- a[1231]=0;
- a[1232]=0;
- a[1233]=0;
- a[1234]=0;
- a[1235]=0;
- a[1236]=0;
- a[1237]=0;
- a[1238]=0;
- a[1239]=0;
- a[1240]=0;
- a[1241]=0;
- a[1242]=0;
- a[1243]=0;
- a[1244]=0;
- a[1245]=0;
- a[1246]=0;
- a[1247]=0;
- a[1248]=0;
- a[1249]=0;
- a[1250]=0;
- a[1251]=0;
- a[1252]=0;
- a[1253]=0;
- a[1254]=0;
- a[1255]=0;
- a[1256]=0;
- a[1257]=0;
- a[1258]=0;
- a[1259]=0;
- a[1260]=0;
- a[1261]=0;
- a[1262]=0;
- a[1263]=0;
- a[1264]=0;
- a[1265]=0;
- a[1266]=0;
- a[1267]=0;
- a[1268]=0;
- a[1269]=0;
- a[1270]=0;
- a[1271]=0;
- a[1272]=0;
- a[1273]=0;
- a[1274]=0;
- a[1275]=0;
- a[1276]=0;
- a[1277]=0;
- a[1278]=0;
- a[1279]=0;
- a[1280]=0;
- a[1281]=0;
- a[1282]=0;
- a[1283]=0;
- a[1284]=0;
- a[1285]=0;
- a[1286]=0;
- a[1287]=0;
- a[1288]=0;
- a[1289]=0;
- a[1290]=0;
- a[1291]=0;
- a[1292]=0;
- a[1293]=0;
- a[1294]=0;
- a[1295]=0;
- a[1296]=0;
- a[1297]=0;
- a[1298]=0;
- a[1299]=0;
- a[1300]=0;
- a[1301]=0;
- a[1302]=0;
- a[1303]=0;
- a[1304]=0;
- a[1305]=0;
- a[1306]=0;
- a[1307]=0;
- a[1308]=0;
- a[1309]=0;
- a[1310]=0;
- a[1311]=0;
- a[1312]=0;
- a[1313]=0;
- a[1314]=0;
- a[1315]=0;
- a[1316]=0;
- a[1317]=0;
- a[1318]=0;
- a[1319]=0;
- a[1320]=0;
- a[1321]=0;
- a[1322]=0;
- a[1323]=0;
- a[1324]=0;
- a[1325]=0;
- a[1326]=0;
- a[1327]=0;
- a[1328]=0;
- a[1329]=0;
- a[1330]=0;
- a[1331]=0;
- a[1332]=0;
- a[1333]=0;
- a[1334]=0;
- a[1335]=0;
- a[1336]=0;
- a[1337]=0;
- a[1338]=0;
- a[1339]=0;
- a[1340]=0;
- a[1341]=0;
- a[1342]=0;
- a[1343]=0;
- a[1344]=0;
- a[1345]=0;
- a[1346]=0;
- a[1347]=0;
- a[1348]=0;
- a[1349]=0;
- a[1350]=0;
- a[1351]=0;
- a[1352]=0;
- a[1353]=0;
- a[1354]=0;
- a[1355]=0;
- a[1356]=0;
- a[1357]=0;
- a[1358]=0;
- a[1359]=0;
- a[1360]=0;
- a[1361]=0;
- a[1362]=0;
- a[1363]=0;
- a[1364]=0;
- a[1365]=0;
- a[1366]=0;
- a[1367]=0;
- a[1368]=0;
- a[1369]=0;
- a[1370]=0;
- a[1371]=0;
- a[1372]=0;
- a[1373]=0;
- a[1374]=0;
- a[1375]=0;
- a[1376]=0;
- a[1377]=0;
- a[1378]=0;
- a[1379]=0;
- a[1380]=0;
- a[1381]=0;
- a[1382]=0;
- a[1383]=0;
- a[1384]=0;
- a[1385]=0;
- a[1386]=0;
- a[1387]=0;
- a[1388]=0;
- a[1389]=0;
- a[1390]=0;
- a[1391]=0;
- a[1392]=0;
- a[1393]=0;
- a[1394]=0;
- a[1395]=0;
- a[1396]=0;
- a[1397]=0;
- a[1398]=0;
- a[1399]=0;
- a[1400]=0;
- a[1401]=0;
- a[1402]=0;
- a[1403]=0;
- a[1404]=0;
- a[1405]=0;
- a[1406]=0;
- a[1407]=0;
- a[1408]=0;
- a[1409]=0;
- a[1410]=0;
- a[1411]=0;
- a[1412]=0;
- a[1413]=0;
- a[1414]=0;
- a[1415]=0;
- a[1416]=0;
- a[1417]=0;
- a[1418]=0;
- a[1419]=0;
- a[1420]=0;
- a[1421]=0;
- a[1422]=0;
- a[1423]=0;
- a[1424]=0;
- a[1425]=0;
- a[1426]=0;
- a[1427]=0;
- a[1428]=0;
- a[1429]=0;
- a[1430]=0;
- a[1431]=0;
- a[1432]=0;
- a[1433]=0;
- a[1434]=0;
- a[1435]=0;
- a[1436]=0;
- a[1437]=0;
- a[1438]=0;
- a[1439]=0;
- a[1440]=0;
- a[1441]=0;
- a[1442]=0;
- a[1443]=0;
- a[1444]=0;
- a[1445]=0;
- a[1446]=0;
- a[1447]=0;
- a[1448]=0;
- a[1449]=0;
- a[1450]=0;
- a[1451]=0;
- a[1452]=0;
- a[1453]=0;
- a[1454]=0;
- a[1455]=0;
- a[1456]=0;
- a[1457]=0;
- a[1458]=0;
- a[1459]=0;
- a[1460]=0;
- a[1461]=0;
- a[1462]=0;
- a[1463]=0;
- a[1464]=0;
- a[1465]=0;
- a[1466]=0;
- a[1467]=0;
- a[1468]=0;
- a[1469]=0;
- a[1470]=0;
- a[1471]=0;
- a[1472]=0;
- a[1473]=0;
- a[1474]=0;
- a[1475]=0;
- a[1476]=0;
- a[1477]=0;
- a[1478]=0;
- a[1479]=0;
- a[1480]=0;
- a[1481]=0;
- a[1482]=0;
- a[1483]=0;
- a[1484]=0;
- a[1485]=0;
- a[1486]=0;
- a[1487]=0;
- a[1488]=0;
- a[1489]=0;
- a[1490]=0;
- a[1491]=0;
- a[1492]=0;
- a[1493]=0;
- a[1494]=0;
- a[1495]=0;
- a[1496]=0;
- a[1497]=0;
- a[1498]=0;
- a[1499]=0;
- a[1500]=0;
- a[1501]=0;
- a[1502]=0;
- a[1503]=0;
- a[1504]=0;
- a[1505]=0;
- a[1506]=0;
- a[1507]=0;
- a[1508]=0;
- a[1509]=0;
- a[1510]=0;
- a[1511]=0;
- a[1512]=0;
- a[1513]=0;
- a[1514]=0;
- a[1515]=0;
- a[1516]=0;
- a[1517]=0;
- a[1518]=0;
- a[1519]=0;
- a[1520]=0;
- a[1521]=0;
- a[1522]=0;
- a[1523]=0;
- a[1524]=0;
- a[1525]=0;
- a[1526]=0;
- a[1527]=0;
- a[1528]=0;
- a[1529]=0;
- a[1530]=0;
- a[1531]=0;
- a[1532]=0;
- a[1533]=0;
- a[1534]=0;
- a[1535]=0;
- a[1536]=0;
- a[1537]=0;
- a[1538]=0;
- a[1539]=0;
- a[1540]=0;
- a[1541]=0;
- a[1542]=0;
- a[1543]=0;
- a[1544]=0;
- a[1545]=0;
- a[1546]=0;
- a[1547]=0;
- a[1548]=0;
- a[1549]=0;
- a[1550]=0;
- a[1551]=0;
- a[1552]=0;
- a[1553]=0;
- a[1554]=0;
- a[1555]=0;
- a[1556]=0;
- a[1557]=0;
- a[1558]=0;
- a[1559]=0;
- a[1560]=0;
- a[1561]=0;
- a[1562]=0;
- a[1563]=0;
- a[1564]=0;
- a[1565]=0;
- a[1566]=0;
- a[1567]=0;
- a[1568]=0;
- a[1569]=0;
- a[1570]=0;
- a[1571]=0;
- a[1572]=0;
- a[1573]=0;
- a[1574]=0;
- a[1575]=0;
- a[1576]=0;
- a[1577]=0;
- a[1578]=0;
- a[1579]=0;
- a[1580]=0;
- a[1581]=0;
- a[1582]=0;
- a[1583]=0;
- a[1584]=0;
- a[1585]=0;
- a[1586]=0;
- a[1587]=0;
- a[1588]=0;
- a[1589]=0;
- a[1590]=0;
- a[1591]=0;
- a[1592]=0;
- a[1593]=0;
- a[1594]=0;
- a[1595]=0;
- a[1596]=0;
- a[1597]=0;
- a[1598]=0;
- a[1599]=0;
- a[1600]=0;
- a[1601]=0;
- a[1602]=0;
- a[1603]=0;
- a[1604]=0;
- a[1605]=0;
- a[1606]=0;
- a[1607]=0;
- a[1608]=0;
- a[1609]=0;
- a[1610]=0;
- a[1611]=0;
- a[1612]=0;
- a[1613]=0;
- a[1614]=0;
- a[1615]=0;
- a[1616]=0;
- a[1617]=0;
- a[1618]=0;
- a[1619]=0;
- a[1620]=0;
- a[1621]=0;
- a[1622]=0;
- a[1623]=0;
- a[1624]=0;
- a[1625]=0;
- a[1626]=0;
- a[1627]=0;
- a[1628]=0;
- a[1629]=0;
- a[1630]=0;
- a[1631]=0;
- a[1632]=0;
- a[1633]=0;
- a[1634]=0;
- a[1635]=0;
- a[1636]=0;
- a[1637]=0;
- a[1638]=0;
- a[1639]=0;
- a[1640]=0;
- a[1641]=0;
- a[1642]=0;
- a[1643]=0;
- a[1644]=0;
- a[1645]=0;
- a[1646]=0;
- a[1647]=0;
- a[1648]=0;
- a[1649]=0;
- a[1650]=0;
- a[1651]=0;
- a[1652]=0;
- a[1653]=0;
- a[1654]=0;
- a[1655]=0;
- a[1656]=0;
- a[1657]=0;
- a[1658]=0;
- a[1659]=0;
- a[1660]=0;
- a[1661]=0;
- a[1662]=0;
- a[1663]=0;
- a[1664]=0;
- a[1665]=0;
- a[1666]=0;
- a[1667]=0;
- a[1668]=0;
- a[1669]=0;
- a[1670]=0;
- a[1671]=0;
- a[1672]=0;
- a[1673]=0;
- a[1674]=0;
- a[1675]=0;
- a[1676]=0;
- a[1677]=0;
- a[1678]=0;
- a[1679]=0;
- a[1680]=0;
- a[1681]=0;
- a[1682]=0;
- a[1683]=0;
- a[1684]=0;
- a[1685]=0;
- a[1686]=0;
- a[1687]=0;
- a[1688]=0;
- a[1689]=0;
- a[1690]=0;
- a[1691]=0;
- a[1692]=0;
- a[1693]=0;
- a[1694]=0;
- a[1695]=0;
- a[1696]=0;
- a[1697]=0;
- a[1698]=0;
- a[1699]=0;
- a[1700]=0;
- a[1701]=0;
- a[1702]=0;
- a[1703]=0;
- a[1704]=0;
- a[1705]=0;
- a[1706]=0;
- a[1707]=0;
- a[1708]=0;
- a[1709]=0;
- a[1710]=0;
- a[1711]=0;
- a[1712]=0;
- a[1713]=0;
- a[1714]=0;
- a[1715]=0;
- a[1716]=0;
- a[1717]=0;
- a[1718]=0;
- a[1719]=0;
- a[1720]=0;
- a[1721]=0;
- a[1722]=0;
- a[1723]=0;
- a[1724]=0;
- a[1725]=0;
- a[1726]=0;
- a[1727]=0;
- a[1728]=0;
- a[1729]=0;
- a[1730]=0;
- a[1731]=0;
- a[1732]=0;
- a[1733]=0;
- a[1734]=0;
- a[1735]=0;
- a[1736]=0;
- a[1737]=0;
- a[1738]=0;
- a[1739]=0;
- a[1740]=0;
- a[1741]=0;
- a[1742]=0;
- a[1743]=0;
- a[1744]=0;
- a[1745]=0;
- a[1746]=0;
- a[1747]=0;
- a[1748]=0;
- a[1749]=0;
- a[1750]=0;
- a[1751]=0;
- a[1752]=0;
- a[1753]=0;
- a[1754]=0;
- a[1755]=0;
- a[1756]=0;
- a[1757]=0;
- a[1758]=0;
- a[1759]=0;
- a[1760]=0;
- a[1761]=0;
- a[1762]=0;
- a[1763]=0;
- a[1764]=0;
- a[1765]=0;
- a[1766]=0;
- a[1767]=0;
- a[1768]=0;
- a[1769]=0;
- a[1770]=0;
- a[1771]=0;
- a[1772]=0;
- a[1773]=0;
- a[1774]=0;
- a[1775]=0;
- a[1776]=0;
- a[1777]=0;
- a[1778]=0;
- a[1779]=0;
- a[1780]=0;
- a[1781]=0;
- a[1782]=0;
- a[1783]=0;
- a[1784]=0;
- a[1785]=0;
- a[1786]=0;
- a[1787]=0;
- a[1788]=0;
- a[1789]=0;
- a[1790]=0;
- a[1791]=0;
- a[1792]=0;
- a[1793]=0;
- a[1794]=0;
- a[1795]=0;
- a[1796]=0;
- a[1797]=0;
- a[1798]=0;
- a[1799]=0;
- a[1800]=0;
- a[1801]=0;
- a[1802]=0;
- a[1803]=0;
- a[1804]=0;
- a[1805]=0;
- a[1806]=0;
- a[1807]=0;
- a[1808]=0;
- a[1809]=0;
- a[1810]=0;
- a[1811]=0;
- a[1812]=0;
- a[1813]=0;
- a[1814]=0;
- a[1815]=0;
- a[1816]=0;
- a[1817]=0;
- a[1818]=0;
- a[1819]=0;
- a[1820]=0;
- a[1821]=0;
- a[1822]=0;
- a[1823]=0;
- a[1824]=0;
- a[1825]=0;
- a[1826]=0;
- a[1827]=0;
- a[1828]=0;
- a[1829]=0;
- a[1830]=0;
- a[1831]=0;
- a[1832]=0;
- a[1833]=0;
- a[1834]=0;
- a[1835]=0;
- a[1836]=0;
- a[1837]=0;
- a[1838]=0;
- a[1839]=0;
- a[1840]=0;
- a[1841]=0;
- a[1842]=0;
- a[1843]=0;
- a[1844]=0;
- a[1845]=0;
- a[1846]=0;
- a[1847]=0;
- a[1848]=0;
- a[1849]=0;
- a[1850]=0;
- a[1851]=0;
- a[1852]=0;
- a[1853]=0;
- a[1854]=0;
- a[1855]=0;
- a[1856]=0;
- a[1857]=0;
- a[1858]=0;
- a[1859]=0;
- a[1860]=0;
- a[1861]=0;
- a[1862]=0;
- a[1863]=0;
- a[1864]=0;
- a[1865]=0;
- a[1866]=0;
- a[1867]=0;
- a[1868]=0;
- a[1869]=0;
- a[1870]=0;
- a[1871]=0;
- a[1872]=0;
- a[1873]=0;
- a[1874]=0;
- a[1875]=0;
- a[1876]=0;
- a[1877]=0;
- a[1878]=0;
- a[1879]=0;
- a[1880]=0;
- a[1881]=0;
- a[1882]=0;
- a[1883]=0;
- a[1884]=0;
- a[1885]=0;
- a[1886]=0;
- a[1887]=0;
- a[1888]=0;
- a[1889]=0;
- a[1890]=0;
- a[1891]=0;
- a[1892]=0;
- a[1893]=0;
- a[1894]=0;
- a[1895]=0;
- a[1896]=0;
- a[1897]=0;
- a[1898]=0;
- a[1899]=0;
- a[1900]=0;
- a[1901]=0;
- a[1902]=0;
- a[1903]=0;
- a[1904]=0;
- a[1905]=0;
- a[1906]=0;
- a[1907]=0;
- a[1908]=0;
- a[1909]=0;
- a[1910]=0;
- a[1911]=0;
- a[1912]=0;
- a[1913]=0;
- a[1914]=0;
- a[1915]=0;
- a[1916]=0;
- a[1917]=0;
- a[1918]=0;
- a[1919]=0;
- a[1920]=0;
- a[1921]=0;
- a[1922]=0;
- a[1923]=0;
- a[1924]=0;
- a[1925]=0;
- a[1926]=0;
- a[1927]=0;
- a[1928]=0;
- a[1929]=0;
- a[1930]=0;
- a[1931]=0;
- a[1932]=0;
- a[1933]=0;
- a[1934]=0;
- a[1935]=0;
- a[1936]=0;
- a[1937]=0;
- a[1938]=0;
- a[1939]=0;
- a[1940]=0;
- a[1941]=0;
- a[1942]=0;
- a[1943]=0;
- a[1944]=0;
- a[1945]=0;
- a[1946]=0;
- a[1947]=0;
- a[1948]=0;
- a[1949]=0;
- a[1950]=0;
- a[1951]=0;
- a[1952]=0;
- a[1953]=0;
- a[1954]=0;
- a[1955]=0;
- a[1956]=0;
- a[1957]=0;
- a[1958]=0;
- a[1959]=0;
- a[1960]=0;
- a[1961]=0;
- a[1962]=0;
- a[1963]=0;
- a[1964]=0;
- a[1965]=0;
- a[1966]=0;
- a[1967]=0;
- a[1968]=0;
- a[1969]=0;
- a[1970]=0;
- a[1971]=0;
- a[1972]=0;
- a[1973]=0;
- a[1974]=0;
- a[1975]=0;
- a[1976]=0;
- a[1977]=0;
- a[1978]=0;
- a[1979]=0;
- a[1980]=0;
- a[1981]=0;
- a[1982]=0;
- a[1983]=0;
- a[1984]=0;
- a[1985]=0;
- a[1986]=0;
- a[1987]=0;
- a[1988]=0;
- a[1989]=0;
- a[1990]=0;
- a[1991]=0;
- a[1992]=0;
- a[1993]=0;
- a[1994]=0;
- a[1995]=0;
- a[1996]=0;
- a[1997]=0;
- a[1998]=0;
- a[1999]=0;
- a[2000]=0;
- a[2001]=0;
- a[2002]=0;
- a[2003]=0;
- a[2004]=0;
- a[2005]=0;
- a[2006]=0;
- a[2007]=0;
- a[2008]=0;
- a[2009]=0;
- a[2010]=0;
- a[2011]=0;
- a[2012]=0;
- a[2013]=0;
- a[2014]=0;
- a[2015]=0;
- a[2016]=0;
- a[2017]=0;
- a[2018]=0;
- a[2019]=0;
- a[2020]=0;
- a[2021]=0;
- a[2022]=0;
- a[2023]=0;
- a[2024]=0;
- a[2025]=0;
- a[2026]=0;
- a[2027]=0;
- a[2028]=0;
- a[2029]=0;
- a[2030]=0;
- a[2031]=0;
- a[2032]=0;
- a[2033]=0;
- a[2034]=0;
- a[2035]=0;
- a[2036]=0;
- a[2037]=0;
- a[2038]=0;
- a[2039]=0;
- a[2040]=0;
- a[2041]=0;
- a[2042]=0;
- a[2043]=0;
- a[2044]=0;
- a[2045]=0;
- a[2046]=0;
- a[2047]=0;
- a[2048]=0;
- a[2049]=0;
- a[2050]=0;
- a[2051]=0;
- a[2052]=0;
- a[2053]=0;
- a[2054]=0;
- a[2055]=0;
- a[2056]=0;
- a[2057]=0;
- a[2058]=0;
- a[2059]=0;
- a[2060]=0;
- a[2061]=0;
- a[2062]=0;
- a[2063]=0;
- a[2064]=0;
- a[2065]=0;
- a[2066]=0;
- a[2067]=0;
- a[2068]=0;
- a[2069]=0;
- a[2070]=0;
- a[2071]=0;
- a[2072]=0;
- a[2073]=0;
- a[2074]=0;
- a[2075]=0;
- a[2076]=0;
- a[2077]=0;
- a[2078]=0;
- a[2079]=0;
- a[2080]=0;
- a[2081]=0;
- a[2082]=0;
- a[2083]=0;
- a[2084]=0;
- a[2085]=0;
- a[2086]=0;
- a[2087]=0;
- a[2088]=0;
- a[2089]=0;
- a[2090]=0;
- a[2091]=0;
- a[2092]=0;
- a[2093]=0;
- a[2094]=0;
- a[2095]=0;
- a[2096]=0;
- a[2097]=0;
- a[2098]=0;
- a[2099]=0;
- a[2100]=0;
- a[2101]=0;
- a[2102]=0;
- a[2103]=0;
- a[2104]=0;
- a[2105]=0;
- a[2106]=0;
- a[2107]=0;
- a[2108]=0;
- a[2109]=0;
- a[2110]=0;
- a[2111]=0;
- a[2112]=0;
- a[2113]=0;
- a[2114]=0;
- a[2115]=0;
- a[2116]=0;
- a[2117]=0;
- a[2118]=0;
- a[2119]=0;
- a[2120]=0;
- a[2121]=0;
- a[2122]=0;
- a[2123]=0;
- a[2124]=0;
- a[2125]=0;
- a[2126]=0;
- a[2127]=0;
- a[2128]=0;
- a[2129]=0;
- a[2130]=0;
- a[2131]=0;
- a[2132]=0;
- a[2133]=0;
- a[2134]=0;
- a[2135]=0;
- a[2136]=0;
- a[2137]=0;
- a[2138]=0;
- a[2139]=0;
- a[2140]=0;
- a[2141]=0;
- a[2142]=0;
- a[2143]=0;
- a[2144]=0;
- a[2145]=0;
- a[2146]=0;
- a[2147]=0;
- a[2148]=0;
- a[2149]=0;
- a[2150]=0;
- a[2151]=0;
- a[2152]=0;
- a[2153]=0;
- a[2154]=0;
- a[2155]=0;
- a[2156]=0;
- a[2157]=0;
- a[2158]=0;
- a[2159]=0;
- a[2160]=0;
- a[2161]=0;
- a[2162]=0;
- a[2163]=0;
- a[2164]=0;
- a[2165]=0;
- a[2166]=0;
- a[2167]=0;
- a[2168]=0;
- a[2169]=0;
- a[2170]=0;
- a[2171]=0;
- a[2172]=0;
- a[2173]=0;
- a[2174]=0;
- a[2175]=0;
- a[2176]=0;
- a[2177]=0;
- a[2178]=0;
- a[2179]=0;
- a[2180]=0;
- a[2181]=0;
- a[2182]=0;
- a[2183]=0;
- a[2184]=0;
- a[2185]=0;
- a[2186]=0;
- a[2187]=0;
- a[2188]=0;
- a[2189]=0;
- a[2190]=0;
- a[2191]=0;
- a[2192]=0;
- a[2193]=0;
- a[2194]=0;
- a[2195]=0;
- a[2196]=0;
- a[2197]=0;
- a[2198]=0;
- a[2199]=0;
- a[2200]=0;
- a[2201]=0;
- a[2202]=0;
- a[2203]=0;
- a[2204]=0;
- a[2205]=0;
- a[2206]=0;
- a[2207]=0;
- a[2208]=0;
- a[2209]=0;
- a[2210]=0;
- a[2211]=0;
- a[2212]=0;
- a[2213]=0;
- a[2214]=0;
- a[2215]=0;
- a[2216]=0;
- a[2217]=0;
- a[2218]=0;
- a[2219]=0;
- a[2220]=0;
- a[2221]=0;
- a[2222]=0;
- a[2223]=0;
- a[2224]=0;
- a[2225]=0;
- a[2226]=0;
- a[2227]=0;
- a[2228]=0;
- a[2229]=0;
- a[2230]=0;
- a[2231]=0;
- a[2232]=0;
- a[2233]=0;
- a[2234]=0;
- a[2235]=0;
- a[2236]=0;
- a[2237]=0;
- a[2238]=0;
- a[2239]=0;
- a[2240]=0;
- a[2241]=0;
- a[2242]=0;
- a[2243]=0;
- a[2244]=0;
- a[2245]=0;
- a[2246]=0;
- a[2247]=0;
- a[2248]=0;
- a[2249]=0;
- a[2250]=0;
- a[2251]=0;
- a[2252]=0;
- a[2253]=0;
- a[2254]=0;
- a[2255]=0;
- a[2256]=0;
- a[2257]=0;
- a[2258]=0;
- a[2259]=0;
- a[2260]=0;
- a[2261]=0;
- a[2262]=0;
- a[2263]=0;
- a[2264]=0;
- a[2265]=0;
- a[2266]=0;
- a[2267]=0;
- a[2268]=0;
- a[2269]=0;
- a[2270]=0;
- a[2271]=0;
- a[2272]=0;
- a[2273]=0;
- a[2274]=0;
- a[2275]=0;
- a[2276]=0;
- a[2277]=0;
- a[2278]=0;
- a[2279]=0;
- a[2280]=0;
- a[2281]=0;
- a[2282]=0;
- a[2283]=0;
- a[2284]=0;
- a[2285]=0;
- a[2286]=0;
- a[2287]=0;
- a[2288]=0;
- a[2289]=0;
- a[2290]=0;
- a[2291]=0;
- a[2292]=0;
- a[2293]=0;
- a[2294]=0;
- a[2295]=0;
- a[2296]=0;
- a[2297]=0;
- a[2298]=0;
- a[2299]=0;
- a[2300]=0;
- a[2301]=0;
- a[2302]=0;
- a[2303]=0;
- a[2304]=0;
- a[2305]=0;
- a[2306]=0;
- a[2307]=0;
- a[2308]=0;
- a[2309]=0;
- a[2310]=0;
- a[2311]=0;
- a[2312]=0;
- a[2313]=0;
- a[2314]=0;
- a[2315]=0;
- a[2316]=0;
- a[2317]=0;
- a[2318]=0;
- a[2319]=0;
- a[2320]=0;
- a[2321]=0;
- a[2322]=0;
- a[2323]=0;
- a[2324]=0;
- a[2325]=0;
- a[2326]=0;
- a[2327]=0;
- a[2328]=0;
- a[2329]=0;
- a[2330]=0;
- a[2331]=0;
- a[2332]=0;
- a[2333]=0;
- a[2334]=0;
- a[2335]=0;
- a[2336]=0;
- a[2337]=0;
- a[2338]=0;
- a[2339]=0;
- a[2340]=0;
- a[2341]=0;
- a[2342]=0;
- a[2343]=0;
- a[2344]=0;
- a[2345]=0;
- a[2346]=0;
- a[2347]=0;
- a[2348]=0;
- a[2349]=0;
- a[2350]=0;
- a[2351]=0;
- a[2352]=0;
- a[2353]=0;
- a[2354]=0;
- a[2355]=0;
- a[2356]=0;
- a[2357]=0;
- a[2358]=0;
- a[2359]=0;
- a[2360]=0;
- a[2361]=0;
- a[2362]=0;
- a[2363]=0;
- a[2364]=0;
- a[2365]=0;
- a[2366]=0;
- a[2367]=0;
- a[2368]=0;
- a[2369]=0;
- a[2370]=0;
- a[2371]=0;
- a[2372]=0;
- a[2373]=0;
- a[2374]=0;
- a[2375]=0;
- a[2376]=0;
- a[2377]=0;
- a[2378]=0;
- a[2379]=0;
- a[2380]=0;
- a[2381]=0;
- a[2382]=0;
- a[2383]=0;
- a[2384]=0;
- a[2385]=0;
- a[2386]=0;
- a[2387]=0;
- a[2388]=0;
- a[2389]=0;
- a[2390]=0;
- a[2391]=0;
- a[2392]=0;
- a[2393]=0;
- a[2394]=0;
- a[2395]=0;
- a[2396]=0;
- a[2397]=0;
- a[2398]=0;
- a[2399]=0;
- a[2400]=0;
- a[2401]=0;
- a[2402]=0;
- a[2403]=0;
- a[2404]=0;
- a[2405]=0;
- a[2406]=0;
- a[2407]=0;
- a[2408]=0;
- a[2409]=0;
- a[2410]=0;
- a[2411]=0;
- a[2412]=0;
- a[2413]=0;
- a[2414]=0;
- a[2415]=0;
- a[2416]=0;
- a[2417]=0;
- a[2418]=0;
- a[2419]=0;
- a[2420]=0;
- a[2421]=0;
- a[2422]=0;
- a[2423]=0;
- a[2424]=0;
- a[2425]=0;
- a[2426]=0;
- a[2427]=0;
- a[2428]=0;
- a[2429]=0;
- a[2430]=0;
- a[2431]=0;
- a[2432]=0;
- a[2433]=0;
- a[2434]=0;
- a[2435]=0;
- a[2436]=0;
- a[2437]=0;
- a[2438]=0;
- a[2439]=0;
- a[2440]=0;
- a[2441]=0;
- a[2442]=0;
- a[2443]=0;
- a[2444]=0;
- a[2445]=0;
- a[2446]=0;
- a[2447]=0;
- a[2448]=0;
- a[2449]=0;
- a[2450]=0;
- a[2451]=0;
- a[2452]=0;
- a[2453]=0;
- a[2454]=0;
- a[2455]=0;
- a[2456]=0;
- a[2457]=0;
- a[2458]=0;
- a[2459]=0;
- a[2460]=0;
- a[2461]=0;
- a[2462]=0;
- a[2463]=0;
- a[2464]=0;
- a[2465]=0;
- a[2466]=0;
- a[2467]=0;
- a[2468]=0;
- a[2469]=0;
- a[2470]=0;
- a[2471]=0;
- a[2472]=0;
- a[2473]=0;
- a[2474]=0;
- a[2475]=0;
- a[2476]=0;
- a[2477]=0;
- a[2478]=0;
- a[2479]=0;
- a[2480]=0;
- a[2481]=0;
- a[2482]=0;
- a[2483]=0;
- a[2484]=0;
- a[2485]=0;
- a[2486]=0;
- a[2487]=0;
- a[2488]=0;
- a[2489]=0;
- a[2490]=0;
- a[2491]=0;
- a[2492]=0;
- a[2493]=0;
- a[2494]=0;
- a[2495]=0;
- a[2496]=0;
- a[2497]=0;
- a[2498]=0;
- a[2499]=0;
- a[2500]=0;
- a[2501]=0;
- a[2502]=0;
- a[2503]=0;
- a[2504]=0;
- a[2505]=0;
- a[2506]=0;
- a[2507]=0;
- a[2508]=0;
- a[2509]=0;
- a[2510]=0;
- a[2511]=0;
- a[2512]=0;
- a[2513]=0;
- a[2514]=0;
- a[2515]=0;
- a[2516]=0;
- a[2517]=0;
- a[2518]=0;
- a[2519]=0;
- a[2520]=0;
- a[2521]=0;
- a[2522]=0;
- a[2523]=0;
- a[2524]=0;
- a[2525]=0;
- a[2526]=0;
- a[2527]=0;
- a[2528]=0;
- a[2529]=0;
- a[2530]=0;
- a[2531]=0;
- a[2532]=0;
- a[2533]=0;
- a[2534]=0;
- a[2535]=0;
- a[2536]=0;
- a[2537]=0;
- a[2538]=0;
- a[2539]=0;
- a[2540]=0;
- a[2541]=0;
- a[2542]=0;
- a[2543]=0;
- a[2544]=0;
- a[2545]=0;
- a[2546]=0;
- a[2547]=0;
- a[2548]=0;
- a[2549]=0;
- a[2550]=0;
- a[2551]=0;
- a[2552]=0;
- a[2553]=0;
- a[2554]=0;
- a[2555]=0;
- a[2556]=0;
- a[2557]=0;
- a[2558]=0;
- a[2559]=0;
- a[2560]=0;
- a[2561]=0;
- a[2562]=0;
- a[2563]=0;
- a[2564]=0;
- a[2565]=0;
- a[2566]=0;
- a[2567]=0;
- a[2568]=0;
- a[2569]=0;
- a[2570]=0;
- a[2571]=0;
- a[2572]=0;
- a[2573]=0;
- a[2574]=0;
- a[2575]=0;
- a[2576]=0;
- a[2577]=0;
- a[2578]=0;
- a[2579]=0;
- a[2580]=0;
- a[2581]=0;
- a[2582]=0;
- a[2583]=0;
- a[2584]=0;
- a[2585]=0;
- a[2586]=0;
- a[2587]=0;
- a[2588]=0;
- a[2589]=0;
- a[2590]=0;
- a[2591]=0;
- a[2592]=0;
- a[2593]=0;
- a[2594]=0;
- a[2595]=0;
- a[2596]=0;
- a[2597]=0;
- a[2598]=0;
- a[2599]=0;
- a[2600]=0;
- a[2601]=0;
- a[2602]=0;
- a[2603]=0;
- a[2604]=0;
- a[2605]=0;
- a[2606]=0;
- a[2607]=0;
- a[2608]=0;
- a[2609]=0;
- a[2610]=0;
- a[2611]=0;
- a[2612]=0;
- a[2613]=0;
- a[2614]=0;
- a[2615]=0;
- a[2616]=0;
- a[2617]=0;
- a[2618]=0;
- a[2619]=0;
- a[2620]=0;
- a[2621]=0;
- a[2622]=0;
- a[2623]=0;
- a[2624]=0;
- a[2625]=0;
- a[2626]=0;
- a[2627]=0;
- a[2628]=0;
- a[2629]=0;
- a[2630]=0;
- a[2631]=0;
- a[2632]=0;
- a[2633]=0;
- a[2634]=0;
- a[2635]=0;
- a[2636]=0;
- a[2637]=0;
- a[2638]=0;
- a[2639]=0;
- a[2640]=0;
- a[2641]=0;
- a[2642]=0;
- a[2643]=0;
- a[2644]=0;
- a[2645]=0;
- a[2646]=0;
- a[2647]=0;
- a[2648]=0;
- a[2649]=0;
- a[2650]=0;
- a[2651]=0;
- a[2652]=0;
- a[2653]=0;
- a[2654]=0;
- a[2655]=0;
- a[2656]=0;
- a[2657]=0;
- a[2658]=0;
- a[2659]=0;
- a[2660]=0;
- a[2661]=0;
- a[2662]=0;
- a[2663]=0;
- a[2664]=0;
- a[2665]=0;
- a[2666]=0;
- a[2667]=0;
- a[2668]=0;
- a[2669]=0;
- a[2670]=0;
- a[2671]=0;
- a[2672]=0;
- a[2673]=0;
- a[2674]=0;
- a[2675]=0;
- a[2676]=0;
- a[2677]=0;
- a[2678]=0;
- a[2679]=0;
- a[2680]=0;
- a[2681]=0;
- a[2682]=0;
- a[2683]=0;
- a[2684]=0;
- a[2685]=0;
- a[2686]=0;
- a[2687]=0;
- a[2688]=0;
- a[2689]=0;
- a[2690]=0;
- a[2691]=0;
- a[2692]=0;
- a[2693]=0;
- a[2694]=0;
- a[2695]=0;
- a[2696]=0;
- a[2697]=0;
- a[2698]=0;
- a[2699]=0;
- a[2700]=0;
- a[2701]=0;
- a[2702]=0;
- a[2703]=0;
- a[2704]=0;
- a[2705]=0;
- a[2706]=0;
- a[2707]=0;
- a[2708]=0;
- a[2709]=0;
- a[2710]=0;
- a[2711]=0;
- a[2712]=0;
- a[2713]=0;
- a[2714]=0;
- a[2715]=0;
- a[2716]=0;
- a[2717]=0;
- a[2718]=0;
- a[2719]=0;
- a[2720]=0;
- a[2721]=0;
- a[2722]=0;
- a[2723]=0;
- a[2724]=0;
- a[2725]=0;
- a[2726]=0;
- a[2727]=0;
- a[2728]=0;
- a[2729]=0;
- a[2730]=0;
- a[2731]=0;
- a[2732]=0;
- a[2733]=0;
- a[2734]=0;
- a[2735]=0;
- a[2736]=0;
- a[2737]=0;
- a[2738]=0;
- a[2739]=0;
- a[2740]=0;
- a[2741]=0;
- a[2742]=0;
- a[2743]=0;
- a[2744]=0;
- a[2745]=0;
- a[2746]=0;
- a[2747]=0;
- a[2748]=0;
- a[2749]=0;
- a[2750]=0;
- a[2751]=0;
- a[2752]=0;
- a[2753]=0;
- a[2754]=0;
- a[2755]=0;
- a[2756]=0;
- a[2757]=0;
- a[2758]=0;
- a[2759]=0;
- a[2760]=0;
- a[2761]=0;
- a[2762]=0;
- a[2763]=0;
- a[2764]=0;
- a[2765]=0;
- a[2766]=0;
- a[2767]=0;
- a[2768]=0;
- a[2769]=0;
- a[2770]=0;
- a[2771]=0;
- a[2772]=0;
- a[2773]=0;
- a[2774]=0;
- a[2775]=0;
- a[2776]=0;
- a[2777]=0;
- a[2778]=0;
- a[2779]=0;
- a[2780]=0;
- a[2781]=0;
- a[2782]=0;
- a[2783]=0;
- a[2784]=0;
- a[2785]=0;
- a[2786]=0;
- a[2787]=0;
- a[2788]=0;
- a[2789]=0;
- a[2790]=0;
- a[2791]=0;
- a[2792]=0;
- a[2793]=0;
- a[2794]=0;
- a[2795]=0;
- a[2796]=0;
- a[2797]=0;
- a[2798]=0;
- a[2799]=0;
- a[2800]=0;
- a[2801]=0;
- a[2802]=0;
- a[2803]=0;
- a[2804]=0;
- a[2805]=0;
- a[2806]=0;
- a[2807]=0;
- a[2808]=0;
- a[2809]=0;
- a[2810]=0;
- a[2811]=0;
- a[2812]=0;
- a[2813]=0;
- a[2814]=0;
- a[2815]=0;
- a[2816]=0;
- a[2817]=0;
- a[2818]=0;
- a[2819]=0;
- a[2820]=0;
- a[2821]=0;
- a[2822]=0;
- a[2823]=0;
- a[2824]=0;
- a[2825]=0;
- a[2826]=0;
- a[2827]=0;
- a[2828]=0;
- a[2829]=0;
- a[2830]=0;
- a[2831]=0;
- a[2832]=0;
- a[2833]=0;
- a[2834]=0;
- a[2835]=0;
- a[2836]=0;
- a[2837]=0;
- a[2838]=0;
- a[2839]=0;
- a[2840]=0;
- a[2841]=0;
- a[2842]=0;
- a[2843]=0;
- a[2844]=0;
- a[2845]=0;
- a[2846]=0;
- a[2847]=0;
- a[2848]=0;
- a[2849]=0;
- a[2850]=0;
- a[2851]=0;
- a[2852]=0;
- a[2853]=0;
- a[2854]=0;
- a[2855]=0;
- a[2856]=0;
- a[2857]=0;
- a[2858]=0;
- a[2859]=0;
- a[2860]=0;
- a[2861]=0;
- a[2862]=0;
- a[2863]=0;
- a[2864]=0;
- a[2865]=0;
- a[2866]=0;
- a[2867]=0;
- a[2868]=0;
- a[2869]=0;
- a[2870]=0;
- a[2871]=0;
- a[2872]=0;
- a[2873]=0;
- a[2874]=0;
- a[2875]=0;
- a[2876]=0;
- a[2877]=0;
- a[2878]=0;
- a[2879]=0;
- a[2880]=0;
- a[2881]=0;
- a[2882]=0;
- a[2883]=0;
- a[2884]=0;
- a[2885]=0;
- a[2886]=0;
- a[2887]=0;
- a[2888]=0;
- a[2889]=0;
- a[2890]=0;
- a[2891]=0;
- a[2892]=0;
- a[2893]=0;
- a[2894]=0;
- a[2895]=0;
- a[2896]=0;
- a[2897]=0;
- a[2898]=0;
- a[2899]=0;
- a[2900]=0;
- a[2901]=0;
- a[2902]=0;
- a[2903]=0;
- a[2904]=0;
- a[2905]=0;
- a[2906]=0;
- a[2907]=0;
- a[2908]=0;
- a[2909]=0;
- a[2910]=0;
- a[2911]=0;
- a[2912]=0;
- a[2913]=0;
- a[2914]=0;
- a[2915]=0;
- a[2916]=0;
- a[2917]=0;
- a[2918]=0;
- a[2919]=0;
- a[2920]=0;
- a[2921]=0;
- a[2922]=0;
- a[2923]=0;
- a[2924]=0;
- a[2925]=0;
- a[2926]=0;
- a[2927]=0;
- a[2928]=0;
- a[2929]=0;
- a[2930]=0;
- a[2931]=0;
- a[2932]=0;
- a[2933]=0;
- a[2934]=0;
- a[2935]=0;
- a[2936]=0;
- a[2937]=0;
- a[2938]=0;
- a[2939]=0;
- a[2940]=0;
- a[2941]=0;
- a[2942]=0;
- a[2943]=0;
- a[2944]=0;
- a[2945]=0;
- a[2946]=0;
- a[2947]=0;
- a[2948]=0;
- a[2949]=0;
- a[2950]=0;
- a[2951]=0;
- a[2952]=0;
- a[2953]=0;
- a[2954]=0;
- a[2955]=0;
- a[2956]=0;
- a[2957]=0;
- a[2958]=0;
- a[2959]=0;
- a[2960]=0;
- a[2961]=0;
- a[2962]=0;
- a[2963]=0;
- a[2964]=0;
- a[2965]=0;
- a[2966]=0;
- a[2967]=0;
- a[2968]=0;
- a[2969]=0;
- a[2970]=0;
- a[2971]=0;
- a[2972]=0;
- a[2973]=0;
- a[2974]=0;
- a[2975]=0;
- a[2976]=0;
- a[2977]=0;
- a[2978]=0;
- a[2979]=0;
- a[2980]=0;
- a[2981]=0;
- a[2982]=0;
- a[2983]=0;
- a[2984]=0;
- a[2985]=0;
- a[2986]=0;
- a[2987]=0;
- a[2988]=0;
- a[2989]=0;
- a[2990]=0;
- a[2991]=0;
- a[2992]=0;
- a[2993]=0;
- a[2994]=0;
- a[2995]=0;
- a[2996]=0;
- a[2997]=0;
- a[2998]=0;
- a[2999]=0;
- a[3000]=0;
- a[3001]=0;
- a[3002]=0;
- a[3003]=0;
- a[3004]=0;
- a[3005]=0;
- a[3006]=0;
- a[3007]=0;
- a[3008]=0;
- a[3009]=0;
- a[3010]=0;
- a[3011]=0;
- a[3012]=0;
- a[3013]=0;
- a[3014]=0;
- a[3015]=0;
- a[3016]=0;
- a[3017]=0;
- a[3018]=0;
- a[3019]=0;
- a[3020]=0;
- a[3021]=0;
- a[3022]=0;
- a[3023]=0;
- a[3024]=0;
- a[3025]=0;
- a[3026]=0;
- a[3027]=0;
- a[3028]=0;
- a[3029]=0;
- a[3030]=0;
- a[3031]=0;
- a[3032]=0;
- a[3033]=0;
- a[3034]=0;
- a[3035]=0;
- a[3036]=0;
- a[3037]=0;
- a[3038]=0;
- a[3039]=0;
- a[3040]=0;
- a[3041]=0;
- a[3042]=0;
- a[3043]=0;
- a[3044]=0;
- a[3045]=0;
- a[3046]=0;
- a[3047]=0;
- a[3048]=0;
- a[3049]=0;
- a[3050]=0;
- a[3051]=0;
- a[3052]=0;
- a[3053]=0;
- a[3054]=0;
- a[3055]=0;
- a[3056]=0;
- a[3057]=0;
- a[3058]=0;
- a[3059]=0;
- a[3060]=0;
- a[3061]=0;
- a[3062]=0;
- a[3063]=0;
- a[3064]=0;
- a[3065]=0;
- a[3066]=0;
- a[3067]=0;
- a[3068]=0;
- a[3069]=0;
- a[3070]=0;
- a[3071]=0;
- a[3072]=0;
- a[3073]=0;
- a[3074]=0;
- a[3075]=0;
- a[3076]=0;
- a[3077]=0;
- a[3078]=0;
- a[3079]=0;
- a[3080]=0;
- a[3081]=0;
- a[3082]=0;
- a[3083]=0;
- a[3084]=0;
- a[3085]=0;
- a[3086]=0;
- a[3087]=0;
- a[3088]=0;
- a[3089]=0;
- a[3090]=0;
- a[3091]=0;
- a[3092]=0;
- a[3093]=0;
- a[3094]=0;
- a[3095]=0;
- a[3096]=0;
- a[3097]=0;
- a[3098]=0;
- a[3099]=0;
- a[3100]=0;
- a[3101]=0;
- a[3102]=0;
- a[3103]=0;
- a[3104]=0;
- a[3105]=0;
- a[3106]=0;
- a[3107]=0;
- a[3108]=0;
- a[3109]=0;
- a[3110]=0;
- a[3111]=0;
- a[3112]=0;
- a[3113]=0;
- a[3114]=0;
- a[3115]=0;
- a[3116]=0;
- a[3117]=0;
- a[3118]=0;
- a[3119]=0;
- a[3120]=0;
- a[3121]=0;
- a[3122]=0;
- a[3123]=0;
- a[3124]=0;
- a[3125]=0;
- a[3126]=0;
- a[3127]=0;
- a[3128]=0;
- a[3129]=0;
- a[3130]=0;
- a[3131]=0;
- a[3132]=0;
- a[3133]=0;
- a[3134]=0;
- a[3135]=0;
- a[3136]=0;
- a[3137]=0;
- a[3138]=0;
- a[3139]=0;
- a[3140]=0;
- a[3141]=0;
- a[3142]=0;
- a[3143]=0;
- a[3144]=0;
- a[3145]=0;
- a[3146]=0;
- a[3147]=0;
- a[3148]=0;
- a[3149]=0;
- a[3150]=0;
- a[3151]=0;
- a[3152]=0;
- a[3153]=0;
- a[3154]=0;
- a[3155]=0;
- a[3156]=0;
- a[3157]=0;
- a[3158]=0;
- a[3159]=0;
- a[3160]=0;
- a[3161]=0;
- a[3162]=0;
- a[3163]=0;
- a[3164]=0;
- a[3165]=0;
- a[3166]=0;
- a[3167]=0;
- a[3168]=0;
- a[3169]=0;
- a[3170]=0;
- a[3171]=0;
- a[3172]=0;
- a[3173]=0;
- a[3174]=0;
- a[3175]=0;
- a[3176]=0;
- a[3177]=0;
- a[3178]=0;
- a[3179]=0;
- a[3180]=0;
- a[3181]=0;
- a[3182]=0;
- a[3183]=0;
- a[3184]=0;
- a[3185]=0;
- a[3186]=0;
- a[3187]=0;
- a[3188]=0;
- a[3189]=0;
- a[3190]=0;
- a[3191]=0;
- a[3192]=0;
- a[3193]=0;
- a[3194]=0;
- a[3195]=0;
- a[3196]=0;
- a[3197]=0;
- a[3198]=0;
- a[3199]=0;
- a[3200]=0;
- a[3201]=0;
- a[3202]=0;
- a[3203]=0;
- a[3204]=0;
- a[3205]=0;
- a[3206]=0;
- a[3207]=0;
- a[3208]=0;
- a[3209]=0;
- a[3210]=0;
- a[3211]=0;
- a[3212]=0;
- a[3213]=0;
- a[3214]=0;
- a[3215]=0;
- a[3216]=0;
- a[3217]=0;
- a[3218]=0;
- a[3219]=0;
- a[3220]=0;
- a[3221]=0;
- a[3222]=0;
- a[3223]=0;
- a[3224]=0;
- a[3225]=0;
- a[3226]=0;
- a[3227]=0;
- a[3228]=0;
- a[3229]=0;
- a[3230]=0;
- a[3231]=0;
- a[3232]=0;
- a[3233]=0;
- a[3234]=0;
- a[3235]=0;
- a[3236]=0;
- a[3237]=0;
- a[3238]=0;
- a[3239]=0;
- a[3240]=0;
- a[3241]=0;
- a[3242]=0;
- a[3243]=0;
- a[3244]=0;
- a[3245]=0;
- a[3246]=0;
- a[3247]=0;
- a[3248]=0;
- a[3249]=0;
- a[3250]=0;
- a[3251]=0;
- a[3252]=0;
- a[3253]=0;
- a[3254]=0;
- a[3255]=0;
- a[3256]=0;
- a[3257]=0;
- a[3258]=0;
- a[3259]=0;
- a[3260]=0;
- a[3261]=0;
- a[3262]=0;
- a[3263]=0;
- a[3264]=0;
- a[3265]=0;
- a[3266]=0;
- a[3267]=0;
- a[3268]=0;
- a[3269]=0;
- a[3270]=0;
- a[3271]=0;
- a[3272]=0;
- a[3273]=0;
- a[3274]=0;
- a[3275]=0;
- a[3276]=0;
- a[3277]=0;
- a[3278]=0;
- a[3279]=0;
- a[3280]=0;
- a[3281]=0;
- a[3282]=0;
- a[3283]=0;
- a[3284]=0;
- a[3285]=0;
- a[3286]=0;
- a[3287]=0;
- a[3288]=0;
- a[3289]=0;
- a[3290]=0;
- a[3291]=0;
- a[3292]=0;
- a[3293]=0;
- a[3294]=0;
- a[3295]=0;
- a[3296]=0;
- a[3297]=0;
- a[3298]=0;
- a[3299]=0;
- a[3300]=0;
- a[3301]=0;
- a[3302]=0;
- a[3303]=0;
- a[3304]=0;
- a[3305]=0;
- a[3306]=0;
- a[3307]=0;
- a[3308]=0;
- a[3309]=0;
- a[3310]=0;
- a[3311]=0;
- a[3312]=0;
- a[3313]=0;
- a[3314]=0;
- a[3315]=0;
- a[3316]=0;
- a[3317]=0;
- a[3318]=0;
- a[3319]=0;
- a[3320]=0;
- a[3321]=0;
- a[3322]=0;
- a[3323]=0;
- a[3324]=0;
- a[3325]=0;
- a[3326]=0;
- a[3327]=0;
- a[3328]=0;
- a[3329]=0;
- a[3330]=0;
- a[3331]=0;
- a[3332]=0;
- a[3333]=0;
- a[3334]=0;
- a[3335]=0;
- a[3336]=0;
- a[3337]=0;
- a[3338]=0;
- a[3339]=0;
- a[3340]=0;
- a[3341]=0;
- a[3342]=0;
- a[3343]=0;
- a[3344]=0;
- a[3345]=0;
- a[3346]=0;
- a[3347]=0;
- a[3348]=0;
- a[3349]=0;
- a[3350]=0;
- a[3351]=0;
- a[3352]=0;
- a[3353]=0;
- a[3354]=0;
- a[3355]=0;
- a[3356]=0;
- a[3357]=0;
- a[3358]=0;
- a[3359]=0;
- a[3360]=0;
- a[3361]=0;
- a[3362]=0;
- a[3363]=0;
- a[3364]=0;
- a[3365]=0;
- a[3366]=0;
- a[3367]=0;
- a[3368]=0;
- a[3369]=0;
- a[3370]=0;
- a[3371]=0;
- a[3372]=0;
- a[3373]=0;
- a[3374]=0;
- a[3375]=0;
- a[3376]=0;
- a[3377]=0;
- a[3378]=0;
- a[3379]=0;
- a[3380]=0;
- a[3381]=0;
- a[3382]=0;
- a[3383]=0;
- a[3384]=0;
- a[3385]=0;
- a[3386]=0;
- a[3387]=0;
- a[3388]=0;
- a[3389]=0;
- a[3390]=0;
- a[3391]=0;
- a[3392]=0;
- a[3393]=0;
- a[3394]=0;
- a[3395]=0;
- a[3396]=0;
- a[3397]=0;
- a[3398]=0;
- a[3399]=0;
- a[3400]=0;
- a[3401]=0;
- a[3402]=0;
- a[3403]=0;
- a[3404]=0;
- a[3405]=0;
- a[3406]=0;
- a[3407]=0;
- a[3408]=0;
- a[3409]=0;
- a[3410]=0;
- a[3411]=0;
- a[3412]=0;
- a[3413]=0;
- a[3414]=0;
- a[3415]=0;
- a[3416]=0;
- a[3417]=0;
- a[3418]=0;
- a[3419]=0;
- a[3420]=0;
- a[3421]=0;
- a[3422]=0;
- a[3423]=0;
- a[3424]=0;
- a[3425]=0;
- a[3426]=0;
- a[3427]=0;
- a[3428]=0;
- a[3429]=0;
- a[3430]=0;
- a[3431]=0;
- a[3432]=0;
- a[3433]=0;
- a[3434]=0;
- a[3435]=0;
- a[3436]=0;
- a[3437]=0;
- a[3438]=0;
- a[3439]=0;
- a[3440]=0;
- a[3441]=0;
- a[3442]=0;
- a[3443]=0;
- a[3444]=0;
- a[3445]=0;
- a[3446]=0;
- a[3447]=0;
- a[3448]=0;
- a[3449]=0;
- a[3450]=0;
- a[3451]=0;
- a[3452]=0;
- a[3453]=0;
- a[3454]=0;
- a[3455]=0;
- a[3456]=0;
- a[3457]=0;
- a[3458]=0;
- a[3459]=0;
- a[3460]=0;
- a[3461]=0;
- a[3462]=0;
- a[3463]=0;
- a[3464]=0;
- a[3465]=0;
- a[3466]=0;
- a[3467]=0;
- a[3468]=0;
- a[3469]=0;
- a[3470]=0;
- a[3471]=0;
- a[3472]=0;
- a[3473]=0;
- a[3474]=0;
- a[3475]=0;
- a[3476]=0;
- a[3477]=0;
- a[3478]=0;
- a[3479]=0;
- a[3480]=0;
- a[3481]=0;
- a[3482]=0;
- a[3483]=0;
- a[3484]=0;
- a[3485]=0;
- a[3486]=0;
- a[3487]=0;
- a[3488]=0;
- a[3489]=0;
- a[3490]=0;
- a[3491]=0;
- a[3492]=0;
- a[3493]=0;
- a[3494]=0;
- a[3495]=0;
- a[3496]=0;
- a[3497]=0;
- a[3498]=0;
- a[3499]=0;
- a[3500]=0;
- a[3501]=0;
- a[3502]=0;
- a[3503]=0;
- a[3504]=0;
- a[3505]=0;
- a[3506]=0;
- a[3507]=0;
- a[3508]=0;
- a[3509]=0;
- a[3510]=0;
- a[3511]=0;
- a[3512]=0;
- a[3513]=0;
- a[3514]=0;
- a[3515]=0;
- a[3516]=0;
- a[3517]=0;
- a[3518]=0;
- a[3519]=0;
- a[3520]=0;
- a[3521]=0;
- a[3522]=0;
- a[3523]=0;
- a[3524]=0;
- a[3525]=0;
- a[3526]=0;
- a[3527]=0;
- a[3528]=0;
- a[3529]=0;
- a[3530]=0;
- a[3531]=0;
- a[3532]=0;
- a[3533]=0;
- a[3534]=0;
- a[3535]=0;
- a[3536]=0;
- a[3537]=0;
- a[3538]=0;
- a[3539]=0;
- a[3540]=0;
- a[3541]=0;
- a[3542]=0;
- a[3543]=0;
- a[3544]=0;
- a[3545]=0;
- a[3546]=0;
- a[3547]=0;
- a[3548]=0;
- a[3549]=0;
- a[3550]=0;
- a[3551]=0;
- a[3552]=0;
- a[3553]=0;
- a[3554]=0;
- a[3555]=0;
- a[3556]=0;
- a[3557]=0;
- a[3558]=0;
- a[3559]=0;
- a[3560]=0;
- a[3561]=0;
- a[3562]=0;
- a[3563]=0;
- a[3564]=0;
- a[3565]=0;
- a[3566]=0;
- a[3567]=0;
- a[3568]=0;
- a[3569]=0;
- a[3570]=0;
- a[3571]=0;
- a[3572]=0;
- a[3573]=0;
- a[3574]=0;
- a[3575]=0;
- a[3576]=0;
- a[3577]=0;
- a[3578]=0;
- a[3579]=0;
- a[3580]=0;
- a[3581]=0;
- a[3582]=0;
- a[3583]=0;
- a[3584]=0;
- a[3585]=0;
- a[3586]=0;
- a[3587]=0;
- a[3588]=0;
- a[3589]=0;
- a[3590]=0;
- a[3591]=0;
- a[3592]=0;
- a[3593]=0;
- a[3594]=0;
- a[3595]=0;
- a[3596]=0;
- a[3597]=0;
- a[3598]=0;
- a[3599]=0;
- a[3600]=0;
- a[3601]=0;
- a[3602]=0;
- a[3603]=0;
- a[3604]=0;
- a[3605]=0;
- a[3606]=0;
- a[3607]=0;
- a[3608]=0;
- a[3609]=0;
- a[3610]=0;
- a[3611]=0;
- a[3612]=0;
- a[3613]=0;
- a[3614]=0;
- a[3615]=0;
- a[3616]=0;
- a[3617]=0;
- a[3618]=0;
- a[3619]=0;
- a[3620]=0;
- a[3621]=0;
- a[3622]=0;
- a[3623]=0;
- a[3624]=0;
- a[3625]=0;
- a[3626]=0;
- a[3627]=0;
- a[3628]=0;
- a[3629]=0;
- a[3630]=0;
- a[3631]=0;
- a[3632]=0;
- a[3633]=0;
- a[3634]=0;
- a[3635]=0;
- a[3636]=0;
- a[3637]=0;
- a[3638]=0;
- a[3639]=0;
- a[3640]=0;
- a[3641]=0;
- a[3642]=0;
- a[3643]=0;
- a[3644]=0;
- a[3645]=0;
- a[3646]=0;
- a[3647]=0;
- a[3648]=0;
- a[3649]=0;
- a[3650]=0;
- a[3651]=0;
- a[3652]=0;
- a[3653]=0;
- a[3654]=0;
- a[3655]=0;
- a[3656]=0;
- a[3657]=0;
- a[3658]=0;
- a[3659]=0;
- a[3660]=0;
- a[3661]=0;
- a[3662]=0;
- a[3663]=0;
- a[3664]=0;
- a[3665]=0;
- a[3666]=0;
- a[3667]=0;
- a[3668]=0;
- a[3669]=0;
- a[3670]=0;
- a[3671]=0;
- a[3672]=0;
- a[3673]=0;
- a[3674]=0;
- a[3675]=0;
- a[3676]=0;
- a[3677]=0;
- a[3678]=0;
- a[3679]=0;
- a[3680]=0;
- a[3681]=0;
- a[3682]=0;
- a[3683]=0;
- a[3684]=0;
- a[3685]=0;
- a[3686]=0;
- a[3687]=0;
- a[3688]=0;
- a[3689]=0;
- a[3690]=0;
- a[3691]=0;
- a[3692]=0;
- a[3693]=0;
- a[3694]=0;
- a[3695]=0;
- a[3696]=0;
- a[3697]=0;
- a[3698]=0;
- a[3699]=0;
- a[3700]=0;
- a[3701]=0;
- a[3702]=0;
- a[3703]=0;
- a[3704]=0;
- a[3705]=0;
- a[3706]=0;
- a[3707]=0;
- a[3708]=0;
- a[3709]=0;
- a[3710]=0;
- a[3711]=0;
- a[3712]=0;
- a[3713]=0;
- a[3714]=0;
- a[3715]=0;
- a[3716]=0;
- a[3717]=0;
- a[3718]=0;
- a[3719]=0;
- a[3720]=0;
- a[3721]=0;
- a[3722]=0;
- a[3723]=0;
- a[3724]=0;
- a[3725]=0;
- a[3726]=0;
- a[3727]=0;
- a[3728]=0;
- a[3729]=0;
- a[3730]=0;
- a[3731]=0;
- a[3732]=0;
- a[3733]=0;
- a[3734]=0;
- a[3735]=0;
- a[3736]=0;
- a[3737]=0;
- a[3738]=0;
- a[3739]=0;
- a[3740]=0;
- a[3741]=0;
- a[3742]=0;
- a[3743]=0;
- a[3744]=0;
- a[3745]=0;
- a[3746]=0;
- a[3747]=0;
- a[3748]=0;
- a[3749]=0;
- a[3750]=0;
- a[3751]=0;
- a[3752]=0;
- a[3753]=0;
- a[3754]=0;
- a[3755]=0;
- a[3756]=0;
- a[3757]=0;
- a[3758]=0;
- a[3759]=0;
- a[3760]=0;
- a[3761]=0;
- a[3762]=0;
- a[3763]=0;
- a[3764]=0;
- a[3765]=0;
- a[3766]=0;
- a[3767]=0;
- a[3768]=0;
- a[3769]=0;
- a[3770]=0;
- a[3771]=0;
- a[3772]=0;
- a[3773]=0;
- a[3774]=0;
- a[3775]=0;
- a[3776]=0;
- a[3777]=0;
- a[3778]=0;
- a[3779]=0;
- a[3780]=0;
- a[3781]=0;
- a[3782]=0;
- a[3783]=0;
- a[3784]=0;
- a[3785]=0;
- a[3786]=0;
- a[3787]=0;
- a[3788]=0;
- a[3789]=0;
- a[3790]=0;
- a[3791]=0;
- a[3792]=0;
- a[3793]=0;
- a[3794]=0;
- a[3795]=0;
- a[3796]=0;
- a[3797]=0;
- a[3798]=0;
- a[3799]=0;
- a[3800]=0;
- a[3801]=0;
- a[3802]=0;
- a[3803]=0;
- a[3804]=0;
- a[3805]=0;
- a[3806]=0;
- a[3807]=0;
- a[3808]=0;
- a[3809]=0;
- a[3810]=0;
- a[3811]=0;
- a[3812]=0;
- a[3813]=0;
- a[3814]=0;
- a[3815]=0;
- a[3816]=0;
- a[3817]=0;
- a[3818]=0;
- a[3819]=0;
- a[3820]=0;
- a[3821]=0;
- a[3822]=0;
- a[3823]=0;
- a[3824]=0;
- a[3825]=0;
- a[3826]=0;
- a[3827]=0;
- a[3828]=0;
- a[3829]=0;
- a[3830]=0;
- a[3831]=0;
- a[3832]=0;
- a[3833]=0;
- a[3834]=0;
- a[3835]=0;
- a[3836]=0;
- a[3837]=0;
- a[3838]=0;
- a[3839]=0;
- a[3840]=0;
- a[3841]=0;
- a[3842]=0;
- a[3843]=0;
- a[3844]=0;
- a[3845]=0;
- a[3846]=0;
- a[3847]=0;
- a[3848]=0;
- a[3849]=0;
- a[3850]=0;
- a[3851]=0;
- a[3852]=0;
- a[3853]=0;
- a[3854]=0;
- a[3855]=0;
- a[3856]=0;
- a[3857]=0;
- a[3858]=0;
- a[3859]=0;
- a[3860]=0;
- a[3861]=0;
- a[3862]=0;
- a[3863]=0;
- a[3864]=0;
- a[3865]=0;
- a[3866]=0;
- a[3867]=0;
- a[3868]=0;
- a[3869]=0;
- a[3870]=0;
- a[3871]=0;
- a[3872]=0;
- a[3873]=0;
- a[3874]=0;
- a[3875]=0;
- a[3876]=0;
- a[3877]=0;
- a[3878]=0;
- a[3879]=0;
- a[3880]=0;
- a[3881]=0;
- a[3882]=0;
- a[3883]=0;
- a[3884]=0;
- a[3885]=0;
- a[3886]=0;
- a[3887]=0;
- a[3888]=0;
- a[3889]=0;
- a[3890]=0;
- a[3891]=0;
- a[3892]=0;
- a[3893]=0;
- a[3894]=0;
- a[3895]=0;
- a[3896]=0;
- a[3897]=0;
- a[3898]=0;
- a[3899]=0;
- a[3900]=0;
- a[3901]=0;
- a[3902]=0;
- a[3903]=0;
- a[3904]=0;
- a[3905]=0;
- a[3906]=0;
- a[3907]=0;
- a[3908]=0;
- a[3909]=0;
- a[3910]=0;
- a[3911]=0;
- a[3912]=0;
- a[3913]=0;
- a[3914]=0;
- a[3915]=0;
- a[3916]=0;
- a[3917]=0;
- a[3918]=0;
- a[3919]=0;
- a[3920]=0;
- a[3921]=0;
- a[3922]=0;
- a[3923]=0;
- a[3924]=0;
- a[3925]=0;
- a[3926]=0;
- a[3927]=0;
- a[3928]=0;
- a[3929]=0;
- a[3930]=0;
- a[3931]=0;
- a[3932]=0;
- a[3933]=0;
- a[3934]=0;
- a[3935]=0;
- a[3936]=0;
- a[3937]=0;
- a[3938]=0;
- a[3939]=0;
- a[3940]=0;
- a[3941]=0;
- a[3942]=0;
- a[3943]=0;
- a[3944]=0;
- a[3945]=0;
- a[3946]=0;
- a[3947]=0;
- a[3948]=0;
- a[3949]=0;
- a[3950]=0;
- a[3951]=0;
- a[3952]=0;
- a[3953]=0;
- a[3954]=0;
- a[3955]=0;
- a[3956]=0;
- a[3957]=0;
- a[3958]=0;
- a[3959]=0;
- a[3960]=0;
- a[3961]=0;
- a[3962]=0;
- a[3963]=0;
- a[3964]=0;
- a[3965]=0;
- a[3966]=0;
- a[3967]=0;
- a[3968]=0;
- a[3969]=0;
- a[3970]=0;
- a[3971]=0;
- a[3972]=0;
- a[3973]=0;
- a[3974]=0;
- a[3975]=0;
- a[3976]=0;
- a[3977]=0;
- a[3978]=0;
- a[3979]=0;
- a[3980]=0;
- a[3981]=0;
- a[3982]=0;
- a[3983]=0;
- a[3984]=0;
- a[3985]=0;
- a[3986]=0;
- a[3987]=0;
- a[3988]=0;
- a[3989]=0;
- a[3990]=0;
- a[3991]=0;
- a[3992]=0;
- a[3993]=0;
- a[3994]=0;
- a[3995]=0;
- a[3996]=0;
- a[3997]=0;
- a[3998]=0;
- a[3999]=0;
- a[4000]=0;
- a[4001]=0;
- a[4002]=0;
- a[4003]=0;
- a[4004]=0;
- a[4005]=0;
- a[4006]=0;
- a[4007]=0;
- a[4008]=0;
- a[4009]=0;
- a[4010]=0;
- a[4011]=0;
- a[4012]=0;
- a[4013]=0;
- a[4014]=0;
- a[4015]=0;
- a[4016]=0;
- a[4017]=0;
- a[4018]=0;
- a[4019]=0;
- a[4020]=0;
- a[4021]=0;
- a[4022]=0;
- a[4023]=0;
- a[4024]=0;
- a[4025]=0;
- a[4026]=0;
- a[4027]=0;
- a[4028]=0;
- a[4029]=0;
- a[4030]=0;
- a[4031]=0;
- a[4032]=0;
- a[4033]=0;
- a[4034]=0;
- a[4035]=0;
- a[4036]=0;
- a[4037]=0;
- a[4038]=0;
- a[4039]=0;
- a[4040]=0;
- a[4041]=0;
- a[4042]=0;
- a[4043]=0;
- a[4044]=0;
- a[4045]=0;
- a[4046]=0;
- a[4047]=0;
- a[4048]=0;
- a[4049]=0;
- a[4050]=0;
- a[4051]=0;
- a[4052]=0;
- a[4053]=0;
- a[4054]=0;
- a[4055]=0;
- a[4056]=0;
- a[4057]=0;
- a[4058]=0;
- a[4059]=0;
- a[4060]=0;
- a[4061]=0;
- a[4062]=0;
- a[4063]=0;
- a[4064]=0;
- a[4065]=0;
- a[4066]=0;
- a[4067]=0;
- a[4068]=0;
- a[4069]=0;
- a[4070]=0;
- a[4071]=0;
- a[4072]=0;
- a[4073]=0;
- a[4074]=0;
- a[4075]=0;
- a[4076]=0;
- a[4077]=0;
- a[4078]=0;
- a[4079]=0;
- a[4080]=0;
- a[4081]=0;
- a[4082]=0;
- a[4083]=0;
- a[4084]=0;
- a[4085]=0;
- a[4086]=0;
- a[4087]=0;
- a[4088]=0;
- a[4089]=0;
- a[4090]=0;
- a[4091]=0;
- a[4092]=0;
- a[4093]=0;
- a[4094]=0;
- a[4095]=0;
- a[4096]=0;
- a[4097]=0;
- a[4098]=0;
- a[4099]=0;
- a[4100]=0;
- a[4101]=0;
- a[4102]=0;
- a[4103]=0;
- a[4104]=0;
- a[4105]=0;
- a[4106]=0;
- a[4107]=0;
- a[4108]=0;
- a[4109]=0;
- a[4110]=0;
- a[4111]=0;
- a[4112]=0;
- a[4113]=0;
- a[4114]=0;
- a[4115]=0;
- a[4116]=0;
- a[4117]=0;
- a[4118]=0;
- a[4119]=0;
- a[4120]=0;
- a[4121]=0;
- a[4122]=0;
- a[4123]=0;
- a[4124]=0;
- a[4125]=0;
- a[4126]=0;
- a[4127]=0;
- a[4128]=0;
- a[4129]=0;
- a[4130]=0;
- a[4131]=0;
- a[4132]=0;
- a[4133]=0;
- a[4134]=0;
- a[4135]=0;
- a[4136]=0;
- a[4137]=0;
- a[4138]=0;
- a[4139]=0;
- a[4140]=0;
- a[4141]=0;
- a[4142]=0;
- a[4143]=0;
- a[4144]=0;
- a[4145]=0;
- a[4146]=0;
- a[4147]=0;
- a[4148]=0;
- a[4149]=0;
- a[4150]=0;
- a[4151]=0;
- a[4152]=0;
- a[4153]=0;
- a[4154]=0;
- a[4155]=0;
- a[4156]=0;
- a[4157]=0;
- a[4158]=0;
- a[4159]=0;
- a[4160]=0;
- a[4161]=0;
- a[4162]=0;
- a[4163]=0;
- a[4164]=0;
- a[4165]=0;
- a[4166]=0;
- a[4167]=0;
- a[4168]=0;
- a[4169]=0;
- a[4170]=0;
- a[4171]=0;
- a[4172]=0;
- a[4173]=0;
- a[4174]=0;
- a[4175]=0;
- a[4176]=0;
- a[4177]=0;
- a[4178]=0;
- a[4179]=0;
- a[4180]=0;
- a[4181]=0;
- a[4182]=0;
- a[4183]=0;
- a[4184]=0;
- a[4185]=0;
- a[4186]=0;
- a[4187]=0;
- a[4188]=0;
- a[4189]=0;
- a[4190]=0;
- a[4191]=0;
- a[4192]=0;
- a[4193]=0;
- a[4194]=0;
- a[4195]=0;
- a[4196]=0;
- a[4197]=0;
- a[4198]=0;
- a[4199]=0;
- a[4200]=0;
- a[4201]=0;
- a[4202]=0;
- a[4203]=0;
- a[4204]=0;
- a[4205]=0;
- a[4206]=0;
- a[4207]=0;
- a[4208]=0;
- a[4209]=0;
- a[4210]=0;
- a[4211]=0;
- a[4212]=0;
- a[4213]=0;
- a[4214]=0;
- a[4215]=0;
- a[4216]=0;
- a[4217]=0;
- a[4218]=0;
- a[4219]=0;
- a[4220]=0;
- a[4221]=0;
- a[4222]=0;
- a[4223]=0;
- a[4224]=0;
- a[4225]=0;
- a[4226]=0;
- a[4227]=0;
- a[4228]=0;
- a[4229]=0;
- a[4230]=0;
- a[4231]=0;
- a[4232]=0;
- a[4233]=0;
- a[4234]=0;
- a[4235]=0;
- a[4236]=0;
- a[4237]=0;
- a[4238]=0;
- a[4239]=0;
- a[4240]=0;
- a[4241]=0;
- a[4242]=0;
- a[4243]=0;
- a[4244]=0;
- a[4245]=0;
- a[4246]=0;
- a[4247]=0;
- a[4248]=0;
- a[4249]=0;
- a[4250]=0;
- a[4251]=0;
- a[4252]=0;
- a[4253]=0;
- a[4254]=0;
- a[4255]=0;
- a[4256]=0;
- a[4257]=0;
- a[4258]=0;
- a[4259]=0;
- a[4260]=0;
- a[4261]=0;
- a[4262]=0;
- a[4263]=0;
- a[4264]=0;
- a[4265]=0;
- a[4266]=0;
- a[4267]=0;
- a[4268]=0;
- a[4269]=0;
- a[4270]=0;
- a[4271]=0;
- a[4272]=0;
- a[4273]=0;
- a[4274]=0;
- a[4275]=0;
- a[4276]=0;
- a[4277]=0;
- a[4278]=0;
- a[4279]=0;
- a[4280]=0;
- a[4281]=0;
- a[4282]=0;
- a[4283]=0;
- a[4284]=0;
- a[4285]=0;
- a[4286]=0;
- a[4287]=0;
- a[4288]=0;
- a[4289]=0;
- a[4290]=0;
- a[4291]=0;
- a[4292]=0;
- a[4293]=0;
- a[4294]=0;
- a[4295]=0;
- a[4296]=0;
- a[4297]=0;
- a[4298]=0;
- a[4299]=0;
- a[4300]=0;
- a[4301]=0;
- a[4302]=0;
- a[4303]=0;
- a[4304]=0;
- a[4305]=0;
- a[4306]=0;
- a[4307]=0;
- a[4308]=0;
- a[4309]=0;
- a[4310]=0;
- a[4311]=0;
- a[4312]=0;
- a[4313]=0;
- a[4314]=0;
- a[4315]=0;
- a[4316]=0;
- a[4317]=0;
- a[4318]=0;
- a[4319]=0;
- a[4320]=0;
- a[4321]=0;
- a[4322]=0;
- a[4323]=0;
- a[4324]=0;
- a[4325]=0;
- a[4326]=0;
- a[4327]=0;
- a[4328]=0;
- a[4329]=0;
- a[4330]=0;
- a[4331]=0;
- a[4332]=0;
- a[4333]=0;
- a[4334]=0;
- a[4335]=0;
- a[4336]=0;
- a[4337]=0;
- a[4338]=0;
- a[4339]=0;
- a[4340]=0;
- a[4341]=0;
- a[4342]=0;
- a[4343]=0;
- a[4344]=0;
- a[4345]=0;
- a[4346]=0;
- a[4347]=0;
- a[4348]=0;
- a[4349]=0;
- a[4350]=0;
- a[4351]=0;
- a[4352]=0;
- a[4353]=0;
- a[4354]=0;
- a[4355]=0;
- a[4356]=0;
- a[4357]=0;
- a[4358]=0;
- a[4359]=0;
- a[4360]=0;
- a[4361]=0;
- a[4362]=0;
- a[4363]=0;
- a[4364]=0;
- a[4365]=0;
- a[4366]=0;
- a[4367]=0;
- a[4368]=0;
- a[4369]=0;
- a[4370]=0;
- a[4371]=0;
- a[4372]=0;
- a[4373]=0;
- a[4374]=0;
- a[4375]=0;
- a[4376]=0;
- a[4377]=0;
- a[4378]=0;
- a[4379]=0;
- a[4380]=0;
- a[4381]=0;
- a[4382]=0;
- a[4383]=0;
- a[4384]=0;
- a[4385]=0;
- a[4386]=0;
- a[4387]=0;
- a[4388]=0;
- a[4389]=0;
- a[4390]=0;
- a[4391]=0;
- a[4392]=0;
- a[4393]=0;
- a[4394]=0;
- a[4395]=0;
- a[4396]=0;
- a[4397]=0;
- a[4398]=0;
- a[4399]=0;
- a[4400]=0;
- a[4401]=0;
- a[4402]=0;
- a[4403]=0;
- a[4404]=0;
- a[4405]=0;
- a[4406]=0;
- a[4407]=0;
- a[4408]=0;
- a[4409]=0;
- a[4410]=0;
- a[4411]=0;
- a[4412]=0;
- a[4413]=0;
- a[4414]=0;
- a[4415]=0;
- a[4416]=0;
- a[4417]=0;
- a[4418]=0;
- a[4419]=0;
- a[4420]=0;
- a[4421]=0;
- a[4422]=0;
- a[4423]=0;
- a[4424]=0;
- a[4425]=0;
- a[4426]=0;
- a[4427]=0;
- a[4428]=0;
- a[4429]=0;
- a[4430]=0;
- a[4431]=0;
- a[4432]=0;
- a[4433]=0;
- a[4434]=0;
- a[4435]=0;
- a[4436]=0;
- a[4437]=0;
- a[4438]=0;
- a[4439]=0;
- a[4440]=0;
- a[4441]=0;
- a[4442]=0;
- a[4443]=0;
- a[4444]=0;
- a[4445]=0;
- a[4446]=0;
- a[4447]=0;
- a[4448]=0;
- a[4449]=0;
- a[4450]=0;
- a[4451]=0;
- a[4452]=0;
- a[4453]=0;
- a[4454]=0;
- a[4455]=0;
- a[4456]=0;
- a[4457]=0;
- a[4458]=0;
- a[4459]=0;
- a[4460]=0;
- a[4461]=0;
- a[4462]=0;
- a[4463]=0;
- a[4464]=0;
- a[4465]=0;
- a[4466]=0;
- a[4467]=0;
- a[4468]=0;
- a[4469]=0;
- a[4470]=0;
- a[4471]=0;
- a[4472]=0;
- a[4473]=0;
- a[4474]=0;
- a[4475]=0;
- a[4476]=0;
- a[4477]=0;
- a[4478]=0;
- a[4479]=0;
- a[4480]=0;
- a[4481]=0;
- a[4482]=0;
- a[4483]=0;
- a[4484]=0;
- a[4485]=0;
- a[4486]=0;
- a[4487]=0;
- a[4488]=0;
- a[4489]=0;
- a[4490]=0;
- a[4491]=0;
- a[4492]=0;
- a[4493]=0;
- a[4494]=0;
- a[4495]=0;
- a[4496]=0;
- a[4497]=0;
- a[4498]=0;
- a[4499]=0;
- a[4500]=0;
- a[4501]=0;
- a[4502]=0;
- a[4503]=0;
- a[4504]=0;
- a[4505]=0;
- a[4506]=0;
- a[4507]=0;
- a[4508]=0;
- a[4509]=0;
- a[4510]=0;
- a[4511]=0;
- a[4512]=0;
- a[4513]=0;
- a[4514]=0;
- a[4515]=0;
- a[4516]=0;
- a[4517]=0;
- a[4518]=0;
- a[4519]=0;
- a[4520]=0;
- a[4521]=0;
- a[4522]=0;
- a[4523]=0;
- a[4524]=0;
- a[4525]=0;
- a[4526]=0;
- a[4527]=0;
- a[4528]=0;
- a[4529]=0;
- a[4530]=0;
- a[4531]=0;
- a[4532]=0;
- a[4533]=0;
- a[4534]=0;
- a[4535]=0;
- a[4536]=0;
- a[4537]=0;
- a[4538]=0;
- a[4539]=0;
- a[4540]=0;
- a[4541]=0;
- a[4542]=0;
- a[4543]=0;
- a[4544]=0;
- a[4545]=0;
- a[4546]=0;
- a[4547]=0;
- a[4548]=0;
- a[4549]=0;
- a[4550]=0;
- a[4551]=0;
- a[4552]=0;
- a[4553]=0;
- a[4554]=0;
- a[4555]=0;
- a[4556]=0;
- a[4557]=0;
- a[4558]=0;
- a[4559]=0;
- a[4560]=0;
- a[4561]=0;
- a[4562]=0;
- a[4563]=0;
- a[4564]=0;
- a[4565]=0;
- a[4566]=0;
- a[4567]=0;
- a[4568]=0;
- a[4569]=0;
- a[4570]=0;
- a[4571]=0;
- a[4572]=0;
- a[4573]=0;
- a[4574]=0;
- a[4575]=0;
- a[4576]=0;
- a[4577]=0;
- a[4578]=0;
- a[4579]=0;
- a[4580]=0;
- a[4581]=0;
- a[4582]=0;
- a[4583]=0;
- a[4584]=0;
- a[4585]=0;
- a[4586]=0;
- a[4587]=0;
- a[4588]=0;
- a[4589]=0;
- a[4590]=0;
- a[4591]=0;
- a[4592]=0;
- a[4593]=0;
- a[4594]=0;
- a[4595]=0;
- a[4596]=0;
- a[4597]=0;
- a[4598]=0;
- a[4599]=0;
- a[4600]=0;
- a[4601]=0;
- a[4602]=0;
- a[4603]=0;
- a[4604]=0;
- a[4605]=0;
- a[4606]=0;
- a[4607]=0;
- a[4608]=0;
- a[4609]=0;
- a[4610]=0;
- a[4611]=0;
- a[4612]=0;
- a[4613]=0;
- a[4614]=0;
- a[4615]=0;
- a[4616]=0;
- a[4617]=0;
- a[4618]=0;
- a[4619]=0;
- a[4620]=0;
- a[4621]=0;
- a[4622]=0;
- a[4623]=0;
- a[4624]=0;
- a[4625]=0;
- a[4626]=0;
- a[4627]=0;
- a[4628]=0;
- a[4629]=0;
- a[4630]=0;
- a[4631]=0;
- a[4632]=0;
- a[4633]=0;
- a[4634]=0;
- a[4635]=0;
- a[4636]=0;
- a[4637]=0;
- a[4638]=0;
- a[4639]=0;
- a[4640]=0;
- a[4641]=0;
- a[4642]=0;
- a[4643]=0;
- a[4644]=0;
- a[4645]=0;
- a[4646]=0;
- a[4647]=0;
- a[4648]=0;
- a[4649]=0;
- a[4650]=0;
- a[4651]=0;
- a[4652]=0;
- a[4653]=0;
- a[4654]=0;
- a[4655]=0;
- a[4656]=0;
- a[4657]=0;
- a[4658]=0;
- a[4659]=0;
- a[4660]=0;
- a[4661]=0;
- a[4662]=0;
- a[4663]=0;
- a[4664]=0;
- a[4665]=0;
- a[4666]=0;
- a[4667]=0;
- a[4668]=0;
- a[4669]=0;
- a[4670]=0;
- a[4671]=0;
- a[4672]=0;
- a[4673]=0;
- a[4674]=0;
- a[4675]=0;
- a[4676]=0;
- a[4677]=0;
- a[4678]=0;
- a[4679]=0;
- a[4680]=0;
- a[4681]=0;
- a[4682]=0;
- a[4683]=0;
- a[4684]=0;
- a[4685]=0;
- a[4686]=0;
- a[4687]=0;
- a[4688]=0;
- a[4689]=0;
- a[4690]=0;
- a[4691]=0;
- a[4692]=0;
- a[4693]=0;
- a[4694]=0;
- a[4695]=0;
- a[4696]=0;
- a[4697]=0;
- a[4698]=0;
- a[4699]=0;
- a[4700]=0;
- a[4701]=0;
- a[4702]=0;
- a[4703]=0;
- a[4704]=0;
- a[4705]=0;
- a[4706]=0;
- a[4707]=0;
- a[4708]=0;
- a[4709]=0;
- a[4710]=0;
- a[4711]=0;
- a[4712]=0;
- a[4713]=0;
- a[4714]=0;
- a[4715]=0;
- a[4716]=0;
- a[4717]=0;
- a[4718]=0;
- a[4719]=0;
- a[4720]=0;
- a[4721]=0;
- a[4722]=0;
- a[4723]=0;
- a[4724]=0;
- a[4725]=0;
- a[4726]=0;
- a[4727]=0;
- a[4728]=0;
- a[4729]=0;
- a[4730]=0;
- a[4731]=0;
- a[4732]=0;
- a[4733]=0;
- a[4734]=0;
- a[4735]=0;
- a[4736]=0;
- a[4737]=0;
- a[4738]=0;
- a[4739]=0;
- a[4740]=0;
- a[4741]=0;
- a[4742]=0;
- a[4743]=0;
- a[4744]=0;
- a[4745]=0;
- a[4746]=0;
- a[4747]=0;
- a[4748]=0;
- a[4749]=0;
- a[4750]=0;
- a[4751]=0;
- a[4752]=0;
- a[4753]=0;
- a[4754]=0;
- a[4755]=0;
- a[4756]=0;
- a[4757]=0;
- a[4758]=0;
- a[4759]=0;
- a[4760]=0;
- a[4761]=0;
- a[4762]=0;
- a[4763]=0;
- a[4764]=0;
- a[4765]=0;
- a[4766]=0;
- a[4767]=0;
- a[4768]=0;
- a[4769]=0;
- a[4770]=0;
- a[4771]=0;
- a[4772]=0;
- a[4773]=0;
- a[4774]=0;
- a[4775]=0;
- a[4776]=0;
- a[4777]=0;
- a[4778]=0;
- a[4779]=0;
- a[4780]=0;
- a[4781]=0;
- a[4782]=0;
- a[4783]=0;
- a[4784]=0;
- a[4785]=0;
- a[4786]=0;
- a[4787]=0;
- a[4788]=0;
- a[4789]=0;
- a[4790]=0;
- a[4791]=0;
- a[4792]=0;
- a[4793]=0;
- a[4794]=0;
- a[4795]=0;
- a[4796]=0;
- a[4797]=0;
- a[4798]=0;
- a[4799]=0;
- a[4800]=0;
- a[4801]=0;
- a[4802]=0;
- a[4803]=0;
- a[4804]=0;
- a[4805]=0;
- a[4806]=0;
- a[4807]=0;
- a[4808]=0;
- a[4809]=0;
- a[4810]=0;
- a[4811]=0;
- a[4812]=0;
- a[4813]=0;
- a[4814]=0;
- a[4815]=0;
- a[4816]=0;
- a[4817]=0;
- a[4818]=0;
- a[4819]=0;
- a[4820]=0;
- a[4821]=0;
- a[4822]=0;
- a[4823]=0;
- a[4824]=0;
- a[4825]=0;
- a[4826]=0;
- a[4827]=0;
- a[4828]=0;
- a[4829]=0;
- a[4830]=0;
- a[4831]=0;
- a[4832]=0;
- a[4833]=0;
- a[4834]=0;
- a[4835]=0;
- a[4836]=0;
- a[4837]=0;
- a[4838]=0;
- a[4839]=0;
- a[4840]=0;
- a[4841]=0;
- a[4842]=0;
- a[4843]=0;
- a[4844]=0;
- a[4845]=0;
- a[4846]=0;
- a[4847]=0;
- a[4848]=0;
- a[4849]=0;
- a[4850]=0;
- a[4851]=0;
- a[4852]=0;
- a[4853]=0;
- a[4854]=0;
- a[4855]=0;
- a[4856]=0;
- a[4857]=0;
- a[4858]=0;
- a[4859]=0;
- a[4860]=0;
- a[4861]=0;
- a[4862]=0;
- a[4863]=0;
- a[4864]=0;
- a[4865]=0;
- a[4866]=0;
- a[4867]=0;
- a[4868]=0;
- a[4869]=0;
- a[4870]=0;
- a[4871]=0;
- a[4872]=0;
- a[4873]=0;
- a[4874]=0;
- a[4875]=0;
- a[4876]=0;
- a[4877]=0;
- a[4878]=0;
- a[4879]=0;
- a[4880]=0;
- a[4881]=0;
- a[4882]=0;
- a[4883]=0;
- a[4884]=0;
- a[4885]=0;
- a[4886]=0;
- a[4887]=0;
- a[4888]=0;
- a[4889]=0;
- a[4890]=0;
- a[4891]=0;
- a[4892]=0;
- a[4893]=0;
- a[4894]=0;
- a[4895]=0;
- a[4896]=0;
- a[4897]=0;
- a[4898]=0;
- a[4899]=0;
- a[4900]=0;
- a[4901]=0;
- a[4902]=0;
- a[4903]=0;
- a[4904]=0;
- a[4905]=0;
- a[4906]=0;
- a[4907]=0;
- a[4908]=0;
- a[4909]=0;
- a[4910]=0;
- a[4911]=0;
- a[4912]=0;
- a[4913]=0;
- a[4914]=0;
- a[4915]=0;
- a[4916]=0;
- a[4917]=0;
- a[4918]=0;
- a[4919]=0;
- a[4920]=0;
- a[4921]=0;
- a[4922]=0;
- a[4923]=0;
- a[4924]=0;
- a[4925]=0;
- a[4926]=0;
- a[4927]=0;
- a[4928]=0;
- a[4929]=0;
- a[4930]=0;
- a[4931]=0;
- a[4932]=0;
- a[4933]=0;
- a[4934]=0;
- a[4935]=0;
- a[4936]=0;
- a[4937]=0;
- a[4938]=0;
- a[4939]=0;
- a[4940]=0;
- a[4941]=0;
- a[4942]=0;
- a[4943]=0;
- a[4944]=0;
- a[4945]=0;
- a[4946]=0;
- a[4947]=0;
- a[4948]=0;
- a[4949]=0;
- a[4950]=0;
- a[4951]=0;
- a[4952]=0;
- a[4953]=0;
- a[4954]=0;
- a[4955]=0;
- a[4956]=0;
- a[4957]=0;
- a[4958]=0;
- a[4959]=0;
- a[4960]=0;
- a[4961]=0;
- a[4962]=0;
- a[4963]=0;
- a[4964]=0;
- a[4965]=0;
- a[4966]=0;
- a[4967]=0;
- a[4968]=0;
- a[4969]=0;
- a[4970]=0;
- a[4971]=0;
- a[4972]=0;
- a[4973]=0;
- a[4974]=0;
- a[4975]=0;
- a[4976]=0;
- a[4977]=0;
- a[4978]=0;
- a[4979]=0;
- a[4980]=0;
- a[4981]=0;
- a[4982]=0;
- a[4983]=0;
- a[4984]=0;
- a[4985]=0;
- a[4986]=0;
- a[4987]=0;
- a[4988]=0;
- a[4989]=0;
- a[4990]=0;
- a[4991]=0;
- a[4992]=0;
- a[4993]=0;
- a[4994]=0;
- a[4995]=0;
- a[4996]=0;
- a[4997]=0;
- a[4998]=0;
- a[4999]=0;
- return a;
-}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2119.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2119.js
new file mode 100644
index 0000000..54840c2
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2119.js
@@ -0,0 +1,36 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --nouse-ic
+
+function strict_function() {
+ "use strict"
+ undeclared = 1;
+}
+
+assertThrows(strict_function);
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2153.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2153.js
new file mode 100644
index 0000000..3170042
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2153.js
@@ -0,0 +1,32 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var o = {};
+o.__defineGetter__('foo', function () { return null; });
+var o = {};
+o.foo = 42;
+assertEquals(42, o.foo);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2156.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2156.js
new file mode 100644
index 0000000..3482571
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2156.js
@@ -0,0 +1,39 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --harmony-collections
+
+var key1 = {};
+var key2 = {};
+var map = new WeakMap;
+
+// Adding hidden properties preserves map sharing. Putting the key into
+// a WeakMap will cause the first hidden property to be added.
+assertTrue(%HaveSameMap(key1, key2));
+map.set(key1, 1);
+map.set(key2, 2);
+assertTrue(%HaveSameMap(key1, key2));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2163.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2163.js
new file mode 100644
index 0000000..bfce9ff
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2163.js
@@ -0,0 +1,70 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-gc
+
+// Handy abbreviation.
+var dp = Object.defineProperty;
+
+function getter() { return 111; }
+function setter(x) { print(222); }
+function anotherGetter() { return 333; }
+function anotherSetter(x) { print(444); }
+var obj1, obj2;
+
+// obj1 and obj2 share the getter accessor.
+obj1 = {};
+dp(obj1, "alpha", { get: getter, set: setter });
+obj2 = {}
+dp(obj2, "alpha", { get: getter });
+obj1 = {};
+assertEquals(111, obj2.alpha);
+gc();
+assertEquals(111, obj2.alpha);
+
+// obj1, obj2, and obj3 share the getter accessor.
+obj1 = {};
+dp(obj1, "alpha", { get: getter, set: setter });
+obj2 = {}
+dp(obj2, "alpha", { get: getter });
+obj1 = {};
+gc();
+obj3 = {}
+dp(obj3, "alpha", { get: getter });
+
+
+// obj1 and obj2 share the getter and setter accessor.
+obj1 = {};
+dp(obj1, "alpha", { get: getter, set: setter });
+obj1.beta = 10;
+obj2 = {}
+dp(obj2, "alpha", { get: getter, set: setter });
+obj1 = {};
+assertEquals(111, obj2.alpha);
+gc();
+obj2.alpha = 100
+assertEquals(111, obj2.alpha);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2170.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2170.js
new file mode 100644
index 0000000..01cb1ea
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2170.js
@@ -0,0 +1,58 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function array_fun() {
+ for (var i = 0; i < 2; i++) {
+ var a = [1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8];
+ var x = new Array();
+ x.fixed$length = true;
+ for (var j = 0; j < a.length; j++) {
+ x.push(a[j]);
+ }
+ for(var j = 0; j < x.length; j++) {
+ if (typeof x[j] != 'number') {
+ throw "foo";
+ }
+ x[j] = x[j];
+ }
+ }
+}
+
+try {
+ for (var i = 0; i < 10; ++i) {
+ array_fun();
+ }
+ %OptimizeFunctionOnNextCall(array_fun);
+ for (var i = 0; i < 10; ++i) {
+ array_fun();
+ }
+} catch (e) {
+ assertUnreachable();
+}
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2172.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2172.js
new file mode 100644
index 0000000..5d06f4e
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2172.js
@@ -0,0 +1,35 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+for (var i = 0; i < 10000; i++){
+ (i + "\0").split(/(.)\1/i);
+}
+
+for (var i = 0; i < 10000; i++){
+ (i + "\u1234\0").split(/(.)\1/i);
+}
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2185-2.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2185-2.js
new file mode 100644
index 0000000..b1eedb9
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2185-2.js
@@ -0,0 +1,145 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// These tests used to time out before this was fixed.
+
+var LEN = 2e4;
+
+function short() {
+ var sum = 0;
+ for (var i = 0; i < 1000; i++) {
+ var a = [1, 4, 34, 23, 6, 123, 3, 2, 11, 515, 4, 33, 22, 2, 2, 1, 0, 123,
+ 23, 42, 43, 1002, 44, 43, 101, 23, 55, 11, 101, 102, 45, 11, 404,
+ 31415, 34, 53, 453, 45, 34, 5, 2, 35, 5, 345, 36, 45, 345, 3, 45,
+ 3, 5, 5, 2, 2342344, 2234, 23, 2718, 1500, 2, 19, 22, 43, 41, 0,
+ -1, 33, 45, 78];
+ a.sort(function(a, b) { return a - b; });
+ sum += a[0];
+ }
+ return sum;
+}
+
+function short_bench(name, array) {
+ var start = new Date();
+ short();
+ var end = new Date();
+ var ms = end - start;
+ print("Short " + Math.floor(ms) + "ms");
+}
+
+function sawseq(a, tooth) {
+ var count = 0;
+ while (true) {
+ for (var i = 0; i < tooth; i++) {
+ a.push(i);
+ if (++count >= LEN) return a;
+ }
+ }
+}
+
+function sawseq2(a, tooth) {
+ var count = 0;
+ while (true) {
+ for (var i = 0; i < tooth; i++) {
+ a.push(i);
+ if (++count >= LEN) return a;
+ }
+ for (var i = 0; i < tooth; i++) {
+ a.push(tooth - i);
+ if (++count >= LEN) return a;
+ }
+ }
+}
+
+function sawseq3(a, tooth) {
+ var count = 0;
+ while (true) {
+ for (var i = 0; i < tooth; i++) {
+ a.push(tooth - i);
+ if (++count >= LEN) return a;
+ }
+ }
+}
+
+function up(a) {
+ for (var i = 0; i < LEN; i++) {
+ a.push(i);
+ }
+ return a;
+}
+
+function down(a) {
+ for (var i = 0; i < LEN; i++) {
+ a.push(LEN - i);
+ }
+ return a;
+}
+
+function ran(a) {
+ for (var i = 0; i < LEN; i++) {
+ a.push(Math.floor(Math.random() * LEN));
+ }
+ return a;
+}
+
+var random = ran([]);
+var asc = up([]);
+var desc = down([]);
+var asc_desc = down(up([]));
+var desc_asc = up(down([]));
+var asc_asc = up(up([]));
+var desc_desc = down(down([]));
+var saw1 = sawseq([], 1000);
+var saw2 = sawseq([], 500);
+var saw3 = sawseq([], 200);
+var saw4 = sawseq2([], 200);
+var saw5 = sawseq3([], 200);
+
+function bench(name, array) {
+ var start = new Date();
+ array.sort(function(a, b) { return a - b; });
+ var end = new Date();
+ for (var i = 0; i < array.length - 1; i++) {
+ if (array[i] > array[i + 1]) throw name + " " + i;
+ }
+ var ms = end - start;
+ print(name + " " + Math.floor(ms) + "ms");
+}
+
+short_bench();
+bench("random", random);
+bench("up", asc);
+bench("down", desc);
+bench("saw 1000", saw1);
+bench("saw 500", saw2);
+bench("saw 200", saw3);
+bench("saw 200 symmetric", saw4);
+bench("saw 200 down", saw4);
+bench("up, down", asc_desc);
+bench("up, up", asc_asc);
+bench("down, down", desc_desc);
+bench("down, up", desc_asc);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2185.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2185.js
new file mode 100644
index 0000000..895f322
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2185.js
@@ -0,0 +1,36 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var a = [];
+
+for (var i = 0; i < 2; i++) {
+ for (var j = 0; j < 30000; j++) {
+ a.push(j);
+ }
+}
+
+a.sort(function(a, b) { return a - b; } );
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2186.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2186.js
new file mode 100644
index 0000000..0921dce
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2186.js
@@ -0,0 +1,49 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-collections
+
+function heapify(i) {
+ return 2.0 * (i / 2);
+}
+heapify(1);
+
+var ONE = 1;
+var ANOTHER_ONE = heapify(ONE);
+assertSame(ONE, ANOTHER_ONE);
+assertEquals("number", typeof ONE);
+assertEquals("number", typeof ANOTHER_ONE);
+
+var set = new Set;
+set.add(ONE);
+assertTrue(set.has(ONE));
+assertTrue(set.has(ANOTHER_ONE));
+
+var map = new Map;
+map.set(ONE, 23);
+assertSame(23, map.get(ONE));
+assertSame(23, map.get(ANOTHER_ONE));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2193.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2193.js
new file mode 100644
index 0000000..50509bf
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2193.js
@@ -0,0 +1,58 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --cache-optimized-code
+
+function bozo() {};
+function MakeClosure() {
+ return function f(use_literals) {
+ if (use_literals) {
+ return [1,2,3,3,4,5,6,7,8,9,bozo];
+ } else {
+ return 0;
+ }
+ }
+}
+
+// Create two closures that share the same literal boilerplates.
+var closure1 = MakeClosure();
+var closure2 = MakeClosure();
+var expected = [1,2,3,3,4,5,6,7,8,9,bozo];
+
+// Make sure we generate optimized code for the first closure after
+// warming it up properly so that the literals boilerplate is generated
+// and the optimized code uses CreateArrayLiteralShallow runtime call.
+assertEquals(0, closure1(false));
+assertEquals(expected, closure1(true));
+%OptimizeFunctionOnNextCall(closure1);
+assertEquals(expected, closure1(true));
+
+// Optimize the second closure, which should reuse the optimized code
+// from the first closure with the same literal boilerplates.
+assertEquals(0, closure2(false));
+%OptimizeFunctionOnNextCall(closure2);
+assertEquals(expected, closure2(true));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2219.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2219.js
new file mode 100644
index 0000000..946c75b
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2219.js
@@ -0,0 +1,32 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-proxies --expose-gc
+
+var p = Proxy.create({getPropertyDescriptor: function() { gc() }});
+var o = Object.create(p);
+assertSame(23, o.x = 23);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2225.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2225.js
new file mode 100644
index 0000000..9957d8d
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2225.js
@@ -0,0 +1,65 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-proxies
+
+var proxy_has_x = false;
+var proxy = Proxy.create({ getPropertyDescriptor:function(key) {
+ assertSame('x', key);
+ if (proxy_has_x) {
+ return { configurable:true, writable:false, value:19 };
+ }
+}});
+
+// Test __lookupGetter__/__lookupSetter__ with proxy.
+assertSame(undefined, Object.prototype.__lookupGetter__.call(proxy, 'foo'));
+assertSame(undefined, Object.prototype.__lookupSetter__.call(proxy, 'bar'));
+assertSame(undefined, Object.prototype.__lookupGetter__.call(proxy, '123'));
+assertSame(undefined, Object.prototype.__lookupSetter__.call(proxy, '456'));
+
+// Test __lookupGetter__/__lookupSetter__ with proxy in prototype chain.
+var object = Object.create(proxy);
+assertSame(undefined, Object.prototype.__lookupGetter__.call(object, 'foo'));
+assertSame(undefined, Object.prototype.__lookupSetter__.call(object, 'bar'));
+assertSame(undefined, Object.prototype.__lookupGetter__.call(object, '123'));
+assertSame(undefined, Object.prototype.__lookupSetter__.call(object, '456'));
+
+// Test inline constructors with proxy as prototype.
+function f() { this.x = 23; }
+f.prototype = proxy;
+proxy_has_x = false;
+assertSame(23, new f().x);
+proxy_has_x = true;
+assertSame(19, new f().x);
+
+// Test inline constructors with proxy in prototype chain.
+function g() { this.x = 42; }
+g.prototype.__proto__ = proxy;
+proxy_has_x = false;
+assertSame(42, new g().x);
+proxy_has_x = true;
+assertSame(19, new g().x);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2226.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2226.js
new file mode 100644
index 0000000..1ac3d30
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2226.js
@@ -0,0 +1,36 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var foo = function() { 0; /* foo function */ };
+var bar = function() { 1; /* bar function */ };
+var baz = function() { 2; /* baz function */ };
+
+var test = foo.test = bar.test = baz;
+
+assertEquals(baz, test);
+assertEquals(baz, foo.test);
+assertEquals(baz, bar.test);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2234.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2234.js
new file mode 100644
index 0000000..8da513e
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2234.js
@@ -0,0 +1,41 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function test(i) {
+ // Overwrite random parts of the transcendental cache.
+ Math.sin(i / 1779 * Math.PI);
+ // Check whether the first cache line has been accidentally overwritten
+ // with incorrect key.
+ assertEquals(0, Math.sin(0));
+}
+
+for (i = 0; i < 10000; ++i) {
+ test(i);
+ if (i == 0) %OptimizeFunctionOnNextCall(test);
+}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2249.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2249.js
new file mode 100644
index 0000000..07d687d
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2249.js
@@ -0,0 +1,33 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --gc-interval=10 --stress-compaction
+
+var o = {};
+o[Math.pow(2,30)-1] = 0;
+o[Math.pow(2,31)-1] = 0;
+o[1] = 0;
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2250.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2250.js
new file mode 100644
index 0000000..b3b0db3
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2250.js
@@ -0,0 +1,68 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// The original problem from the bug: In the example below SMI check for b
+// generated for inlining of equals invocation (marked with (*)) will be hoisted
+// out of the loop across the typeof b === "object" condition and cause an
+// immediate deopt. Another problem here is that no matter how many time we
+// deopt and reopt we will continue to produce the wrong code.
+//
+// The fix is to notice when a deopt and subsequent reopt doesn't find
+// additional type information, indicating that optimistic LICM should be
+// disabled during compilation.
+
+function eq(a, b) {
+ if (typeof b === "object") {
+ return b.equals(a); // (*)
+ }
+ return a === b;
+}
+
+Object.prototype.equals = function (other) {
+ return (this === other);
+};
+
+function test() {
+ for (var i = 0; !eq(i, 10); i++)
+ ;
+}
+
+eq({}, {});
+eq({}, {});
+eq(1, 1);
+eq(1, 1);
+test();
+%OptimizeFunctionOnNextCall(test);
+test();
+%OptimizeFunctionOnNextCall(test);
+// Second compilation should have noticed that LICM wasn't a good idea, and now
+// function should no longer deopt when called.
+test();
+assertTrue(2 != %GetOptimizationStatus(test));
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2261.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2261.js
new file mode 100644
index 0000000..000e07d
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2261.js
@@ -0,0 +1,113 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Test materialization of the arguments object when deoptimizing a
+// strict mode closure after modifying an argument.
+
+(function () {
+ var forceDeopt = 0;
+ function inner(x) {
+ "use strict";
+ x = 2;
+ // Do not remove this %DebugPrint as it makes sure the deopt happens
+ // after the assignment and is not hoisted above the assignment.
+ %DebugPrint(arguments[0]);
+ forceDeopt + 1;
+ return arguments[0];
+ }
+
+ assertEquals(1, inner(1));
+ assertEquals(1, inner(1));
+ %OptimizeFunctionOnNextCall(inner);
+ assertEquals(1, inner(1));
+ forceDeopt = "not a number";
+ assertEquals(1, inner(1));
+})();
+
+
+// Test materialization of the arguments object when deoptimizing an
+// inlined strict mode closure after modifying an argument.
+
+(function () {
+ var forceDeopt = 0;
+ function inner(x) {
+ "use strict";
+ x = 2;
+ // Do not remove this %DebugPrint as it makes sure the deopt happens
+ // after the assignment and is not hoisted above the assignment.
+ %DebugPrint(arguments[0]);
+ forceDeopt + 1;
+ return arguments[0];
+ }
+
+ function outer(x) {
+ return inner(x);
+ }
+
+ assertEquals(1, outer(1));
+ assertEquals(1, outer(1));
+ %OptimizeFunctionOnNextCall(outer);
+ assertEquals(1, outer(1));
+ forceDeopt = "not a number";
+ assertEquals(1, outer(1));
+})();
+
+
+// Test materialization of the multiple arguments objects when
+// deoptimizing several inlined closure after modifying an argument.
+
+(function () {
+ var forceDeopt = 0;
+ function inner(x,y,z) {
+ "use strict";
+ x = 3;
+ // Do not remove this %DebugPrint as it makes sure the deopt happens
+ // after the assignment and is not hoisted above the assignment.
+ %DebugPrint(arguments[0]);
+ forceDeopt + 1;
+ return arguments[0];
+ }
+
+ function middle(x) {
+ "use strict";
+ x = 2;
+ return inner(10*x, 20*x, 30*x) + arguments[0];
+ }
+
+ function outer(x) {
+ return middle(x);
+ }
+
+ assertEquals(21, outer(1));
+ assertEquals(21, outer(1));
+ %OptimizeFunctionOnNextCall(outer);
+ assertEquals(21, outer(1));
+ forceDeopt = "not a number";
+ assertEquals(21, outer(1));
+})();
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2284.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2284.js
new file mode 100644
index 0000000..5614019
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2284.js
@@ -0,0 +1,32 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+assertThrows("%foobar();", TypeError);
+assertThrows("%constructor();", TypeError);
+assertThrows("%constructor(23);", TypeError);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2285.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2285.js
new file mode 100644
index 0000000..efda4cd
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2285.js
@@ -0,0 +1,32 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+assertThrows(function() { %_CallFunction(null, 0, ""); });
+assertThrows(function() { %_CallFunction(null, 0, 1); });
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2286.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2286.js
new file mode 100644
index 0000000..372451e
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2286.js
@@ -0,0 +1,32 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+assertThrows("f()", ReferenceError);
+assertThrows("%f()", TypeError);
+assertThrows("%_f()", SyntaxError);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2289.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2289.js
new file mode 100644
index 0000000..e89ec6e
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2289.js
@@ -0,0 +1,34 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var foo = "a";
+for (var i = 0; i < 12; i++) foo += foo;
+foo = foo + 'b' + foo;
+
+foo.replace(/b/, "a");
+
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2291.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2291.js
new file mode 100644
index 0000000..96627fc
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2291.js
@@ -0,0 +1,36 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+function StrictCompare(x) { return x === Object(x); }
+
+var obj = new Object();
+var obj2 = new Object();
+obj == obj; // Populate IC cache with non-strict comparison.
+
+StrictCompare(obj); // Set IC in StrictCompare from IC cache.
+
+assertFalse(StrictCompare('foo')); // Use == stub for === operation.
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2294.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2294.js
new file mode 100644
index 0000000..43ba10d
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2294.js
@@ -0,0 +1,70 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var clampedArray = new Uint8ClampedArray(10);
+
+function test() {
+ clampedArray[0] = 0.499;
+ assertEquals(0, clampedArray[0]);
+ clampedArray[0] = 0.5;
+ assertEquals(0, clampedArray[0]);
+ clampedArray[0] = 0.501;
+ assertEquals(1, clampedArray[0]);
+ clampedArray[0] = 1.499;
+ assertEquals(1, clampedArray[0]);
+ clampedArray[0] = 1.5;
+ assertEquals(2, clampedArray[0]);
+ clampedArray[0] = 1.501;
+ assertEquals(2, clampedArray[0]);
+ clampedArray[0] = 2.5;
+ assertEquals(2, clampedArray[0]);
+ clampedArray[0] = 3.5;
+ assertEquals(4, clampedArray[0]);
+ clampedArray[0] = 252.5;
+ assertEquals(252, clampedArray[0]);
+ clampedArray[0] = 253.5;
+ assertEquals(254, clampedArray[0]);
+ clampedArray[0] = 254.5;
+ assertEquals(254, clampedArray[0]);
+ clampedArray[0] = 256.5;
+ assertEquals(255, clampedArray[0]);
+ clampedArray[0] = -0.5;
+ assertEquals(0, clampedArray[0]);
+ clampedArray[0] = -1.5;
+ assertEquals(0, clampedArray[0]);
+ clampedArray[0] = 1000000000000;
+ assertEquals(255, clampedArray[0]);
+ clampedArray[0] = -1000000000000;
+ assertEquals(0, clampedArray[0]);
+}
+
+test();
+test();
+%OptimizeFunctionOnNextCall(test);
+test();
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2296.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2296.js
new file mode 100644
index 0000000..c00f14f
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2296.js
@@ -0,0 +1,40 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+
+Debug = debug.Debug
+
+function listener(event, exec_state, event_data, data) {
+ event_data.script().setSource(1);
+};
+
+Debug.setListener(listener);
+
+eval('0');
+
+Debug.setListener(null);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2318.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2318.js
new file mode 100644
index 0000000..ca67ab2
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2318.js
@@ -0,0 +1,66 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --nostack-trace-on-abort
+
+function f() {
+ var i = 0;
+
+ // Stack-allocate to reach the end of stack quickly.
+ var _A0 = 00; var _A1 = 01; var _A2 = 02; var _A3 = 03; var _A4 = 04;
+ var _B0 = 05; var _B1 = 06; var _B2 = 07; var _B3 = 08; var _B4 = 09;
+ var _C0 = 10; var _C1 = 11; var _C2 = 12; var _C3 = 13; var _C4 = 14;
+ var _D0 = 15; var _D1 = 16; var _D2 = 17; var _D3 = 18; var _D4 = 19;
+ var _E0 = 20; var _E1 = 21; var _E2 = 22; var _E3 = 23; var _E4 = 24;
+ var _F0 = 25; var _F1 = 26; var _F2 = 27; var _F3 = 28; var _F4 = 29;
+ var _G0 = 30; var _G1 = 31; var _G2 = 32; var _G3 = 33; var _G4 = 34;
+ var _H0 = 35; var _H1 = 36; var _H2 = 37; var _H3 = 38; var _H4 = 39;
+ var _I0 = 40; var _I1 = 41; var _I2 = 42; var _I3 = 43; var _I4 = 44;
+ var _J0 = 45; var _J1 = 46; var _J2 = 47; var _J3 = 48; var _J4 = 49;
+ var _K0 = 50; var _K1 = 51; var _K2 = 52; var _K3 = 53; var _K4 = 54;
+ var _L0 = 55; var _L1 = 56; var _L2 = 57; var _L3 = 58; var _L4 = 59;
+ var _M0 = 60; var _M1 = 61; var _M2 = 62; var _M3 = 63; var _M4 = 64;
+ var _N0 = 65; var _N1 = 66; var _N2 = 67; var _N3 = 68; var _N4 = 69;
+ var _O0 = 70; var _O1 = 71; var _O2 = 72; var _O3 = 73; var _O4 = 74;
+ var _P0 = 75; var _P1 = 76; var _P2 = 77; var _P3 = 78; var _P4 = 79;
+ var _Q0 = 80; var _Q1 = 81; var _Q2 = 82; var _Q3 = 83; var _Q4 = 84;
+ var _R0 = 85; var _R1 = 86; var _R2 = 87; var _R3 = 88; var _R4 = 89;
+ var _S0 = 90; var _S1 = 91; var _S2 = 92; var _S3 = 93; var _S4 = 94;
+ var _T0 = 95; var _T1 = 96; var _T2 = 97; var _T3 = 98; var _T4 = 99;
+
+ f();
+};
+
+Debug = debug.Debug;
+var bp = Debug.setBreakPoint(f, 0);
+
+function listener(event, exec_state, event_data, data) {
+ result = exec_state.frame().evaluate("i").value();
+};
+
+Debug.setListener(listener);
+assertThrows(function() { f(); }, RangeError);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2322.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2322.js
new file mode 100644
index 0000000..1195bab
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2322.js
@@ -0,0 +1,36 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-scoping
+
+"use strict";
+
+assertThrows("'use strict'; for (let x in x);", ReferenceError);
+
+let s;
+for (let pppp in {}) {};
+assertThrows(function() { pppp = true }, ReferenceError);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2326.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2326.js
new file mode 100644
index 0000000..d2edf2b
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2326.js
@@ -0,0 +1,54 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This tests that we do not share optimized code across closures that
+// were optimized using OSR (for a particular OSR entry AST id) even if
+// caching of optimized code kicks in.
+
+function makeClosure() {
+ function f(mode, iterations) {
+ var accumulator = 0;
+ if (mode == 1) {
+ while (--iterations > 0) accumulator = Math.ceil(accumulator);
+ return 1;
+ } else {
+ while (--iterations > 0) accumulator = Math.floor(accumulator);
+ return 2;
+ }
+ }
+ return f;
+}
+
+// Generate two closures sharing the same underlying function literal.
+var f1 = makeClosure();
+var f2 = makeClosure();
+
+// This function should be optimized via OSR in the first tight loop.
+assertSame(1, f1(1, 100000));
+
+// This function should be optimized via OSR in the second tight loop.
+assertSame(2, f2(2, 100000));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2336.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2336.js
new file mode 100644
index 0000000..edfff60
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2336.js
@@ -0,0 +1,53 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --expose-gc
+
+// Check that we can cope with a debug listener that runs in the
+// GC epilogue and causes enough allocation to trigger a new GC during
+// the epilogue.
+
+var f = eval("(function f() { return 42; })");
+
+Debug = debug.Debug;
+
+var called = false;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.ScriptCollected) {
+ if (!called) {
+ called = true;
+ gc();
+ }
+ }
+};
+
+Debug.scripts();
+Debug.setListener(listener);
+f = void 0;
+gc();
+assertTrue(called);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2339.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2339.js
new file mode 100644
index 0000000..b16821d
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2339.js
@@ -0,0 +1,59 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --expose-gc
+
+/**
+ * The possible optimization states of a function. Must be in sync with the
+ * return values of Runtime_GetOptimizationStatus() in runtime.cc!
+ */
+
+var OptimizationState = {
+ YES: 1,
+ NO: 2,
+ ALWAYS: 3,
+ NEVER: 4
+};
+
+function simple() {
+ return simple_two_args(0, undefined);
+}
+
+function simple_two_args(always_zero, always_undefined) {
+ var always_five = always_undefined || 5;
+ return always_zero * always_five * .5;
+}
+
+
+simple();
+simple();
+%OptimizeFunctionOnNextCall(simple);
+simple();
+var raw_optimized = %GetOptimizationStatus(simple);
+assertFalse(raw_optimized == OptimizationState.NO);
+gc();
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2346.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2346.js
new file mode 100644
index 0000000..4c88b3e
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2346.js
@@ -0,0 +1,123 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This file only tests very simple descriptors that always have
+// configurable, enumerable, and writable set to true.
+// A range of more elaborate tests are performed in
+// object-define-property.js
+
+// Flags: --stress-runs=5
+
+function get() { return x; }
+function set(x) { this.x = x; }
+
+var obj = {x: 1};
+obj.__defineGetter__("accessor", get);
+obj.__defineSetter__("accessor", set);
+var a = new Array();
+a[1] = 42;
+obj[1] = 42;
+
+var descIsData = Object.getOwnPropertyDescriptor(obj, 'x');
+assertTrue(descIsData.enumerable);
+assertTrue(descIsData.writable);
+assertTrue(descIsData.configurable);
+
+var descIsAccessor = Object.getOwnPropertyDescriptor(obj, 'accessor');
+assertTrue(descIsAccessor.enumerable);
+assertTrue(descIsAccessor.configurable);
+assertTrue(descIsAccessor.get == get);
+assertTrue(descIsAccessor.set == set);
+
+var descIsNotData = Object.getOwnPropertyDescriptor(obj, 'not-x');
+assertTrue(descIsNotData == undefined);
+
+var descIsNotAccessor = Object.getOwnPropertyDescriptor(obj, 'not-accessor');
+assertTrue(descIsNotAccessor == undefined);
+
+var descArray = Object.getOwnPropertyDescriptor(a, '1');
+assertTrue(descArray.enumerable);
+assertTrue(descArray.configurable);
+assertTrue(descArray.writable);
+assertEquals(descArray.value, 42);
+
+var descObjectElement = Object.getOwnPropertyDescriptor(obj, '1');
+assertTrue(descObjectElement.enumerable);
+assertTrue(descObjectElement.configurable);
+assertTrue(descObjectElement.writable);
+assertEquals(descObjectElement.value, 42);
+
+// String objects.
+var a = new String('foobar');
+for (var i = 0; i < a.length; i++) {
+ var descStringObject = Object.getOwnPropertyDescriptor(a, i);
+ assertTrue(descStringObject.enumerable);
+ assertFalse(descStringObject.configurable);
+ assertFalse(descStringObject.writable);
+ assertEquals(descStringObject.value, a.substring(i, i+1));
+}
+
+// Support for additional attributes on string objects.
+a.x = 42;
+a[10] = 'foo';
+var descStringProperty = Object.getOwnPropertyDescriptor(a, 'x');
+assertTrue(descStringProperty.enumerable);
+assertTrue(descStringProperty.configurable);
+assertTrue(descStringProperty.writable);
+assertEquals(descStringProperty.value, 42);
+
+var descStringElement = Object.getOwnPropertyDescriptor(a, '10');
+assertTrue(descStringElement.enumerable);
+assertTrue(descStringElement.configurable);
+assertTrue(descStringElement.writable);
+assertEquals(descStringElement.value, 'foo');
+
+// Test that elements in the prototype chain is not returned.
+var proto = {};
+proto[10] = 42;
+
+var objWithProto = new Array();
+objWithProto.prototype = proto;
+objWithProto[0] = 'bar';
+var descWithProto = Object.getOwnPropertyDescriptor(objWithProto, '10');
+assertEquals(undefined, descWithProto);
+
+// Test elements on global proxy object.
+var global = (function() { return this; })();
+
+global[42] = 42;
+
+function el_getter() { return 239; };
+function el_setter() {};
+Object.defineProperty(global, '239', {get: el_getter, set: el_setter});
+
+var descRegularElement = Object.getOwnPropertyDescriptor(global, '42');
+assertEquals(42, descRegularElement.value);
+
+var descAccessorElement = Object.getOwnPropertyDescriptor(global, '239');
+assertEquals(el_getter, descAccessorElement.get);
+assertEquals(el_setter, descAccessorElement.set);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2373.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2373.js
new file mode 100644
index 0000000..16a87ec
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2373.js
@@ -0,0 +1,29 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var o = JSON.parse('{"a":2600753951}');
+assertEquals(2600753951, o.a);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2374.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2374.js
new file mode 100644
index 0000000..b12e5f2
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2374.js
@@ -0,0 +1,33 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var msg = '{"result":{"profile":{"head":{"functionName":"(root)","url":"","lineNumber":0,"totalTime":495.7243772462511,"selfTime":0,"numberOfCalls":0,"visible":true,"callUID":2771605942,"children":[{"functionName":"(program)","url":"","lineNumber":0,"totalTime":495.7243772462511,"selfTime":495.7243772462511,"numberOfCalls":0,"visible":true,"callUID":1902715303,"children":[]}]},"bottomUpHead":{"functionName":"(root)","url":"","lineNumber":0,"totalTime":495.7243772462511,"selfTime":0,"numberOfCalls":0,"visible":true,"callUID":2771605942,"children":[{"functionName":"(program)","url":"","lineNumber":0,"totalTime":495.7243772462511,"selfTime":495.7243772462511,"numberOfCalls":0,"visible":true,"callUID":1902715303,"children":[]}]}}},"id":41}';
+
+var obj = JSON.parse(msg);
+var obj2 = JSON.parse(msg);
+
+assertEquals(JSON.stringify(obj), JSON.stringify(obj2));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2398.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2398.js
new file mode 100644
index 0000000..1c66e7f
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2398.js
@@ -0,0 +1,41 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"use strict";
+
+var observed = false;
+
+var object = { get toString() { observed = true; } };
+Object.defineProperty(object, "ro", { value: 1 });
+
+try {
+ object.ro = 2; // TypeError caused by trying to write to read-only.
+} catch (e) {
+ e.message; // Forces formatting of the message object.
+}
+
+assertFalse(observed);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-334.js b/src/3rdparty/v8/test/mjsunit/regress/regress-334.js
index 024fc9e..37dd299 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-334.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-334.js
@@ -40,7 +40,7 @@ var object = {__proto__:{}};
%SetProperty(object, "foo", func1, DONT_ENUM | DONT_DELETE);
%SetProperty(object, "bar", func1, DONT_ENUM | READ_ONLY);
%SetProperty(object, "baz", func1, DONT_DELETE | READ_ONLY);
-%SetProperty(object.__proto__, "bif", func1, DONT_ENUM | DONT_DELETE | READ_ONLY);
+%SetProperty(object.__proto__, "bif", func1, DONT_ENUM | DONT_DELETE);
object.bif = func2;
function enumerable(obj) {
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-builtin-array-op.js b/src/3rdparty/v8/test/mjsunit/regress/regress-builtin-array-op.js
new file mode 100644
index 0000000..1e37af3
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-builtin-array-op.js
@@ -0,0 +1,38 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that we invoke the correct sort function in
+// array operations.
+
+var foo = "hest";
+Array.prototype.sort = function(fn) { foo = "fisk"; };
+Function.prototype.call = function() { foo = "caramel"; };
+var a = [2,3,1];
+a[100000] = 0;
+a.join();
+assertEquals("hest", foo);
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-cnlt-elements.js b/src/3rdparty/v8/test/mjsunit/regress/regress-cnlt-elements.js
new file mode 100644
index 0000000..634534c
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-cnlt-elements.js
@@ -0,0 +1,43 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-gc
+
+var a = JSON.parse('{"b":1,"c":2,"d":3,"e":4}');
+var b = JSON.parse('{"12040200":1, "a":2, "b":2}');
+var c = JSON.parse('{"24050300":1}');
+b = null;
+gc();
+gc();
+c.a1 = 2;
+c.a2 = 2;
+c.a3 = 2;
+c.a4 = 2;
+c.a5 = 2;
+c.a6 = 2;
+c.a7 = 2;
+c.a8 = 2;
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-cnlt-enum-indices.js b/src/3rdparty/v8/test/mjsunit/regress/regress-cnlt-enum-indices.js
new file mode 100644
index 0000000..03582bb
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-cnlt-enum-indices.js
@@ -0,0 +1,45 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --expose-gc
+
+var o = {};
+var o2 = {};
+
+o.a = 1;
+o2.a = 1;
+function f() { return 10; }
+// Adds a non-field enumerable property.
+Object.defineProperty(o, "b", { get: f, enumerable: true });
+Object.defineProperty(o2, "b", { get: f, enumerable: true });
+assertTrue(%HaveSameMap(o, o2));
+o.c = 2;
+
+for (var x in o) { }
+o = null;
+
+gc();
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-cntl-descriptors-enum.js b/src/3rdparty/v8/test/mjsunit/regress/regress-cntl-descriptors-enum.js
new file mode 100644
index 0000000..ee72faf
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-cntl-descriptors-enum.js
@@ -0,0 +1,46 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --expose-gc
+
+DontEnum = 2;
+
+var o = {};
+%SetProperty(o, "a", 0, DontEnum);
+
+var o2 = {};
+%SetProperty(o2, "a", 0, DontEnum);
+
+assertTrue(%HaveSameMap(o, o2));
+
+o.y = 2;
+
+for (var v in o) { print(v); }
+o = {};
+gc();
+
+for (var v in o2) { print(v); }
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-convert-enum.js b/src/3rdparty/v8/test/mjsunit/regress/regress-convert-enum.js
new file mode 100644
index 0000000..c624cad
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-convert-enum.js
@@ -0,0 +1,60 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-gc
+
+// Create a transition tree A (no descriptors) -> B (descriptor for a) -> C
+// (descriptor for a and c), that all share the descriptor array [a,c]. C is the
+// owner of the descriptor array.
+var o = {};
+o.a = 1;
+o.c = 2;
+
+// Add a transition B -> D where D has its own descriptor array [a,b] where b is
+// a constant function.
+var o1 = {};
+o1.a = 1;
+
+// Install an enumeration cache in the descriptor array [a,c] at map B.
+for (var x in o1) { }
+o1.b = function() { return 1; };
+
+// Return ownership of the descriptor array [a,c] to B and trim it to [a].
+o = null;
+gc();
+
+// Convert the transition B -> D into a transition to B -> E so that E uses the
+// instance descriptors [a,b] with b being a field.
+var o2 = {};
+o2.a = 1;
+o2.b = 10;
+
+// Create an object with map B and iterate over it.
+var o3 = {};
+o3.a = 1;
+
+for (var y in o3) { }
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-convert-enum2.js b/src/3rdparty/v8/test/mjsunit/regress/regress-convert-enum2.js
new file mode 100644
index 0000000..cdc7fbe
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-convert-enum2.js
@@ -0,0 +1,46 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var o = {};
+o.a = 1;
+o.b = function() { return 1; };
+o.d = 2;
+
+for (var x in o) { }
+
+var o1 = {};
+o1.a = 1;
+o1.b = 10;
+o1.c = 20;
+
+var keys = ["a", "b", "c"];
+
+var i = 0;
+for (var y in o1) {
+ assertEquals(keys[i], y);
+ i += 1;
+}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-convert-transition.js b/src/3rdparty/v8/test/mjsunit/regress/regress-convert-transition.js
new file mode 100644
index 0000000..057dc80
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-convert-transition.js
@@ -0,0 +1,40 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var input = '{ "a1":1, "a2":1, "a3":1, "a4":1, "a5":1, "a6":1, "a7":1,\
+ "a8":1, "a9":1, "a10":1, "a11":1, "a12":1, "a13":1}';
+var a = JSON.parse(input);
+a.a = function() { return 10; };
+
+// Force conversion of field to slow mode.
+var b = JSON.parse(input);
+b.a = 10;
+
+// Add another property to the object that would transition to a.
+var c = JSON.parse(input);
+c.x = 10;
+assertEquals(undefined, c.a);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-119926.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-119926.js
index 26b84fa..1ad250a 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-119926.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-119926.js
@@ -25,9 +25,11 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --gc-global
+
// Test that array elements don't break upon garbage collection.
var a = new Array(500);
-for (var i = 0; i < 500000; i++) {
+for (var i = 0; i < 100000; i++) {
a[i] = new Object();
}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-122271.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-122271.js
index 3a99a7f..8ae91e8 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-122271.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-122271.js
@@ -39,11 +39,11 @@ function foo(array) {
array.foo = "bar";
}
-assertTrue(%HasFastSmiOnlyElements(a));
-assertTrue(%HasFastElements(b));
+assertTrue(%HasFastSmiElements(a));
+assertTrue(%HasFastObjectElements(b));
foo(a);
foo(b);
-assertTrue(%HasFastSmiOnlyElements(a));
-assertTrue(%HasFastElements(b));
+assertTrue(%HasFastSmiElements(a));
+assertTrue(%HasFastObjectElements(b));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-125148.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-125148.js
new file mode 100644
index 0000000..0f7bcd8
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-125148.js
@@ -0,0 +1,90 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function ToDictionaryMode(x) {
+ %OptimizeObjectForAddingMultipleProperties(x, 100);
+}
+
+var A, B, C;
+
+// The initial bug report was about calling a know function...
+A = {};
+Object.defineProperty(A, "foo", { value: function() { assertUnreachable(); }});
+
+B = Object.create(A);
+Object.defineProperty(B, "foo", { value: function() { return 111; }});
+
+C = Object.create(B);
+
+function bar(x) { return x.foo(); }
+
+assertEquals(111, bar(C));
+assertEquals(111, bar(C));
+ToDictionaryMode(B);
+%OptimizeFunctionOnNextCall(bar);
+assertEquals(111, bar(C));
+
+// Although this was not in the initial bug report: The same for getters...
+A = {};
+Object.defineProperty(A, "baz", { get: function() { assertUnreachable(); }});
+
+B = Object.create(A);
+Object.defineProperty(B, "baz", { get: function() { return 111; }});
+
+C = Object.create(B);
+
+function boo(x) { return x.baz; }
+
+assertEquals(111, boo(C));
+assertEquals(111, boo(C));
+ToDictionaryMode(B);
+%OptimizeFunctionOnNextCall(boo);
+assertEquals(111, boo(C));
+
+// And once more for setters...
+A = {};
+Object.defineProperty(A, "huh", { set: function(x) { assertUnreachable(); }});
+
+B = Object.create(A);
+var setterValue;
+Object.defineProperty(B, "huh", { set: function(x) { setterValue = x; }});
+
+C = Object.create(B);
+
+function fuu(x) {
+ setterValue = 222;
+ x.huh = 111;
+ return setterValue;
+}
+
+assertEquals(111, fuu(C));
+assertEquals(111, fuu(C));
+ToDictionaryMode(B);
+%OptimizeFunctionOnNextCall(fuu);
+assertEquals(111, fuu(C));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-134055.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-134055.js
new file mode 100644
index 0000000..9b658fb
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-134055.js
@@ -0,0 +1,63 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function crash(obj) {
+ return obj.foo;
+}
+
+function base(number_of_properties) {
+ var result = new Array();
+ for (var i = 0; i < number_of_properties; i++) {
+ result["property" + i] = "value" + i;
+ }
+ result.foo = number_of_properties;
+ return result;
+}
+
+var a = base(12);
+var b = base(13);
+var c = base(14);
+var d = base(15);
+
+crash(a); // Premonomorphic.
+crash(a);
+crash(b);
+crash(c);
+crash(d); // Polymorphic, degree 4.
+
+//Prepare ElementsKind transition map chain.
+var x = base(13);
+x[0] = "object";
+x = base(14);
+x[0] = "object";
+x = base(15);
+x[0] = "object";
+
+%OptimizeFunctionOnNextCall(crash);
+crash(a);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-134609.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-134609.js
new file mode 100644
index 0000000..da7d85d
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-134609.js
@@ -0,0 +1,59 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --inline-accessors
+
+var forceDeopt = {x:0};
+
+var objectWithGetterProperty = (function (value) {
+ var obj = {};
+ Object.defineProperty(obj, "getterProperty", {
+ get: function foo() {
+ forceDeopt.x;
+ return value;
+ },
+ });
+ return obj;
+})("bad");
+
+function test() {
+ var iAmContextAllocated = "good";
+ objectWithGetterProperty.getterProperty;
+ return iAmContextAllocated;
+
+ // Make sure that the local variable is context allocated.
+ function unused() { iAmContextAllocated; }
+}
+
+assertEquals("good", test());
+assertEquals("good", test());
+%OptimizeFunctionOnNextCall(test);
+assertEquals("good", test());
+
+// At this point, foo should have been inlined into test. Let's deopt...
+delete forceDeopt.x;
+assertEquals("good", test());
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-135008.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-135008.js
new file mode 100644
index 0000000..2be396e
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-135008.js
@@ -0,0 +1,45 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Filler long enough to trigger lazy parsing.
+var filler = "//" + new Array(1024).join('x');
+
+var scope = { x:23 };
+
+with(scope) {
+ eval(
+ "scope.f = (function outer() {" +
+ " function inner() {" +
+ " return x;" +
+ " }" +
+ " return inner;" +
+ "})();" +
+ filler
+ );
+};
+
+assertSame(23, scope.f());
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-135066.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-135066.js
new file mode 100644
index 0000000..1aeca8b
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-135066.js
@@ -0,0 +1,53 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Filler long enough to trigger lazy parsing.
+var filler = "//" + new Array(1024).join('x');
+
+// Test strict eval in global context.
+eval(
+ "'use strict';" +
+ "var x = 23;" +
+ "var f = function bozo1() {" +
+ " return x;" +
+ "};" +
+ "assertSame(23, f());" +
+ filler
+);
+
+// Test default eval in strict context.
+(function() {
+ "use strict";
+ eval(
+ "var y = 42;" +
+ "var g = function bozo2() {" +
+ " return y;" +
+ "};" +
+ "assertSame(42, g());" +
+ filler
+ );
+})();
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-137689.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-137689.js
new file mode 100644
index 0000000..ef79d24
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-137689.js
@@ -0,0 +1,47 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function getter() { return 10; }
+function setter(v) { }
+function getter2() { return 20; }
+
+var o = {};
+var o2 = {};
+
+Object.defineProperty(o, "foo", { get: getter, configurable: true });
+Object.defineProperty(o2, "foo", { get: getter, configurable: true });
+assertTrue(%HaveSameMap(o, o2));
+
+Object.defineProperty(o, "bar", { get: getter2 });
+Object.defineProperty(o2, "bar", { get: getter2 });
+assertTrue(%HaveSameMap(o, o2));
+
+Object.defineProperty(o, "foo", { set: setter, configurable: true });
+Object.defineProperty(o2, "foo", { set: setter, configurable: true });
+assertTrue(%HaveSameMap(o, o2));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-138887.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-138887.js
new file mode 100644
index 0000000..8d8e169
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-138887.js
@@ -0,0 +1,48 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function worker1(ignored) {
+ return 100;
+}
+
+function factory(worker) {
+ return function(call_depth) {
+ if (call_depth == 0) return 10;
+ return 1 + worker(call_depth - 1);
+ }
+}
+
+var f1 = factory(worker1);
+var f2 = factory(f1);
+assertEquals(11, f2(1)); // Result: 1 + f1(0) == 1 + 10.
+assertEquals(11, f2(1));
+%OptimizeFunctionOnNextCall(f1);
+assertEquals(10, f1(0)); // Terminates immediately -> returns 10.
+%OptimizeFunctionOnNextCall(f2);
+assertEquals(102, f2(1000)); // 1 + f1(999) == 1 + 1 + worker1(998) == 102
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-140083.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-140083.js
new file mode 100644
index 0000000..e38192c
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-140083.js
@@ -0,0 +1,44 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Test that the absence of a setter in a compound/count operation works.
+
+Object.defineProperty(Object.prototype, "foo",
+ { get: function() { return 123; } });
+
+function bar(o) {
+ o.foo += 42;
+ o.foo++;
+}
+
+var baz = {};
+bar(baz);
+bar(baz);
+%OptimizeFunctionOnNextCall(bar)
+bar(baz);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-142087.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-142087.js
new file mode 100644
index 0000000..881ca60
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-142087.js
@@ -0,0 +1,38 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var string = "What are you looking for?";
+
+var expected_match = [""];
+for (var i = 0; i < string.length; i++) {
+ expected_match.push("");
+}
+
+string.replace(/(_)|(_|)/g, "");
+assertArrayEquals(expected_match, string.match(/(_)|(_|)/g, ""));
+
+'***************************************'.match(/((\\)|(\*)|(\$))/g, ".");
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-142218.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-142218.js
new file mode 100644
index 0000000..373f83b
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-142218.js
@@ -0,0 +1,44 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+length = 1 << 16;
+a = new Array(length);
+
+function insert_element(key) {
+ a[key] = 42;
+}
+
+insert_element(1);
+%OptimizeFunctionOnNextCall(insert_element);
+insert_element(new Object());
+count = 0;
+for (var i = 0; i < length; i++) {
+ if (a[i] != undefined) count++;
+}
+assertEquals(1, count);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-145961.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-145961.js
new file mode 100644
index 0000000..eb88945
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-145961.js
@@ -0,0 +1,39 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This test causes the operands to be passed in as Integer32 registers.
+// Flags: --allow-natives-syntax
+function test() {
+ var a = new Int32Array(2);
+ var x = a[0];
+ return Math.min(x, x);
+}
+
+assertEquals(0, test());
+assertEquals(0, test());
+%OptimizeFunctionOnNextCall(test);
+assertEquals(0, test());
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-146910.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-146910.js
new file mode 100644
index 0000000..120f809
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-146910.js
@@ -0,0 +1,38 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x = [];
+assertSame(0, x.length);
+assertSame(undefined, x[0]);
+
+Object.defineProperty(x, '0', { value: 7, configurable: false });
+assertSame(1, x.length);
+assertSame(7, x[0]);
+
+x.length = 0;
+assertSame(1, x.length);
+assertSame(7, x[0]);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-147475.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-147475.js
new file mode 100644
index 0000000..180744c
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-147475.js
@@ -0,0 +1,48 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function worker1(ignored) {
+ return 100;
+}
+
+function factory(worker) {
+ return function(call_depth) {
+ if (call_depth == 0) return 10;
+ return 1 + worker(call_depth - 1);
+ }
+}
+
+var f1 = factory(worker1);
+var f2 = factory(f1);
+assertEquals(11, f2(1));
+%OptimizeFunctionOnNextCall(f1);
+assertEquals(10, f1(0));
+%OptimizeFunctionOnNextCall(f2);
+assertEquals(102, f2(2));
+assertEquals(102, f2(2));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-148376.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-148376.js
new file mode 100644
index 0000000..55bb5f1
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-148376.js
@@ -0,0 +1,35 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+function defineSetter(o) {
+ o.__defineSetter__('property', function() {});
+}
+
+defineSetter(Object.prototype);
+property = 0;
+defineSetter(this);
+var keys = Object.keys(this);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-150545.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-150545.js
new file mode 100644
index 0000000..68efdbf
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-150545.js
@@ -0,0 +1,53 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Test that we do not generate OSR entry points that have an arguments
+// stack height different from zero. The OSR machinery cannot generate
+// frames for that.
+
+(function() {
+ "use strict";
+
+ var instantReturn = false;
+ function inner() {
+ if (instantReturn) return;
+ assertSame(3, arguments.length);
+ assertSame(1, arguments[0]);
+ assertSame(2, arguments[1]);
+ assertSame(3, arguments[2]);
+ }
+
+ function outer() {
+ inner(1,2,3);
+ // Trigger OSR.
+ while (%GetOptimizationStatus(outer) == 2) {}
+ }
+
+ outer();
+})();
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-150729.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-150729.js
new file mode 100644
index 0000000..15aa587
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-150729.js
@@ -0,0 +1,39 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var t = 0;
+function burn() {
+ i = [t, 1];
+ var M = [i[0], Math.cos(t) + i[7074959]];
+ t += .05;
+}
+for (var j = 0; j < 5; j++) {
+ if (j == 2) %OptimizeFunctionOnNextCall(burn);
+ burn();
+}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-157019.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-157019.js
new file mode 100644
index 0000000..1c54089
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-157019.js
@@ -0,0 +1,54 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --nocrankshaft
+
+function makeConstructor() {
+ return function() {
+ this.a = 1;
+ this.b = 2;
+ };
+}
+
+var c1 = makeConstructor();
+var o1 = new c1();
+
+c1.prototype = {};
+
+for (var i = 0; i < 10; i++) {
+ var o = new c1();
+ for (var j = 0; j < 8; j++) {
+ o["x" + j] = 0;
+ }
+}
+
+var c2 = makeConstructor();
+var o2 = new c2();
+
+for (var i = 0; i < 50000; i++) {
+ new c2();
+}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-157520.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-157520.js
new file mode 100644
index 0000000..17081df
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-157520.js
@@ -0,0 +1,38 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --nocrankshaft
+
+(function(){
+ var f = function(arg) {
+ arg = 2;
+ return arguments[0];
+ };
+ for (var i = 0; i < 50000; i++) {
+ assertSame(2, f(1));
+ }
+})();
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-158185.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-158185.js
new file mode 100644
index 0000000..99f19c7
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-158185.js
@@ -0,0 +1,39 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+assertEquals("0023456",
+ Object.keys(JSON.parse('{"0023456": 1}'))[0]);
+assertEquals("1234567890123",
+ Object.keys(JSON.parse('{"1234567890123": 1}'))[0]);
+assertEquals("123456789ABCD",
+ Object.keys(JSON.parse('{"123456789ABCD": 1}'))[0]);
+assertEquals("12A",
+ Object.keys(JSON.parse('{"12A": 1}'))[0]);
+
+assertEquals(1, JSON.parse('{"0":1}')[0]);
+assertEquals(undefined, JSON.parse('{"00":1}')[0]);
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-160010.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-160010.js
new file mode 100644
index 0000000..266e545
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-160010.js
@@ -0,0 +1,33 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var str = "a";
+for (var i = 0; i < 28; i++) {
+ str += str;
+}
+JSON.stringify(str);
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-debug-code-recompilation.js b/src/3rdparty/v8/test/mjsunit/regress/regress-debug-code-recompilation.js
index 1a608b1..4723ec1 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-debug-code-recompilation.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-debug-code-recompilation.js
@@ -25,7 +25,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --allow-natives-syntax --hydrogen-filter=Debug.setBreakPoint --expose-debug-as debug
+// Flags: --allow-natives-syntax --hydrogen-filter=Debug.setBreakPoint
+// Flags: --expose-debug-as debug
Debug = debug.Debug
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-deep-proto.js b/src/3rdparty/v8/test/mjsunit/regress/regress-deep-proto.js
new file mode 100644
index 0000000..5d2758c
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-deep-proto.js
@@ -0,0 +1,45 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+function poly(x) {
+ return x.foo;
+}
+
+var one = {foo: 0};
+var two = {foo: 0, bar: 1};
+var three = {bar: 0};
+three.__proto__ = {};
+three.__proto__.__proto__ = {};
+three.__proto__.__proto__.__proto__ = {};
+three.__proto__.__proto__.__proto__.__proto__ = {};
+three.__proto__.__proto__.__proto__.__proto__.__proto__ = {};
+
+for (var i = 0; i < 1e6; i++) {
+ poly(one);
+ poly(two);
+ poly(three);
+}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-delete-empty-double.js b/src/3rdparty/v8/test/mjsunit/regress/regress-delete-empty-double.js
new file mode 100644
index 0000000..f7af2b1
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-delete-empty-double.js
@@ -0,0 +1,40 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+a = [1.1,2.2,3.3];
+a.length = 1;
+delete a[1];
+
+assertTrue(%HasFastDoubleElements(a));
+assertFalse(%HasFastHoleyElements(a));
+
+delete a[0];
+
+assertTrue(%HasFastDoubleElements(a));
+assertTrue(%HasFastHoleyElements(a));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-iteration-order.js b/src/3rdparty/v8/test/mjsunit/regress/regress-iteration-order.js
new file mode 100644
index 0000000..76f5c3f
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-iteration-order.js
@@ -0,0 +1,42 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x = {a: 1, b: 2, c: 3};
+
+x.__proto__ = {};
+
+delete x.b;
+
+x.d = 4;
+
+s = "";
+
+for (key in x) {
+ s += x[key];
+}
+
+assertEquals("134", s);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-json-stringify-gc.js b/src/3rdparty/v8/test/mjsunit/regress/regress-json-stringify-gc.js
new file mode 100644
index 0000000..d732ebc
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-json-stringify-gc.js
@@ -0,0 +1,41 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var a = [];
+var new_space_string = "";
+for (var i = 0; i < 128; i++) {
+ new_space_string += String.fromCharCode((Math.random() * 26 + 65) | 0);
+}
+for (var i = 0; i < 10000; i++) a.push(new_space_string);
+
+// At some point during the first stringify, allocation causes a GC and
+// new_space_string is moved to old space. Make sure that this does not
+// screw up reading from the correct location.
+json1 = JSON.stringify(a);
+json2 = JSON.stringify(a);
+assertEquals(json1, json2, "GC caused JSON.stringify to fail.");
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-load-elements.js b/src/3rdparty/v8/test/mjsunit/regress/regress-load-elements.js
new file mode 100644
index 0000000..68cdc8e
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-load-elements.js
@@ -0,0 +1,49 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function bad_func(o,a) {
+ for (var i = 0; i < 1; ++i) {
+ o.prop = 0;
+ var x = a[0];
+ }
+}
+
+o = new Object();
+a = {};
+a[0] = 1;
+bad_func(o, a);
+
+o = new Object();
+bad_func(o, a);
+
+// Optimize. Before the fix, the elements-load and subsequent fixed-array-length
+// were hoisted above the map check. This is invalid since not all types
+// necessarily have elements.
+%OptimizeFunctionOnNextCall(bad_func);
+bad_func(o, "");
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-smi-only-concat.js b/src/3rdparty/v8/test/mjsunit/regress/regress-smi-only-concat.js
index a9a6d89..55ca299 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-smi-only-concat.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-smi-only-concat.js
@@ -33,5 +33,5 @@
var fast_array = ['a', 'b'];
var array = fast_array.concat(fast_array);
-assertTrue(%HasFastElements(fast_array));
-assertTrue(%HasFastElements(array)); \ No newline at end of file
+assertTrue(%HasFastObjectElements(fast_array));
+assertTrue(%HasFastObjectElements(array));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-undefined-store-keyed-fast-element.js b/src/3rdparty/v8/test/mjsunit/regress/regress-undefined-store-keyed-fast-element.js
new file mode 100644
index 0000000..9e6ec9d
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-undefined-store-keyed-fast-element.js
@@ -0,0 +1,37 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function f(v) {
+ return [0.0, 0.1, 0.2, v];
+}
+
+assertEquals([0.0, 0.1, 0.2, NaN], f(NaN));
+assertEquals([0.0, 0.1, 0.2, NaN], f(NaN));
+%OptimizeFunctionOnNextCall(f);
+assertEquals([0.0, 0.1, 0.2, undefined], f(undefined));
diff --git a/src/3rdparty/v8/test/mjsunit/stack-traces.js b/src/3rdparty/v8/test/mjsunit/stack-traces.js
index 536e71b..438eec9 100644
--- a/src/3rdparty/v8/test/mjsunit/stack-traces.js
+++ b/src/3rdparty/v8/test/mjsunit/stack-traces.js
@@ -111,6 +111,18 @@ function testStrippedCustomError() {
throw new CustomError("hep-hey", CustomError);
}
+MyObj = function() { FAIL; }
+
+MyObjCreator = function() {}
+
+MyObjCreator.prototype.Create = function() {
+ return new MyObj();
+}
+
+function testClassNames() {
+ (new MyObjCreator).Create();
+}
+
// Utility function for testing that the expected strings occur
// in the stack trace produced when running the given function.
function testTrace(name, fun, expected, unexpected) {
@@ -254,6 +266,8 @@ testTrace("testDefaultCustomError", testDefaultCustomError,
["collectStackTrace"]);
testTrace("testStrippedCustomError", testStrippedCustomError, ["hep-hey"],
["new CustomError", "collectStackTrace"]);
+testTrace("testClassNames", testClassNames,
+ ["new MyObj", "MyObjCreator.Create"], ["as Create"]);
testCallerCensorship();
testUnintendedCallerCensorship();
testErrorsDuringFormatting();
diff --git a/src/3rdparty/v8/test/mjsunit/str-to-num.js b/src/3rdparty/v8/test/mjsunit/str-to-num.js
index bbfa7d3..cbec87f 100644
--- a/src/3rdparty/v8/test/mjsunit/str-to-num.js
+++ b/src/3rdparty/v8/test/mjsunit/str-to-num.js
@@ -147,7 +147,6 @@ assertEquals(15, toNumber("0Xf"));
assertEquals(15, toNumber("0XF"));
assertEquals(0, toNumber("0x000"));
-assertEquals(-Infinity, 1 / toNumber("-0x000"));
assertEquals(0, toNumber("0x000" + repeat('0', 1000)));
assertEquals(9, toNumber("0x009"));
assertEquals(10, toNumber("0x00a"));
@@ -157,7 +156,6 @@ assertEquals(15, toNumber("0x00F"));
assertEquals(15, toNumber("0x00F "));
assertEquals(Infinity, toNumber("0x" + repeat('0', 1000) + '1'
+ repeat('0', 1000)));
-assertEquals(-Infinity, toNumber("-0x1" + repeat('0', 1000)));
assertEquals(0x1000000 * 0x10000000, toNumber("0x10000000000000"));
assertEquals(0x1000000 * 0x10000000 + 1, toNumber("0x10000000000001"));
@@ -207,3 +205,10 @@ assertTrue(isNaN(toNumber("1" + repeat('0', 1000) + 'junk')), "1e1000 junk");
for (var i = 1; i < 12; i++) {
assertEquals(toNumber('1' + repeat('0', i)), Math.pow(10.0, i));
}
+
+assertTrue(isNaN(toNumber("+0x0")));
+assertTrue(isNaN(toNumber("+0xFF")));
+assertTrue(isNaN(toNumber("+0x012")));
+assertTrue(isNaN(toNumber("-0x0")));
+assertTrue(isNaN(toNumber("-0xFF")));
+assertTrue(isNaN(toNumber("-0x012"))); \ No newline at end of file
diff --git a/src/3rdparty/v8/test/mjsunit/string-charcodeat.js b/src/3rdparty/v8/test/mjsunit/string-charcodeat.js
index 8be6a09..72dc819 100644
--- a/src/3rdparty/v8/test/mjsunit/string-charcodeat.js
+++ b/src/3rdparty/v8/test/mjsunit/string-charcodeat.js
@@ -231,3 +231,6 @@ for (var i = 0; i < 5; i++) {
}
%OptimizeFunctionOnNextCall(directlyOnPrototype);
directlyOnPrototype();
+
+assertTrue(isNaN(%_StringCharCodeAt("ABC", -1)));
+assertTrue(isNaN(%_StringCharCodeAt("ABC", 4)));
diff --git a/src/3rdparty/v8/test/mjsunit/testcfg.py b/src/3rdparty/v8/test/mjsunit/testcfg.py
index 87ed4fa..c8b972c 100644
--- a/src/3rdparty/v8/test/mjsunit/testcfg.py
+++ b/src/3rdparty/v8/test/mjsunit/testcfg.py
@@ -25,17 +25,87 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-import test
import os
-from os.path import join, dirname, exists
import re
-import tempfile
+
+from testrunner.local import testsuite
+from testrunner.objects import testcase
FLAGS_PATTERN = re.compile(r"//\s+Flags:(.*)")
FILES_PATTERN = re.compile(r"//\s+Files:(.*)")
SELF_SCRIPT_PATTERN = re.compile(r"//\s+Env: TEST_FILE_NAME")
+class MjsunitTestSuite(testsuite.TestSuite):
+
+ def __init__(self, name, root):
+ super(MjsunitTestSuite, self).__init__(name, root)
+
+ def ListTests(self, context):
+ tests = []
+ for dirname, dirs, files in os.walk(self.root):
+ for dotted in [x for x in dirs if x.startswith('.')]:
+ dirs.remove(dotted)
+ dirs.sort()
+ files.sort()
+ for filename in files:
+ if filename.endswith(".js") and filename != "mjsunit.js":
+ testname = join(dirname[len(self.root) + 1:], filename[:-3])
+ test = testcase.TestCase(self, testname)
+ tests.append(test)
+ return tests
+
+ def GetFlagsForTestCase(self, testcase, context):
+ source = self.GetSourceForTest(testcase)
+ flags = [] + context.mode_flags
+ flags_match = re.findall(FLAGS_PATTERN, source)
+ for match in flags_match:
+ flags += match.strip().split()
+
+ files_list = [] # List of file names to append to command arguments.
+ files_match = FILES_PATTERN.search(source);
+ # Accept several lines of 'Files:'.
+ while True:
+ if files_match:
+ files_list += files_match.group(1).strip().split()
+ files_match = FILES_PATTERN.search(source, files_match.end())
+ else:
+ break
+ files = [ os.path.normpath(os.path.join(self.root, '..', '..', f))
+ for f in files_list ]
+ testfilename = os.path.join(self.root, testcase.path + self.suffix())
+ if SELF_SCRIPT_PATTERN.search(source):
+ env = ["-e", "TEST_FILE_NAME=\"%s\"" % testfilename]
+ files = env + files
+ files.append(os.path.join(self.root, "mjsunit.js"))
+ files.append(testfilename)
+
+ flags += files
+ if context.isolates:
+ flags.append("--isolate")
+ flags += files
+
+ return testcase.flags + flags
+
+ def GetSourceForTest(self, testcase):
+ filename = os.path.join(self.root, testcase.path + self.suffix())
+ with open(filename) as f:
+ return f.read()
+
+
+def GetSuite(name, root):
+ return MjsunitTestSuite(name, root)
+
+
+# Deprecated definitions below.
+# TODO(jkummerow): Remove when SCons is no longer supported.
+
+
+from os.path import dirname, exists, join, normpath
+import tempfile
+import test
+
+
class MjsunitTestCase(test.TestCase):
def __init__(self, path, file, mode, context, config, isolates):
@@ -56,9 +126,9 @@ class MjsunitTestCase(test.TestCase):
def GetVmCommand(self, source):
result = self.config.context.GetVmCommand(self, self.mode)
- flags_match = FLAGS_PATTERN.search(source)
- if flags_match:
- result += flags_match.group(1).strip().split()
+ flags_match = re.findall(FLAGS_PATTERN, source);
+ for match in flags_match:
+ result += match.strip().split()
return result
def GetVmArguments(self, source):
diff --git a/src/3rdparty/v8/test/mjsunit/try-finally-continue.js b/src/3rdparty/v8/test/mjsunit/try-finally-continue.js
new file mode 100644
index 0000000..b55e7ac
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/try-finally-continue.js
@@ -0,0 +1,72 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that we correctly restore the stack when continuing from a
+// finally block inside a for-in.
+
+var f = 0;
+var a = [1, 2, 3];
+
+for (x in a) {
+ try{
+ throw 'error';
+ } finally {
+ f++;
+ continue;
+ }
+}
+assertEquals(3, f);
+
+f = 0;
+for (x in a) {
+ try {
+ f++;
+ } finally {
+ f++;
+ continue;
+ }
+}
+assertEquals(6, f);
+
+f = 0;
+for (x in a) {
+ try {
+ f++;
+ } finally {
+ try {
+ throw 'error'
+ } finally {
+ try {
+ f++;
+ } finally {
+ f++;
+ continue;
+ }
+ }
+ }
+}
+assertEquals(9, f); \ No newline at end of file
diff --git a/src/3rdparty/v8/test/mjsunit/typed-array-slice.js b/src/3rdparty/v8/test/mjsunit/typed-array-slice.js
new file mode 100644
index 0000000..c6e7e94
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/typed-array-slice.js
@@ -0,0 +1,61 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// This is a regression test for overlapping key and value registers.
+
+var types = [Array, Int8Array, Uint8Array, Int16Array, Uint16Array,
+ Int32Array, Uint32Array, Uint8ClampedArray, Float32Array,
+ Float64Array];
+
+var results1 = [-2, -2, 254, -2, 65534, -2, 4294967294, 0, -2, -2];
+var results2 = [undefined, -1, 255, -1, 65535, -1, 4294967295, 0, -1, -1];
+var results3 = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
+var results4 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1];
+
+const kElementCount = 40;
+
+function do_slice(a) {
+ return Array.prototype.slice.call(a, 4, 8);
+}
+
+for (var t = 0; t < types.length; t++) {
+ var type = types[t];
+ var a = new type(kElementCount);
+ for (var i = 0; i < kElementCount; ++i ) {
+ a[i] = i-6;
+ }
+ delete a[5];
+ var sliced = do_slice(a);
+
+ %ClearFunctionTypeFeedback(do_slice);
+ assertEquals(results1[t], sliced[0]);
+ assertEquals(results2[t], sliced[1]);
+ assertEquals(results3[t], sliced[2]);
+ assertEquals(results4[t], sliced[3]);
+}
diff --git a/src/3rdparty/v8/test/mjsunit/unbox-double-arrays.js b/src/3rdparty/v8/test/mjsunit/unbox-double-arrays.js
index fd7db28..5d061ae 100644
--- a/src/3rdparty/v8/test/mjsunit/unbox-double-arrays.js
+++ b/src/3rdparty/v8/test/mjsunit/unbox-double-arrays.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -28,6 +28,8 @@
// Test dictionary -> double elements -> dictionary elements round trip
// Flags: --allow-natives-syntax --unbox-double-arrays --expose-gc
+// Flags: --noparallel-recompilation
+
var large_array_size = 100000;
var approx_dict_to_elements_threshold = 70000;
@@ -278,7 +280,8 @@ function testOneArrayType(allocator) {
expected_array_value(7));
%DeoptimizeFunction(test_various_loads6);
- gc();
+ %ClearFunctionTypeFeedback(test_various_stores);
+ %ClearFunctionTypeFeedback(test_various_loads7);
// Test stores for non-NaN.
var large_array = new allocator(large_array_size);
@@ -376,7 +379,7 @@ delete large_array2[5];
// Convert back to fast elements and make sure the contents of the array are
// unchanged.
large_array2[25] = new Object();
-assertTrue(%HasFastElements(large_array2));
+assertTrue(%HasFastObjectElements(large_array2));
for (var i= 0; i < approx_dict_to_elements_threshold; i += 500 ) {
if (i != 25 && i != 5) {
assertEquals(expected_array_value(i), large_array2[i]);
diff --git a/src/3rdparty/v8/test/mjsunit/with-readonly.js b/src/3rdparty/v8/test/mjsunit/with-readonly.js
index e29520a..29982b3 100644
--- a/src/3rdparty/v8/test/mjsunit/with-readonly.js
+++ b/src/3rdparty/v8/test/mjsunit/with-readonly.js
@@ -27,6 +27,8 @@
// Test that readonly variables are treated correctly.
+// Flags: --es5_readonly
+
// Create an object with a read-only length property in the prototype
// chain by putting the string split function in the prototype chain.
var o = {};
@@ -36,8 +38,8 @@ function f() {
with (o) {
length = 23;
length = 24;
- assertEquals(24, length);
+ assertEquals(2, length);
}
+ assertEquals(2, o.length);
}
f();
-
diff --git a/src/3rdparty/v8/test/mozilla/mozilla.status b/src/3rdparty/v8/test/mozilla/mozilla.status
index c30be5e..4f2fbde 100644
--- a/src/3rdparty/v8/test/mozilla/mozilla.status
+++ b/src/3rdparty/v8/test/mozilla/mozilla.status
@@ -126,13 +126,13 @@ ecma/Date/15.9.2.2-5: PASS || FAIL
ecma/Date/15.9.2.2-6: PASS || FAIL
# 1026139: These date tests fail on arm and mips
-ecma/Date/15.9.5.29-1: PASS || (($ARM || $MIPS) && FAIL)
-ecma/Date/15.9.5.34-1: PASS || (($ARM || $MIPS) && FAIL)
-ecma/Date/15.9.5.28-1: PASS || (($ARM || $MIPS) && FAIL)
+ecma/Date/15.9.5.29-1: PASS || FAIL if ($arch == arm || $arch == mipsel)
+ecma/Date/15.9.5.34-1: PASS || FAIL if ($arch == arm || $arch == mipsel)
+ecma/Date/15.9.5.28-1: PASS || FAIL if ($arch == arm || $arch == mipsel)
# 1050186: Arm/MIPS vm is broken; probably unrelated to dates
-ecma/Array/15.4.4.5-3: PASS || (($ARM || $MIPS) && FAIL)
-ecma/Date/15.9.5.22-2: PASS || (($ARM || $MIPS) && FAIL)
+ecma/Array/15.4.4.5-3: PASS || FAIL if ($arch == arm || $arch == mipsel)
+ecma/Date/15.9.5.22-2: PASS || FAIL if ($arch == arm || $arch == mipsel)
# Flaky test that fails due to what appears to be a bug in the test.
# Occurs depending on current time
@@ -245,9 +245,6 @@ js1_5/Function/regress-338121-03: FAIL_OK
# Expectes 'prototype' property of functions to be enumerable.
js1_5/Function/10.1.6-01: FAIL_OK
-# Length of objects whose prototype chain includes a function
-ecma_3/Function/regress-313570: FAIL_OK
-
# toPrecision argument restricted to range 1..21 in JSC/V8
js1_5/Regress/regress-452346: FAIL_OK
ecma_3/Number/15.7.4.7-1: FAIL_OK
@@ -371,6 +368,10 @@ ecma/GlobalObject/15.1.2.6: FAIL_OK
ecma/GlobalObject/15.1.2.7: FAIL_OK
+# Leading zero no longer signal octal numbers (ECMA-262 Annex E 15.1.2.2).
+ecma/GlobalObject/15.1.2.2-2: FAIL_OK
+
+
# Tests that rely on specific details of function decompilation or
# print strings for errors. Non-ECMA behavior.
js1_2/function/tostring-2: FAIL_OK
@@ -606,6 +607,10 @@ ecma_2/RegExp/function-001: FAIL_OK
ecma_2/RegExp/properties-001: FAIL_OK
+# Negative hexadecimal literals are parsed as NaN. This test is outdated.
+ecma/TypeConversion/9.3.1-3: FAIL_OK
+
+
##################### FAILING TESTS #####################
# This section is for tests that fail in V8 and pass in JSC.
@@ -636,7 +641,7 @@ js1_5/Expressions/regress-394673: FAIL
# Bug 762: http://code.google.com/p/v8/issues/detail?id=762
# We do not correctly handle assignments within "with"
-/ecma_3/Statements/12.10-01: FAIL
+ecma_3/Statements/12.10-01: FAIL
# We do not throw an exception when a const is redeclared.
# (We only fail section 1 of the test.)
@@ -751,7 +756,6 @@ js1_5/extensions/regress-90596-001: FAIL_OK
js1_5/extensions/regress-90596-002: FAIL_OK
js1_5/extensions/regress-96284-001: FAIL_OK
js1_5/extensions/regress-96284-002: FAIL_OK
-js1_5/extensions/scope-001: FAIL_OK
js1_5/extensions/toLocaleFormat-01: FAIL_OK
js1_5/extensions/toLocaleFormat-02: FAIL_OK
@@ -822,12 +826,6 @@ js1_5/decompilation/regress-383721: PASS || FAIL
js1_5/decompilation/regress-406555: PASS || FAIL
js1_5/decompilation/regress-460870: PASS || FAIL
-# These tests take an unreasonable amount of time so we skip them
-# in fast mode.
-
-js1_5/Regress/regress-312588: TIMEOUT || SKIP if $FAST == yes
-js1_5/Regress/regress-271716-n: PASS || SKIP if $FAST == yes
-
[ $arch == arm ]
@@ -852,40 +850,7 @@ js1_5/Regress/regress-451322: SKIP
js1_5/GC/regress-203278-2: PASS || TIMEOUT
-[ $fast == yes && $arch == arm ]
-
-# In fast mode on arm we try to skip all tests that would time out,
-# since running the tests takes so long in the first place.
-
-js1_5/Regress/regress-280769-2: SKIP
-js1_5/Regress/regress-280769-3: SKIP
-js1_5/Regress/regress-244470: SKIP
-js1_5/Regress/regress-203278-1: SKIP
-js1_5/Regress/regress-290575: SKIP
-js1_5/Regress/regress-159334: SKIP
-js1_5/Regress/regress-321971: SKIP
-js1_5/Regress/regress-347306-01: SKIP
-js1_5/Regress/regress-280769-1: SKIP
-js1_5/Regress/regress-280769-5: SKIP
-js1_5/GC/regress-306788: SKIP
-js1_5/GC/regress-278725: SKIP
-js1_5/GC/regress-203278-3: SKIP
-js1_5/GC/regress-311497: SKIP
-js1_5/Array/regress-99120-02: SKIP
-ecma/Date/15.9.5.22-1: SKIP
-ecma/Date/15.9.5.20: SKIP
-ecma/Date/15.9.5.12-2: SKIP
-ecma/Date/15.9.5.8: SKIP
-ecma/Date/15.9.5.9: SKIP
-ecma/Date/15.9.5.11-2: SKIP
-ecma/Expressions/11.7.2: SKIP
-ecma/Expressions/11.10-2: SKIP
-ecma/Expressions/11.7.3: SKIP
-ecma/Expressions/11.10-3: SKIP
-ecma/Expressions/11.7.1: SKIP
-ecma_3/RegExp/regress-209067: SKIP
-
-[ $arch == mips ]
+[ $arch == mipsel ]
# Times out and print so much output that we need to skip it to not
# hang the builder.
@@ -906,37 +871,3 @@ js1_5/Regress/regress-451322: SKIP
# BUG(1040): Allow this test to timeout.
js1_5/GC/regress-203278-2: PASS || TIMEOUT
-
-
-[ $fast == yes && $arch == mips ]
-
-# In fast mode on mips we try to skip all tests that would time out,
-# since running the tests takes so long in the first place.
-
-js1_5/Regress/regress-280769-2: SKIP
-js1_5/Regress/regress-280769-3: SKIP
-js1_5/Regress/regress-244470: SKIP
-js1_5/Regress/regress-203278-1: SKIP
-js1_5/Regress/regress-290575: SKIP
-js1_5/Regress/regress-159334: SKIP
-js1_5/Regress/regress-321971: SKIP
-js1_5/Regress/regress-347306-01: SKIP
-js1_5/Regress/regress-280769-1: SKIP
-js1_5/Regress/regress-280769-5: SKIP
-js1_5/GC/regress-306788: SKIP
-js1_5/GC/regress-278725: SKIP
-js1_5/GC/regress-203278-3: SKIP
-js1_5/GC/regress-311497: SKIP
-js1_5/Array/regress-99120-02: SKIP
-ecma/Date/15.9.5.22-1: SKIP
-ecma/Date/15.9.5.20: SKIP
-ecma/Date/15.9.5.12-2: SKIP
-ecma/Date/15.9.5.8: SKIP
-ecma/Date/15.9.5.9: SKIP
-ecma/Date/15.9.5.11-2: SKIP
-ecma/Expressions/11.7.2: SKIP
-ecma/Expressions/11.10-2: SKIP
-ecma/Expressions/11.7.3: SKIP
-ecma/Expressions/11.10-3: SKIP
-ecma/Expressions/11.7.1: SKIP
-ecma_3/RegExp/regress-209067: SKIP
diff --git a/src/3rdparty/v8/test/mozilla/testcfg.py b/src/3rdparty/v8/test/mozilla/testcfg.py
index 587781d..5aeac4c 100644
--- a/src/3rdparty/v8/test/mozilla/testcfg.py
+++ b/src/3rdparty/v8/test/mozilla/testcfg.py
@@ -26,12 +26,19 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-import test
import os
-from os.path import join, exists
+import shutil
+import subprocess
+import tarfile
+
+from testrunner.local import testsuite
+from testrunner.objects import testcase
+
+MOZILLA_VERSION = "2010-06-29"
-EXCLUDED = ['CVS']
+
+EXCLUDED = ["CVS"]
FRAMEWORK = """
@@ -54,6 +61,117 @@ TEST_DIRS = """
""".split()
+class MozillaTestSuite(testsuite.TestSuite):
+
+ def __init__(self, name, root):
+ super(MozillaTestSuite, self).__init__(name, root)
+ self.testroot = os.path.join(root, "data")
+
+ def ListTests(self, context):
+ tests = []
+ for testdir in TEST_DIRS:
+ current_root = os.path.join(self.testroot, testdir)
+ for dirname, dirs, files in os.walk(current_root):
+ for dotted in [x for x in dirs if x.startswith(".")]:
+ dirs.remove(dotted)
+ for excluded in EXCLUDED:
+ if excluded in dirs:
+ dirs.remove(excluded)
+ dirs.sort()
+ files.sort()
+ for filename in files:
+ if filename.endswith(".js") and not filename in FRAMEWORK:
+ testname = os.path.join(dirname[len(self.testroot) + 1:],
+ filename[:-3])
+ case = testcase.TestCase(self, testname)
+ tests.append(case)
+ return tests
+
+ def GetFlagsForTestCase(self, testcase, context):
+ result = []
+ result += context.mode_flags
+ result += ["--expose-gc"]
+ result += [os.path.join(self.root, "mozilla-shell-emulation.js")]
+ testfilename = testcase.path + ".js"
+ testfilepath = testfilename.split(os.path.sep)
+ for i in xrange(len(testfilepath)):
+ script = os.path.join(self.testroot,
+ reduce(os.path.join, testfilepath[:i], ""),
+ "shell.js")
+ if os.path.exists(script):
+ result.append(script)
+ result.append(os.path.join(self.testroot, testfilename))
+ return testcase.flags + result
+
+ def GetSourceForTest(self, testcase):
+ filename = join(self.testroot, testcase.path + ".js")
+ with open(filename) as f:
+ return f.read()
+
+ def IsNegativeTest(self, testcase):
+ return testcase.path.endswith("-n")
+
+ def IsFailureOutput(self, output, testpath):
+ if output.exit_code != 0:
+ return True
+ return "FAILED!" in output.stdout
+
+ def DownloadData(self):
+ old_cwd = os.getcwd()
+ os.chdir(os.path.abspath(self.root))
+
+ # Maybe we're still up to date?
+ versionfile = "CHECKED_OUT_VERSION"
+ checked_out_version = None
+ if os.path.exists(versionfile):
+ with open(versionfile) as f:
+ checked_out_version = f.read()
+ if checked_out_version == MOZILLA_VERSION:
+ os.chdir(old_cwd)
+ return
+
+ # If we have a local archive file with the test data, extract it.
+ directory_name = "data"
+ if os.path.exists(directory_name):
+ os.rename(directory_name, "data.old")
+ archive_file = "downloaded_%s.tar.gz" % MOZILLA_VERSION
+ if os.path.exists(archive_file):
+ with tarfile.open(archive_file, "r:gz") as tar:
+ tar.extractall()
+ with open(versionfile, "w") as f:
+ f.write(MOZILLA_VERSION)
+ os.chdir(old_cwd)
+ return
+
+ # No cached copy. Check out via CVS, and pack as .tar.gz for later use.
+ command = ("cvs -d :pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot"
+ " co -D %s mozilla/js/tests" % MOZILLA_VERSION)
+ code = subprocess.call(command, shell=True)
+ if code != 0:
+ os.chdir(old_cwd)
+ raise Exception("Error checking out Mozilla test suite!")
+ os.rename(join("mozilla", "js", "tests"), directory_name)
+ shutil.rmtree("mozilla")
+ with tarfile.open(archive_file, "w:gz") as tar:
+ tar.add("data")
+ with open(versionfile, "w") as f:
+ f.write(MOZILLA_VERSION)
+ os.chdir(old_cwd)
+
+
+def GetSuite(name, root):
+ return MozillaTestSuite(name, root)
+
+
+# Deprecated definitions below.
+# TODO(jkummerow): Remove when SCons is no longer supported.
+
+
+from os.path import exists
+from os.path import join
+import test
+
+
class MozillaTestCase(test.TestCase):
def __init__(self, filename, path, context, root, mode, framework):
@@ -76,6 +194,7 @@ class MozillaTestCase(test.TestCase):
def GetCommand(self):
result = self.context.GetVmCommand(self, self.mode) + \
[ '--expose-gc', join(self.root, 'mozilla-shell-emulation.js') ]
+ result += [ '--es5_readonly' ] # Temporary hack until we can remove flag
result += self.framework
result.append(self.filename)
return result
diff --git a/src/3rdparty/v8/test/preparser/preparser.status b/src/3rdparty/v8/test/preparser/preparser.status
index 6f15fed..40c5caf 100644
--- a/src/3rdparty/v8/test/preparser/preparser.status
+++ b/src/3rdparty/v8/test/preparser/preparser.status
@@ -31,3 +31,8 @@ prefix preparser
# escapes (we need to parse to distinguish octal escapes from valid
# back-references).
strict-octal-regexp: FAIL
+
+[ $arch == android_arm || $arch == android_ia32 ]
+# Remove this once the issue above is fixed. Android test runner does not
+# handle "FAIL" test expectation correctly.
+strict-octal-regexp: SKIP
diff --git a/src/3rdparty/v8/test/preparser/strict-identifiers.pyt b/src/3rdparty/v8/test/preparser/strict-identifiers.pyt
index aa3d521..f979088 100644
--- a/src/3rdparty/v8/test/preparser/strict-identifiers.pyt
+++ b/src/3rdparty/v8/test/preparser/strict-identifiers.pyt
@@ -285,4 +285,4 @@ for reserved_word in reserved_words + strict_reserved_words:
# Future reserved words in strict mode behave like normal identifiers
# in a non strict context.
for reserved_word in strict_reserved_words:
- non_strict_use({"id": id}, None)
+ non_strict_use({"id": reserved_word}, None)
diff --git a/src/3rdparty/v8/test/preparser/testcfg.py b/src/3rdparty/v8/test/preparser/testcfg.py
index 88c06a3..61c14c9 100644
--- a/src/3rdparty/v8/test/preparser/testcfg.py
+++ b/src/3rdparty/v8/test/preparser/testcfg.py
@@ -25,13 +25,109 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-import test
+
import os
-from os.path import join, dirname, exists, isfile
-import platform
-import utils
import re
+from testrunner.local import testsuite
+from testrunner.local import utils
+from testrunner.objects import testcase
+
+
+class PreparserTestSuite(testsuite.TestSuite):
+ def __init__(self, name, root):
+ super(PreparserTestSuite, self).__init__(name, root)
+
+ def shell(self):
+ return "preparser"
+
+ def _GetExpectations(self):
+ expects_file = join(self.root, "preparser.expectation")
+ expectations_map = {}
+ if not os.path.exists(expects_file): return expectations_map
+ rule_regex = re.compile("^([\w\-]+)(?::([\w\-]+))?(?::(\d+),(\d+))?$")
+ for line in utils.ReadLinesFrom(expects_file):
+ rule_match = rule_regex.match(line)
+ if not rule_match: continue
+ expects = []
+ if (rule_match.group(2)):
+ expects += [rule_match.group(2)]
+ if (rule_match.group(3)):
+ expects += [rule_match.group(3), rule_match.group(4)]
+ expectations_map[rule_match.group(1)] = " ".join(expects)
+ return expectations_map
+
+ def _ParsePythonTestTemplates(self, result, filename):
+ pathname = join(self.root, filename + ".pyt")
+ def Test(name, source, expectation):
+ source = source.replace("\n", " ")
+ testname = os.path.join(filename, name)
+ flags = ["-e", source]
+ if expectation:
+ flags += ["throws", expectation]
+ test = testcase.TestCase(self, testname, flags=flags)
+ result.append(test)
+ def Template(name, source):
+ def MkTest(replacement, expectation):
+ testname = name
+ testsource = source
+ for key in replacement.keys():
+ testname = testname.replace("$" + key, replacement[key]);
+ testsource = testsource.replace("$" + key, replacement[key]);
+ Test(testname, testsource, expectation)
+ return MkTest
+ execfile(pathname, {"Test": Test, "Template": Template})
+
+ def ListTests(self, context):
+ expectations = self._GetExpectations()
+ result = []
+
+ # Find all .js files in this directory.
+ filenames = [f[:-3] for f in os.listdir(self.root) if f.endswith(".js")]
+ filenames.sort()
+ for f in filenames:
+ throws = expectations.get(f, None)
+ flags = [f + ".js"]
+ if throws:
+ flags += ["throws", throws]
+ test = testcase.TestCase(self, f, flags=flags)
+ result.append(test)
+
+ # Find all .pyt files in this directory.
+ filenames = [f[:-4] for f in os.listdir(self.root) if f.endswith(".pyt")]
+ filenames.sort()
+ for f in filenames:
+ self._ParsePythonTestTemplates(result, f)
+ return result
+
+ def GetFlagsForTestCase(self, testcase, context):
+ first = testcase.flags[0]
+ if first != "-e":
+ testcase.flags[0] = os.path.join(self.root, first)
+ return testcase.flags
+
+ def GetSourceForTest(self, testcase):
+ if testcase.flags[0] == "-e":
+ return testcase.flags[1]
+ with open(testcase.flags[0]) as f:
+ return f.read()
+
+ def VariantFlags(self):
+ return [[]];
+
+
+def GetSuite(name, root):
+ return PreparserTestSuite(name, root)
+
+
+# Deprecated definitions below.
+# TODO(jkummerow): Remove when SCons is no longer supported.
+
+
+from os.path import join, exists, isfile
+import test
+
+
class PreparserTestCase(test.TestCase):
def __init__(self, root, path, executable, mode, throws, context, source):
@@ -50,7 +146,7 @@ class PreparserTestCase(test.TestCase):
def HasSource(self):
return self.source is not None
- def GetSource():
+ def GetSource(self):
return self.source
def BuildCommand(self, path):
diff --git a/src/3rdparty/v8/test/sputnik/sputnik.status b/src/3rdparty/v8/test/sputnik/sputnik.status
index 52d126e..67d1c75 100644
--- a/src/3rdparty/v8/test/sputnik/sputnik.status
+++ b/src/3rdparty/v8/test/sputnik/sputnik.status
@@ -216,7 +216,7 @@ S15.1.3.4_A2.3_T1: SKIP
S15.1.3.1_A2.5_T1: SKIP
S15.1.3.2_A2.5_T1: SKIP
-[ $arch == mips ]
+[ $arch == mipsel ]
# BUG(3251225): Tests that timeout with --nocrankshaft.
S15.1.3.1_A2.5_T1: SKIP
diff --git a/src/3rdparty/v8/test/sputnik/testcfg.py b/src/3rdparty/v8/test/sputnik/testcfg.py
index 1032c13..b6f3746 100644
--- a/src/3rdparty/v8/test/sputnik/testcfg.py
+++ b/src/3rdparty/v8/test/sputnik/testcfg.py
@@ -33,6 +33,11 @@ import test
import time
+def GetSuite(name, root):
+ # Not implemented.
+ return None
+
+
class SputnikTestCase(test.TestCase):
def __init__(self, case, path, context, mode):
diff --git a/src/3rdparty/v8/test/test262/test262.status b/src/3rdparty/v8/test/test262/test262.status
index c755289..06b43c7 100644
--- a/src/3rdparty/v8/test/test262/test262.status
+++ b/src/3rdparty/v8/test/test262/test262.status
@@ -39,23 +39,8 @@ S15.12.2_A1: FAIL
# V8 Bug: http://code.google.com/p/v8/issues/detail?id=691
11.2.3-3_3: FAIL
-# Prototypal inheritance of properties does not maintain accessibility.
-# The [[CanPut]] operation should traverse the prototype chain to
-# determine whether given property is writable or not.
-# V8 Bug: http://code.google.com/p/v8/issues/detail?id=1475
-8.14.4-8-b_1: FAIL
-8.14.4-8-b_2: FAIL
-15.2.3.6-4-405: FAIL
-15.2.3.6-4-410: FAIL
-15.2.3.6-4-415: FAIL
-15.2.3.6-4-420: FAIL
-
##################### DELIBERATE INCOMPATIBILITIES #####################
-# We deliberately treat arguments to parseInt() with a leading zero as
-# octal numbers in order to not break the web.
-S15.1.2.2_A5.1_T1: FAIL_OK
-
# This tests precision of Math.tan and Math.sin. The implementation for those
# trigonometric functions are platform/compiler dependent. Furthermore, the
# expectation values by far deviates from the actual result given by an
@@ -86,33 +71,19 @@ S15.9.3.1_A5_T6: PASS || FAIL_OK
############################ SKIPPED TESTS #############################
# These tests take a looong time to run in debug mode.
-S15.1.3.2_A2.5_T1: PASS, SKIP if $mode == debug
S15.1.3.1_A2.5_T1: PASS, SKIP if $mode == debug
+S15.1.3.2_A2.5_T1: PASS, SKIP if $mode == debug
-[ $arch == arm ]
-
-# BUG(3251225): Tests that timeout with --nocrankshaft.
-S15.1.3.1_A2.5_T1: SKIP
-S15.1.3.2_A2.5_T1: SKIP
-S15.1.3.1_A2.4_T1: SKIP
-S15.1.3.1_A2.5_T1: SKIP
-S15.1.3.2_A2.4_T1: SKIP
-S15.1.3.2_A2.5_T1: SKIP
-S15.1.3.3_A2.3_T1: SKIP
-S15.1.3.4_A2.3_T1: SKIP
-S15.1.3.1_A2.5_T1: SKIP
-S15.1.3.2_A2.5_T1: SKIP
+[ $arch == arm || $arch == mipsel ]
-[ $arch == mips ]
+# TODO(mstarzinger): Causes stack overflow on simulators due to eager
+# compilation of parenthesized function literals. Needs investigation.
+S13.2.1_A1_T1: SKIP
# BUG(3251225): Tests that timeout with --nocrankshaft.
-S15.1.3.1_A2.5_T1: SKIP
-S15.1.3.2_A2.5_T1: SKIP
S15.1.3.1_A2.4_T1: SKIP
S15.1.3.1_A2.5_T1: SKIP
S15.1.3.2_A2.4_T1: SKIP
S15.1.3.2_A2.5_T1: SKIP
S15.1.3.3_A2.3_T1: SKIP
S15.1.3.4_A2.3_T1: SKIP
-S15.1.3.1_A2.5_T1: SKIP
-S15.1.3.2_A2.5_T1: SKIP
diff --git a/src/3rdparty/v8/test/test262/testcfg.py b/src/3rdparty/v8/test/test262/testcfg.py
index 07f760c..875a4e5 100644
--- a/src/3rdparty/v8/test/test262/testcfg.py
+++ b/src/3rdparty/v8/test/test262/testcfg.py
@@ -26,19 +26,107 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-import test
-import os
-from os.path import join, exists
-import urllib
import hashlib
+import os
import sys
import tarfile
+import urllib
+
+from testrunner.local import testsuite
+from testrunner.objects import testcase
+
+
+TEST_262_ARCHIVE_REVISION = "fb327c439e20" # This is the r334 revision.
+TEST_262_ARCHIVE_MD5 = "307acd166ec34629592f240dc12d57ed"
+TEST_262_URL = "http://hg.ecmascript.org/tests/test262/archive/%s.tar.bz2"
+TEST_262_HARNESS = ["sta.js"]
+
+
+class Test262TestSuite(testsuite.TestSuite):
+
+ def __init__(self, name, root):
+ super(Test262TestSuite, self).__init__(name, root)
+ self.testroot = os.path.join(root, "data", "test", "suite")
+ self.harness = [os.path.join(self.root, "data", "test", "harness", f)
+ for f in TEST_262_HARNESS]
+ self.harness += [os.path.join(self.root, "harness-adapt.js")]
+
+ def CommonTestName(self, testcase):
+ return testcase.path.split(os.path.sep)[-1]
+ def ListTests(self, context):
+ tests = []
+ for dirname, dirs, files in os.walk(self.testroot):
+ for dotted in [x for x in dirs if x.startswith(".")]:
+ dirs.remove(dotted)
+ dirs.sort()
+ files.sort()
+ for filename in files:
+ if filename.endswith(".js"):
+ testname = os.path.join(dirname[len(self.testroot) + 1:],
+ filename[:-3])
+ case = testcase.TestCase(self, testname)
+ tests.append(case)
+ return tests
+
+ def GetFlagsForTestCase(self, testcase, context):
+ return (testcase.flags + context.mode_flags + self.harness +
+ [os.path.join(self.testroot, testcase.path + ".js")])
+
+ def GetSourceForTest(self, testcase):
+ filename = os.path.join(self.testroot, testcase.path + ".js")
+ with open(filename) as f:
+ return f.read()
+
+ def IsNegativeTest(self, testcase):
+ return "@negative" in self.GetSourceForTest(testcase)
+
+ def IsFailureOutput(self, output, testpath):
+ if output.exit_code != 0:
+ return True
+ return "FAILED!" in output.stdout
-TEST_262_ARCHIVE_REVISION = 'fb327c439e20' # This is the r334 revision.
-TEST_262_ARCHIVE_MD5 = '307acd166ec34629592f240dc12d57ed'
-TEST_262_URL = 'http://hg.ecmascript.org/tests/test262/archive/%s.tar.bz2'
-TEST_262_HARNESS = ['sta.js']
+ def DownloadData(self):
+ revision = TEST_262_ARCHIVE_REVISION
+ archive_url = TEST_262_URL % revision
+ archive_name = os.path.join(self.root, "test262-%s.tar.bz2" % revision)
+ directory_name = os.path.join(self.root, "data")
+ directory_old_name = os.path.join(self.root, "data.old")
+ if not os.path.exists(archive_name):
+ print "Downloading test data from %s ..." % archive_url
+ urllib.urlretrieve(archive_url, archive_name)
+ if os.path.exists(directory_name):
+ os.rename(directory_name, directory_old_name)
+ if not os.path.exists(directory_name):
+ print "Extracting test262-%s.tar.bz2 ..." % revision
+ md5 = hashlib.md5()
+ with open(archive_name, "rb") as f:
+ for chunk in iter(lambda: f.read(8192), ""):
+ md5.update(chunk)
+ if md5.hexdigest() != TEST_262_ARCHIVE_MD5:
+ os.remove(archive_name)
+ raise Exception("Hash mismatch of test data file")
+ archive = tarfile.open(archive_name, "r:bz2")
+ if sys.platform in ("win32", "cygwin"):
+ # Magic incantation to allow longer path names on Windows.
+ archive.extractall(u"\\\\?\\%s" % self.root)
+ else:
+ archive.extractall(self.root)
+ os.rename(os.path.join(self.root, "test262-%s" % revision),
+ directory_name)
+
+
+def GetSuite(name, root):
+ return Test262TestSuite(name, root)
+
+
+# Deprecated definitions below.
+# TODO(jkummerow): Remove when SCons is no longer supported.
+
+
+from os.path import exists
+from os.path import join
+import test
class Test262TestCase(test.TestCase):
@@ -62,6 +150,7 @@ class Test262TestCase(test.TestCase):
def GetCommand(self):
result = self.context.GetVmCommand(self, self.mode)
+ result += [ '--es5_readonly' ] # Temporary hack until we can remove flag
result += self.framework
result.append(self.filename)
return result
diff --git a/src/3rdparty/v8/tools/android-build.sh b/src/3rdparty/v8/tools/android-build.sh
new file mode 100755
index 0000000..e69de29
--- /dev/null
+++ b/src/3rdparty/v8/tools/android-build.sh
diff --git a/src/3rdparty/v8/tools/android-ll-prof.sh b/src/3rdparty/v8/tools/android-ll-prof.sh
new file mode 100755
index 0000000..436f262
--- /dev/null
+++ b/src/3rdparty/v8/tools/android-ll-prof.sh
@@ -0,0 +1,69 @@
+#!/bin/bash
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Runs d8 with the given arguments on the device under 'perf' and
+# processes the profiler trace and v8 logs using ll_prof.py.
+#
+# Usage:
+# > ./tools/android-ll-prof.sh (debug|release) "args to d8" "args to ll_prof.py"
+#
+# The script creates deploy directory deploy/data/local/tmp/v8, copies there
+# the d8 binary either from out/android_arm.release or out/android_arm.debug,
+# and then sync the deploy directory with /data/local/tmp/v8 on the device.
+# You can put JS files in the deploy directory before running the script.
+# Note: $ANDROID_NDK_ROOT must be set.
+
+MODE=$1
+RUN_ARGS=$2
+LL_PROF_ARGS=$3
+
+BASE=`cd $(dirname "$0")/..; pwd`
+DEPLOY="$BASE/deploy"
+
+set +e
+mkdir -p "$DEPLOY/data/local/tmp/v8"
+
+cp "$BASE/out/android_arm.$MODE/d8" "$DEPLOY/data/local/tmp/v8/d8"
+
+adb -p "$DEPLOY" sync data
+
+adb shell "cd /data/local/tmp/v8;\
+ perf record -R -e cycles -c 10000 -f -i \
+ ./d8 --ll_prof --gc-fake-mmap=/data/local/tmp/__v8_gc__ $RUN_ARGS"
+
+adb pull /data/local/tmp/v8/v8.log .
+adb pull /data/local/tmp/v8/v8.log.ll .
+adb pull /data/perf.data .
+
+ARCH=arm-linux-androideabi-4.6
+TOOLCHAIN="${ANDROID_NDK_ROOT}/toolchains/$ARCH/prebuilt/linux-x86/bin"
+
+$BASE/tools/ll_prof.py --host-root="$BASE/deploy" \
+ --gc-fake-mmap=/data/local/tmp/__v8_gc__ \
+ --objdump="$TOOLCHAIN/arm-linux-androideabi-objdump" \
+ $LL_PROF_ARGS
diff --git a/src/3rdparty/v8/tools/android-run.py b/src/3rdparty/v8/tools/android-run.py
new file mode 100755
index 0000000..1693c5b
--- /dev/null
+++ b/src/3rdparty/v8/tools/android-run.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+#
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# This script executes the passed command line on Android device
+# using 'adb shell' command. Unfortunately, 'adb shell' always
+# returns exit code 0, ignoring the exit code of executed command.
+# Since we need to return non-zero exit code if the command failed,
+# we augment the passed command line with exit code checking statement
+# and output special error string in case of non-zero exit code.
+# Then we parse the output of 'adb shell' and look for that error string.
+
+import os
+from os.path import join, dirname, abspath
+import subprocess
+import sys
+import tempfile
+
+def Check(output, errors):
+ failed = any([s.startswith('/system/bin/sh:') or s.startswith('ANDROID')
+ for s in output.split('\n')])
+ return 1 if failed else 0
+
+def Execute(cmdline):
+ (fd_out, outname) = tempfile.mkstemp()
+ (fd_err, errname) = tempfile.mkstemp()
+ process = subprocess.Popen(
+ args=cmdline,
+ shell=True,
+ stdout=fd_out,
+ stderr=fd_err,
+ )
+ exit_code = process.wait()
+ os.close(fd_out)
+ os.close(fd_err)
+ output = file(outname).read()
+ errors = file(errname).read()
+ os.unlink(outname)
+ os.unlink(errname)
+ sys.stdout.write(output)
+ sys.stderr.write(errors)
+ return exit_code or Check(output, errors)
+
+def Escape(arg):
+ def ShouldEscape():
+ for x in arg:
+ if not x.isalnum() and x != '-' and x != '_':
+ return True
+ return False
+
+ return arg if not ShouldEscape() else '"%s"' % (arg.replace('"', '\\"'))
+
+def WriteToTemporaryFile(data):
+ (fd, fname) = tempfile.mkstemp()
+ os.close(fd)
+ tmp_file = open(fname, "w")
+ tmp_file.write(data)
+ tmp_file.close()
+ return fname
+
+def Main():
+ if (len(sys.argv) == 1):
+ print("Usage: %s <command-to-run-on-device>" % sys.argv[0])
+ return 1
+ workspace = abspath(join(dirname(sys.argv[0]), '..'))
+ android_workspace = os.getenv("ANDROID_V8", "/data/local/v8")
+ args = [Escape(arg) for arg in sys.argv[1:]]
+ script = (" ".join(args) + "\n"
+ "case $? in\n"
+ " 0) ;;\n"
+ " *) echo \"ANDROID: Error returned by test\";;\n"
+ "esac\n")
+ script = script.replace(workspace, android_workspace)
+ script_file = WriteToTemporaryFile(script)
+ android_script_file = android_workspace + "/" + script_file
+ command = ("adb push '%s' %s;" % (script_file, android_script_file) +
+ "adb shell 'sh %s';" % android_script_file +
+ "adb shell 'rm %s'" % android_script_file)
+ error_code = Execute(command)
+ os.unlink(script_file)
+ return error_code
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/src/3rdparty/v8/tools/android-sync.sh b/src/3rdparty/v8/tools/android-sync.sh
new file mode 100755
index 0000000..5d4ef2e
--- /dev/null
+++ b/src/3rdparty/v8/tools/android-sync.sh
@@ -0,0 +1,105 @@
+#!/bin/bash
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# This script pushes android binaries and test data to the device.
+# The first argument can be either "android.release" or "android.debug".
+# The second argument is a relative path to the output directory with binaries.
+# The third argument is the absolute path to the V8 directory on the host.
+# The fourth argument is the absolute path to the V8 directory on the device.
+
+if [ ${#@} -lt 4 ] ; then
+ echo "$0: Error: need 4 arguments"
+ exit 1
+fi
+
+ARCH_MODE=$1
+OUTDIR=$2
+HOST_V8=$3
+ANDROID_V8=$4
+
+function LINUX_MD5 {
+ local HASH=$(md5sum $1)
+ echo ${HASH%% *}
+}
+
+function DARWIN_MD5 {
+ local HASH=$(md5 $1)
+ echo ${HASH} | cut -f2 -d "=" | cut -f2 -d " "
+}
+
+host_os=$(uname -s)
+case "${host_os}" in
+ "Linux")
+ MD5=LINUX_MD5
+ ;;
+ "Darwin")
+ MD5=DARWIN_MD5
+ ;;
+ *)
+ echo "$0: Host platform ${host_os} is not supported" >& 2
+ exit 1
+esac
+
+function sync_file {
+ local FILE=$1
+ local ANDROID_HASH=$(adb shell "md5 \"$ANDROID_V8/$FILE\"")
+ local HOST_HASH=$($MD5 "$HOST_V8/$FILE")
+ if [ "${ANDROID_HASH%% *}" != "${HOST_HASH}" ]; then
+ adb push "$HOST_V8/$FILE" "$ANDROID_V8/$FILE" &> /dev/null
+ fi
+ echo -n "."
+}
+
+function sync_dir {
+ local DIR=$1
+ echo -n "sync to $ANDROID_V8/$DIR"
+ for FILE in $(find "$HOST_V8/$DIR" -not -path "*.svn*" -type f); do
+ local RELATIVE_FILE=${FILE:${#HOST_V8}}
+ sync_file "$RELATIVE_FILE"
+ done
+ echo ""
+}
+
+echo -n "sync to $ANDROID_V8/$OUTDIR/$ARCH_MODE"
+sync_file "$OUTDIR/$ARCH_MODE/cctest"
+sync_file "$OUTDIR/$ARCH_MODE/d8"
+sync_file "$OUTDIR/$ARCH_MODE/preparser"
+echo ""
+echo -n "sync to $ANDROID_V8/tools"
+sync_file tools/consarray.js
+sync_file tools/codemap.js
+sync_file tools/csvparser.js
+sync_file tools/profile.js
+sync_file tools/splaytree.js
+sync_file tools/profile_view.js
+sync_file tools/logreader.js
+sync_file tools/tickprocessor.js
+echo ""
+sync_dir test/message
+sync_dir test/mjsunit
+sync_dir test/preparser
diff --git a/src/3rdparty/v8/tools/bash-completion.sh b/src/3rdparty/v8/tools/bash-completion.sh
index 9f65c67..9f65c67 100644..100755
--- a/src/3rdparty/v8/tools/bash-completion.sh
+++ b/src/3rdparty/v8/tools/bash-completion.sh
diff --git a/src/3rdparty/v8/tools/check-static-initializers.sh b/src/3rdparty/v8/tools/check-static-initializers.sh
index 1103a97..1103a97 100644..100755
--- a/src/3rdparty/v8/tools/check-static-initializers.sh
+++ b/src/3rdparty/v8/tools/check-static-initializers.sh
diff --git a/src/3rdparty/v8/tools/common-includes.sh b/src/3rdparty/v8/tools/common-includes.sh
index 2b806ca..7785e9f 100644
--- a/src/3rdparty/v8/tools/common-includes.sh
+++ b/src/3rdparty/v8/tools/common-includes.sh
@@ -36,9 +36,7 @@ TEMP_BRANCH=$BRANCHNAME-temporary-branch-created-by-script
VERSION_FILE="src/version.cc"
CHANGELOG_ENTRY_FILE="$PERSISTFILE_BASENAME-changelog-entry"
PATCH_FILE="$PERSISTFILE_BASENAME-patch"
-PATCH_OUTPUT_FILE="$PERSISTFILE_BASENAME-patch-output"
COMMITMSG_FILE="$PERSISTFILE_BASENAME-commitmsg"
-TOUCHED_FILES_FILE="$PERSISTFILE_BASENAME-touched-files"
TRUNK_REVISION_FILE="$PERSISTFILE_BASENAME-trunkrevision"
START_STEP=0
CURRENT_STEP=0
@@ -180,26 +178,21 @@ the uploaded CL."
done
}
-# Takes a file containing the patch to apply as first argument.
-apply_patch() {
- patch $REVERSE_PATCH -p1 < "$1" > "$PATCH_OUTPUT_FILE" || \
- { cat "$PATCH_OUTPUT_FILE" && die "Applying the patch failed."; }
- tee < "$PATCH_OUTPUT_FILE" >(grep "patching file" \
- | awk '{print $NF}' >> "$TOUCHED_FILES_FILE")
- rm "$PATCH_OUTPUT_FILE"
+wait_for_resolving_conflicts() {
+ echo "Applying the patch \"$1\" failed. Either type \"ABORT<Return>\", or \
+resolve the conflicts, stage *all* touched files with 'git add', and \
+type \"RESOLVED<Return>\""
+ unset ANSWER
+ while [ "$ANSWER" != "RESOLVED" ] ; do
+ [[ "$ANSWER" == "ABORT" ]] && die "Applying the patch failed."
+ [[ -n "$ANSWER" ]] && echo "That was not 'RESOLVED' or 'ABORT'."
+ echo -n "> "
+ read ANSWER
+ done
}
-stage_files() {
- # Stage added and modified files.
- TOUCHED_FILES=$(cat "$TOUCHED_FILES_FILE")
- for FILE in $TOUCHED_FILES ; do
- git add "$FILE"
- done
- # Stage deleted files.
- DELETED_FILES=$(git status -s -uno --porcelain | grep "^ D" \
- | awk '{print $NF}')
- for FILE in $DELETED_FILES ; do
- git rm "$FILE"
- done
- rm -f "$TOUCHED_FILES_FILE"
+# Takes a file containing the patch to apply as first argument.
+apply_patch() {
+ git apply --index --reject $REVERSE_PATCH "$1" || \
+ wait_for_resolving_conflicts "$1";
}
diff --git a/src/3rdparty/v8/tools/fuzz-harness.sh b/src/3rdparty/v8/tools/fuzz-harness.sh
new file mode 100755
index 0000000..efbf864
--- /dev/null
+++ b/src/3rdparty/v8/tools/fuzz-harness.sh
@@ -0,0 +1,92 @@
+#!/bin/bash
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# A simple harness that downloads and runs 'jsfunfuzz' against d8. This
+# takes a long time because it runs many iterations and is intended for
+# automated usage. The package containing 'jsfunfuzz' can be found as an
+# attachment to this bug:
+# https://bugzilla.mozilla.org/show_bug.cgi?id=jsfunfuzz
+
+JSFUNFUZZ_URL="https://bugzilla.mozilla.org/attachment.cgi?id=310631"
+JSFUNFUZZ_MD5="d0e497201c5cd7bffbb1cdc1574f4e32"
+
+v8_root=$(readlink -f $(dirname $BASH_SOURCE)/../)
+
+if [ -n "$1" ]; then
+ d8="${v8_root}/$1"
+else
+ d8="${v8_root}/d8"
+fi
+
+if [ ! -f "$d8" ]; then
+ echo "Failed to find d8 binary: $d8"
+ exit 1
+fi
+
+jsfunfuzz_file="$v8_root/tools/jsfunfuzz.zip"
+if [ ! -f "$jsfunfuzz_file" ]; then
+ echo "Downloading $jsfunfuzz_file ..."
+ wget -q -O "$jsfunfuzz_file" $JSFUNFUZZ_URL || exit 1
+fi
+
+jsfunfuzz_sum=$(md5sum "$jsfunfuzz_file" | awk '{ print $1 }')
+if [ $jsfunfuzz_sum != $JSFUNFUZZ_MD5 ]; then
+ echo "Failed to verify checksum!"
+ exit 1
+fi
+
+jsfunfuzz_dir="$v8_root/tools/jsfunfuzz"
+if [ ! -d "$jsfunfuzz_dir" ]; then
+ echo "Unpacking into $jsfunfuzz_dir ..."
+ unzip "$jsfunfuzz_file" -d "$jsfunfuzz_dir" || exit 1
+ echo "Patching runner ..."
+ cat << EOF | patch -s -p0 -d "$v8_root"
+--- tools/jsfunfuzz/jsfunfuzz/multi_timed_run.py~
++++ tools/jsfunfuzz/jsfunfuzz/multi_timed_run.py
+@@ -125,7 +125,7 @@
+
+ def many_timed_runs():
+ iteration = 0
+- while True:
++ while iteration < 100:
+ iteration += 1
+ logfilename = "w%d" % iteration
+ one_timed_run(logfilename)
+EOF
+fi
+
+flags='--debug-code --expose-gc --verify-gc'
+python -u "$jsfunfuzz_dir/jsfunfuzz/multi_timed_run.py" 300 \
+ "$d8" $flags "$jsfunfuzz_dir/jsfunfuzz/jsfunfuzz.js"
+exit_code=$(cat w* | grep " looking good" -c)
+exit_code=$((100-exit_code))
+tar -cjf fuzz-results-$(date +%y%m%d).tar.bz2 err-* w*
+rm -f err-* w*
+
+echo "Total failures: $exit_code"
+exit $exit_code
diff --git a/src/3rdparty/v8/tools/gen-postmortem-metadata.py b/src/3rdparty/v8/tools/gen-postmortem-metadata.py
index b9b1625..71f58bf 100644
--- a/src/3rdparty/v8/tools/gen-postmortem-metadata.py
+++ b/src/3rdparty/v8/tools/gen-postmortem-metadata.py
@@ -61,7 +61,7 @@ consts_misc = [
{ 'name': 'StringEncodingMask', 'value': 'kStringEncodingMask' },
{ 'name': 'TwoByteStringTag', 'value': 'kTwoByteStringTag' },
- { 'name': 'AsciiStringTag', 'value': 'kAsciiStringTag' },
+ { 'name': 'AsciiStringTag', 'value': 'kOneByteStringTag' },
{ 'name': 'StringRepresentationMask',
'value': 'kStringRepresentationMask' },
@@ -78,8 +78,8 @@ consts_misc = [
{ 'name': 'SmiValueShift', 'value': 'kSmiTagSize' },
{ 'name': 'PointerSizeLog2', 'value': 'kPointerSizeLog2' },
- { 'name': 'prop_idx_content',
- 'value': 'DescriptorArray::kContentArrayIndex' },
+ { 'name': 'prop_idx_transitions',
+ 'value': 'DescriptorArray::kTransitionsIndex' },
{ 'name': 'prop_idx_first',
'value': 'DescriptorArray::kFirstIndex' },
{ 'name': 'prop_type_field',
diff --git a/src/3rdparty/v8/tools/grokdump.py b/src/3rdparty/v8/tools/grokdump.py
index 29d4755..46ead5e 100755
--- a/src/3rdparty/v8/tools/grokdump.py
+++ b/src/3rdparty/v8/tools/grokdump.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
#
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -27,32 +27,31 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import bisect
import cmd
+import codecs
import ctypes
+import disasm
import mmap
import optparse
import os
-import disasm
-import sys
-import types
-import codecs
import re
import struct
+import sys
+import types
-USAGE="""usage: %prog [OPTION]...
+USAGE="""usage: %prog [OPTIONS] [DUMP-FILE]
Minidump analyzer.
Shows the processor state at the point of exception including the
stack of the active thread and the referenced objects in the V8
heap. Code objects are disassembled and the addresses linked from the
-stack (pushed return addresses) are marked with "=>".
-
+stack (e.g. pushed return addresses) are marked with "=>".
Examples:
- $ %prog 12345678-1234-1234-1234-123456789abcd-full.dmp
-"""
+ $ %prog 12345678-1234-1234-1234-123456789abcd-full.dmp"""
DEBUG=False
@@ -108,21 +107,59 @@ class Descriptor(object):
return Raw
-def do_dump(reader, heap):
+def FullDump(reader, heap):
"""Dump all available memory regions."""
def dump_region(reader, start, size, location):
- print "%s - %s" % (reader.FormatIntPtr(start),
- reader.FormatIntPtr(start + size))
- for slot in xrange(start,
- start + size,
- reader.PointerSize()):
- maybe_address = reader.ReadUIntPtr(slot)
- heap_object = heap.FindObject(maybe_address)
- print "%s: %s" % (reader.FormatIntPtr(slot),
- reader.FormatIntPtr(maybe_address))
- if heap_object:
- heap_object.Print(Printer())
- print
+ print
+ while start & 3 != 0:
+ start += 1
+ size -= 1
+ location += 1
+ is_executable = reader.IsProbableExecutableRegion(location, size)
+ is_ascii = reader.IsProbableASCIIRegion(location, size)
+
+ if is_executable is not False:
+ lines = reader.GetDisasmLines(start, size)
+ for line in lines:
+ print FormatDisasmLine(start, heap, line)
+ print
+
+ if is_ascii is not False:
+ # Output in the same format as the Unix hd command
+ addr = start
+ for slot in xrange(location, location + size, 16):
+ hex_line = ""
+ asc_line = ""
+ for i in xrange(0, 16):
+ if slot + i < location + size:
+ byte = ctypes.c_uint8.from_buffer(reader.minidump, slot + i).value
+ if byte >= 0x20 and byte < 0x7f:
+ asc_line += chr(byte)
+ else:
+ asc_line += "."
+ hex_line += " %02x" % (byte)
+ else:
+ hex_line += " "
+ if i == 7:
+ hex_line += " "
+ print "%s %s |%s|" % (reader.FormatIntPtr(addr),
+ hex_line,
+ asc_line)
+ addr += 16
+
+ if is_executable is not True and is_ascii is not True:
+ print "%s - %s" % (reader.FormatIntPtr(start),
+ reader.FormatIntPtr(start + size))
+ for slot in xrange(start,
+ start + size,
+ reader.PointerSize()):
+ maybe_address = reader.ReadUIntPtr(slot)
+ heap_object = heap.FindObject(maybe_address)
+ print "%s: %s" % (reader.FormatIntPtr(slot),
+ reader.FormatIntPtr(maybe_address))
+ if heap_object:
+ heap_object.Print(Printer())
+ print
reader.ForEachMemoryRegion(dump_region)
@@ -144,6 +181,11 @@ MINIDUMP_LOCATION_DESCRIPTOR = Descriptor([
("rva", ctypes.c_uint32)
])
+MINIDUMP_STRING = Descriptor([
+ ("length", ctypes.c_uint32),
+ ("buffer", lambda t: ctypes.c_uint8 * (t.length + 2))
+])
+
MINIDUMP_DIRECTORY = Descriptor([
("stream_type", ctypes.c_uint32),
("location", MINIDUMP_LOCATION_DESCRIPTOR.ctype)
@@ -364,6 +406,24 @@ MINIDUMP_THREAD_LIST = Descriptor([
("threads", lambda t: MINIDUMP_THREAD.ctype * t.thread_count)
])
+MINIDUMP_RAW_MODULE = Descriptor([
+ ("base_of_image", ctypes.c_uint64),
+ ("size_of_image", ctypes.c_uint32),
+ ("checksum", ctypes.c_uint32),
+ ("time_date_stamp", ctypes.c_uint32),
+ ("module_name_rva", ctypes.c_uint32),
+ ("version_info", ctypes.c_uint32 * 13),
+ ("cv_record", MINIDUMP_LOCATION_DESCRIPTOR.ctype),
+ ("misc_record", MINIDUMP_LOCATION_DESCRIPTOR.ctype),
+ ("reserved0", ctypes.c_uint32 * 2),
+ ("reserved1", ctypes.c_uint32 * 2)
+])
+
+MINIDUMP_MODULE_LIST = Descriptor([
+ ("number_of_modules", ctypes.c_uint32),
+ ("modules", lambda t: MINIDUMP_RAW_MODULE.ctype * t.number_of_modules)
+])
+
MINIDUMP_RAW_SYSTEM_INFO = Descriptor([
("processor_architecture", ctypes.c_uint16)
])
@@ -371,6 +431,20 @@ MINIDUMP_RAW_SYSTEM_INFO = Descriptor([
MD_CPU_ARCHITECTURE_X86 = 0
MD_CPU_ARCHITECTURE_AMD64 = 9
+class FuncSymbol:
+ def __init__(self, start, size, name):
+ self.start = start
+ self.end = self.start + size
+ self.name = name
+
+ def __cmp__(self, other):
+ if isinstance(other, FuncSymbol):
+ return self.start - other.start
+ return self.start - other
+
+ def Covers(self, addr):
+ return (self.start <= addr) and (addr < self.end)
+
class MinidumpReader(object):
"""Minidump (.dmp) reader."""
@@ -382,7 +456,7 @@ class MinidumpReader(object):
self.minidump = mmap.mmap(self.minidump_file.fileno(), 0, mmap.MAP_PRIVATE)
self.header = MINIDUMP_HEADER.Read(self.minidump, 0)
if self.header.signature != MinidumpReader._HEADER_MAGIC:
- print >>sys.stderr, "Warning: unsupported minidump header magic"
+ print >>sys.stderr, "Warning: Unsupported minidump header magic!"
DebugPrint(self.header)
directories = []
offset = self.header.stream_directories_rva
@@ -394,8 +468,13 @@ class MinidumpReader(object):
self.exception_context = None
self.memory_list = None
self.memory_list64 = None
+ self.module_list = None
self.thread_map = {}
+ self.symdir = options.symdir
+ self.modules_with_symbols = []
+ self.symbols = []
+
# Find MDRawSystemInfo stream and determine arch.
for d in directories:
if d.stream_type == MD_SYSTEM_INFO_STREAM:
@@ -425,8 +504,13 @@ class MinidumpReader(object):
for thread in thread_list.threads:
DebugPrint(thread)
self.thread_map[thread.id] = thread
+ elif d.stream_type == MD_MODULE_LIST_STREAM:
+ assert self.module_list is None
+ self.module_list = MINIDUMP_MODULE_LIST.Read(
+ self.minidump, d.location.rva)
+ assert ctypes.sizeof(self.module_list) == d.location.data_size
elif d.stream_type == MD_MEMORY_LIST_STREAM:
- print >>sys.stderr, "Warning: not a full minidump"
+ print >>sys.stderr, "Warning: This is not a full minidump!"
assert self.memory_list is None
self.memory_list = MINIDUMP_MEMORY_LIST.Read(
self.minidump, d.location.rva)
@@ -470,6 +554,64 @@ class MinidumpReader(object):
elif self.arch == MD_CPU_ARCHITECTURE_X86:
return ctypes.c_uint32.from_buffer(self.minidump, location).value
+ def IsProbableASCIIRegion(self, location, length):
+ ascii_bytes = 0
+ non_ascii_bytes = 0
+ for loc in xrange(location, location + length):
+ byte = ctypes.c_uint8.from_buffer(self.minidump, loc).value
+ if byte >= 0x7f:
+ non_ascii_bytes += 1
+ if byte < 0x20 and byte != 0:
+ non_ascii_bytes += 1
+ if byte < 0x7f and byte >= 0x20:
+ ascii_bytes += 1
+ if byte == 0xa: # newline
+ ascii_bytes += 1
+ if ascii_bytes * 10 <= length:
+ return False
+ if length > 0 and ascii_bytes > non_ascii_bytes * 7:
+ return True
+ if ascii_bytes > non_ascii_bytes * 3:
+ return None # Maybe
+ return False
+
+ def IsProbableExecutableRegion(self, location, length):
+ opcode_bytes = 0
+ sixty_four = self.arch == MD_CPU_ARCHITECTURE_AMD64
+ for loc in xrange(location, location + length):
+ byte = ctypes.c_uint8.from_buffer(self.minidump, loc).value
+ if (byte == 0x8b or # mov
+ byte == 0x89 or # mov reg-reg
+ (byte & 0xf0) == 0x50 or # push/pop
+ (sixty_four and (byte & 0xf0) == 0x40) or # rex prefix
+ byte == 0xc3 or # return
+ byte == 0x74 or # jeq
+ byte == 0x84 or # jeq far
+ byte == 0x75 or # jne
+ byte == 0x85 or # jne far
+ byte == 0xe8 or # call
+ byte == 0xe9 or # jmp far
+ byte == 0xeb): # jmp near
+ opcode_bytes += 1
+ opcode_percent = (opcode_bytes * 100) / length
+ threshold = 20
+ if opcode_percent > threshold + 2:
+ return True
+ if opcode_percent > threshold - 2:
+ return None # Maybe
+ return False
+
+ def FindRegion(self, addr):
+ answer = [-1, -1]
+ def is_in(reader, start, size, location):
+ if addr >= start and addr < start + size:
+ answer[0] = start
+ answer[1] = size
+ self.ForEachMemoryRegion(is_in)
+ if answer[0] == -1:
+ return None
+ return answer
+
def ForEachMemoryRegion(self, cb):
if self.memory_list64 is not None:
for r in self.memory_list64.ranges:
@@ -481,14 +623,14 @@ class MinidumpReader(object):
for r in self.memory_list.ranges:
cb(self, r.start, r.memory.data_size, r.memory.rva)
- def FindWord(self, word):
+ def FindWord(self, word, alignment=0):
def search_inside_region(reader, start, size, location):
- for loc in xrange(location, location + size):
+ location = (location + alignment) & ~alignment
+ for loc in xrange(location, location + size - self.PointerSize()):
if reader._ReadWord(loc) == word:
slot = start + (loc - location)
print "%s: %s" % (reader.FormatIntPtr(slot),
reader.FormatIntPtr(word))
-
self.ForEachMemoryRegion(search_inside_region)
def FindLocation(self, address):
@@ -550,6 +692,66 @@ class MinidumpReader(object):
def Register(self, name):
return self.exception_context.__getattribute__(name)
+ def ReadMinidumpString(self, rva):
+ string = bytearray(MINIDUMP_STRING.Read(self.minidump, rva).buffer)
+ string = string.decode("utf16")
+ return string[0:len(string) - 1]
+
+ # Load FUNC records from a BreakPad symbol file
+ #
+ # http://code.google.com/p/google-breakpad/wiki/SymbolFiles
+ #
+ def _LoadSymbolsFrom(self, symfile, baseaddr):
+ print "Loading symbols from %s" % (symfile)
+ funcs = []
+ with open(symfile) as f:
+ for line in f:
+ result = re.match(
+ r"^FUNC ([a-f0-9]+) ([a-f0-9]+) ([a-f0-9]+) (.*)$", line)
+ if result is not None:
+ start = int(result.group(1), 16)
+ size = int(result.group(2), 16)
+ name = result.group(4).rstrip()
+ bisect.insort_left(self.symbols,
+ FuncSymbol(baseaddr + start, size, name))
+ print " ... done"
+
+ def TryLoadSymbolsFor(self, modulename, module):
+ try:
+ symfile = os.path.join(self.symdir,
+ modulename.replace('.', '_') + ".pdb.sym")
+ self._LoadSymbolsFrom(symfile, module.base_of_image)
+ self.modules_with_symbols.append(module)
+ except Exception as e:
+ print " ... failure (%s)" % (e)
+
+ # Returns true if address is covered by some module that has loaded symbols.
+ def _IsInModuleWithSymbols(self, addr):
+ for module in self.modules_with_symbols:
+ start = module.base_of_image
+ end = start + module.size_of_image
+ if (start <= addr) and (addr < end):
+ return True
+ return False
+
+ # Find symbol covering the given address and return its name in format
+ # <symbol name>+<offset from the start>
+ def FindSymbol(self, addr):
+ if not self._IsInModuleWithSymbols(addr):
+ return None
+
+ i = bisect.bisect_left(self.symbols, addr)
+ symbol = None
+ if (0 < i) and self.symbols[i - 1].Covers(addr):
+ symbol = self.symbols[i - 1]
+ elif (i < len(self.symbols)) and self.symbols[i].Covers(addr):
+ symbol = self.symbols[i]
+ else:
+ return None
+ diff = addr - symbol.start
+ return "%s+0x%x" % (symbol.name, diff)
+
+
# List of V8 instance types. Obtained by adding the code below to any .cc file.
#
@@ -614,24 +816,182 @@ INSTANCE_TYPES = {
156: "SCRIPT_TYPE",
157: "CODE_CACHE_TYPE",
158: "POLYMORPHIC_CODE_CACHE_TYPE",
- 161: "FIXED_ARRAY_TYPE",
+ 159: "TYPE_FEEDBACK_INFO_TYPE",
+ 160: "ALIASED_ARGUMENTS_ENTRY_TYPE",
+ 163: "FIXED_ARRAY_TYPE",
145: "FIXED_DOUBLE_ARRAY_TYPE",
- 162: "SHARED_FUNCTION_INFO_TYPE",
- 163: "JS_MESSAGE_OBJECT_TYPE",
- 166: "JS_VALUE_TYPE",
- 167: "JS_OBJECT_TYPE",
- 168: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
- 169: "JS_GLOBAL_OBJECT_TYPE",
- 170: "JS_BUILTINS_OBJECT_TYPE",
- 171: "JS_GLOBAL_PROXY_TYPE",
- 172: "JS_ARRAY_TYPE",
- 165: "JS_PROXY_TYPE",
- 175: "JS_WEAK_MAP_TYPE",
- 176: "JS_REGEXP_TYPE",
- 177: "JS_FUNCTION_TYPE",
- 164: "JS_FUNCTION_PROXY_TYPE",
- 159: "DEBUG_INFO_TYPE",
- 160: "BREAK_POINT_INFO_TYPE",
+ 164: "SHARED_FUNCTION_INFO_TYPE",
+ 165: "JS_MESSAGE_OBJECT_TYPE",
+ 168: "JS_VALUE_TYPE",
+ 169: "JS_DATE_TYPE",
+ 170: "JS_OBJECT_TYPE",
+ 171: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
+ 172: "JS_MODULE_TYPE",
+ 173: "JS_GLOBAL_OBJECT_TYPE",
+ 174: "JS_BUILTINS_OBJECT_TYPE",
+ 175: "JS_GLOBAL_PROXY_TYPE",
+ 176: "JS_ARRAY_TYPE",
+ 167: "JS_PROXY_TYPE",
+ 179: "JS_WEAK_MAP_TYPE",
+ 180: "JS_REGEXP_TYPE",
+ 181: "JS_FUNCTION_TYPE",
+ 166: "JS_FUNCTION_PROXY_TYPE",
+ 161: "DEBUG_INFO_TYPE",
+ 162: "BREAK_POINT_INFO_TYPE",
+}
+
+
+# List of known V8 maps. Used to determine the instance type and name
+# for maps that are part of the root-set and hence on the first page of
+# the map-space. Obtained by adding the code below to an IA32 release
+# build with enabled snapshots to the end of the Isolate::Init method.
+#
+# #define ROOT_LIST_CASE(type, name, camel_name) \
+# if (o == heap_.name()) n = #camel_name;
+# #define STRUCT_LIST_CASE(upper_name, camel_name, name) \
+# if (o == heap_.name##_map()) n = #camel_name "Map";
+# HeapObjectIterator it(heap_.map_space());
+# printf("KNOWN_MAPS = {\n");
+# for (Object* o = it.Next(); o != NULL; o = it.Next()) {
+# Map* m = Map::cast(o);
+# const char* n = "";
+# intptr_t p = reinterpret_cast<intptr_t>(m) & 0xfffff;
+# int t = m->instance_type();
+# ROOT_LIST(ROOT_LIST_CASE)
+# STRUCT_LIST(STRUCT_LIST_CASE)
+# printf(" 0x%05x: (%d, \"%s\"),\n", p, t, n);
+# }
+# printf("}\n");
+KNOWN_MAPS = {
+ 0x08081: (134, "ByteArrayMap"),
+ 0x080a1: (128, "MetaMap"),
+ 0x080c1: (130, "OddballMap"),
+ 0x080e1: (163, "FixedArrayMap"),
+ 0x08101: (68, "AsciiSymbolMap"),
+ 0x08121: (132, "HeapNumberMap"),
+ 0x08141: (135, "FreeSpaceMap"),
+ 0x08161: (146, "OnePointerFillerMap"),
+ 0x08181: (146, "TwoPointerFillerMap"),
+ 0x081a1: (131, "GlobalPropertyCellMap"),
+ 0x081c1: (164, "SharedFunctionInfoMap"),
+ 0x081e1: (4, "AsciiStringMap"),
+ 0x08201: (163, "GlobalContextMap"),
+ 0x08221: (129, "CodeMap"),
+ 0x08241: (163, "ScopeInfoMap"),
+ 0x08261: (163, "FixedCOWArrayMap"),
+ 0x08281: (145, "FixedDoubleArrayMap"),
+ 0x082a1: (163, "HashTableMap"),
+ 0x082c1: (0, "StringMap"),
+ 0x082e1: (64, "SymbolMap"),
+ 0x08301: (1, "ConsStringMap"),
+ 0x08321: (5, "ConsAsciiStringMap"),
+ 0x08341: (3, "SlicedStringMap"),
+ 0x08361: (7, "SlicedAsciiStringMap"),
+ 0x08381: (65, "ConsSymbolMap"),
+ 0x083a1: (69, "ConsAsciiSymbolMap"),
+ 0x083c1: (66, "ExternalSymbolMap"),
+ 0x083e1: (74, "ExternalSymbolWithAsciiDataMap"),
+ 0x08401: (70, "ExternalAsciiSymbolMap"),
+ 0x08421: (2, "ExternalStringMap"),
+ 0x08441: (10, "ExternalStringWithAsciiDataMap"),
+ 0x08461: (6, "ExternalAsciiStringMap"),
+ 0x08481: (82, "ShortExternalSymbolMap"),
+ 0x084a1: (90, "ShortExternalSymbolWithAsciiDataMap"),
+ 0x084c1: (86, "ShortExternalAsciiSymbolMap"),
+ 0x084e1: (18, "ShortExternalStringMap"),
+ 0x08501: (26, "ShortExternalStringWithAsciiDataMap"),
+ 0x08521: (22, "ShortExternalAsciiStringMap"),
+ 0x08541: (0, "UndetectableStringMap"),
+ 0x08561: (4, "UndetectableAsciiStringMap"),
+ 0x08581: (144, "ExternalPixelArrayMap"),
+ 0x085a1: (136, "ExternalByteArrayMap"),
+ 0x085c1: (137, "ExternalUnsignedByteArrayMap"),
+ 0x085e1: (138, "ExternalShortArrayMap"),
+ 0x08601: (139, "ExternalUnsignedShortArrayMap"),
+ 0x08621: (140, "ExternalIntArrayMap"),
+ 0x08641: (141, "ExternalUnsignedIntArrayMap"),
+ 0x08661: (142, "ExternalFloatArrayMap"),
+ 0x08681: (143, "ExternalDoubleArrayMap"),
+ 0x086a1: (163, "NonStrictArgumentsElementsMap"),
+ 0x086c1: (163, "FunctionContextMap"),
+ 0x086e1: (163, "CatchContextMap"),
+ 0x08701: (163, "WithContextMap"),
+ 0x08721: (163, "BlockContextMap"),
+ 0x08741: (163, "ModuleContextMap"),
+ 0x08761: (165, "JSMessageObjectMap"),
+ 0x08781: (133, "ForeignMap"),
+ 0x087a1: (170, "NeanderMap"),
+ 0x087c1: (158, "PolymorphicCodeCacheMap"),
+ 0x087e1: (156, "ScriptMap"),
+ 0x08801: (147, "AccessorInfoMap"),
+ 0x08821: (148, "AccessorPairMap"),
+ 0x08841: (149, "AccessCheckInfoMap"),
+ 0x08861: (150, "InterceptorInfoMap"),
+ 0x08881: (151, "CallHandlerInfoMap"),
+ 0x088a1: (152, "FunctionTemplateInfoMap"),
+ 0x088c1: (153, "ObjectTemplateInfoMap"),
+ 0x088e1: (154, "SignatureInfoMap"),
+ 0x08901: (155, "TypeSwitchInfoMap"),
+ 0x08921: (157, "CodeCacheMap"),
+ 0x08941: (159, "TypeFeedbackInfoMap"),
+ 0x08961: (160, "AliasedArgumentsEntryMap"),
+ 0x08981: (161, "DebugInfoMap"),
+ 0x089a1: (162, "BreakPointInfoMap"),
+}
+
+
+# List of known V8 objects. Used to determine name for objects that are
+# part of the root-set and hence on the first page of various old-space
+# paged. Obtained by adding the code below to an IA32 release build with
+# enabled snapshots to the end of the Isolate::Init method.
+#
+# #define ROOT_LIST_CASE(type, name, camel_name) \
+# if (o == heap_.name()) n = #camel_name;
+# OldSpaces spit;
+# printf("KNOWN_OBJECTS = {\n");
+# for (PagedSpace* s = spit.next(); s != NULL; s = spit.next()) {
+# HeapObjectIterator it(s);
+# const char* sname = AllocationSpaceName(s->identity());
+# for (Object* o = it.Next(); o != NULL; o = it.Next()) {
+# const char* n = NULL;
+# intptr_t p = reinterpret_cast<intptr_t>(o) & 0xfffff;
+# ROOT_LIST(ROOT_LIST_CASE)
+# if (n != NULL) {
+# printf(" (\"%s\", 0x%05x): \"%s\",\n", sname, p, n);
+# }
+# }
+# }
+# printf("}\n");
+KNOWN_OBJECTS = {
+ ("OLD_POINTER_SPACE", 0x08081): "NullValue",
+ ("OLD_POINTER_SPACE", 0x08091): "UndefinedValue",
+ ("OLD_POINTER_SPACE", 0x080a1): "InstanceofCacheMap",
+ ("OLD_POINTER_SPACE", 0x080b1): "TrueValue",
+ ("OLD_POINTER_SPACE", 0x080c1): "FalseValue",
+ ("OLD_POINTER_SPACE", 0x080d1): "NoInterceptorResultSentinel",
+ ("OLD_POINTER_SPACE", 0x080e1): "ArgumentsMarker",
+ ("OLD_POINTER_SPACE", 0x080f1): "NumberStringCache",
+ ("OLD_POINTER_SPACE", 0x088f9): "SingleCharacterStringCache",
+ ("OLD_POINTER_SPACE", 0x08b01): "StringSplitCache",
+ ("OLD_POINTER_SPACE", 0x08f09): "TerminationException",
+ ("OLD_POINTER_SPACE", 0x08f19): "MessageListeners",
+ ("OLD_POINTER_SPACE", 0x08f35): "CodeStubs",
+ ("OLD_POINTER_SPACE", 0x09b61): "NonMonomorphicCache",
+ ("OLD_POINTER_SPACE", 0x0a175): "PolymorphicCodeCache",
+ ("OLD_POINTER_SPACE", 0x0a17d): "NativesSourceCache",
+ ("OLD_POINTER_SPACE", 0x0a1bd): "EmptyScript",
+ ("OLD_POINTER_SPACE", 0x0a1f9): "IntrinsicFunctionNames",
+ ("OLD_POINTER_SPACE", 0x24a49): "SymbolTable",
+ ("OLD_DATA_SPACE", 0x08081): "EmptyFixedArray",
+ ("OLD_DATA_SPACE", 0x080a1): "NanValue",
+ ("OLD_DATA_SPACE", 0x0811d): "EmptyByteArray",
+ ("OLD_DATA_SPACE", 0x08125): "EmptyString",
+ ("OLD_DATA_SPACE", 0x08131): "EmptyDescriptorArray",
+ ("OLD_DATA_SPACE", 0x08259): "InfinityValue",
+ ("OLD_DATA_SPACE", 0x08265): "MinusZeroValue",
+ ("OLD_DATA_SPACE", 0x08271): "PrototypeAccessors",
+ ("CODE_SPACE", 0x12b81): "JsEntryCode",
+ ("CODE_SPACE", 0x12c61): "JsConstructEntryCode",
}
@@ -799,18 +1159,42 @@ class ConsString(String):
class Oddball(HeapObject):
+ # Should match declarations in objects.h
+ KINDS = [
+ "False",
+ "True",
+ "TheHole",
+ "Null",
+ "ArgumentMarker",
+ "Undefined",
+ "Other"
+ ]
+
def ToStringOffset(self):
return self.heap.PointerSize()
+ def ToNumberOffset(self):
+ return self.ToStringOffset() + self.heap.PointerSize()
+
+ def KindOffset(self):
+ return self.ToNumberOffset() + self.heap.PointerSize()
+
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
self.to_string = self.ObjectField(self.ToStringOffset())
+ self.kind = self.SmiField(self.KindOffset())
def Print(self, p):
p.Print(str(self))
def __str__(self):
- return "<%s>" % self.to_string.GetChars()
+ if self.to_string:
+ return "Oddball(%08x, <%s>)" % (self.address, self.to_string.GetChars())
+ else:
+ kind = "???"
+ if 0 <= self.kind < len(Oddball.KINDS):
+ kind = Oddball.KINDS[self.kind]
+ return "Oddball(%08x, kind=%s)" % (self.address, kind)
class FixedArray(HeapObject):
@@ -831,7 +1215,13 @@ class FixedArray(HeapObject):
base_offset = self.ElementsOffset()
for i in xrange(self.length):
offset = base_offset + 4 * i
- p.Print("[%08d] = %s" % (i, self.ObjectField(offset)))
+ try:
+ p.Print("[%08d] = %s" % (i, self.ObjectField(offset)))
+ except TypeError:
+ p.Dedent()
+ p.Print("...")
+ p.Print("}")
+ return
p.Dedent()
p.Print("}")
@@ -936,6 +1326,27 @@ class Script(HeapObject):
self.name = self.ObjectField(self.NameOffset())
+class CodeCache(HeapObject):
+ def DefaultCacheOffset(self):
+ return self.heap.PointerSize()
+
+ def NormalTypeCacheOffset(self):
+ return self.DefaultCacheOffset() + self.heap.PointerSize()
+
+ def __init__(self, heap, map, address):
+ HeapObject.__init__(self, heap, map, address)
+ self.default_cache = self.ObjectField(self.DefaultCacheOffset())
+ self.normal_type_cache = self.ObjectField(self.NormalTypeCacheOffset())
+
+ def Print(self, p):
+ p.Print("CodeCache(%s) {" % self.heap.reader.FormatIntPtr(self.address))
+ p.Indent()
+ p.Print("default cache: %s" % self.default_cache)
+ p.Print("normal type cache: %s" % self.normal_type_cache)
+ p.Dedent()
+ p.Print("}")
+
+
class Code(HeapObject):
CODE_ALIGNMENT_MASK = (1 << 5) - 1
@@ -986,14 +1397,14 @@ class V8Heap(object):
"EXTERNAL_STRING_TYPE": ExternalString,
"EXTERNAL_STRING_WITH_ASCII_DATA_TYPE": ExternalString,
"EXTERNAL_ASCII_STRING_TYPE": ExternalString,
-
"MAP_TYPE": Map,
"ODDBALL_TYPE": Oddball,
"FIXED_ARRAY_TYPE": FixedArray,
"JS_FUNCTION_TYPE": JSFunction,
"SHARED_FUNCTION_INFO_TYPE": SharedFunctionInfo,
"SCRIPT_TYPE": Script,
- "CODE_TYPE": Code
+ "CODE_CACHE_TYPE": CodeCache,
+ "CODE_TYPE": Code,
}
def __init__(self, reader, stack_map):
@@ -1051,109 +1462,383 @@ class V8Heap(object):
elif self.reader.arch == MD_CPU_ARCHITECTURE_X86:
return (1 << 5) - 1
+ def PageAlignmentMask(self):
+ return (1 << 20) - 1
-EIP_PROXIMITY = 64
-CONTEXT_FOR_ARCH = {
- MD_CPU_ARCHITECTURE_AMD64:
- ['rax', 'rbx', 'rcx', 'rdx', 'rdi', 'rsi', 'rbp', 'rsp', 'rip'],
- MD_CPU_ARCHITECTURE_X86:
- ['eax', 'ebx', 'ecx', 'edx', 'edi', 'esi', 'ebp', 'esp', 'eip']
-}
+class KnownObject(HeapObject):
+ def __init__(self, heap, known_name):
+ HeapObject.__init__(self, heap, None, None)
+ self.known_name = known_name
+
+ def __str__(self):
+ return "<%s>" % self.known_name
+
+
+class KnownMap(HeapObject):
+ def __init__(self, heap, known_name, instance_type):
+ HeapObject.__init__(self, heap, None, None)
+ self.instance_type = instance_type
+ self.known_name = known_name
+
+ def __str__(self):
+ return "<%s>" % self.known_name
+
+
+class InspectionPadawan(object):
+ """The padawan can improve annotations by sensing well-known objects."""
+ def __init__(self, reader, heap):
+ self.reader = reader
+ self.heap = heap
+ self.known_first_map_page = 0
+ self.known_first_data_page = 0
+ self.known_first_pointer_page = 0
+
+ def __getattr__(self, name):
+ """An InspectionPadawan can be used instead of V8Heap, even though
+ it does not inherit from V8Heap (aka. mixin)."""
+ return getattr(self.heap, name)
+
+ def GetPageOffset(self, tagged_address):
+ return tagged_address & self.heap.PageAlignmentMask()
+
+ def IsInKnownMapSpace(self, tagged_address):
+ page_address = tagged_address & ~self.heap.PageAlignmentMask()
+ return page_address == self.known_first_map_page
+
+ def IsInKnownOldSpace(self, tagged_address):
+ page_address = tagged_address & ~self.heap.PageAlignmentMask()
+ return page_address in [self.known_first_data_page,
+ self.known_first_pointer_page]
+
+ def ContainingKnownOldSpaceName(self, tagged_address):
+ page_address = tagged_address & ~self.heap.PageAlignmentMask()
+ if page_address == self.known_first_data_page: return "OLD_DATA_SPACE"
+ if page_address == self.known_first_pointer_page: return "OLD_POINTER_SPACE"
+ return None
+
+ def SenseObject(self, tagged_address):
+ if self.IsInKnownOldSpace(tagged_address):
+ offset = self.GetPageOffset(tagged_address)
+ lookup_key = (self.ContainingKnownOldSpaceName(tagged_address), offset)
+ known_obj_name = KNOWN_OBJECTS.get(lookup_key)
+ if known_obj_name:
+ return KnownObject(self, known_obj_name)
+ if self.IsInKnownMapSpace(tagged_address):
+ known_map = self.SenseMap(tagged_address)
+ if known_map:
+ return known_map
+ found_obj = self.heap.FindObject(tagged_address)
+ if found_obj: return found_obj
+ address = tagged_address - 1
+ if self.reader.IsValidAddress(address):
+ map_tagged_address = self.reader.ReadUIntPtr(address)
+ map = self.SenseMap(map_tagged_address)
+ if map is None: return None
+ instance_type_name = INSTANCE_TYPES.get(map.instance_type)
+ if instance_type_name is None: return None
+ cls = V8Heap.CLASS_MAP.get(instance_type_name, HeapObject)
+ return cls(self, map, address)
+ return None
+
+ def SenseMap(self, tagged_address):
+ if self.IsInKnownMapSpace(tagged_address):
+ offset = self.GetPageOffset(tagged_address)
+ known_map_info = KNOWN_MAPS.get(offset)
+ if known_map_info:
+ known_map_type, known_map_name = known_map_info
+ return KnownMap(self, known_map_name, known_map_type)
+ found_map = self.heap.FindMap(tagged_address)
+ if found_map: return found_map
+ return None
+
+ def FindObjectOrSmi(self, tagged_address):
+ """When used as a mixin in place of V8Heap."""
+ found_obj = self.SenseObject(tagged_address)
+ if found_obj: return found_obj
+ if (tagged_address & 1) == 0:
+ return "Smi(%d)" % (tagged_address / 2)
+ else:
+ return "Unknown(%s)" % self.reader.FormatIntPtr(tagged_address)
+
+ def FindObject(self, tagged_address):
+ """When used as a mixin in place of V8Heap."""
+ raise NotImplementedError
+
+ def FindMap(self, tagged_address):
+ """When used as a mixin in place of V8Heap."""
+ raise NotImplementedError
+
+ def PrintKnowledge(self):
+ print " known_first_map_page = %s\n"\
+ " known_first_data_page = %s\n"\
+ " known_first_pointer_page = %s" % (
+ self.reader.FormatIntPtr(self.known_first_map_page),
+ self.reader.FormatIntPtr(self.known_first_data_page),
+ self.reader.FormatIntPtr(self.known_first_pointer_page))
+
class InspectionShell(cmd.Cmd):
def __init__(self, reader, heap):
cmd.Cmd.__init__(self)
self.reader = reader
self.heap = heap
+ self.padawan = InspectionPadawan(reader, heap)
self.prompt = "(grok) "
+ def do_da(self, address):
+ """
+ Print ASCII string starting at specified address.
+ """
+ address = int(address, 16)
+ string = ""
+ while self.reader.IsValidAddress(address):
+ code = self.reader.ReadU8(address)
+ if code < 128:
+ string += chr(code)
+ else:
+ break
+ address += 1
+ if string == "":
+ print "Not an ASCII string at %s" % self.reader.FormatIntPtr(address)
+ else:
+ print "%s\n" % string
+
def do_dd(self, address):
- "Interpret memory at the given address (if available)"\
- " as a sequence of words."
+ """
+ Interpret memory at the given address (if available) as a sequence
+ of words. Automatic alignment is not performed.
+ """
start = int(address, 16)
+ if (start & self.heap.ObjectAlignmentMask()) != 0:
+ print "Warning: Dumping un-aligned memory, is this what you had in mind?"
for slot in xrange(start,
start + self.reader.PointerSize() * 10,
self.reader.PointerSize()):
+ if not self.reader.IsValidAddress(slot):
+ print "Address is not contained within the minidump!"
+ return
maybe_address = self.reader.ReadUIntPtr(slot)
- heap_object = self.heap.FindObject(maybe_address)
- print "%s: %s" % (self.reader.FormatIntPtr(slot),
- self.reader.FormatIntPtr(maybe_address))
- if heap_object:
- heap_object.Print(Printer())
- print
+ heap_object = self.padawan.SenseObject(maybe_address)
+ print "%s: %s %s" % (self.reader.FormatIntPtr(slot),
+ self.reader.FormatIntPtr(maybe_address),
+ heap_object or '')
+
+ def do_do(self, address):
+ """
+ Interpret memory at the given address as a V8 object. Automatic
+ alignment makes sure that you can pass tagged as well as un-tagged
+ addresses.
+ """
+ address = int(address, 16)
+ if (address & self.heap.ObjectAlignmentMask()) == 0:
+ address = address + 1
+ elif (address & self.heap.ObjectAlignmentMask()) != 1:
+ print "Address doesn't look like a valid pointer!"
+ return
+ heap_object = self.padawan.SenseObject(address)
+ if heap_object:
+ heap_object.Print(Printer())
+ else:
+ print "Address cannot be interpreted as object!"
+
+ def do_dp(self, address):
+ """
+ Interpret memory at the given address as being on a V8 heap page
+ and print information about the page header (if available).
+ """
+ address = int(address, 16)
+ page_address = address & ~self.heap.PageAlignmentMask()
+ if self.reader.IsValidAddress(page_address):
+ raise NotImplementedError
+ else:
+ print "Page header is not available!"
+
+ def do_k(self, arguments):
+ """
+ Teach V8 heap layout information to the inspector. This increases
+ the amount of annotations the inspector can produce while dumping
+ data. The first page of each heap space is of particular interest
+ because it contains known objects that do not move.
+ """
+ self.padawan.PrintKnowledge()
+
+ def do_kd(self, address):
+ """
+ Teach V8 heap layout information to the inspector. Set the first
+ data-space page by passing any pointer into that page.
+ """
+ address = int(address, 16)
+ page_address = address & ~self.heap.PageAlignmentMask()
+ self.padawan.known_first_data_page = page_address
+
+ def do_km(self, address):
+ """
+ Teach V8 heap layout information to the inspector. Set the first
+ map-space page by passing any pointer into that page.
+ """
+ address = int(address, 16)
+ page_address = address & ~self.heap.PageAlignmentMask()
+ self.padawan.known_first_map_page = page_address
+
+ def do_kp(self, address):
+ """
+ Teach V8 heap layout information to the inspector. Set the first
+ pointer-space page by passing any pointer into that page.
+ """
+ address = int(address, 16)
+ page_address = address & ~self.heap.PageAlignmentMask()
+ self.padawan.known_first_pointer_page = page_address
+
+ def do_list(self, smth):
+ """
+ List all available memory regions.
+ """
+ def print_region(reader, start, size, location):
+ print " %s - %s (%d bytes)" % (reader.FormatIntPtr(start),
+ reader.FormatIntPtr(start + size),
+ size)
+ print "Available memory regions:"
+ self.reader.ForEachMemoryRegion(print_region)
def do_s(self, word):
- "Search for a given word in available memory regions"
- word = int(word, 0)
- print "searching for word", word
+ """
+ Search for a given word in available memory regions. The given word
+ is expanded to full pointer size and searched at aligned as well as
+ un-aligned memory locations. Use 'sa' to search aligned locations
+ only.
+ """
+ try:
+ word = int(word, 0)
+ except ValueError:
+ print "Malformed word, prefix with '0x' to use hexadecimal format."
+ return
+ print "Searching for word %d/0x%s:" % (word, self.reader.FormatIntPtr(word))
self.reader.FindWord(word)
- def do_list(self, smth):
- """List all available memory regions."""
- def print_region(reader, start, size, location):
- print "%s - %s" % (reader.FormatIntPtr(start),
- reader.FormatIntPtr(start + size))
+ def do_sh(self, none):
+ """
+ Search for the V8 Heap object in all available memory regions. You
+ might get lucky and find this rare treasure full of invaluable
+ information.
+ """
+ raise NotImplementedError
+
+ def do_u(self, args):
+ """
+ u 0x<address> 0x<size>
+ Unassemble memory in the region [address, address + size)
+ """
+ args = args.split(' ')
+ start = int(args[0], 16)
+ size = int(args[1], 16)
+ lines = self.reader.GetDisasmLines(start, size)
+ for line in lines:
+ print FormatDisasmLine(start, self.heap, line)
+ print
- self.reader.ForEachMemoryRegion(print_region)
+EIP_PROXIMITY = 64
+
+CONTEXT_FOR_ARCH = {
+ MD_CPU_ARCHITECTURE_AMD64:
+ ['rax', 'rbx', 'rcx', 'rdx', 'rdi', 'rsi', 'rbp', 'rsp', 'rip',
+ 'r8', 'r9', 'r10', 'r11', 'r12', 'r13', 'r14', 'r15'],
+ MD_CPU_ARCHITECTURE_X86:
+ ['eax', 'ebx', 'ecx', 'edx', 'edi', 'esi', 'ebp', 'esp', 'eip']
+}
+
+KNOWN_MODULES = {'chrome.exe', 'chrome.dll'}
+
+def GetModuleName(reader, module):
+ name = reader.ReadMinidumpString(module.module_name_rva)
+ return str(os.path.basename(str(name).replace("\\", "/")))
def AnalyzeMinidump(options, minidump_name):
reader = MinidumpReader(options, minidump_name)
+ heap = None
DebugPrint("========================================")
if reader.exception is None:
print "Minidump has no exception info"
- return
- print "Exception info:"
- exception_thread = reader.thread_map[reader.exception.thread_id]
- print " thread id: %d" % exception_thread.id
- print " code: %08X" % reader.exception.exception.code
- print " context:"
- for r in CONTEXT_FOR_ARCH[reader.arch]:
- print " %s: %s" % (r, reader.FormatIntPtr(reader.Register(r)))
- # TODO(vitalyr): decode eflags.
- print " eflags: %s" % bin(reader.exception_context.eflags)[2:]
- print
-
- stack_top = reader.ExceptionSP()
- stack_bottom = exception_thread.stack.start + \
- exception_thread.stack.memory.data_size
- stack_map = {reader.ExceptionIP(): -1}
- for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
- maybe_address = reader.ReadUIntPtr(slot)
- if not maybe_address in stack_map:
- stack_map[maybe_address] = slot
- heap = V8Heap(reader, stack_map)
-
- print "Disassembly around exception.eip:"
- start = reader.ExceptionIP() - EIP_PROXIMITY
- lines = reader.GetDisasmLines(start, 2 * EIP_PROXIMITY)
- for line in lines:
- print FormatDisasmLine(start, heap, line)
- print
+ else:
+ print "Exception info:"
+ exception_thread = reader.thread_map[reader.exception.thread_id]
+ print " thread id: %d" % exception_thread.id
+ print " code: %08X" % reader.exception.exception.code
+ print " context:"
+ for r in CONTEXT_FOR_ARCH[reader.arch]:
+ print " %s: %s" % (r, reader.FormatIntPtr(reader.Register(r)))
+ # TODO(vitalyr): decode eflags.
+ print " eflags: %s" % bin(reader.exception_context.eflags)[2:]
+ print
+ print " modules:"
+ for module in reader.module_list.modules:
+ name = GetModuleName(reader, module)
+ if name in KNOWN_MODULES:
+ print " %s at %08X" % (name, module.base_of_image)
+ reader.TryLoadSymbolsFor(name, module)
+ print
+
+ stack_top = reader.ExceptionSP()
+ stack_bottom = exception_thread.stack.start + \
+ exception_thread.stack.memory.data_size
+ stack_map = {reader.ExceptionIP(): -1}
+ for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
+ maybe_address = reader.ReadUIntPtr(slot)
+ if not maybe_address in stack_map:
+ stack_map[maybe_address] = slot
+ heap = V8Heap(reader, stack_map)
+
+ print "Disassembly around exception.eip:"
+ eip_symbol = reader.FindSymbol(reader.ExceptionIP())
+ if eip_symbol is not None:
+ print eip_symbol
+ disasm_start = reader.ExceptionIP() - EIP_PROXIMITY
+ disasm_bytes = 2 * EIP_PROXIMITY
+ if (options.full):
+ full_range = reader.FindRegion(reader.ExceptionIP())
+ if full_range is not None:
+ disasm_start = full_range[0]
+ disasm_bytes = full_range[1]
+
+ lines = reader.GetDisasmLines(disasm_start, disasm_bytes)
+
+ for line in lines:
+ print FormatDisasmLine(disasm_start, heap, line)
+ print
+
+ if heap is None:
+ heap = V8Heap(reader, None)
if options.full:
- do_dump(reader, heap)
+ FullDump(reader, heap)
if options.shell:
InspectionShell(reader, heap).cmdloop("type help to get help")
else:
- print "Annotated stack (from exception.esp to bottom):"
- for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
- maybe_address = reader.ReadUIntPtr(slot)
- heap_object = heap.FindObject(maybe_address)
- print "%s: %s" % (reader.FormatIntPtr(slot),
- reader.FormatIntPtr(maybe_address))
- if heap_object:
- heap_object.Print(Printer())
- print
+ if reader.exception is not None:
+ print "Annotated stack (from exception.esp to bottom):"
+ for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
+ maybe_address = reader.ReadUIntPtr(slot)
+ heap_object = heap.FindObject(maybe_address)
+ maybe_symbol = reader.FindSymbol(maybe_address)
+ print "%s: %s %s" % (reader.FormatIntPtr(slot),
+ reader.FormatIntPtr(maybe_address),
+ maybe_symbol or "")
+ if heap_object:
+ heap_object.Print(Printer())
+ print
reader.Dispose()
if __name__ == "__main__":
parser = optparse.OptionParser(USAGE)
- parser.add_option("-s", "--shell", dest="shell", action="store_true")
- parser.add_option("-f", "--full", dest="full", action="store_true")
+ parser.add_option("-s", "--shell", dest="shell", action="store_true",
+ help="start an interactive inspector shell")
+ parser.add_option("-f", "--full", dest="full", action="store_true",
+ help="dump all information contained in the minidump")
+ parser.add_option("--symdir", dest="symdir", default=".",
+ help="directory containing *.pdb.sym file with symbols")
options, args = parser.parse_args()
if len(args) != 1:
parser.print_help()
diff --git a/src/3rdparty/v8/tools/gyp/v8.gyp b/src/3rdparty/v8/tools/gyp/v8.gyp
index aa91139..aad07c7 100644
--- a/src/3rdparty/v8/tools/gyp/v8.gyp
+++ b/src/3rdparty/v8/tools/gyp/v8.gyp
@@ -69,6 +69,14 @@
],
},
'conditions': [
+ ['OS=="android"', {
+ 'libraries': [
+ '-llog',
+ ],
+ 'include_dirs': [
+ 'src/common/android/include',
+ ],
+ }],
['OS=="mac"', {
'xcode_settings': {
'OTHER_LDFLAGS': ['-dynamiclib', '-all_load']
@@ -234,10 +242,10 @@
'../../src/ast.h',
'../../src/atomicops.h',
'../../src/atomicops_internals_x86_gcc.cc',
- '../../src/bignum.cc',
- '../../src/bignum.h',
'../../src/bignum-dtoa.cc',
'../../src/bignum-dtoa.h',
+ '../../src/bignum.cc',
+ '../../src/bignum.h',
'../../src/bootstrapper.cc',
'../../src/bootstrapper.h',
'../../src/builtins.cc',
@@ -268,21 +276,21 @@
'../../src/conversions.h',
'../../src/counters.cc',
'../../src/counters.h',
- '../../src/cpu.h',
'../../src/cpu-profiler-inl.h',
'../../src/cpu-profiler.cc',
'../../src/cpu-profiler.h',
+ '../../src/cpu.h',
'../../src/data-flow.cc',
'../../src/data-flow.h',
'../../src/date.cc',
'../../src/date.h',
+ '../../src/dateparser-inl.h',
'../../src/dateparser.cc',
'../../src/dateparser.h',
- '../../src/dateparser-inl.h',
- '../../src/debug.cc',
- '../../src/debug.h',
'../../src/debug-agent.cc',
'../../src/debug-agent.h',
+ '../../src/debug.cc',
+ '../../src/debug.h',
'../../src/deoptimizer.cc',
'../../src/deoptimizer.h',
'../../src/disasm.h',
@@ -293,17 +301,25 @@
'../../src/double.h',
'../../src/dtoa.cc',
'../../src/dtoa.h',
+ '../../src/elements-kind.cc',
+ '../../src/elements-kind.h',
'../../src/elements.cc',
'../../src/elements.h',
'../../src/execution.cc',
'../../src/execution.h',
+ '../../src/extensions/externalize-string-extension.cc',
+ '../../src/extensions/externalize-string-extension.h',
+ '../../src/extensions/gc-extension.cc',
+ '../../src/extensions/gc-extension.h',
+ '../../src/extensions/statistics-extension.cc',
+ '../../src/extensions/statistics-extension.h',
'../../src/factory.cc',
'../../src/factory.h',
'../../src/fast-dtoa.cc',
'../../src/fast-dtoa.h',
- '../../src/flag-definitions.h',
'../../src/fixed-dtoa.cc',
'../../src/fixed-dtoa.h',
+ '../../src/flag-definitions.h',
'../../src/flags.cc',
'../../src/flags.h',
'../../src/frames-inl.h',
@@ -313,6 +329,8 @@
'../../src/full-codegen.h',
'../../src/func-name-inferrer.cc',
'../../src/func-name-inferrer.h',
+ '../../src/gdb-jit.cc',
+ '../../src/gdb-jit.h',
'../../src/global-handles.cc',
'../../src/global-handles.h',
'../../src/globals.h',
@@ -321,14 +339,14 @@
'../../src/handles.h',
'../../src/hashmap.h',
'../../src/heap-inl.h',
- '../../src/heap.cc',
- '../../src/heap.h',
'../../src/heap-profiler.cc',
'../../src/heap-profiler.h',
- '../../src/hydrogen.cc',
- '../../src/hydrogen.h',
+ '../../src/heap.cc',
+ '../../src/heap.h',
'../../src/hydrogen-instructions.cc',
'../../src/hydrogen-instructions.h',
+ '../../src/hydrogen.cc',
+ '../../src/hydrogen.h',
'../../src/ic-inl.h',
'../../src/ic.cc',
'../../src/ic.h',
@@ -340,19 +358,20 @@
'../../src/interface.h',
'../../src/interpreter-irregexp.cc',
'../../src/interpreter-irregexp.h',
+ '../../src/isolate.cc',
+ '../../src/isolate.h',
'../../src/json-parser.h',
+ '../../src/json-stringifier.h',
'../../src/jsregexp.cc',
'../../src/jsregexp.h',
- '../../src/isolate.cc',
- '../../src/isolate.h',
'../../src/lazy-instance.h',
'../../src/list-inl.h',
'../../src/list.h',
- '../../src/lithium.cc',
- '../../src/lithium.h',
+ '../../src/lithium-allocator-inl.h',
'../../src/lithium-allocator.cc',
'../../src/lithium-allocator.h',
- '../../src/lithium-allocator-inl.h',
+ '../../src/lithium.cc',
+ '../../src/lithium.h',
'../../src/liveedit.cc',
'../../src/liveedit.h',
'../../src/liveobjectlist-inl.h',
@@ -370,14 +389,16 @@
'../../src/messages.h',
'../../src/natives.h',
'../../src/objects-debug.cc',
- '../../src/objects-printer.cc',
'../../src/objects-inl.h',
+ '../../src/objects-printer.cc',
'../../src/objects-visiting.cc',
'../../src/objects-visiting.h',
'../../src/objects.cc',
'../../src/objects.h',
'../../src/once.cc',
'../../src/once.h',
+ '../../src/optimizing-compiler-thread.h',
+ '../../src/optimizing-compiler-thread.cc',
'../../src/parser.cc',
'../../src/parser.h',
'../../src/platform-posix.h',
@@ -392,12 +413,12 @@
'../../src/preparser.h',
'../../src/prettyprinter.cc',
'../../src/prettyprinter.h',
- '../../src/property.cc',
- '../../src/property.h',
- '../../src/property-details.h',
'../../src/profile-generator-inl.h',
'../../src/profile-generator.cc',
'../../src/profile-generator.h',
+ '../../src/property-details.h',
+ '../../src/property.cc',
+ '../../src/property.h',
'../../src/regexp-macro-assembler-irregexp-inl.h',
'../../src/regexp-macro-assembler-irregexp.cc',
'../../src/regexp-macro-assembler-irregexp.h',
@@ -409,16 +430,16 @@
'../../src/regexp-stack.h',
'../../src/rewriter.cc',
'../../src/rewriter.h',
- '../../src/runtime.cc',
- '../../src/runtime.h',
'../../src/runtime-profiler.cc',
'../../src/runtime-profiler.h',
+ '../../src/runtime.cc',
+ '../../src/runtime.h',
'../../src/safepoint-table.cc',
'../../src/safepoint-table.h',
- '../../src/scanner.cc',
- '../../src/scanner.h',
'../../src/scanner-character-streams.cc',
'../../src/scanner-character-streams.h',
+ '../../src/scanner.cc',
+ '../../src/scanner.h',
'../../src/scopeinfo.cc',
'../../src/scopeinfo.h',
'../../src/scopes.cc',
@@ -426,7 +447,7 @@
'../../src/serialize.cc',
'../../src/serialize.h',
'../../src/small-pointer-list.h',
- '../../src/smart-array-pointer.h',
+ '../../src/smart-pointers.h',
'../../src/snapshot-common.cc',
'../../src/snapshot.h',
'../../src/spaces-inl.h',
@@ -445,6 +466,9 @@
'../../src/stub-cache.h',
'../../src/token.cc',
'../../src/token.h',
+ '../../src/transitions-inl.h',
+ '../../src/transitions.cc',
+ '../../src/transitions.h',
'../../src/type-info.cc',
'../../src/type-info.h',
'../../src/unbound-queue-inl.h',
@@ -477,10 +501,6 @@
'../../src/zone-inl.h',
'../../src/zone.cc',
'../../src/zone.h',
- '../../src/extensions/externalize-string-extension.cc',
- '../../src/extensions/externalize-string-extension.h',
- '../../src/extensions/gc-extension.cc',
- '../../src/extensions/gc-extension.h',
],
'conditions': [
['want_separate_host_toolset==1', {
@@ -553,7 +573,7 @@
'../../src/ia32/stub-cache-ia32.cc',
],
}],
- ['v8_target_arch=="mips"', {
+ ['v8_target_arch=="mipsel"', {
'sources': [
'../../src/mips/assembler-mips.cc',
'../../src/mips/assembler-mips.h',
@@ -768,6 +788,7 @@
'../../src/macros.py',
'../../src/proxy.js',
'../../src/collection.js',
+ '../../src/object-observe.js'
],
},
'actions': [
diff --git a/src/3rdparty/v8/tools/linux-tick-processor b/src/3rdparty/v8/tools/linux-tick-processor
index 7070ce6..93f143f 100755
--- a/src/3rdparty/v8/tools/linux-tick-processor
+++ b/src/3rdparty/v8/tools/linux-tick-processor
@@ -12,21 +12,21 @@ done
tools_path=`cd $(dirname "$0");pwd`
if [ ! "$D8_PATH" ]; then
d8_public=`which d8`
- if [ -x $d8_public ]; then D8_PATH=$(dirname "$d8_public"); fi
+ if [ -x "$d8_public" ]; then D8_PATH=$(dirname "$d8_public"); fi
fi
-[ "$D8_PATH" ] || D8_PATH=$tools_path/..
+[ -n "$D8_PATH" ] || D8_PATH=$tools_path/..
d8_exec=$D8_PATH/d8
-if [ ! -x $d8_exec ]; then
+if [ ! -x "$d8_exec" ]; then
D8_PATH=`pwd`/out/native
d8_exec=$D8_PATH/d8
fi
-if [ ! -x $d8_exec ]; then
+if [ ! -x "$d8_exec" ]; then
d8_exec=`grep -m 1 -o '".*/d8"' $log_file | sed 's/"//g'`
fi
-if [ ! -x $d8_exec ]; then
+if [ ! -x "$d8_exec" ]; then
echo "d8 shell not found in $D8_PATH"
echo "To build, execute 'make native' from the V8 directory"
exit 1
diff --git a/src/3rdparty/v8/tools/ll_prof.py b/src/3rdparty/v8/tools/ll_prof.py
index 51ba672..3afe179 100755
--- a/src/3rdparty/v8/tools/ll_prof.py
+++ b/src/3rdparty/v8/tools/ll_prof.py
@@ -68,15 +68,9 @@ Examples:
"""
-# Must match kGcFakeMmap.
-V8_GC_FAKE_MMAP = "/tmp/__v8_gc__"
-
JS_ORIGIN = "js"
JS_SNAPSHOT_ORIGIN = "js-snapshot"
-OBJDUMP_BIN = disasm.OBJDUMP_BIN
-
-
class Code(object):
"""Code object."""
@@ -639,7 +633,7 @@ class TraceReader(object):
# Read null-terminated filename.
filename = self.trace[offset + self.header_size + ctypes.sizeof(mmap_info):
offset + header.size]
- mmap_info.filename = filename[:filename.find(chr(0))]
+ mmap_info.filename = HOST_ROOT + filename[:filename.find(chr(0))]
return mmap_info
def ReadSample(self, header, offset):
@@ -858,6 +852,15 @@ if __name__ == "__main__":
default=False,
action="store_true",
help="no auxiliary messages [default: %default]")
+ parser.add_option("--gc-fake-mmap",
+ default="/tmp/__v8_gc__",
+ help="gc fake mmap file [default: %default]")
+ parser.add_option("--objdump",
+ default="/usr/bin/objdump",
+ help="objdump tool to use [default: %default]")
+ parser.add_option("--host-root",
+ default="",
+ help="Path to the host root [default: %default]")
options, args = parser.parse_args()
if not options.quiet:
@@ -869,6 +872,14 @@ if __name__ == "__main__":
print "V8 log: %s, %s.ll (no snapshot)" % (options.log, options.log)
print "Perf trace file: %s" % options.trace
+ V8_GC_FAKE_MMAP = options.gc_fake_mmap
+ HOST_ROOT = options.host_root
+ if os.path.exists(options.objdump):
+ disasm.OBJDUMP_BIN = options.objdump
+ OBJDUMP_BIN = options.objdump
+ else:
+ print "Cannot find %s, falling back to default objdump" % options.objdump
+
# Stats.
events = 0
ticks = 0
@@ -905,7 +916,7 @@ if __name__ == "__main__":
if header.type == PERF_RECORD_MMAP:
start = time.time()
mmap_info = trace_reader.ReadMmap(header, offset)
- if mmap_info.filename == V8_GC_FAKE_MMAP:
+ if mmap_info.filename == HOST_ROOT + V8_GC_FAKE_MMAP:
log_reader.ReadUpToGC()
else:
library_repo.Load(mmap_info, code_map, options)
diff --git a/src/3rdparty/v8/tools/merge-to-branch.sh b/src/3rdparty/v8/tools/merge-to-branch.sh
index aa590a3..e0011ed 100644..100755
--- a/src/3rdparty/v8/tools/merge-to-branch.sh
+++ b/src/3rdparty/v8/tools/merge-to-branch.sh
@@ -186,7 +186,6 @@ if [ $START_STEP -le $CURRENT_STEP ] ; then
if [ -n "$EXTRA_PATCH" ] ; then
apply_patch "$EXTRA_PATCH"
fi
- stage_files
fi
let CURRENT_STEP+=1
@@ -205,8 +204,9 @@ if [ $START_STEP -le $CURRENT_STEP ] ; then
your EDITOR on $VERSION_FILE so you can make arbitrary changes. When \
you're done, save the file and exit your EDITOR.)"
if [ $? -eq 0 ] ; then
+ echo $NEWPATCH $VERSION_FILE
sed -e "/#define PATCH_LEVEL/s/[0-9]*$/$NEWPATCH/" \
- -i "$VERSION_FILE"
+ -i.bak "$VERSION_FILE" || die "Could not increment patch level"
else
$EDITOR "$VERSION_FILE"
fi
diff --git a/src/3rdparty/v8/tools/presubmit.py b/src/3rdparty/v8/tools/presubmit.py
index a0b81e8..efa8724 100755
--- a/src/3rdparty/v8/tools/presubmit.py
+++ b/src/3rdparty/v8/tools/presubmit.py
@@ -307,6 +307,7 @@ class SourceProcessor(SourceFileProcessor):
or (name == 'DerivedSources'))
IGNORE_COPYRIGHTS = ['cpplint.py',
+ 'daemon.py',
'earley-boyer.js',
'raytrace.js',
'crypto.js',
diff --git a/src/3rdparty/v8/tools/push-to-trunk.sh b/src/3rdparty/v8/tools/push-to-trunk.sh
index ff6dd1d..8512d12 100755
--- a/src/3rdparty/v8/tools/push-to-trunk.sh
+++ b/src/3rdparty/v8/tools/push-to-trunk.sh
@@ -268,7 +268,6 @@ if [ $START_STEP -le $CURRENT_STEP ] ; then
echo ">>> Step $CURRENT_STEP: Apply squashed changes."
rm -f "$TOUCHED_FILES_FILE"
apply_patch "$PATCH_FILE"
- stage_files
rm -f "$PATCH_FILE"
fi
@@ -304,11 +303,22 @@ fi
let CURRENT_STEP+=1
if [ $START_STEP -le $CURRENT_STEP ] ; then
echo ">>> Step $CURRENT_STEP: Commit to SVN."
- git svn dcommit | tee >(grep -E "^Committed r[0-9]+" \
- | sed -e 's/^Committed r\([0-9]\+\)/\1/' \
- > "$TRUNK_REVISION_FILE") \
+ git svn dcommit 2>&1 | tee >(grep -E "^Committed r[0-9]+" \
+ | sed -e 's/^Committed r\([0-9]\+\)/\1/' \
+ > "$TRUNK_REVISION_FILE") \
|| die "'git svn dcommit' failed."
TRUNK_REVISION=$(cat "$TRUNK_REVISION_FILE")
+ # Sometimes grepping for the revision fails. No idea why. If you figure
+ # out why it is flaky, please do fix it properly.
+ if [ -z "$TRUNK_REVISION" ] ; then
+ echo "Sorry, grepping for the SVN revision failed. Please look for it in \
+the last command's output above and provide it manually (just the number, \
+without the leading \"r\")."
+ while [ -z "$TRUNK_REVISION" ] ; do
+ echo -n "> "
+ read TRUNK_REVISION
+ done
+ fi
persist "TRUNK_REVISION"
rm -f "$TRUNK_REVISION_FILE"
fi
diff --git a/src/3rdparty/v8/tools/run-tests.py b/src/3rdparty/v8/tools/run-tests.py
new file mode 100755
index 0000000..c09ea06
--- /dev/null
+++ b/src/3rdparty/v8/tools/run-tests.py
@@ -0,0 +1,369 @@
+#!/usr/bin/env python
+#
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import multiprocessing
+import optparse
+import os
+from os.path import join
+import subprocess
+import sys
+import time
+
+from testrunner.local import execution
+from testrunner.local import progress
+from testrunner.local import testsuite
+from testrunner.local import utils
+from testrunner.local import verbose
+from testrunner.network import network_execution
+from testrunner.objects import context
+
+
+ARCH_GUESS = utils.DefaultArch()
+DEFAULT_TESTS = ["mjsunit", "cctest", "message", "preparser"]
+TIMEOUT_DEFAULT = 60
+TIMEOUT_SCALEFACTOR = {"debug" : 4,
+ "release" : 1 }
+
+# Use this to run several variants of the tests.
+VARIANT_FLAGS = [[],
+ ["--stress-opt", "--always-opt"],
+ ["--nocrankshaft"]]
+MODE_FLAGS = {
+ "debug" : ["--nobreak-on-abort", "--nodead-code-elimination",
+ "--enable-slow-asserts", "--debug-code", "--verify-heap"],
+ "release" : ["--nobreak-on-abort", "--nodead-code-elimination"]}
+
+SUPPORTED_ARCHS = ["android_arm",
+ "android_ia32",
+ "arm",
+ "ia32",
+ "mipsel",
+ "x64"]
+# Double the timeout for these:
+SLOW_ARCHS = ["android_arm",
+ "android_ia32",
+ "arm",
+ "mipsel"]
+
+
+def BuildOptions():
+ result = optparse.OptionParser()
+ result.add_option("--arch",
+ help=("The architecture to run tests for, "
+ "'auto' or 'native' for auto-detect"),
+ default="ia32,x64,arm")
+ result.add_option("--arch-and-mode",
+ help="Architecture and mode in the format 'arch.mode'",
+ default=None)
+ result.add_option("--buildbot",
+ help="Adapt to path structure used on buildbots",
+ default=False, action="store_true")
+ result.add_option("--cat", help="Print the source of the tests",
+ default=False, action="store_true")
+ result.add_option("--command-prefix",
+ help="Prepended to each shell command used to run a test",
+ default="")
+ result.add_option("--download-data", help="Download missing test suite data",
+ default=False, action="store_true")
+ result.add_option("--extra-flags",
+ help="Additional flags to pass to each test command",
+ default="")
+ result.add_option("--isolates", help="Whether to test isolates",
+ default=False, action="store_true")
+ result.add_option("-j", help="The number of parallel tasks to run",
+ default=0, type="int")
+ result.add_option("-m", "--mode",
+ help="The test modes in which to run (comma-separated)",
+ default="release,debug")
+ result.add_option("--no-network", "--nonetwork",
+ help="Don't distribute tests on the network",
+ default=(utils.GuessOS() != "linux"),
+ dest="no_network", action="store_true")
+ result.add_option("--no-presubmit", "--nopresubmit",
+ help='Skip presubmit checks',
+ default=False, dest="no_presubmit", action="store_true")
+ result.add_option("--no-stress", "--nostress",
+ help="Don't run crankshaft --always-opt --stress-op test",
+ default=False, dest="no_stress", action="store_true")
+ result.add_option("--outdir", help="Base directory with compile output",
+ default="out")
+ result.add_option("-p", "--progress",
+ help=("The style of progress indicator"
+ " (verbose, dots, color, mono)"),
+ choices=progress.PROGRESS_INDICATORS.keys(), default="mono")
+ result.add_option("--report", help="Print a summary of the tests to be run",
+ default=False, action="store_true")
+ result.add_option("--shard-count",
+ help="Split testsuites into this number of shards",
+ default=1, type="int")
+ result.add_option("--shard-run",
+ help="Run this shard from the split up tests.",
+ default=1, type="int")
+ result.add_option("--shell", help="DEPRECATED! use --shell-dir", default="")
+ result.add_option("--shell-dir", help="Directory containing executables",
+ default="")
+ result.add_option("--stress-only",
+ help="Only run tests with --always-opt --stress-opt",
+ default=False, action="store_true")
+ result.add_option("--time", help="Print timing information after running",
+ default=False, action="store_true")
+ result.add_option("-t", "--timeout", help="Timeout in seconds",
+ default= -1, type="int")
+ result.add_option("-v", "--verbose", help="Verbose output",
+ default=False, action="store_true")
+ result.add_option("--valgrind", help="Run tests through valgrind",
+ default=False, action="store_true")
+ result.add_option("--warn-unused", help="Report unused rules",
+ default=False, action="store_true")
+ return result
+
+
+def ProcessOptions(options):
+ global VARIANT_FLAGS
+
+ # Architecture and mode related stuff.
+ if options.arch_and_mode:
+ tokens = options.arch_and_mode.split(".")
+ options.arch = tokens[0]
+ options.mode = tokens[1]
+ options.mode = options.mode.split(",")
+ for mode in options.mode:
+ if not mode in ["debug", "release"]:
+ print "Unknown mode %s" % mode
+ return False
+ if options.arch in ["auto", "native"]:
+ options.arch = ARCH_GUESS
+ options.arch = options.arch.split(",")
+ for arch in options.arch:
+ if not arch in SUPPORTED_ARCHS:
+ print "Unknown architecture %s" % arch
+ return False
+
+ # Special processing of other options, sorted alphabetically.
+
+ if options.buildbot:
+ # Buildbots run presubmit tests as a separate step.
+ options.no_presubmit = True
+ options.no_network = True
+ if options.command_prefix:
+ print("Specifying --command-prefix disables network distribution, "
+ "running tests locally.")
+ options.no_network = True
+ if options.j == 0:
+ options.j = multiprocessing.cpu_count()
+ if options.no_stress:
+ VARIANT_FLAGS = [[], ["--nocrankshaft"]]
+ if not options.shell_dir:
+ if options.shell:
+ print "Warning: --shell is deprecated, use --shell-dir instead."
+ options.shell_dir = os.path.dirname(options.shell)
+ if options.stress_only:
+ VARIANT_FLAGS = [["--stress-opt", "--always-opt"]]
+ if options.valgrind:
+ run_valgrind = os.path.join("tools", "run-valgrind.py")
+ # This is OK for distributed running, so we don't need to set no_network.
+ options.command_prefix = ("python -u " + run_valgrind +
+ options.command_prefix)
+ return True
+
+
+def ShardTests(tests, shard_count, shard_run):
+ if shard_count < 2:
+ return tests
+ if shard_run < 1 or shard_run > shard_count:
+ print "shard-run not a valid number, should be in [1:shard-count]"
+ print "defaulting back to running all tests"
+ return tests
+ count = 0
+ shard = []
+ for test in tests:
+ if count % shard_count == shard_run - 1:
+ shard.append(test)
+ count += 1
+ return shard
+
+
+def Main():
+ parser = BuildOptions()
+ (options, args) = parser.parse_args()
+ if not ProcessOptions(options):
+ parser.print_help()
+ return 1
+
+ exit_code = 0
+ workspace = os.path.abspath(join(os.path.dirname(sys.argv[0]), ".."))
+ if not options.no_presubmit:
+ print ">>> running presubmit tests"
+ code = subprocess.call(
+ [sys.executable, join(workspace, "tools", "presubmit.py")])
+ exit_code = code
+
+ suite_paths = utils.GetSuitePaths(join(workspace, "test"))
+
+ if len(args) == 0:
+ suite_paths = [ s for s in suite_paths if s in DEFAULT_TESTS ]
+ else:
+ args_suites = set()
+ for arg in args:
+ suite = arg.split(os.path.sep)[0]
+ if not suite in args_suites:
+ args_suites.add(suite)
+ suite_paths = [ s for s in suite_paths if s in args_suites ]
+
+ suites = []
+ for root in suite_paths:
+ suite = testsuite.TestSuite.LoadTestSuite(
+ os.path.join(workspace, "test", root))
+ if suite:
+ suites.append(suite)
+
+ if options.download_data:
+ for s in suites:
+ s.DownloadData()
+
+ for mode in options.mode:
+ for arch in options.arch:
+ code = Execute(arch, mode, args, options, suites, workspace)
+ exit_code = exit_code or code
+ return exit_code
+
+
+def Execute(arch, mode, args, options, suites, workspace):
+ print(">>> Running tests for %s.%s" % (arch, mode))
+
+ shell_dir = options.shell_dir
+ if not shell_dir:
+ if options.buildbot:
+ shell_dir = os.path.join(workspace, options.outdir, mode)
+ mode = mode.lower()
+ else:
+ shell_dir = os.path.join(workspace, options.outdir,
+ "%s.%s" % (arch, mode))
+ shell_dir = os.path.relpath(shell_dir)
+
+ # Populate context object.
+ mode_flags = MODE_FLAGS[mode]
+ timeout = options.timeout
+ if timeout == -1:
+ # Simulators are slow, therefore allow a longer default timeout.
+ if arch in SLOW_ARCHS:
+ timeout = 2 * TIMEOUT_DEFAULT;
+ else:
+ timeout = TIMEOUT_DEFAULT;
+
+ timeout *= TIMEOUT_SCALEFACTOR[mode]
+ ctx = context.Context(arch, mode, shell_dir,
+ mode_flags, options.verbose,
+ timeout, options.isolates,
+ options.command_prefix,
+ options.extra_flags)
+
+ # Find available test suites and read test cases from them.
+ variables = {
+ "mode": mode,
+ "arch": arch,
+ "system": utils.GuessOS(),
+ "isolates": options.isolates
+ }
+ all_tests = []
+ num_tests = 0
+ test_id = 0
+ for s in suites:
+ s.ReadStatusFile(variables)
+ s.ReadTestCases(ctx)
+ if len(args) > 0:
+ s.FilterTestCasesByArgs(args)
+ all_tests += s.tests
+ s.FilterTestCasesByStatus(options.warn_unused)
+ if options.cat:
+ verbose.PrintTestSource(s.tests)
+ continue
+ variant_flags = s.VariantFlags() or VARIANT_FLAGS
+ s.tests = [ t.CopyAddingFlags(v) for t in s.tests for v in variant_flags ]
+ s.tests = ShardTests(s.tests, options.shard_count, options.shard_run)
+ num_tests += len(s.tests)
+ for t in s.tests:
+ t.id = test_id
+ test_id += 1
+
+ if options.cat:
+ return 0 # We're done here.
+
+ if options.report:
+ verbose.PrintReport(all_tests)
+
+ if num_tests == 0:
+ print "No tests to run."
+ return 0
+
+ # Run the tests, either locally or distributed on the network.
+ try:
+ start_time = time.time()
+ progress_indicator = progress.PROGRESS_INDICATORS[options.progress]()
+
+ run_networked = not options.no_network
+ if not run_networked:
+ print("Network distribution disabled, running tests locally.")
+ elif utils.GuessOS() != "linux":
+ print("Network distribution is only supported on Linux, sorry!")
+ run_networked = False
+ peers = []
+ if run_networked:
+ peers = network_execution.GetPeers()
+ if not peers:
+ print("No connection to distribution server; running tests locally.")
+ run_networked = False
+ elif len(peers) == 1:
+ print("No other peers on the network; running tests locally.")
+ run_networked = False
+ elif num_tests <= 100:
+ print("Less than 100 tests, running them locally.")
+ run_networked = False
+
+ if run_networked:
+ runner = network_execution.NetworkedRunner(suites, progress_indicator,
+ ctx, peers, workspace)
+ else:
+ runner = execution.Runner(suites, progress_indicator, ctx)
+
+ exit_code = runner.Run(options.j)
+ if runner.terminate:
+ return exit_code
+ overall_duration = time.time() - start_time
+ except KeyboardInterrupt:
+ return 1
+
+ if options.time:
+ verbose.PrintTestDurations(suites, overall_duration)
+ return exit_code
+
+
+if __name__ == "__main__":
+ sys.exit(Main())
diff --git a/src/3rdparty/v8/tools/status-file-converter.py b/src/3rdparty/v8/tools/status-file-converter.py
new file mode 100755
index 0000000..ba063ee
--- /dev/null
+++ b/src/3rdparty/v8/tools/status-file-converter.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import sys
+from testrunner.local import old_statusfile
+
+if len(sys.argv) != 2:
+ print "Usage: %s foo.status" % sys.argv[0]
+ print "Will read foo.status and print the converted version to stdout."
+ sys.exit(1)
+
+print old_statusfile.ConvertNotation(sys.argv[1]).GetOutput()
diff --git a/src/3rdparty/v8/tools/test-server.py b/src/3rdparty/v8/tools/test-server.py
new file mode 100755
index 0000000..df547ed
--- /dev/null
+++ b/src/3rdparty/v8/tools/test-server.py
@@ -0,0 +1,224 @@
+#!/usr/bin/env python
+#
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import os
+import subprocess
+import sys
+
+
+PIDFILE = "/tmp/v8-distributed-testing-server.pid"
+ROOT = os.path.abspath(os.path.dirname(sys.argv[0]))
+
+
+def _PrintUsage():
+ print("""Usage: python %s COMMAND
+
+Where COMMAND can be any of:
+ start Starts the server. Forks to the background.
+ stop Stops the server.
+ restart Stops, then restarts the server.
+ setup Creates or updates the environment for the server to run.
+ update Alias for "setup".
+ trust <keyfile> Adds the given public key to the list of trusted keys.
+ help Displays this help text.
+ """ % sys.argv[0])
+
+
+def _IsDaemonRunning():
+ return os.path.exists(PIDFILE)
+
+
+def _Cmd(cmd):
+ code = subprocess.call(cmd, shell=True)
+ if code != 0:
+ print("Command '%s' returned error code %d" % (cmd, code))
+ sys.exit(code)
+
+
+def Update():
+ # Create directory for private data storage.
+ data_dir = os.path.join(ROOT, "data")
+ if not os.path.exists(data_dir):
+ os.makedirs(data_dir)
+
+ # Create directory for trusted public keys of peers (and self).
+ trusted_dir = os.path.join(ROOT, "trusted")
+ if not os.path.exists(trusted_dir):
+ os.makedirs(trusted_dir)
+
+ # Install UltraJSON. It is much faster than Python's builtin json.
+ try:
+ import ujson #@UnusedImport
+ except ImportError:
+ # Install pip if it doesn't exist.
+ code = subprocess.call("which pip > /dev/null", shell=True)
+ if code != 0:
+ apt_get_code = subprocess.call("which apt-get > /dev/null", shell=True)
+ if apt_get_code == 0:
+ print("Installing pip...")
+ _Cmd("sudo apt-get install python-pip")
+ else:
+ print("Please install pip on your machine. You can get it at: "
+ "http://www.pip-installer.org/en/latest/installing.html "
+ "or via your distro's package manager.")
+ sys.exit(1)
+ print("Using pip to install UltraJSON...")
+ _Cmd("sudo pip install ujson")
+
+ # Make sure we have a key pair for signing binaries.
+ privkeyfile = os.path.expanduser("~/.ssh/v8_dtest")
+ if not os.path.exists(privkeyfile):
+ _Cmd("ssh-keygen -t rsa -f %s -N '' -q" % privkeyfile)
+ fingerprint = subprocess.check_output("ssh-keygen -lf %s" % privkeyfile,
+ shell=True)
+ fingerprint = fingerprint.split(" ")[1].replace(":", "")[:16]
+ pubkeyfile = os.path.join(trusted_dir, "%s.pem" % fingerprint)
+ if (not os.path.exists(pubkeyfile) or
+ os.path.getmtime(pubkeyfile) < os.path.getmtime(privkeyfile)):
+ _Cmd("openssl rsa -in %s -out %s -pubout" % (privkeyfile, pubkeyfile))
+ with open(pubkeyfile, "a") as f:
+ f.write(fingerprint + "\n")
+ datafile = os.path.join(data_dir, "mypubkey")
+ with open(datafile, "w") as f:
+ f.write(fingerprint + "\n")
+
+ # Check out or update the server implementation in the current directory.
+ testrunner_dir = os.path.join(ROOT, "testrunner")
+ if os.path.exists(os.path.join(testrunner_dir, "server/daemon.py")):
+ _Cmd("cd %s; svn up" % testrunner_dir)
+ else:
+ path = ("http://v8.googlecode.com/svn/branches/bleeding_edge/"
+ "tools/testrunner")
+ _Cmd("svn checkout --force %s %s" % (path, testrunner_dir))
+
+ # Update this very script.
+ path = ("http://v8.googlecode.com/svn/branches/bleeding_edge/"
+ "tools/test-server.py")
+ scriptname = os.path.abspath(sys.argv[0])
+ _Cmd("svn cat %s > %s" % (path, scriptname))
+
+ # The testcfg.py files currently need to be able to import the old test.py
+ # script, so we temporarily need to make that available.
+ # TODO(jkummerow): Remove this when removing test.py.
+ for filename in ("test.py", "utils.py"):
+ url = ("http://v8.googlecode.com/svn/branches/bleeding_edge/"
+ "tools/%s" % filename)
+ filepath = os.path.join(os.path.dirname(scriptname), filename)
+ _Cmd("svn cat %s > %s" % (url, filepath))
+
+ # Check out or update V8.
+ v8_dir = os.path.join(ROOT, "v8")
+ if os.path.exists(v8_dir):
+ _Cmd("cd %s; git fetch" % v8_dir)
+ else:
+ _Cmd("git clone git://github.com/v8/v8.git %s" % v8_dir)
+
+ print("Finished.")
+
+
+# Handle "setup" here, because when executing that we can't import anything
+# else yet.
+if __name__ == "__main__" and len(sys.argv) == 2:
+ if sys.argv[1] in ("setup", "update"):
+ if _IsDaemonRunning():
+ print("Please stop the server before updating. Exiting.")
+ sys.exit(1)
+ Update()
+ sys.exit(0)
+ # Other parameters are handled below.
+
+
+#==========================================================
+# At this point we can assume that the implementation is available,
+# so we can import it.
+try:
+ from testrunner.server import constants
+ from testrunner.server import local_handler
+ from testrunner.server import main
+except Exception, e:
+ print(e)
+ print("Failed to import implementation. Have you run 'setup'?")
+ sys.exit(1)
+
+
+def _StartDaemon(daemon):
+ if not os.path.isdir(os.path.join(ROOT, "v8")):
+ print("No 'v8' working directory found. Have you run 'setup'?")
+ sys.exit(1)
+ daemon.start()
+
+
+if __name__ == "__main__":
+ if len(sys.argv) == 2:
+ arg = sys.argv[1]
+ if arg == "start":
+ daemon = main.Server(PIDFILE, ROOT)
+ _StartDaemon(daemon)
+ elif arg == "stop":
+ daemon = main.Server(PIDFILE, ROOT)
+ daemon.stop()
+ elif arg == "restart":
+ daemon = main.Server(PIDFILE, ROOT)
+ daemon.stop()
+ _StartDaemon(daemon)
+ elif arg in ("help", "-h", "--help"):
+ _PrintUsage()
+ elif arg == "status":
+ if not _IsDaemonRunning():
+ print("Server not running.")
+ else:
+ print(local_handler.LocalQuery([constants.REQUEST_STATUS]))
+ else:
+ print("Unknown command")
+ _PrintUsage()
+ sys.exit(2)
+ elif len(sys.argv) == 3:
+ arg = sys.argv[1]
+ if arg == "approve":
+ filename = sys.argv[2]
+ if not os.path.exists(filename):
+ print("%s does not exist.")
+ sys.exit(1)
+ filename = os.path.abspath(filename)
+ if _IsDaemonRunning():
+ response = local_handler.LocalQuery([constants.ADD_TRUSTED, filename])
+ else:
+ daemon = main.Server(PIDFILE, ROOT)
+ response = daemon.CopyToTrusted(filename)
+ print("Added certificate %s to trusted certificates." % response)
+ else:
+ print("Unknown command")
+ _PrintUsage()
+ sys.exit(2)
+ else:
+ print("Unknown command")
+ _PrintUsage()
+ sys.exit(2)
+ sys.exit(0)
diff --git a/src/3rdparty/v8/tools/test-wrapper-gypbuild.py b/src/3rdparty/v8/tools/test-wrapper-gypbuild.py
index eda2459..4dd6338 100755
--- a/src/3rdparty/v8/tools/test-wrapper-gypbuild.py
+++ b/src/3rdparty/v8/tools/test-wrapper-gypbuild.py
@@ -95,11 +95,14 @@ def BuildOptions():
default=1, type="int")
result.add_option("--time", help="Print timing information after running",
default=False, action="store_true")
- result.add_option("--suppress-dialogs", help="Suppress Windows dialogs for crashing tests",
- dest="suppress_dialogs", default=True, action="store_true")
- result.add_option("--no-suppress-dialogs", help="Display Windows dialogs for crashing tests",
- dest="suppress_dialogs", action="store_false")
- result.add_option("--isolates", help="Whether to test isolates", default=False, action="store_true")
+ result.add_option("--suppress-dialogs",
+ help="Suppress Windows dialogs for crashing tests",
+ dest="suppress_dialogs", default=True, action="store_true")
+ result.add_option("--no-suppress-dialogs",
+ help="Display Windows dialogs for crashing tests",
+ dest="suppress_dialogs", action="store_false")
+ result.add_option("--isolates", help="Whether to test isolates",
+ default=False, action="store_true")
result.add_option("--store-unexpected-output",
help="Store the temporary JS files from tests that fails",
dest="store_unexpected_output", default=True, action="store_true")
@@ -112,9 +115,6 @@ def BuildOptions():
result.add_option("--nostress",
help="Don't run crankshaft --always-opt --stress-op test",
default=False, action="store_true")
- result.add_option("--crankshaft",
- help="Run with the --crankshaft flag",
- default=False, action="store_true")
result.add_option("--shard-count",
help="Split testsuites into this number of shards",
default=1, type="int")
@@ -151,7 +151,8 @@ def ProcessOptions(options):
print "Unknown mode %s" % mode
return False
for arch in options.arch:
- if not arch in ['ia32', 'x64', 'arm', 'mips']:
+ if not arch in ['ia32', 'x64', 'arm', 'mipsel', 'android_arm',
+ 'android_ia32']:
print "Unknown architecture %s" % arch
return False
if options.buildbot:
@@ -199,8 +200,6 @@ def PassOnOptions(options):
result += ['--stress-only']
if options.nostress:
result += ['--nostress']
- if options.crankshaft:
- result += ['--crankshaft']
if options.shard_count != 1:
result += ['--shard-count=%s' % options.shard_count]
if options.shard_run != 1:
@@ -222,9 +221,11 @@ def Main():
if not options.no_presubmit:
print ">>> running presubmit tests"
- returncodes += subprocess.call([workspace + '/tools/presubmit.py'])
+ returncodes += subprocess.call([sys.executable,
+ workspace + '/tools/presubmit.py'])
- args_for_children = [workspace + '/tools/test.py'] + PassOnOptions(options)
+ args_for_children = [sys.executable]
+ args_for_children += [workspace + '/tools/test.py'] + PassOnOptions(options)
args_for_children += ['--no-build', '--build-system=gyp']
for arg in args:
args_for_children += [arg]
@@ -240,10 +241,11 @@ def Main():
shellpath = workspace + '/' + options.outdir + '/' + arch + '.' + mode
env['LD_LIBRARY_PATH'] = shellpath + '/lib.target'
shell = shellpath + "/d8"
- child = subprocess.Popen(' '.join(args_for_children +
- ['--arch=' + arch] +
- ['--mode=' + mode] +
- ['--shell=' + shell]),
+ cmdline = ' '.join(args_for_children +
+ ['--arch=' + arch] +
+ ['--mode=' + mode] +
+ ['--shell=' + shell])
+ child = subprocess.Popen(cmdline,
shell=True,
cwd=workspace,
env=env)
diff --git a/src/3rdparty/v8/tools/test.py b/src/3rdparty/v8/tools/test.py
index 0aacd99..b3b62b3 100755
--- a/src/3rdparty/v8/tools/test.py
+++ b/src/3rdparty/v8/tools/test.py
@@ -140,9 +140,9 @@ def EscapeCommand(command):
parts = []
for part in command:
if ' ' in part:
- # Escape spaces. We may need to escape more characters for this
- # to work properly.
- parts.append('"%s"' % part)
+ # Escape spaces and double quotes. We may need to escape more characters
+ # for this to work properly.
+ parts.append('"%s"' % part.replace('"', '\\"'))
else:
parts.append(part)
return " ".join(parts)
@@ -299,8 +299,6 @@ class MonochromeProgressIndicator(CompactProgressIndicator):
'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s",
'stdout': '%s',
'stderr': '%s',
- 'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"),
- 'max_length': 78
}
super(MonochromeProgressIndicator, self).__init__(cases, templates)
@@ -686,8 +684,9 @@ SUFFIX = {
'debug' : '_g',
'release' : '' }
FLAGS = {
- 'debug' : ['--nobreak-on-abort', '--enable-slow-asserts', '--debug-code', '--verify-heap'],
- 'release' : ['--nobreak-on-abort']}
+ 'debug' : ['--nobreak-on-abort', '--nodead-code-elimination',
+ '--enable-slow-asserts', '--debug-code', '--verify-heap'],
+ 'release' : ['--nobreak-on-abort', '--nodead-code-elimination']}
TIMEOUT_SCALEFACTOR = {
'debug' : 4,
'release' : 1 }
@@ -1246,9 +1245,6 @@ def BuildOptions():
result.add_option("--nostress",
help="Don't run crankshaft --always-opt --stress-op test",
default=False, action="store_true")
- result.add_option("--crankshaft",
- help="Run with the --crankshaft flag",
- default=False, action="store_true")
result.add_option("--shard-count",
help="Split testsuites into this number of shards",
default=1, type="int")
@@ -1286,7 +1282,7 @@ def ProcessOptions(options):
options.scons_flags.append("arch=" + options.arch)
# Simulators are slow, therefore allow a longer default timeout.
if options.timeout == -1:
- if options.arch == 'arm' or options.arch == 'mips':
+ if options.arch in ['android', 'arm', 'mipsel']:
options.timeout = 2 * TIMEOUT_DEFAULT;
else:
options.timeout = TIMEOUT_DEFAULT;
@@ -1300,11 +1296,6 @@ def ProcessOptions(options):
VARIANT_FLAGS = [['--stress-opt', '--always-opt']]
if options.nostress:
VARIANT_FLAGS = [[],['--nocrankshaft']]
- if options.crankshaft:
- if options.special_command:
- options.special_command += " --crankshaft"
- else:
- options.special_command = "@ --crankshaft"
if options.shell.endswith("d8"):
if options.special_command:
options.special_command += " --test"
@@ -1380,8 +1371,9 @@ def GetSpecialCommandProcessor(value):
else:
pos = value.find('@')
import urllib
- prefix = urllib.unquote(value[:pos]).split()
- suffix = urllib.unquote(value[pos+1:]).split()
+ import shlex
+ prefix = shlex.split(urllib.unquote(value[:pos]))
+ suffix = shlex.split(urllib.unquote(value[pos+1:]))
def ExpandCommand(args):
return prefix + args + suffix
return ExpandCommand
@@ -1493,7 +1485,6 @@ def Main():
'system': utils.GuessOS(),
'arch': options.arch,
'simulator': options.simulator,
- 'crankshaft': options.crankshaft,
'isolates': options.isolates
}
test_list = root.ListTests([], path, context, mode, [])
diff --git a/src/3rdparty/v8/tools/testrunner/README b/src/3rdparty/v8/tools/testrunner/README
new file mode 100644
index 0000000..8f0c01f
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/README
@@ -0,0 +1,174 @@
+Test suite runner for V8, including support for distributed running.
+====================================================================
+
+
+Local usage instructions:
+=========================
+
+Run the main script with --help to get detailed usage instructions:
+
+$ tools/run-tests.py --help
+
+The interface is mostly the same as it was for the old test runner.
+You'll likely want something like this:
+
+$ tools/run-tests.py --nonetwork --arch ia32 --mode release
+
+--nonetwork is the default on Mac and Windows. If you don't specify --arch
+and/or --mode, all available values will be used and run in turn (e.g.,
+omitting --mode from the above example will run ia32 in both Release and Debug
+modes).
+
+
+Networked usage instructions:
+=============================
+
+Networked running is only supported on Linux currently. Make sure that all
+machines participating in the cluster are binary-compatible (e.g. mixing
+Ubuntu Lucid and Precise doesn't work).
+
+Setup:
+------
+
+1.) Copy tools/test-server.py to a new empty directory anywhere on your hard
+ drive (preferably not inside your V8 checkout just to keep things clean).
+ Please do create a copy, not just a symlink.
+
+2.) Navigate to the new directory and let the server setup itself:
+
+$ ./test-server.py setup
+
+ This will install PIP and UltraJSON, create a V8 working directory, and
+ generate a keypair.
+
+3.) Swap public keys with someone who's already part of the networked cluster.
+
+$ cp trusted/`cat data/mypubkey`.pem /where/peers/can/see/it/myname.pem
+$ ./test-server.py approve /wherever/they/put/it/yourname.pem
+
+
+Usage:
+------
+
+1.) Start your server:
+
+$ ./test-server.py start
+
+2.) (Optionally) inspect the server's status:
+
+$ ./test-server.py status
+
+3.) From your regular V8 working directory, run tests:
+
+$ tool/run-tests.py --arch ia32 --mode debug
+
+4.) (Optionally) enjoy the speeeeeeeeeeeeeeeed
+
+
+Architecture overview:
+======================
+
+Code organization:
+------------------
+
+This section is written from the point of view of the tools/ directory.
+
+./run-tests.py:
+ Main script. Parses command-line options and drives the test execution
+ procedure from a high level. Imports the actual implementation of all
+ steps from the testrunner/ directory.
+
+./test-server.py:
+ Interface to interact with the server. Contains code to setup the server's
+ working environment and can start and stop server daemon processes.
+ Imports some stuff from the testrunner/server/ directory.
+
+./testrunner/local/*:
+ Implementation needed to run tests locally. Used by run-tests.py. Inspired by
+ (and partly copied verbatim from) the original test.py script.
+
+./testrunner/local/old_statusfile.py:
+ Provides functionality to read an old-style <testsuite>.status file and
+ convert it to new-style syntax. This can be removed once the new-style
+ syntax becomes authoritative (and old-style syntax is no longer supported).
+ ./status-file-converter.py provides a stand-alone interface to this.
+
+./testrunner/objects/*:
+ A bunch of data container classes, used by the scripts in the various other
+ directories; serializable for transmission over the network.
+
+./testrunner/network/*:
+ Equivalents and extensions of some of the functionality in ./testrunner/local/
+ as required when dispatching tests to peers on the network.
+
+./testrunner/network/network_execution.py:
+ Drop-in replacement for ./testrunner/local/execution that distributes
+ test jobs to network peers instead of running them locally.
+
+./testrunner/network/endpoint.py:
+ Receiving end of a network distributed job, uses the implementation
+ in ./testrunner/local/execution.py for actually running the tests.
+
+./testrunner/server/*:
+ Implementation of the daemon that accepts and runs test execution jobs from
+ peers on the network. Should ideally have no dependencies on any of the other
+ directories, but that turned out to be impractical, so there are a few
+ exceptions.
+
+./testrunner/server/compression.py:
+ Defines a wrapper around Python TCP sockets that provides JSON based
+ serialization, gzip based compression, and ensures message completeness.
+
+
+Networking architecture:
+------------------------
+
+The distribution stuff is designed to be a layer between deciding which tests
+to run on the one side, and actually running them on the other. The frontend
+that the user interacts with is the same for local and networked execution,
+and the actual test execution and result gathering code is the same too.
+
+The server daemon starts four separate servers, each listening on another port:
+- "Local": Communication with a run-tests.py script running on the same host.
+ The test driving script e.g. needs to ask for available peers. It then talks
+ to those peers directly (one of them will be the locally running server).
+- "Work": Listens for test job requests from run-tests.py scripts on the network
+ (including localhost). Accepts an arbitrary number of connections at the
+ same time, but only works on them in a serialized fashion.
+- "Status": Used for communication with other servers on the network, e.g. for
+ exchanging trusted public keys to create the transitive trust closure.
+- "Discovery": Used to detect presence of other peers on the network.
+ In contrast to the other three, this uses UDP (as opposed to TCP).
+
+
+Give us a diagram! We love diagrams!
+------------------------------------
+ .
+ Machine A . Machine B
+ .
++------------------------------+ .
+| run-tests.py | .
+| with flag: | .
+|--nonetwork --network | .
+| | / | | .
+| | / | | .
+| v / v | .
+|BACKEND / distribution | .
++--------- / --------| \ ------+ .
+ / | \_____________________
+ / | . \
+ / | . \
++----- v ----------- v --------+ . +---- v -----------------------+
+| LocalHandler | WorkHandler | . | WorkHandler | LocalHandler |
+| | | | . | | | |
+| | v | . | v | |
+| | BACKEND | . | BACKEND | |
+|------------- +---------------| . |---------------+--------------|
+| Discovery | StatusHandler <----------> StatusHandler | Discovery |
++---- ^ -----------------------+ . +-------------------- ^ -------+
+ | . |
+ +---------------------------------------------------------+
+
+Note that the three occurrences of "BACKEND" are the same code
+(testrunner/local/execution.py and its imports), but running from three
+distinct directories (and on two different machines).
diff --git a/src/3rdparty/v8/tools/testrunner/__init__.py b/src/3rdparty/v8/tools/testrunner/__init__.py
new file mode 100644
index 0000000..202a262
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/__init__.py
@@ -0,0 +1,26 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/3rdparty/v8/tools/testrunner/local/__init__.py b/src/3rdparty/v8/tools/testrunner/local/__init__.py
new file mode 100644
index 0000000..202a262
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/local/__init__.py
@@ -0,0 +1,26 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/3rdparty/v8/tools/testrunner/local/commands.py b/src/3rdparty/v8/tools/testrunner/local/commands.py
new file mode 100644
index 0000000..01f170d
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/local/commands.py
@@ -0,0 +1,153 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import os
+import signal
+import subprocess
+import sys
+import tempfile
+import time
+
+from ..local import utils
+from ..objects import output
+
+
+def KillProcessWithID(pid):
+ if utils.IsWindows():
+ os.popen('taskkill /T /F /PID %d' % pid)
+ else:
+ os.kill(pid, signal.SIGTERM)
+
+
+MAX_SLEEP_TIME = 0.1
+INITIAL_SLEEP_TIME = 0.0001
+SLEEP_TIME_FACTOR = 1.25
+
+SEM_INVALID_VALUE = -1
+SEM_NOGPFAULTERRORBOX = 0x0002 # Microsoft Platform SDK WinBase.h
+
+
+def Win32SetErrorMode(mode):
+ prev_error_mode = SEM_INVALID_VALUE
+ try:
+ import ctypes
+ prev_error_mode = \
+ ctypes.windll.kernel32.SetErrorMode(mode) #@UndefinedVariable
+ except ImportError:
+ pass
+ return prev_error_mode
+
+
+def RunProcess(verbose, timeout, args, **rest):
+ if verbose: print "#", " ".join(args)
+ popen_args = args
+ prev_error_mode = SEM_INVALID_VALUE
+ if utils.IsWindows():
+ popen_args = subprocess.list2cmdline(args)
+ # Try to change the error mode to avoid dialogs on fatal errors. Don't
+ # touch any existing error mode flags by merging the existing error mode.
+ # See http://blogs.msdn.com/oldnewthing/archive/2004/07/27/198410.aspx.
+ error_mode = SEM_NOGPFAULTERRORBOX
+ prev_error_mode = Win32SetErrorMode(error_mode)
+ Win32SetErrorMode(error_mode | prev_error_mode)
+ process = subprocess.Popen(
+ shell=utils.IsWindows(),
+ args=popen_args,
+ **rest
+ )
+ if (utils.IsWindows() and prev_error_mode != SEM_INVALID_VALUE):
+ Win32SetErrorMode(prev_error_mode)
+ # Compute the end time - if the process crosses this limit we
+ # consider it timed out.
+ if timeout is None: end_time = None
+ else: end_time = time.time() + timeout
+ timed_out = False
+ # Repeatedly check the exit code from the process in a
+ # loop and keep track of whether or not it times out.
+ exit_code = None
+ sleep_time = INITIAL_SLEEP_TIME
+ try:
+ while exit_code is None:
+ if (not end_time is None) and (time.time() >= end_time):
+ # Kill the process and wait for it to exit.
+ KillProcessWithID(process.pid)
+ exit_code = process.wait()
+ timed_out = True
+ else:
+ exit_code = process.poll()
+ time.sleep(sleep_time)
+ sleep_time = sleep_time * SLEEP_TIME_FACTOR
+ if sleep_time > MAX_SLEEP_TIME:
+ sleep_time = MAX_SLEEP_TIME
+ return (exit_code, timed_out)
+ except KeyboardInterrupt:
+ raise
+
+
+def PrintError(string):
+ sys.stderr.write(string)
+ sys.stderr.write("\n")
+
+
+def CheckedUnlink(name):
+ # On Windows, when run with -jN in parallel processes,
+ # OS often fails to unlink the temp file. Not sure why.
+ # Need to retry.
+ # Idea from https://bugs.webkit.org/attachment.cgi?id=75982&action=prettypatch
+ retry_count = 0
+ while retry_count < 30:
+ try:
+ os.unlink(name)
+ return
+ except OSError, e:
+ retry_count += 1
+ time.sleep(retry_count * 0.1)
+ PrintError("os.unlink() " + str(e))
+
+
+def Execute(args, verbose=False, timeout=None):
+ args = [ c for c in args if c != "" ]
+ (fd_out, outname) = tempfile.mkstemp()
+ (fd_err, errname) = tempfile.mkstemp()
+ try:
+ (exit_code, timed_out) = RunProcess(
+ verbose,
+ timeout,
+ args=args,
+ stdout=fd_out,
+ stderr=fd_err
+ )
+ except:
+ raise
+ os.close(fd_out)
+ os.close(fd_err)
+ out = file(outname).read()
+ errors = file(errname).read()
+ CheckedUnlink(outname)
+ CheckedUnlink(errname)
+ return output.Output(exit_code, timed_out, out, errors)
diff --git a/src/3rdparty/v8/tools/testrunner/local/execution.py b/src/3rdparty/v8/tools/testrunner/local/execution.py
new file mode 100644
index 0000000..6004367
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/local/execution.py
@@ -0,0 +1,182 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import multiprocessing
+import os
+import threading
+import time
+
+from . import commands
+from . import utils
+
+
+BREAK_NOW = -1
+EXCEPTION = -2
+
+
+class Job(object):
+ def __init__(self, command, dep_command, test_id, timeout, verbose):
+ self.command = command
+ self.dep_command = dep_command
+ self.id = test_id
+ self.timeout = timeout
+ self.verbose = verbose
+
+
+def RunTest(job):
+ try:
+ start_time = time.time()
+ if job.dep_command is not None:
+ dep_output = commands.Execute(job.dep_command, job.verbose, job.timeout)
+ # TODO(jkummerow): We approximate the test suite specific function
+ # IsFailureOutput() by just checking the exit code here. Currently
+ # only cctests define dependencies, for which this simplification is
+ # correct.
+ if dep_output.exit_code != 0:
+ return (job.id, dep_output, time.time() - start_time)
+ output = commands.Execute(job.command, job.verbose, job.timeout)
+ return (job.id, output, time.time() - start_time)
+ except KeyboardInterrupt:
+ return (-1, BREAK_NOW, 0)
+ except Exception, e:
+ print(">>> EXCEPTION: %s" % e)
+ return (-1, EXCEPTION, 0)
+
+
+class Runner(object):
+
+ def __init__(self, suites, progress_indicator, context):
+ self.tests = [ t for s in suites for t in s.tests ]
+ self._CommonInit(len(self.tests), progress_indicator, context)
+
+ def _CommonInit(self, num_tests, progress_indicator, context):
+ self.indicator = progress_indicator
+ progress_indicator.runner = self
+ self.context = context
+ self.succeeded = 0
+ self.total = num_tests
+ self.remaining = num_tests
+ self.failed = []
+ self.crashed = 0
+ self.terminate = False
+ self.lock = threading.Lock()
+
+ def Run(self, jobs):
+ self.indicator.Starting()
+ self._RunInternal(jobs)
+ self.indicator.Done()
+ if self.failed:
+ return 1
+ return 0
+
+ def _RunInternal(self, jobs):
+ pool = multiprocessing.Pool(processes=jobs)
+ test_map = {}
+ queue = []
+ queued_exception = None
+ for test in self.tests:
+ assert test.id >= 0
+ test_map[test.id] = test
+ try:
+ command = self.GetCommand(test)
+ except Exception, e:
+ # If this failed, save the exception and re-raise it later (after
+ # all other tests have had a chance to run).
+ queued_exception = e
+ continue
+ timeout = self.context.timeout
+ if ("--stress-opt" in test.flags or
+ "--stress-opt" in self.context.mode_flags or
+ "--stress-opt" in self.context.extra_flags):
+ timeout *= 4
+ if test.dependency is not None:
+ dep_command = [ c.replace(test.path, test.dependency) for c in command ]
+ else:
+ dep_command = None
+ job = Job(command, dep_command, test.id, timeout, self.context.verbose)
+ queue.append(job)
+ try:
+ kChunkSize = 1
+ it = pool.imap_unordered(RunTest, queue, kChunkSize)
+ for result in it:
+ test_id = result[0]
+ if test_id < 0:
+ if result[1] == BREAK_NOW:
+ self.terminate = True
+ else:
+ continue
+ if self.terminate:
+ pool.terminate()
+ pool.join()
+ raise BreakNowException("User pressed Ctrl+C or IO went wrong")
+ test = test_map[test_id]
+ self.indicator.AboutToRun(test)
+ test.output = result[1]
+ test.duration = result[2]
+ if test.suite.HasUnexpectedOutput(test):
+ self.failed.append(test)
+ if test.output.HasCrashed():
+ self.crashed += 1
+ else:
+ self.succeeded += 1
+ self.remaining -= 1
+ self.indicator.HasRun(test)
+ except KeyboardInterrupt:
+ pool.terminate()
+ pool.join()
+ raise
+ except Exception, e:
+ print("Exception: %s" % e)
+ pool.terminate()
+ pool.join()
+ raise
+ if queued_exception:
+ raise queued_exception
+ return
+
+
+ def GetCommand(self, test):
+ d8testflag = []
+ shell = test.suite.shell()
+ if shell == "d8":
+ d8testflag = ["--test"]
+ if utils.IsWindows():
+ shell += ".exe"
+ cmd = ([self.context.command_prefix] +
+ [os.path.abspath(os.path.join(self.context.shell_dir, shell))] +
+ d8testflag +
+ test.suite.GetFlagsForTestCase(test, self.context) +
+ [self.context.extra_flags])
+ return cmd
+
+
+class BreakNowException(Exception):
+ def __init__(self, value):
+ self.value = value
+ def __str__(self):
+ return repr(self.value)
diff --git a/src/3rdparty/v8/tools/testrunner/local/old_statusfile.py b/src/3rdparty/v8/tools/testrunner/local/old_statusfile.py
new file mode 100644
index 0000000..a16941b
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/local/old_statusfile.py
@@ -0,0 +1,460 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import cStringIO
+import re
+
+# These outcomes can occur in a TestCase's outcomes list:
+SKIP = 'SKIP'
+FAIL = 'FAIL'
+PASS = 'PASS'
+OKAY = 'OKAY'
+TIMEOUT = 'TIMEOUT'
+CRASH = 'CRASH'
+SLOW = 'SLOW'
+# These are just for the status files and are mapped below in DEFS:
+FAIL_OK = 'FAIL_OK'
+PASS_OR_FAIL = 'PASS_OR_FAIL'
+
+KEYWORDS = {SKIP: SKIP,
+ FAIL: FAIL,
+ PASS: PASS,
+ OKAY: OKAY,
+ TIMEOUT: TIMEOUT,
+ CRASH: CRASH,
+ SLOW: SLOW,
+ FAIL_OK: FAIL_OK,
+ PASS_OR_FAIL: PASS_OR_FAIL}
+
+class Expression(object):
+ pass
+
+
+class Constant(Expression):
+
+ def __init__(self, value):
+ self.value = value
+
+ def Evaluate(self, env, defs):
+ return self.value
+
+
+class Variable(Expression):
+
+ def __init__(self, name):
+ self.name = name
+
+ def GetOutcomes(self, env, defs):
+ if self.name in env: return set([env[self.name]])
+ else: return set([])
+
+ def Evaluate(self, env, defs):
+ return env[self.name]
+
+ def __str__(self):
+ return self.name
+
+ def string(self, logical):
+ return self.__str__()
+
+
+class Outcome(Expression):
+
+ def __init__(self, name):
+ self.name = name
+
+ def GetOutcomes(self, env, defs):
+ if self.name in defs:
+ return defs[self.name].GetOutcomes(env, defs)
+ else:
+ return set([self.name])
+
+ def __str__(self):
+ if self.name in KEYWORDS:
+ return "%s" % KEYWORDS[self.name]
+ return "'%s'" % self.name
+
+ def string(self, logical):
+ if logical:
+ return "%s" % self.name
+ return self.__str__()
+
+
+class Operation(Expression):
+
+ def __init__(self, left, op, right):
+ self.left = left
+ self.op = op
+ self.right = right
+
+ def Evaluate(self, env, defs):
+ if self.op == '||' or self.op == ',':
+ return self.left.Evaluate(env, defs) or self.right.Evaluate(env, defs)
+ elif self.op == 'if':
+ return False
+ elif self.op == '==':
+ return not self.left.GetOutcomes(env, defs).isdisjoint(self.right.GetOutcomes(env, defs))
+ elif self.op == '!=':
+ return self.left.GetOutcomes(env, defs).isdisjoint(self.right.GetOutcomes(env, defs))
+ else:
+ assert self.op == '&&'
+ return self.left.Evaluate(env, defs) and self.right.Evaluate(env, defs)
+
+ def GetOutcomes(self, env, defs):
+ if self.op == '||' or self.op == ',':
+ return self.left.GetOutcomes(env, defs) | self.right.GetOutcomes(env, defs)
+ elif self.op == 'if':
+ if self.right.Evaluate(env, defs): return self.left.GetOutcomes(env, defs)
+ else: return set([])
+ else:
+ assert self.op == '&&'
+ return self.left.GetOutcomes(env, defs) & self.right.GetOutcomes(env, defs)
+
+ def __str__(self):
+ return self.string(False)
+
+ def string(self, logical=False):
+ if self.op == 'if':
+ return "['%s', %s]" % (self.right.string(True), self.left.string(logical))
+ elif self.op == "||" or self.op == ",":
+ if logical:
+ return "%s or %s" % (self.left.string(True), self.right.string(True))
+ else:
+ return "%s, %s" % (self.left, self.right)
+ elif self.op == "&&":
+ return "%s and %s" % (self.left.string(True), self.right.string(True))
+ return "%s %s %s" % (self.left.string(logical), self.op,
+ self.right.string(logical))
+
+
+def IsAlpha(string):
+ for char in string:
+ if not (char.isalpha() or char.isdigit() or char == '_'):
+ return False
+ return True
+
+
+class Tokenizer(object):
+ """A simple string tokenizer that chops expressions into variables,
+ parens and operators"""
+
+ def __init__(self, expr):
+ self.index = 0
+ self.expr = expr
+ self.length = len(expr)
+ self.tokens = None
+
+ def Current(self, length=1):
+ if not self.HasMore(length): return ""
+ return self.expr[self.index:self.index + length]
+
+ def HasMore(self, length=1):
+ return self.index < self.length + (length - 1)
+
+ def Advance(self, count=1):
+ self.index = self.index + count
+
+ def AddToken(self, token):
+ self.tokens.append(token)
+
+ def SkipSpaces(self):
+ while self.HasMore() and self.Current().isspace():
+ self.Advance()
+
+ def Tokenize(self):
+ self.tokens = [ ]
+ while self.HasMore():
+ self.SkipSpaces()
+ if not self.HasMore():
+ return None
+ if self.Current() == '(':
+ self.AddToken('(')
+ self.Advance()
+ elif self.Current() == ')':
+ self.AddToken(')')
+ self.Advance()
+ elif self.Current() == '$':
+ self.AddToken('$')
+ self.Advance()
+ elif self.Current() == ',':
+ self.AddToken(',')
+ self.Advance()
+ elif IsAlpha(self.Current()):
+ buf = ""
+ while self.HasMore() and IsAlpha(self.Current()):
+ buf += self.Current()
+ self.Advance()
+ self.AddToken(buf)
+ elif self.Current(2) == '&&':
+ self.AddToken('&&')
+ self.Advance(2)
+ elif self.Current(2) == '||':
+ self.AddToken('||')
+ self.Advance(2)
+ elif self.Current(2) == '==':
+ self.AddToken('==')
+ self.Advance(2)
+ elif self.Current(2) == '!=':
+ self.AddToken('!=')
+ self.Advance(2)
+ else:
+ return None
+ return self.tokens
+
+
+class Scanner(object):
+ """A simple scanner that can serve out tokens from a given list"""
+
+ def __init__(self, tokens):
+ self.tokens = tokens
+ self.length = len(tokens)
+ self.index = 0
+
+ def HasMore(self):
+ return self.index < self.length
+
+ def Current(self):
+ return self.tokens[self.index]
+
+ def Advance(self):
+ self.index = self.index + 1
+
+
+def ParseAtomicExpression(scan):
+ if scan.Current() == "true":
+ scan.Advance()
+ return Constant(True)
+ elif scan.Current() == "false":
+ scan.Advance()
+ return Constant(False)
+ elif IsAlpha(scan.Current()):
+ name = scan.Current()
+ scan.Advance()
+ return Outcome(name)
+ elif scan.Current() == '$':
+ scan.Advance()
+ if not IsAlpha(scan.Current()):
+ return None
+ name = scan.Current()
+ scan.Advance()
+ return Variable(name.lower())
+ elif scan.Current() == '(':
+ scan.Advance()
+ result = ParseLogicalExpression(scan)
+ if (not result) or (scan.Current() != ')'):
+ return None
+ scan.Advance()
+ return result
+ else:
+ return None
+
+
+BINARIES = ['==', '!=']
+def ParseOperatorExpression(scan):
+ left = ParseAtomicExpression(scan)
+ if not left: return None
+ while scan.HasMore() and (scan.Current() in BINARIES):
+ op = scan.Current()
+ scan.Advance()
+ right = ParseOperatorExpression(scan)
+ if not right:
+ return None
+ left = Operation(left, op, right)
+ return left
+
+
+def ParseConditionalExpression(scan):
+ left = ParseOperatorExpression(scan)
+ if not left: return None
+ while scan.HasMore() and (scan.Current() == 'if'):
+ scan.Advance()
+ right = ParseOperatorExpression(scan)
+ if not right:
+ return None
+ left = Operation(left, 'if', right)
+ return left
+
+
+LOGICALS = ["&&", "||", ","]
+def ParseLogicalExpression(scan):
+ left = ParseConditionalExpression(scan)
+ if not left: return None
+ while scan.HasMore() and (scan.Current() in LOGICALS):
+ op = scan.Current()
+ scan.Advance()
+ right = ParseConditionalExpression(scan)
+ if not right:
+ return None
+ left = Operation(left, op, right)
+ return left
+
+
+def ParseCondition(expr):
+ """Parses a logical expression into an Expression object"""
+ tokens = Tokenizer(expr).Tokenize()
+ if not tokens:
+ print "Malformed expression: '%s'" % expr
+ return None
+ scan = Scanner(tokens)
+ ast = ParseLogicalExpression(scan)
+ if not ast:
+ print "Malformed expression: '%s'" % expr
+ return None
+ if scan.HasMore():
+ print "Malformed expression: '%s'" % expr
+ return None
+ return ast
+
+
+class Section(object):
+ """A section of the configuration file. Sections are enabled or
+ disabled prior to running the tests, based on their conditions"""
+
+ def __init__(self, condition):
+ self.condition = condition
+ self.rules = [ ]
+
+ def AddRule(self, rule):
+ self.rules.append(rule)
+
+
+class Rule(object):
+ """A single rule that specifies the expected outcome for a single
+ test."""
+
+ def __init__(self, raw_path, path, value):
+ self.raw_path = raw_path
+ self.path = path
+ self.value = value
+
+ def GetOutcomes(self, env, defs):
+ return self.value.GetOutcomes(env, defs)
+
+ def Contains(self, path):
+ if len(self.path) > len(path):
+ return False
+ for i in xrange(len(self.path)):
+ if not self.path[i].match(path[i]):
+ return False
+ return True
+
+
+HEADER_PATTERN = re.compile(r'\[([^]]+)\]')
+RULE_PATTERN = re.compile(r'\s*([^: ]*)\s*:(.*)')
+DEF_PATTERN = re.compile(r'^def\s*(\w+)\s*=(.*)$')
+PREFIX_PATTERN = re.compile(r'^\s*prefix\s+([\w\_\.\-\/]+)$')
+
+
+class ConvertNotation(object):
+ def __init__(self, path):
+ self.path = path
+ self.indent = ""
+ self.comment = []
+ self.init = False
+ self.section = False
+ self.out = cStringIO.StringIO()
+
+ def OpenGlobal(self):
+ if self.init: return
+ self.WriteComment()
+ print >> self.out, "["
+ self.init = True
+
+ def CloseGlobal(self):
+ if not self.init: return
+ print >> self.out, "]"
+ self.init = False
+
+ def OpenSection(self, condition="ALWAYS"):
+ if self.section: return
+ self.OpenGlobal()
+ if type(condition) != str:
+ condition = "'%s'" % condition.string(True)
+ print >> self.out, "%s[%s, {" % (self.indent, condition)
+ self.indent += " " * 2
+ self.section = condition
+
+ def CloseSection(self):
+ if not self.section: return
+ self.indent = self.indent[:-2]
+ print >> self.out, "%s}], # %s" % (self.indent, self.section)
+ self.section = False
+
+ def WriteComment(self):
+ if not self.comment: return
+ for c in self.comment:
+ if len(c.strip()) == 0:
+ print >> self.out, ""
+ else:
+ print >> self.out, "%s%s" % (self.indent, c),
+ self.comment = []
+
+ def GetOutput(self):
+ with open(self.path) as f:
+ for line in f:
+ if line[0] == '#':
+ self.comment += [line]
+ continue
+ if len(line.strip()) == 0:
+ self.comment += [line]
+ continue
+ header_match = HEADER_PATTERN.match(line)
+ if header_match:
+ condition = ParseCondition(header_match.group(1).strip())
+ self.CloseSection()
+ self.WriteComment()
+ self.OpenSection(condition)
+ continue
+ rule_match = RULE_PATTERN.match(line)
+ if rule_match:
+ self.OpenSection()
+ self.WriteComment()
+ path = rule_match.group(1).strip()
+ value_str = rule_match.group(2).strip()
+ comment = ""
+ if '#' in value_str:
+ pos = value_str.find('#')
+ comment = " %s" % value_str[pos:].strip()
+ value_str = value_str[:pos].strip()
+ value = ParseCondition(value_str)
+ print >> self.out, ("%s'%s': [%s],%s" %
+ (self.indent, path, value, comment))
+ continue
+ def_match = DEF_PATTERN.match(line)
+ if def_match:
+ # Custom definitions are deprecated.
+ continue
+ prefix_match = PREFIX_PATTERN.match(line)
+ if prefix_match:
+ continue
+ print "Malformed line: '%s'." % line
+ self.CloseSection()
+ self.CloseGlobal()
+ result = self.out.getvalue()
+ self.out.close()
+ return result
diff --git a/src/3rdparty/v8/tools/testrunner/local/progress.py b/src/3rdparty/v8/tools/testrunner/local/progress.py
new file mode 100644
index 0000000..9075a95
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/local/progress.py
@@ -0,0 +1,238 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import sys
+import time
+
+def EscapeCommand(command):
+ parts = []
+ for part in command:
+ if ' ' in part:
+ # Escape spaces. We may need to escape more characters for this
+ # to work properly.
+ parts.append('"%s"' % part)
+ else:
+ parts.append(part)
+ return " ".join(parts)
+
+
+class ProgressIndicator(object):
+
+ def __init__(self):
+ self.runner = None
+
+ def Starting(self):
+ pass
+
+ def Done(self):
+ pass
+
+ def AboutToRun(self, test):
+ pass
+
+ def HasRun(self, test):
+ pass
+
+ def PrintFailureHeader(self, test):
+ if test.suite.IsNegativeTest(test):
+ negative_marker = '[negative] '
+ else:
+ negative_marker = ''
+ print "=== %(label)s %(negative)s===" % {
+ 'label': test.GetLabel(),
+ 'negative': negative_marker
+ }
+
+
+class SimpleProgressIndicator(ProgressIndicator):
+ """Abstract base class for {Verbose,Dots}ProgressIndicator"""
+
+ def Starting(self):
+ print 'Running %i tests' % self.runner.total
+
+ def Done(self):
+ print
+ for failed in self.runner.failed:
+ self.PrintFailureHeader(failed)
+ if failed.output.stderr:
+ print "--- stderr ---"
+ print failed.output.stderr.strip()
+ if failed.output.stdout:
+ print "--- stdout ---"
+ print failed.output.stdout.strip()
+ print "Command: %s" % EscapeCommand(self.runner.GetCommand(failed))
+ if failed.output.HasCrashed():
+ print "--- CRASHED ---"
+ if failed.output.HasTimedOut():
+ print "--- TIMEOUT ---"
+ if len(self.runner.failed) == 0:
+ print "==="
+ print "=== All tests succeeded"
+ print "==="
+ else:
+ print
+ print "==="
+ print "=== %i tests failed" % len(self.runner.failed)
+ if self.runner.crashed > 0:
+ print "=== %i tests CRASHED" % self.runner.crashed
+ print "==="
+
+
+class VerboseProgressIndicator(SimpleProgressIndicator):
+
+ def AboutToRun(self, test):
+ print 'Starting %s...' % test.GetLabel()
+ sys.stdout.flush()
+
+ def HasRun(self, test):
+ if test.suite.HasUnexpectedOutput(test):
+ if test.output.HasCrashed():
+ outcome = 'CRASH'
+ else:
+ outcome = 'FAIL'
+ else:
+ outcome = 'pass'
+ print 'Done running %s: %s' % (test.GetLabel(), outcome)
+
+
+class DotsProgressIndicator(SimpleProgressIndicator):
+
+ def HasRun(self, test):
+ total = self.runner.succeeded + len(self.runner.failed)
+ if (total > 1) and (total % 50 == 1):
+ sys.stdout.write('\n')
+ if test.suite.HasUnexpectedOutput(test):
+ if test.output.HasCrashed():
+ sys.stdout.write('C')
+ sys.stdout.flush()
+ elif test.output.HasTimedOut():
+ sys.stdout.write('T')
+ sys.stdout.flush()
+ else:
+ sys.stdout.write('F')
+ sys.stdout.flush()
+ else:
+ sys.stdout.write('.')
+ sys.stdout.flush()
+
+
+class CompactProgressIndicator(ProgressIndicator):
+ """Abstract base class for {Color,Monochrome}ProgressIndicator"""
+
+ def __init__(self, templates):
+ super(CompactProgressIndicator, self).__init__()
+ self.templates = templates
+ self.last_status_length = 0
+ self.start_time = time.time()
+
+ def Done(self):
+ self.PrintProgress('Done')
+ print "" # Line break.
+
+ def AboutToRun(self, test):
+ self.PrintProgress(test.GetLabel())
+
+ def HasRun(self, test):
+ if test.suite.HasUnexpectedOutput(test):
+ self.ClearLine(self.last_status_length)
+ self.PrintFailureHeader(test)
+ stdout = test.output.stdout.strip()
+ if len(stdout):
+ print self.templates['stdout'] % stdout
+ stderr = test.output.stderr.strip()
+ if len(stderr):
+ print self.templates['stderr'] % stderr
+ print "Command: %s" % EscapeCommand(self.runner.GetCommand(test))
+ if test.output.HasCrashed():
+ print "exit code: %d" % test.output.exit_code
+ print "--- CRASHED ---"
+ if test.output.HasTimedOut():
+ print "--- TIMEOUT ---"
+
+ def Truncate(self, string, length):
+ if length and (len(string) > (length - 3)):
+ return string[:(length - 3)] + "..."
+ else:
+ return string
+
+ def PrintProgress(self, name):
+ self.ClearLine(self.last_status_length)
+ elapsed = time.time() - self.start_time
+ status = self.templates['status_line'] % {
+ 'passed': self.runner.succeeded,
+ 'remaining': (((self.runner.total - self.runner.remaining) * 100) //
+ self.runner.total),
+ 'failed': len(self.runner.failed),
+ 'test': name,
+ 'mins': int(elapsed) / 60,
+ 'secs': int(elapsed) % 60
+ }
+ status = self.Truncate(status, 78)
+ self.last_status_length = len(status)
+ print status,
+ sys.stdout.flush()
+
+
+class ColorProgressIndicator(CompactProgressIndicator):
+
+ def __init__(self):
+ templates = {
+ 'status_line': ("[%(mins)02i:%(secs)02i|"
+ "\033[34m%%%(remaining) 4d\033[0m|"
+ "\033[32m+%(passed) 4d\033[0m|"
+ "\033[31m-%(failed) 4d\033[0m]: %(test)s"),
+ 'stdout': "\033[1m%s\033[0m",
+ 'stderr': "\033[31m%s\033[0m",
+ }
+ super(ColorProgressIndicator, self).__init__(templates)
+
+ def ClearLine(self, last_line_length):
+ print "\033[1K\r",
+
+
+class MonochromeProgressIndicator(CompactProgressIndicator):
+
+ def __init__(self):
+ templates = {
+ 'status_line': ("[%(mins)02i:%(secs)02i|%%%(remaining) 4d|"
+ "+%(passed) 4d|-%(failed) 4d]: %(test)s"),
+ 'stdout': '%s',
+ 'stderr': '%s',
+ }
+ super(MonochromeProgressIndicator, self).__init__(templates)
+
+ def ClearLine(self, last_line_length):
+ print ("\r" + (" " * last_line_length) + "\r"),
+
+
+PROGRESS_INDICATORS = {
+ 'verbose': VerboseProgressIndicator,
+ 'dots': DotsProgressIndicator,
+ 'color': ColorProgressIndicator,
+ 'mono': MonochromeProgressIndicator
+}
diff --git a/src/3rdparty/v8/tools/testrunner/local/statusfile.py b/src/3rdparty/v8/tools/testrunner/local/statusfile.py
new file mode 100644
index 0000000..bf1de45
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/local/statusfile.py
@@ -0,0 +1,145 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+# These imports are required for the on-demand conversion from
+# old to new status file format.
+from os.path import exists
+from os.path import getmtime
+
+from . import old_statusfile
+
+
+# These outcomes can occur in a TestCase's outcomes list:
+SKIP = "SKIP"
+FAIL = "FAIL"
+PASS = "PASS"
+OKAY = "OKAY"
+TIMEOUT = "TIMEOUT"
+CRASH = "CRASH"
+SLOW = "SLOW"
+# These are just for the status files and are mapped below in DEFS:
+FAIL_OK = "FAIL_OK"
+PASS_OR_FAIL = "PASS_OR_FAIL"
+
+ALWAYS = "ALWAYS"
+
+KEYWORDS = {}
+for key in [SKIP, FAIL, PASS, OKAY, TIMEOUT, CRASH, SLOW, FAIL_OK,
+ PASS_OR_FAIL, ALWAYS]:
+ KEYWORDS[key] = key
+
+DEFS = {FAIL_OK: [FAIL, OKAY],
+ PASS_OR_FAIL: [PASS, FAIL]}
+
+# Support arches, modes to be written as keywords instead of strings.
+VARIABLES = {ALWAYS: True}
+for var in ["debug", "release", "android_arm", "android_ia32", "arm", "ia32",
+ "mipsel", "x64"]:
+ VARIABLES[var] = var
+
+
+def DoSkip(outcomes):
+ return SKIP in outcomes or SLOW in outcomes
+
+
+def IsFlaky(outcomes):
+ return ((PASS in outcomes) and (FAIL in outcomes) and
+ (not CRASH in outcomes) and (not OKAY in outcomes))
+
+
+def IsFailOk(outcomes):
+ return (FAIL in outcomes) and (OKAY in outcomes)
+
+
+def _AddOutcome(result, new):
+ global DEFS
+ if new in DEFS:
+ mapped = DEFS[new]
+ if type(mapped) == list:
+ for m in mapped:
+ _AddOutcome(result, m)
+ elif type(mapped) == str:
+ _AddOutcome(result, mapped)
+ else:
+ result.add(new)
+
+
+def _ParseOutcomeList(rule, outcomes, target_dict, variables):
+ result = set([])
+ if type(outcomes) == str:
+ outcomes = [outcomes]
+ for item in outcomes:
+ if type(item) == str:
+ _AddOutcome(result, item)
+ elif type(item) == list:
+ if not eval(item[0], variables): continue
+ for outcome in item[1:]:
+ assert type(outcome) == str
+ _AddOutcome(result, outcome)
+ else:
+ assert False
+ if len(result) == 0: return
+ if rule in target_dict:
+ target_dict[rule] |= result
+ else:
+ target_dict[rule] = result
+
+
+def ReadStatusFile(path, variables):
+ # As long as the old-format .status files are authoritative, just
+ # create the converted version on demand and cache it to speed up
+ # subsequent runs.
+ if path.endswith(".status"):
+ newpath = path + "2"
+ if not exists(newpath) or getmtime(newpath) < getmtime(path):
+ print "Converting status file."
+ converted = old_statusfile.ConvertNotation(path).GetOutput()
+ with open(newpath, 'w') as f:
+ f.write(converted)
+ path = newpath
+
+ with open(path) as f:
+ global KEYWORDS
+ contents = eval(f.read(), KEYWORDS)
+
+ rules = {}
+ wildcards = {}
+ variables.update(VARIABLES)
+ for section in contents:
+ assert type(section) == list
+ assert len(section) == 2
+ if not eval(section[0], variables): continue
+ section = section[1]
+ assert type(section) == dict
+ for rule in section:
+ assert type(rule) == str
+ if rule[-1] == '*':
+ _ParseOutcomeList(rule, section[rule], wildcards, variables)
+ else:
+ _ParseOutcomeList(rule, section[rule], rules, variables)
+ return rules, wildcards
diff --git a/src/3rdparty/v8/tools/testrunner/local/testsuite.py b/src/3rdparty/v8/tools/testrunner/local/testsuite.py
new file mode 100644
index 0000000..de5cddd
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/local/testsuite.py
@@ -0,0 +1,184 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import imp
+import os
+
+from . import statusfile
+
+class TestSuite(object):
+
+ @staticmethod
+ def LoadTestSuite(root):
+ name = root.split(os.path.sep)[-1]
+ f = None
+ try:
+ (f, pathname, description) = imp.find_module("testcfg", [root])
+ module = imp.load_module("testcfg", f, pathname, description)
+ suite = module.GetSuite(name, root)
+ finally:
+ if f:
+ f.close()
+ return suite
+
+ def __init__(self, name, root):
+ self.name = name # string
+ self.root = root # string containing path
+ self.tests = None # list of TestCase objects
+ self.rules = None # dictionary mapping test path to list of outcomes
+ self.wildcards = None # dictionary mapping test paths to list of outcomes
+ self.total_duration = None # float, assigned on demand
+
+ def shell(self):
+ return "d8"
+
+ def suffix(self):
+ return ".js"
+
+ def status_file(self):
+ return "%s/%s.status" % (self.root, self.name)
+
+ # Used in the status file and for stdout printing.
+ def CommonTestName(self, testcase):
+ return testcase.path
+
+ def ListTests(self, context):
+ raise NotImplementedError
+
+ def VariantFlags(self):
+ return None
+
+ def DownloadData(self):
+ pass
+
+ def ReadStatusFile(self, variables):
+ (self.rules, self.wildcards) = \
+ statusfile.ReadStatusFile(self.status_file(), variables)
+
+ def ReadTestCases(self, context):
+ self.tests = self.ListTests(context)
+
+ def FilterTestCasesByStatus(self, warn_unused_rules):
+ filtered = []
+ used_rules = set()
+ for t in self.tests:
+ testname = self.CommonTestName(t)
+ if testname in self.rules:
+ used_rules.add(testname)
+ outcomes = self.rules[testname]
+ t.outcomes = outcomes # Even for skipped tests, as the TestCase
+ # object stays around and PrintReport() uses it.
+ if statusfile.DoSkip(outcomes):
+ continue # Don't add skipped tests to |filtered|.
+ if len(self.wildcards) != 0:
+ skip = False
+ for rule in self.wildcards:
+ assert rule[-1] == '*'
+ if testname.startswith(rule[:-1]):
+ used_rules.add(rule)
+ outcomes = self.wildcards[rule]
+ t.outcomes = outcomes
+ if statusfile.DoSkip(outcomes):
+ skip = True
+ break # "for rule in self.wildcards"
+ if skip: continue # "for t in self.tests"
+ filtered.append(t)
+ self.tests = filtered
+
+ if not warn_unused_rules:
+ return
+
+ for rule in self.rules:
+ if rule not in used_rules:
+ print("Unused rule: %s -> %s" % (rule, self.rules[rule]))
+ for rule in self.wildcards:
+ if rule not in used_rules:
+ print("Unused rule: %s -> %s" % (rule, self.wildcards[rule]))
+
+ def FilterTestCasesByArgs(self, args):
+ filtered = []
+ filtered_args = []
+ for a in args:
+ argpath = a.split(os.path.sep)
+ if argpath[0] != self.name:
+ continue
+ if len(argpath) == 1 or (len(argpath) == 2 and argpath[1] == '*'):
+ return # Don't filter, run all tests in this suite.
+ path = os.path.sep.join(argpath[1:])
+ if path[-1] == '*':
+ path = path[:-1]
+ filtered_args.append(path)
+ for t in self.tests:
+ for a in filtered_args:
+ if t.path.startswith(a):
+ filtered.append(t)
+ break
+ self.tests = filtered
+
+ def GetFlagsForTestCase(self, testcase, context):
+ raise NotImplementedError
+
+ def GetSourceForTest(self, testcase):
+ return "(no source available)"
+
+ def IsFailureOutput(self, output, testpath):
+ return output.exit_code != 0
+
+ def IsNegativeTest(self, testcase):
+ return False
+
+ def HasFailed(self, testcase):
+ execution_failed = self.IsFailureOutput(testcase.output, testcase.path)
+ if self.IsNegativeTest(testcase):
+ return not execution_failed
+ else:
+ return execution_failed
+
+ def HasUnexpectedOutput(self, testcase):
+ if testcase.output.HasCrashed():
+ outcome = statusfile.CRASH
+ elif testcase.output.HasTimedOut():
+ outcome = statusfile.TIMEOUT
+ elif self.HasFailed(testcase):
+ outcome = statusfile.FAIL
+ else:
+ outcome = statusfile.PASS
+ if not testcase.outcomes:
+ return outcome != statusfile.PASS
+ return not outcome in testcase.outcomes
+
+ def StripOutputForTransmit(self, testcase):
+ if not self.HasUnexpectedOutput(testcase):
+ testcase.output.stdout = ""
+ testcase.output.stderr = ""
+
+ def CalculateTotalDuration(self):
+ self.total_duration = 0.0
+ for t in self.tests:
+ self.total_duration += t.duration
+ return self.total_duration
diff --git a/src/3rdparty/v8/tools/testrunner/local/utils.py b/src/3rdparty/v8/tools/testrunner/local/utils.py
new file mode 100644
index 0000000..b7caa12
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/local/utils.py
@@ -0,0 +1,108 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import os
+from os.path import exists
+from os.path import isdir
+from os.path import join
+import platform
+import re
+
+
+def GetSuitePaths(test_root):
+ def IsSuite(path):
+ return isdir(path) and exists(join(path, 'testcfg.py'))
+ return [ f for f in os.listdir(test_root) if IsSuite(join(test_root, f)) ]
+
+
+# Reads a file into an array of strings
+def ReadLinesFrom(name):
+ lines = []
+ with open(name) as f:
+ for line in f:
+ if line.startswith('#'): continue
+ if '#' in line:
+ line = line[:line.find('#')]
+ line = line.strip()
+ if not line: continue
+ lines.append(line)
+ return lines
+
+
+def GuessOS():
+ system = platform.system()
+ if system == 'Linux':
+ return 'linux'
+ elif system == 'Darwin':
+ return 'macos'
+ elif system.find('CYGWIN') >= 0:
+ return 'cygwin'
+ elif system == 'Windows' or system == 'Microsoft':
+ # On Windows Vista platform.system() can return 'Microsoft' with some
+ # versions of Python, see http://bugs.python.org/issue1082
+ return 'win32'
+ elif system == 'FreeBSD':
+ return 'freebsd'
+ elif system == 'OpenBSD':
+ return 'openbsd'
+ elif system == 'SunOS':
+ return 'solaris'
+ elif system == 'NetBSD':
+ return 'netbsd'
+ else:
+ return None
+
+
+# This will default to building the 32 bit VM even on machines that are
+# capable of running the 64 bit VM.
+def DefaultArch():
+ machine = platform.machine()
+ machine = machine.lower() # Windows 7 capitalizes 'AMD64'.
+ if machine.startswith('arm'):
+ return 'arm'
+ elif (not machine) or (not re.match('(x|i[3-6])86$', machine) is None):
+ return 'ia32'
+ elif machine == 'i86pc':
+ return 'ia32'
+ elif machine == 'x86_64':
+ return 'ia32'
+ elif machine == 'amd64':
+ return 'ia32'
+ else:
+ return None
+
+
+def GuessWordsize():
+ if '64' in platform.machine():
+ return '64'
+ else:
+ return '32'
+
+
+def IsWindows():
+ return GuessOS() == 'win32'
diff --git a/src/3rdparty/v8/tools/testrunner/local/verbose.py b/src/3rdparty/v8/tools/testrunner/local/verbose.py
new file mode 100644
index 0000000..f693467
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/local/verbose.py
@@ -0,0 +1,99 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import sys
+import time
+
+from . import statusfile
+
+
+REPORT_TEMPLATE = (
+"""Total: %(total)i tests
+ * %(skipped)4d tests will be skipped
+ * %(timeout)4d tests are expected to timeout sometimes
+ * %(nocrash)4d tests are expected to be flaky but not crash
+ * %(pass)4d tests are expected to pass
+ * %(fail_ok)4d tests are expected to fail that we won't fix
+ * %(fail)4d tests are expected to fail that we should fix""")
+
+
+def PrintReport(tests):
+ total = len(tests)
+ skipped = timeout = nocrash = passes = fail_ok = fail = 0
+ for t in tests:
+ if "outcomes" not in dir(t) or not t.outcomes:
+ passes += 1
+ continue
+ o = t.outcomes
+ if statusfile.DoSkip(o):
+ skipped += 1
+ continue
+ if statusfile.TIMEOUT in o: timeout += 1
+ if statusfile.IsFlaky(o): nocrash += 1
+ if list(o) == [statusfile.PASS]: passes += 1
+ if statusfile.IsFailOk(o): fail_ok += 1
+ if list(o) == [statusfile.FAIL]: fail += 1
+ print REPORT_TEMPLATE % {
+ "total": total,
+ "skipped": skipped,
+ "timeout": timeout,
+ "nocrash": nocrash,
+ "pass": passes,
+ "fail_ok": fail_ok,
+ "fail": fail
+ }
+
+
+def PrintTestSource(tests):
+ for test in tests:
+ suite = test.suite
+ source = suite.GetSourceForTest(test).strip()
+ if len(source) > 0:
+ print "--- begin source: %s/%s ---" % (suite.name, test.path)
+ print source
+ print "--- end source: %s/%s ---" % (suite.name, test.path)
+
+
+def FormatTime(d):
+ millis = round(d * 1000) % 1000
+ return time.strftime("%M:%S.", time.gmtime(d)) + ("%03i" % millis)
+
+
+def PrintTestDurations(suites, overall_time):
+ # Write the times to stderr to make it easy to separate from the
+ # test output.
+ print
+ sys.stderr.write("--- Total time: %s ---\n" % FormatTime(overall_time))
+ timed_tests = [ t for s in suites for t in s.tests
+ if t.duration is not None ]
+ timed_tests.sort(lambda a, b: cmp(b.duration, a.duration))
+ index = 1
+ for entry in timed_tests[:20]:
+ t = FormatTime(entry.duration)
+ sys.stderr.write("%4i (%s) %s\n" % (index, t, entry.GetLabel()))
+ index += 1
diff --git a/src/3rdparty/v8/tools/testrunner/network/__init__.py b/src/3rdparty/v8/tools/testrunner/network/__init__.py
new file mode 100644
index 0000000..202a262
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/network/__init__.py
@@ -0,0 +1,26 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/3rdparty/v8/tools/testrunner/network/distro.py b/src/3rdparty/v8/tools/testrunner/network/distro.py
new file mode 100644
index 0000000..9d5a471
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/network/distro.py
@@ -0,0 +1,90 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+class Shell(object):
+ def __init__(self, shell):
+ self.shell = shell
+ self.tests = []
+ self.total_duration = 0.0
+
+ def AddSuite(self, suite):
+ self.tests += suite.tests
+ self.total_duration += suite.total_duration
+
+ def SortTests(self):
+ self.tests.sort(cmp=lambda x, y: cmp(x.duration, y.duration))
+
+
+def Assign(suites, peers):
+ total_work = 0.0
+ for s in suites:
+ total_work += s.CalculateTotalDuration()
+
+ total_power = 0.0
+ for p in peers:
+ p.assigned_work = 0.0
+ total_power += p.jobs * p.relative_performance
+ for p in peers:
+ p.needed_work = total_work * p.jobs * p.relative_performance / total_power
+
+ shells = {}
+ for s in suites:
+ shell = s.shell()
+ if not shell in shells:
+ shells[shell] = Shell(shell)
+ shells[shell].AddSuite(s)
+ # Convert |shells| to list and sort it, shortest total_duration first.
+ shells = [ shells[s] for s in shells ]
+ shells.sort(cmp=lambda x, y: cmp(x.total_duration, y.total_duration))
+ # Sort tests within each shell, longest duration last (so it's
+ # pop()'ed first).
+ for s in shells: s.SortTests()
+ # Sort peers, least needed_work first.
+ peers.sort(cmp=lambda x, y: cmp(x.needed_work, y.needed_work))
+ index = 0
+ for shell in shells:
+ while len(shell.tests) > 0:
+ while peers[index].needed_work <= 0:
+ index += 1
+ if index == len(peers):
+ print("BIG FAT WARNING: Assigning tests to peers failed. "
+ "Remaining tests: %d. Going to slow mode." % len(shell.tests))
+ # Pick the least-busy peer. Sorting the list for each test
+ # is terribly slow, but this is just an emergency fallback anyway.
+ peers.sort(cmp=lambda x, y: cmp(x.needed_work, y.needed_work))
+ peers[0].ForceAddOneTest(shell.tests.pop(), shell)
+ # If the peer already has a shell assigned and would need this one
+ # and then yet another, try to avoid it.
+ peer = peers[index]
+ if (shell.total_duration < peer.needed_work and
+ len(peer.shells) > 0 and
+ index < len(peers) - 1 and
+ shell.total_duration <= peers[index + 1].needed_work):
+ peers[index + 1].AddTests(shell)
+ else:
+ peer.AddTests(shell)
diff --git a/src/3rdparty/v8/tools/testrunner/network/endpoint.py b/src/3rdparty/v8/tools/testrunner/network/endpoint.py
new file mode 100644
index 0000000..5dc2b9f
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/network/endpoint.py
@@ -0,0 +1,124 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import multiprocessing
+import os
+import Queue
+import threading
+import time
+
+from ..local import execution
+from ..local import progress
+from ..local import testsuite
+from ..local import utils
+from ..server import compression
+
+
+class EndpointProgress(progress.ProgressIndicator):
+ def __init__(self, sock, server, ctx):
+ super(EndpointProgress, self).__init__()
+ self.sock = sock
+ self.server = server
+ self.context = ctx
+ self.results_queue = [] # Accessors must synchronize themselves.
+ self.sender_lock = threading.Lock()
+ self.senderthread = threading.Thread(target=self._SenderThread)
+ self.senderthread.start()
+
+ def HasRun(self, test):
+ # The runners that call this have a lock anyway, so this is safe.
+ self.results_queue.append(test)
+
+ def _SenderThread(self):
+ keep_running = True
+ tests = []
+ self.sender_lock.acquire()
+ while keep_running:
+ time.sleep(0.1)
+ # This should be "atomic enough" without locking :-)
+ # (We don't care which list any new elements get appended to, as long
+ # as we don't lose any and the last one comes last.)
+ current = self.results_queue
+ self.results_queue = []
+ for c in current:
+ if c is None:
+ keep_running = False
+ else:
+ tests.append(c)
+ if keep_running and len(tests) < 1:
+ continue # Wait for more results.
+ if len(tests) < 1: break # We're done here.
+ result = []
+ for t in tests:
+ result.append(t.PackResult())
+ try:
+ compression.Send(result, self.sock)
+ except:
+ self.runner.terminate = True
+ for t in tests:
+ self.server.CompareOwnPerf(t, self.context.arch, self.context.mode)
+ tests = []
+ self.sender_lock.release()
+
+
+def Execute(workspace, ctx, tests, sock, server):
+ suite_paths = utils.GetSuitePaths(os.path.join(workspace, "test"))
+ suites = []
+ for root in suite_paths:
+ suite = testsuite.TestSuite.LoadTestSuite(
+ os.path.join(workspace, "test", root))
+ if suite:
+ suites.append(suite)
+
+ suites_dict = {}
+ for s in suites:
+ suites_dict[s.name] = s
+ s.tests = []
+ for t in tests:
+ suite = suites_dict[t.suite]
+ t.suite = suite
+ suite.tests.append(t)
+
+ suites = [ s for s in suites if len(s.tests) > 0 ]
+ for s in suites:
+ s.DownloadData()
+
+ progress_indicator = EndpointProgress(sock, server, ctx)
+ runner = execution.Runner(suites, progress_indicator, ctx)
+ try:
+ runner.Run(server.jobs)
+ except IOError, e:
+ if e.errno == 2:
+ message = ("File not found: %s, maybe you forgot to 'git add' it?" %
+ e.filename)
+ else:
+ message = "%s" % e
+ compression.Send([[-1, message]], sock)
+ progress_indicator.HasRun(None) # Sentinel to signal the end.
+ progress_indicator.sender_lock.acquire() # Released when sending is done.
+ progress_indicator.sender_lock.release()
diff --git a/src/3rdparty/v8/tools/testrunner/network/network_execution.py b/src/3rdparty/v8/tools/testrunner/network/network_execution.py
new file mode 100644
index 0000000..ddb59e6
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/network/network_execution.py
@@ -0,0 +1,253 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import os
+import socket
+import subprocess
+import threading
+import time
+
+from . import distro
+from . import perfdata
+from ..local import execution
+from ..objects import peer
+from ..objects import workpacket
+from ..server import compression
+from ..server import constants
+from ..server import local_handler
+from ..server import signatures
+
+
+def GetPeers():
+ data = local_handler.LocalQuery([constants.REQUEST_PEERS])
+ if not data: return []
+ return [ peer.Peer.Unpack(p) for p in data ]
+
+
+class NetworkedRunner(execution.Runner):
+ def __init__(self, suites, progress_indicator, context, peers, workspace):
+ self.suites = suites
+ num_tests = 0
+ datapath = os.path.join("out", "testrunner_data")
+ self.perf_data_manager = perfdata.PerfDataManager(datapath)
+ self.perfdata = self.perf_data_manager.GetStore(context.arch, context.mode)
+ for s in suites:
+ for t in s.tests:
+ t.duration = self.perfdata.FetchPerfData(t) or 1.0
+ num_tests += len(s.tests)
+ self._CommonInit(num_tests, progress_indicator, context)
+ self.tests = [] # Only used if we need to fall back to local execution.
+ self.tests_lock = threading.Lock()
+ self.peers = peers
+ self.pubkey_fingerprint = None # Fetched later.
+ self.base_rev = subprocess.check_output(
+ "cd %s; git log -1 --format=%%H --grep=git-svn-id" % workspace,
+ shell=True).strip()
+ self.base_svn_rev = subprocess.check_output(
+ "cd %s; git log -1 %s" # Get commit description.
+ " | grep -e '^\s*git-svn-id:'" # Extract "git-svn-id" line.
+ " | awk '{print $2}'" # Extract "repository@revision" part.
+ " | sed -e 's/.*@//'" % # Strip away "repository@".
+ (workspace, self.base_rev), shell=True).strip()
+ self.patch = subprocess.check_output(
+ "cd %s; git diff %s" % (workspace, self.base_rev), shell=True)
+ self.binaries = {}
+ self.initialization_lock = threading.Lock()
+ self.initialization_lock.acquire() # Released when init is done.
+ self._OpenLocalConnection()
+ self.local_receiver_thread = threading.Thread(
+ target=self._ListenLocalConnection)
+ self.local_receiver_thread.daemon = True
+ self.local_receiver_thread.start()
+ self.initialization_lock.acquire()
+ self.initialization_lock.release()
+
+ def _OpenLocalConnection(self):
+ self.local_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ code = self.local_socket.connect_ex(("localhost", constants.CLIENT_PORT))
+ if code != 0:
+ raise RuntimeError("Failed to connect to local server")
+ compression.Send([constants.REQUEST_PUBKEY_FINGERPRINT], self.local_socket)
+
+ def _ListenLocalConnection(self):
+ release_lock_countdown = 1 # Pubkey.
+ self.local_receiver = compression.Receiver(self.local_socket)
+ while not self.local_receiver.IsDone():
+ data = self.local_receiver.Current()
+ if data[0] == constants.REQUEST_PUBKEY_FINGERPRINT:
+ pubkey = data[1]
+ if not pubkey: raise RuntimeError("Received empty public key")
+ self.pubkey_fingerprint = pubkey
+ release_lock_countdown -= 1
+ if release_lock_countdown == 0:
+ self.initialization_lock.release()
+ release_lock_countdown -= 1 # Prevent repeated triggering.
+ self.local_receiver.Advance()
+
+ def Run(self, jobs):
+ self.indicator.Starting()
+ need_libv8 = False
+ for s in self.suites:
+ shell = s.shell()
+ if shell not in self.binaries:
+ path = os.path.join(self.context.shell_dir, shell)
+ # Check if this is a shared library build.
+ try:
+ ldd = subprocess.check_output("ldd %s | grep libv8\\.so" % (path),
+ shell=True)
+ ldd = ldd.strip().split(" ")
+ assert ldd[0] == "libv8.so"
+ assert ldd[1] == "=>"
+ need_libv8 = True
+ binary_needs_libv8 = True
+ libv8 = signatures.ReadFileAndSignature(ldd[2])
+ except:
+ binary_needs_libv8 = False
+ binary = signatures.ReadFileAndSignature(path)
+ if binary[0] is None:
+ print("Error: Failed to create signature.")
+ assert binary[1] != 0
+ return binary[1]
+ binary.append(binary_needs_libv8)
+ self.binaries[shell] = binary
+ if need_libv8:
+ self.binaries["libv8.so"] = libv8
+ distro.Assign(self.suites, self.peers)
+ # Spawn one thread for each peer.
+ threads = []
+ for p in self.peers:
+ thread = threading.Thread(target=self._TalkToPeer, args=[p])
+ threads.append(thread)
+ thread.start()
+ try:
+ for thread in threads:
+ # Use a timeout so that signals (Ctrl+C) will be processed.
+ thread.join(timeout=10000000)
+ self._AnalyzePeerRuntimes()
+ except KeyboardInterrupt:
+ self.terminate = True
+ raise
+ except Exception, _e:
+ # If there's an exception we schedule an interruption for any
+ # remaining threads...
+ self.terminate = True
+ # ...and then reraise the exception to bail out.
+ raise
+ compression.Send(constants.END_OF_STREAM, self.local_socket)
+ self.local_socket.close()
+ if self.tests:
+ self._RunInternal(jobs)
+ self.indicator.Done()
+ return not self.failed
+
+ def _TalkToPeer(self, peer):
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.settimeout(self.context.timeout + 10)
+ code = sock.connect_ex((peer.address, constants.PEER_PORT))
+ if code == 0:
+ try:
+ peer.runtime = None
+ start_time = time.time()
+ packet = workpacket.WorkPacket(peer=peer, context=self.context,
+ base_revision=self.base_svn_rev,
+ patch=self.patch,
+ pubkey=self.pubkey_fingerprint)
+ data, test_map = packet.Pack(self.binaries)
+ compression.Send(data, sock)
+ compression.Send(constants.END_OF_STREAM, sock)
+ rec = compression.Receiver(sock)
+ while not rec.IsDone() and not self.terminate:
+ data_list = rec.Current()
+ for data in data_list:
+ test_id = data[0]
+ if test_id < 0:
+ # The peer is reporting an error.
+ with self.lock:
+ print("\nPeer %s reports error: %s" % (peer.address, data[1]))
+ continue
+ test = test_map.pop(test_id)
+ test.MergeResult(data)
+ try:
+ self.perfdata.UpdatePerfData(test)
+ except Exception, e:
+ print("UpdatePerfData exception: %s" % e)
+ pass # Just keep working.
+ with self.lock:
+ perf_key = self.perfdata.GetKey(test)
+ compression.Send(
+ [constants.INFORM_DURATION, perf_key, test.duration,
+ self.context.arch, self.context.mode],
+ self.local_socket)
+ self.indicator.AboutToRun(test)
+ if test.suite.HasUnexpectedOutput(test):
+ self.failed.append(test)
+ if test.output.HasCrashed():
+ self.crashed += 1
+ else:
+ self.succeeded += 1
+ self.remaining -= 1
+ self.indicator.HasRun(test)
+ rec.Advance()
+ peer.runtime = time.time() - start_time
+ except KeyboardInterrupt:
+ sock.close()
+ raise
+ except Exception, e:
+ print("Got exception: %s" % e)
+ pass # Fall back to local execution.
+ else:
+ compression.Send([constants.UNRESPONSIVE_PEER, peer.address],
+ self.local_socket)
+ sock.close()
+ if len(test_map) > 0:
+ # Some tests have not received any results. Run them locally.
+ print("\nNo results for %d tests, running them locally." % len(test_map))
+ self._EnqueueLocally(test_map)
+
+ def _EnqueueLocally(self, test_map):
+ with self.tests_lock:
+ for test in test_map:
+ self.tests.append(test_map[test])
+
+ def _AnalyzePeerRuntimes(self):
+ total_runtime = 0.0
+ total_work = 0.0
+ for p in self.peers:
+ if p.runtime is None:
+ return
+ total_runtime += p.runtime
+ total_work += p.assigned_work
+ for p in self.peers:
+ p.assigned_work /= total_work
+ p.runtime /= total_runtime
+ perf_correction = p.assigned_work / p.runtime
+ old_perf = p.relative_performance
+ p.relative_performance = (old_perf + perf_correction) / 2.0
+ compression.Send([constants.UPDATE_PERF, p.address,
+ p.relative_performance],
+ self.local_socket)
diff --git a/src/3rdparty/v8/tools/testrunner/network/perfdata.py b/src/3rdparty/v8/tools/testrunner/network/perfdata.py
new file mode 100644
index 0000000..2979dc4
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/network/perfdata.py
@@ -0,0 +1,120 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import os
+import shelve
+import threading
+
+
+class PerfDataEntry(object):
+ def __init__(self):
+ self.avg = 0.0
+ self.count = 0
+
+ def AddResult(self, result):
+ kLearnRateLimiter = 99 # Greater value means slower learning.
+ # We use an approximation of the average of the last 100 results here:
+ # The existing average is weighted with kLearnRateLimiter (or less
+ # if there are fewer data points).
+ effective_count = min(self.count, kLearnRateLimiter)
+ self.avg = self.avg * effective_count + result
+ self.count = effective_count + 1
+ self.avg /= self.count
+
+
+class PerfDataStore(object):
+ def __init__(self, datadir, arch, mode):
+ filename = os.path.join(datadir, "%s.%s.perfdata" % (arch, mode))
+ self.database = shelve.open(filename, protocol=2)
+ self.closed = False
+ self.lock = threading.Lock()
+
+ def __del__(self):
+ self.close()
+
+ def close(self):
+ if self.closed: return
+ self.database.close()
+ self.closed = True
+
+ def GetKey(self, test):
+ """Computes the key used to access data for the given testcase."""
+ flags = "".join(test.flags)
+ return str("%s.%s.%s" % (test.suitename(), test.path, flags))
+
+ def FetchPerfData(self, test):
+ """Returns the observed duration for |test| as read from the store."""
+ key = self.GetKey(test)
+ if key in self.database:
+ return self.database[key].avg
+ return None
+
+ def UpdatePerfData(self, test):
+ """Updates the persisted value in the store with test.duration."""
+ testkey = self.GetKey(test)
+ self.RawUpdatePerfData(testkey, test.duration)
+
+ def RawUpdatePerfData(self, testkey, duration):
+ with self.lock:
+ if testkey in self.database:
+ entry = self.database[testkey]
+ else:
+ entry = PerfDataEntry()
+ entry.AddResult(duration)
+ self.database[testkey] = entry
+
+
+class PerfDataManager(object):
+ def __init__(self, datadir):
+ self.datadir = os.path.abspath(datadir)
+ if not os.path.exists(self.datadir):
+ os.makedirs(self.datadir)
+ self.stores = {} # Keyed by arch, then mode.
+ self.closed = False
+ self.lock = threading.Lock()
+
+ def __del__(self):
+ self.close()
+
+ def close(self):
+ if self.closed: return
+ for arch in self.stores:
+ modes = self.stores[arch]
+ for mode in modes:
+ store = modes[mode]
+ store.close()
+ self.closed = True
+
+ def GetStore(self, arch, mode):
+ with self.lock:
+ if not arch in self.stores:
+ self.stores[arch] = {}
+ modes = self.stores[arch]
+ if not mode in modes:
+ modes[mode] = PerfDataStore(self.datadir, arch, mode)
+ return modes[mode]
diff --git a/src/3rdparty/v8/tools/testrunner/objects/__init__.py b/src/3rdparty/v8/tools/testrunner/objects/__init__.py
new file mode 100644
index 0000000..202a262
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/objects/__init__.py
@@ -0,0 +1,26 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/3rdparty/v8/tools/testrunner/objects/context.py b/src/3rdparty/v8/tools/testrunner/objects/context.py
new file mode 100644
index 0000000..b72284b
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/objects/context.py
@@ -0,0 +1,50 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+class Context():
+ def __init__(self, arch, mode, shell_dir, mode_flags, verbose, timeout,
+ isolates, command_prefix, extra_flags):
+ self.arch = arch
+ self.mode = mode
+ self.shell_dir = shell_dir
+ self.mode_flags = mode_flags
+ self.verbose = verbose
+ self.timeout = timeout
+ self.isolates = isolates
+ self.command_prefix = command_prefix
+ self.extra_flags = extra_flags
+
+ def Pack(self):
+ return [self.arch, self.mode, self.mode_flags, self.timeout, self.isolates,
+ self.extra_flags]
+
+ @staticmethod
+ def Unpack(packed):
+ # For the order of the fields, refer to Pack() above.
+ return Context(packed[0], packed[1], None, packed[2], False,
+ packed[3], packed[4], "", packed[5])
diff --git a/src/3rdparty/v8/tools/testrunner/objects/output.py b/src/3rdparty/v8/tools/testrunner/objects/output.py
new file mode 100644
index 0000000..87b4c84
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/objects/output.py
@@ -0,0 +1,60 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import signal
+
+from ..local import utils
+
+class Output(object):
+
+ def __init__(self, exit_code, timed_out, stdout, stderr):
+ self.exit_code = exit_code
+ self.timed_out = timed_out
+ self.stdout = stdout
+ self.stderr = stderr
+
+ def HasCrashed(self):
+ if utils.IsWindows():
+ return 0x80000000 & self.exit_code and not (0x3FFFFF00 & self.exit_code)
+ else:
+ # Timed out tests will have exit_code -signal.SIGTERM.
+ if self.timed_out:
+ return False
+ return (self.exit_code < 0 and
+ self.exit_code != -signal.SIGABRT)
+
+ def HasTimedOut(self):
+ return self.timed_out
+
+ def Pack(self):
+ return [self.exit_code, self.timed_out, self.stdout, self.stderr]
+
+ @staticmethod
+ def Unpack(packed):
+ # For the order of the fields, refer to Pack() above.
+ return Output(packed[0], packed[1], packed[2], packed[3])
diff --git a/src/3rdparty/v8/tools/testrunner/objects/peer.py b/src/3rdparty/v8/tools/testrunner/objects/peer.py
new file mode 100644
index 0000000..18a6bec
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/objects/peer.py
@@ -0,0 +1,80 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+class Peer(object):
+ def __init__(self, address, jobs, rel_perf, pubkey):
+ self.address = address # string: IP address
+ self.jobs = jobs # integer: number of CPUs
+ self.relative_performance = rel_perf
+ self.pubkey = pubkey # string: pubkey's fingerprint
+ self.shells = set() # set of strings
+ self.needed_work = 0
+ self.assigned_work = 0
+ self.tests = [] # list of TestCase objects
+ self.trusting_me = False # This peer trusts my public key.
+ self.trusted = False # I trust this peer's public key.
+
+ def __str__(self):
+ return ("Peer at %s, jobs: %d, performance: %.2f, trust I/O: %s/%s" %
+ (self.address, self.jobs, self.relative_performance,
+ self.trusting_me, self.trusted))
+
+ def AddTests(self, shell):
+ """Adds tests from |shell| to this peer.
+
+ Stops when self.needed_work reaches zero, or when all of shell's tests
+ are assigned."""
+ assert self.needed_work > 0
+ if shell.shell not in self.shells:
+ self.shells.add(shell.shell)
+ while len(shell.tests) > 0 and self.needed_work > 0:
+ t = shell.tests.pop()
+ self.needed_work -= t.duration
+ self.assigned_work += t.duration
+ shell.total_duration -= t.duration
+ self.tests.append(t)
+
+ def ForceAddOneTest(self, test, shell):
+ """Forcibly adds another test to this peer, disregarding needed_work."""
+ if shell.shell not in self.shells:
+ self.shells.add(shell.shell)
+ self.needed_work -= test.duration
+ self.assigned_work += test.duration
+ shell.total_duration -= test.duration
+ self.tests.append(test)
+
+
+ def Pack(self):
+ """Creates a JSON serializable representation of this Peer."""
+ return [self.address, self.jobs, self.relative_performance]
+
+ @staticmethod
+ def Unpack(packed):
+ """Creates a Peer object built from a packed representation."""
+ pubkey_dummy = "" # Callers of this don't care (only the server does).
+ return Peer(packed[0], packed[1], packed[2], pubkey_dummy)
diff --git a/src/3rdparty/v8/tools/testrunner/objects/testcase.py b/src/3rdparty/v8/tools/testrunner/objects/testcase.py
new file mode 100644
index 0000000..cfc522e
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/objects/testcase.py
@@ -0,0 +1,83 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+from . import output
+
+class TestCase(object):
+ def __init__(self, suite, path, flags=[], dependency=None):
+ self.suite = suite # TestSuite object
+ self.path = path # string, e.g. 'div-mod', 'test-api/foo'
+ self.flags = flags # list of strings, flags specific to this test case
+ self.dependency = dependency # |path| for testcase that must be run first
+ self.outcomes = None
+ self.output = None
+ self.id = None # int, used to map result back to TestCase instance
+ self.duration = None # assigned during execution
+
+ def CopyAddingFlags(self, flags):
+ copy = TestCase(self.suite, self.path, self.flags + flags, self.dependency)
+ copy.outcomes = self.outcomes
+ return copy
+
+ def PackTask(self):
+ """
+ Extracts those parts of this object that are required to run the test
+ and returns them as a JSON serializable object.
+ """
+ assert self.id is not None
+ return [self.suitename(), self.path, self.flags,
+ self.dependency, list(self.outcomes or []), self.id]
+
+ @staticmethod
+ def UnpackTask(task):
+ """Creates a new TestCase object based on packed task data."""
+ # For the order of the fields, refer to PackTask() above.
+ test = TestCase(str(task[0]), task[1], task[2], task[3])
+ test.outcomes = set(task[4])
+ test.id = task[5]
+ return test
+
+ def SetSuiteObject(self, suites):
+ self.suite = suites[self.suite]
+
+ def PackResult(self):
+ """Serializes the output of the TestCase after it has run."""
+ self.suite.StripOutputForTransmit(self)
+ return [self.id, self.output.Pack(), self.duration]
+
+ def MergeResult(self, result):
+ """Applies the contents of a Result to this object."""
+ assert result[0] == self.id
+ self.output = output.Output.Unpack(result[1])
+ self.duration = result[2]
+
+ def suitename(self):
+ return self.suite.name
+
+ def GetLabel(self):
+ return self.suitename() + "/" + self.suite.CommonTestName(self)
diff --git a/src/3rdparty/v8/tools/testrunner/objects/workpacket.py b/src/3rdparty/v8/tools/testrunner/objects/workpacket.py
new file mode 100644
index 0000000..d07efe7
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/objects/workpacket.py
@@ -0,0 +1,90 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+from . import context
+from . import testcase
+
+class WorkPacket(object):
+ def __init__(self, peer=None, context=None, tests=None, binaries=None,
+ base_revision=None, patch=None, pubkey=None):
+ self.peer = peer
+ self.context = context
+ self.tests = tests
+ self.binaries = binaries
+ self.base_revision = base_revision
+ self.patch = patch
+ self.pubkey_fingerprint = pubkey
+
+ def Pack(self, binaries_dict):
+ """
+ Creates a JSON serializable object containing the data of this
+ work packet.
+ """
+ need_libv8 = False
+ binaries = []
+ for shell in self.peer.shells:
+ prefetched_binary = binaries_dict[shell]
+ binaries.append({"name": shell,
+ "blob": prefetched_binary[0],
+ "sign": prefetched_binary[1]})
+ if prefetched_binary[2]:
+ need_libv8 = True
+ if need_libv8:
+ libv8 = binaries_dict["libv8.so"]
+ binaries.append({"name": "libv8.so",
+ "blob": libv8[0],
+ "sign": libv8[1]})
+ tests = []
+ test_map = {}
+ for t in self.peer.tests:
+ test_map[t.id] = t
+ tests.append(t.PackTask())
+ result = {
+ "binaries": binaries,
+ "pubkey": self.pubkey_fingerprint,
+ "context": self.context.Pack(),
+ "base_revision": self.base_revision,
+ "patch": self.patch,
+ "tests": tests
+ }
+ return result, test_map
+
+ @staticmethod
+ def Unpack(packed):
+ """
+ Creates a WorkPacket object from the given packed representation.
+ """
+ binaries = packed["binaries"]
+ pubkey_fingerprint = packed["pubkey"]
+ ctx = context.Context.Unpack(packed["context"])
+ base_revision = packed["base_revision"]
+ patch = packed["patch"]
+ tests = [ testcase.TestCase.UnpackTask(t) for t in packed["tests"] ]
+ return WorkPacket(context=ctx, tests=tests, binaries=binaries,
+ base_revision=base_revision, patch=patch,
+ pubkey=pubkey_fingerprint)
diff --git a/src/3rdparty/v8/tools/testrunner/server/__init__.py b/src/3rdparty/v8/tools/testrunner/server/__init__.py
new file mode 100644
index 0000000..202a262
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/server/__init__.py
@@ -0,0 +1,26 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/3rdparty/v8/tools/testrunner/server/compression.py b/src/3rdparty/v8/tools/testrunner/server/compression.py
new file mode 100644
index 0000000..ce90c4f
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/server/compression.py
@@ -0,0 +1,112 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import cStringIO as StringIO
+try:
+ import ujson as json
+except ImportError:
+ print("You should install UltraJSON, it is much faster!")
+ import json
+import os
+import struct
+import zlib
+
+from . import constants
+
+def Send(obj, sock):
+ """
+ Sends a JSON encodable object over the specified socket (zlib-compressed).
+ """
+ obj = json.dumps(obj)
+ compression_level = 2 # 1 = fastest, 9 = best compression
+ compressed = zlib.compress(obj, compression_level)
+ payload = struct.pack('>i', len(compressed)) + compressed
+ sock.sendall(payload)
+
+
+class Receiver(object):
+ def __init__(self, sock):
+ self.sock = sock
+ self.data = StringIO.StringIO()
+ self.datalength = 0
+ self._next = self._GetNext()
+
+ def IsDone(self):
+ return self._next == None
+
+ def Current(self):
+ return self._next
+
+ def Advance(self):
+ try:
+ self._next = self._GetNext()
+ except:
+ raise
+
+ def _GetNext(self):
+ try:
+ while self.datalength < constants.SIZE_T:
+ try:
+ chunk = self.sock.recv(8192)
+ except:
+ raise
+ if not chunk: return None
+ self._AppendData(chunk)
+ size = self._PopData(constants.SIZE_T)
+ size = struct.unpack(">i", size)[0]
+ while self.datalength < size:
+ try:
+ chunk = self.sock.recv(8192)
+ except:
+ raise
+ if not chunk: return None
+ self._AppendData(chunk)
+ result = self._PopData(size)
+ result = zlib.decompress(result)
+ result = json.loads(result)
+ if result == constants.END_OF_STREAM:
+ return None
+ return result
+ except:
+ raise
+
+ def _AppendData(self, new):
+ self.data.seek(0, os.SEEK_END)
+ self.data.write(new)
+ self.datalength += len(new)
+
+ def _PopData(self, length):
+ self.data.seek(0)
+ chunk = self.data.read(length)
+ remaining = self.data.read()
+ self.data.close()
+ self.data = StringIO.StringIO()
+ self.data.write(remaining)
+ assert self.datalength - length == len(remaining)
+ self.datalength = len(remaining)
+ return chunk
diff --git a/src/3rdparty/v8/tools/testrunner/server/constants.py b/src/3rdparty/v8/tools/testrunner/server/constants.py
new file mode 100644
index 0000000..5aefcba
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/server/constants.py
@@ -0,0 +1,51 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+CLIENT_PORT = 9991 # Port for the local client to connect to.
+PEER_PORT = 9992 # Port for peers on the network to connect to.
+PRESENCE_PORT = 9993 # Port for presence daemon.
+STATUS_PORT = 9994 # Port for network requests not related to workpackets.
+
+END_OF_STREAM = "end of dtest stream" # Marker for end of network requests.
+SIZE_T = 4 # Number of bytes used for network request size header.
+
+# Messages understood by the local request handler.
+ADD_TRUSTED = "add trusted"
+INFORM_DURATION = "inform about duration"
+REQUEST_PEERS = "get peers"
+UNRESPONSIVE_PEER = "unresponsive peer"
+REQUEST_PUBKEY_FINGERPRINT = "get pubkey fingerprint"
+REQUEST_STATUS = "get status"
+UPDATE_PERF = "update performance"
+
+# Messages understood by the status request handler.
+LIST_TRUSTED_PUBKEYS = "list trusted pubkeys"
+GET_SIGNED_PUBKEY = "pass on signed pubkey"
+NOTIFY_NEW_TRUSTED = "new trusted peer"
+TRUST_YOU_NOW = "trust you now"
+DO_YOU_TRUST = "do you trust"
diff --git a/src/3rdparty/v8/tools/testrunner/server/daemon.py b/src/3rdparty/v8/tools/testrunner/server/daemon.py
new file mode 100644
index 0000000..baa66fb
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/server/daemon.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python
+
+# This code has been written by Sander Marechal and published at:
+# http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python/
+# where the author has placed it in the public domain (see comment #6 at
+# http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python/#c6
+# ).
+# Some minor modifications have been made by the V8 authors. The work remains
+# in the public domain.
+
+import atexit
+import os
+from signal import SIGTERM
+from signal import SIGINT
+import sys
+import time
+
+
+class Daemon(object):
+ """
+ A generic daemon class.
+
+ Usage: subclass the Daemon class and override the run() method
+ """
+ def __init__(self, pidfile, stdin='/dev/null',
+ stdout='/dev/null', stderr='/dev/null'):
+ self.stdin = stdin
+ self.stdout = stdout
+ self.stderr = stderr
+ self.pidfile = pidfile
+
+ def daemonize(self):
+ """
+ do the UNIX double-fork magic, see Stevens' "Advanced
+ Programming in the UNIX Environment" for details (ISBN 0201563177)
+ http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
+ """
+ try:
+ pid = os.fork()
+ if pid > 0:
+ # exit first parent
+ sys.exit(0)
+ except OSError, e:
+ sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
+ sys.exit(1)
+
+ # decouple from parent environment
+ os.chdir("/")
+ os.setsid()
+ os.umask(0)
+
+ # do second fork
+ try:
+ pid = os.fork()
+ if pid > 0:
+ # exit from second parent
+ sys.exit(0)
+ except OSError, e:
+ sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
+ sys.exit(1)
+
+ # redirect standard file descriptors
+ sys.stdout.flush()
+ sys.stderr.flush()
+ si = file(self.stdin, 'r')
+ so = file(self.stdout, 'a+')
+ se = file(self.stderr, 'a+', 0)
+ # TODO: (debug) re-enable this!
+ #os.dup2(si.fileno(), sys.stdin.fileno())
+ #os.dup2(so.fileno(), sys.stdout.fileno())
+ #os.dup2(se.fileno(), sys.stderr.fileno())
+
+ # write pidfile
+ atexit.register(self.delpid)
+ pid = str(os.getpid())
+ file(self.pidfile, 'w+').write("%s\n" % pid)
+
+ def delpid(self):
+ os.remove(self.pidfile)
+
+ def start(self):
+ """
+ Start the daemon
+ """
+ # Check for a pidfile to see if the daemon already runs
+ try:
+ pf = file(self.pidfile, 'r')
+ pid = int(pf.read().strip())
+ pf.close()
+ except IOError:
+ pid = None
+
+ if pid:
+ message = "pidfile %s already exist. Daemon already running?\n"
+ sys.stderr.write(message % self.pidfile)
+ sys.exit(1)
+
+ # Start the daemon
+ self.daemonize()
+ self.run()
+
+ def stop(self):
+ """
+ Stop the daemon
+ """
+ # Get the pid from the pidfile
+ try:
+ pf = file(self.pidfile, 'r')
+ pid = int(pf.read().strip())
+ pf.close()
+ except IOError:
+ pid = None
+
+ if not pid:
+ message = "pidfile %s does not exist. Daemon not running?\n"
+ sys.stderr.write(message % self.pidfile)
+ return # not an error in a restart
+
+ # Try killing the daemon process
+ try:
+ # Give the process a one-second chance to exit gracefully.
+ os.kill(pid, SIGINT)
+ time.sleep(1)
+ while 1:
+ os.kill(pid, SIGTERM)
+ time.sleep(0.1)
+ except OSError, err:
+ err = str(err)
+ if err.find("No such process") > 0:
+ if os.path.exists(self.pidfile):
+ os.remove(self.pidfile)
+ else:
+ print str(err)
+ sys.exit(1)
+
+ def restart(self):
+ """
+ Restart the daemon
+ """
+ self.stop()
+ self.start()
+
+ def run(self):
+ """
+ You should override this method when you subclass Daemon. It will be
+ called after the process has been daemonized by start() or restart().
+ """
diff --git a/src/3rdparty/v8/tools/testrunner/server/local_handler.py b/src/3rdparty/v8/tools/testrunner/server/local_handler.py
new file mode 100644
index 0000000..3b3ac49
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/server/local_handler.py
@@ -0,0 +1,119 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import socket
+import SocketServer
+import StringIO
+
+from . import compression
+from . import constants
+
+
+def LocalQuery(query):
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ code = sock.connect_ex(("localhost", constants.CLIENT_PORT))
+ if code != 0: return None
+ compression.Send(query, sock)
+ compression.Send(constants.END_OF_STREAM, sock)
+ rec = compression.Receiver(sock)
+ data = None
+ while not rec.IsDone():
+ data = rec.Current()
+ assert data[0] == query[0]
+ data = data[1]
+ rec.Advance()
+ sock.close()
+ return data
+
+
+class LocalHandler(SocketServer.BaseRequestHandler):
+ def handle(self):
+ rec = compression.Receiver(self.request)
+ while not rec.IsDone():
+ data = rec.Current()
+ action = data[0]
+
+ if action == constants.REQUEST_PEERS:
+ with self.server.daemon.peer_list_lock:
+ response = [ p.Pack() for p in self.server.daemon.peers
+ if p.trusting_me ]
+ compression.Send([action, response], self.request)
+
+ elif action == constants.UNRESPONSIVE_PEER:
+ self.server.daemon.DeletePeer(data[1])
+
+ elif action == constants.REQUEST_PUBKEY_FINGERPRINT:
+ compression.Send([action, self.server.daemon.pubkey_fingerprint],
+ self.request)
+
+ elif action == constants.REQUEST_STATUS:
+ compression.Send([action, self._GetStatusMessage()], self.request)
+
+ elif action == constants.ADD_TRUSTED:
+ fingerprint = self.server.daemon.CopyToTrusted(data[1])
+ compression.Send([action, fingerprint], self.request)
+
+ elif action == constants.INFORM_DURATION:
+ test_key = data[1]
+ test_duration = data[2]
+ arch = data[3]
+ mode = data[4]
+ self.server.daemon.AddPerfData(test_key, test_duration, arch, mode)
+
+ elif action == constants.UPDATE_PERF:
+ address = data[1]
+ perf = data[2]
+ self.server.daemon.UpdatePeerPerformance(data[1], data[2])
+
+ rec.Advance()
+ compression.Send(constants.END_OF_STREAM, self.request)
+
+ def _GetStatusMessage(self):
+ sio = StringIO.StringIO()
+ sio.write("Peers:\n")
+ with self.server.daemon.peer_list_lock:
+ for p in self.server.daemon.peers:
+ sio.write("%s\n" % p)
+ sio.write("My own jobs: %d, relative performance: %.2f\n" %
+ (self.server.daemon.jobs, self.server.daemon.relative_perf))
+ # Low-priority TODO: Return more information. Ideas:
+ # - currently running anything,
+ # - time since last job,
+ # - time since last repository fetch
+ # - number of workpackets/testcases handled since startup
+ # - slowest test(s)
+ result = sio.getvalue()
+ sio.close()
+ return result
+
+
+class LocalSocketServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
+ def __init__(self, daemon):
+ SocketServer.TCPServer.__init__(self, ("localhost", constants.CLIENT_PORT),
+ LocalHandler)
+ self.daemon = daemon
diff --git a/src/3rdparty/v8/tools/testrunner/server/main.py b/src/3rdparty/v8/tools/testrunner/server/main.py
new file mode 100644
index 0000000..1000713
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/server/main.py
@@ -0,0 +1,245 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import multiprocessing
+import os
+import shutil
+import subprocess
+import threading
+import time
+
+from . import daemon
+from . import local_handler
+from . import presence_handler
+from . import signatures
+from . import status_handler
+from . import work_handler
+from ..network import perfdata
+
+
+class Server(daemon.Daemon):
+
+ def __init__(self, pidfile, root, stdin="/dev/null",
+ stdout="/dev/null", stderr="/dev/null"):
+ super(Server, self).__init__(pidfile, stdin, stdout, stderr)
+ self.root = root
+ self.local_handler = None
+ self.local_handler_thread = None
+ self.work_handler = None
+ self.work_handler_thread = None
+ self.status_handler = None
+ self.status_handler_thread = None
+ self.presence_daemon = None
+ self.presence_daemon_thread = None
+ self.peers = []
+ self.jobs = multiprocessing.cpu_count()
+ self.peer_list_lock = threading.Lock()
+ self.perf_data_lock = None
+ self.presence_daemon_lock = None
+ self.datadir = os.path.join(self.root, "data")
+ pubkey_fingerprint_filename = os.path.join(self.datadir, "mypubkey")
+ with open(pubkey_fingerprint_filename) as f:
+ self.pubkey_fingerprint = f.read().strip()
+ self.relative_perf_filename = os.path.join(self.datadir, "myperf")
+ if os.path.exists(self.relative_perf_filename):
+ with open(self.relative_perf_filename) as f:
+ try:
+ self.relative_perf = float(f.read())
+ except:
+ self.relative_perf = 1.0
+ else:
+ self.relative_perf = 1.0
+
+ def run(self):
+ os.nice(20)
+ self.ip = presence_handler.GetOwnIP()
+ self.perf_data_manager = perfdata.PerfDataManager(self.datadir)
+ self.perf_data_lock = threading.Lock()
+
+ self.local_handler = local_handler.LocalSocketServer(self)
+ self.local_handler_thread = threading.Thread(
+ target=self.local_handler.serve_forever)
+ self.local_handler_thread.start()
+
+ self.work_handler = work_handler.WorkSocketServer(self)
+ self.work_handler_thread = threading.Thread(
+ target=self.work_handler.serve_forever)
+ self.work_handler_thread.start()
+
+ self.status_handler = status_handler.StatusSocketServer(self)
+ self.status_handler_thread = threading.Thread(
+ target=self.status_handler.serve_forever)
+ self.status_handler_thread.start()
+
+ self.presence_daemon = presence_handler.PresenceDaemon(self)
+ self.presence_daemon_thread = threading.Thread(
+ target=self.presence_daemon.serve_forever)
+ self.presence_daemon_thread.start()
+
+ self.presence_daemon.FindPeers()
+ time.sleep(0.5) # Give those peers some time to reply.
+
+ with self.peer_list_lock:
+ for p in self.peers:
+ if p.address == self.ip: continue
+ status_handler.RequestTrustedPubkeys(p, self)
+
+ while True:
+ try:
+ self.PeriodicTasks()
+ time.sleep(60)
+ except Exception, e:
+ print("MAIN LOOP EXCEPTION: %s" % e)
+ self.Shutdown()
+ break
+ except KeyboardInterrupt:
+ self.Shutdown()
+ break
+
+ def Shutdown(self):
+ with open(self.relative_perf_filename, "w") as f:
+ f.write("%s" % self.relative_perf)
+ self.presence_daemon.shutdown()
+ self.presence_daemon.server_close()
+ self.local_handler.shutdown()
+ self.local_handler.server_close()
+ self.work_handler.shutdown()
+ self.work_handler.server_close()
+ self.status_handler.shutdown()
+ self.status_handler.server_close()
+
+ def PeriodicTasks(self):
+ # If we know peers we don't trust, see if someone else trusts them.
+ with self.peer_list_lock:
+ for p in self.peers:
+ if p.trusted: continue
+ if self.IsTrusted(p.pubkey):
+ p.trusted = True
+ status_handler.ITrustYouNow(p)
+ continue
+ for p2 in self.peers:
+ if not p2.trusted: continue
+ status_handler.TryTransitiveTrust(p2, p.pubkey, self)
+ # TODO: Ping for more peers waiting to be discovered.
+ # TODO: Update the checkout (if currently idle).
+
+ def AddPeer(self, peer):
+ with self.peer_list_lock:
+ for p in self.peers:
+ if p.address == peer.address:
+ return
+ self.peers.append(peer)
+ if peer.trusted:
+ status_handler.ITrustYouNow(peer)
+
+ def DeletePeer(self, peer_address):
+ with self.peer_list_lock:
+ for i in xrange(len(self.peers)):
+ if self.peers[i].address == peer_address:
+ del self.peers[i]
+ return
+
+ def MarkPeerAsTrusting(self, peer_address):
+ with self.peer_list_lock:
+ for p in self.peers:
+ if p.address == peer_address:
+ p.trusting_me = True
+ break
+
+ def UpdatePeerPerformance(self, peer_address, performance):
+ with self.peer_list_lock:
+ for p in self.peers:
+ if p.address == peer_address:
+ p.relative_performance = performance
+
+ def CopyToTrusted(self, pubkey_filename):
+ with open(pubkey_filename, "r") as f:
+ lines = f.readlines()
+ fingerprint = lines[-1].strip()
+ target_filename = self._PubkeyFilename(fingerprint)
+ shutil.copy(pubkey_filename, target_filename)
+ with self.peer_list_lock:
+ for peer in self.peers:
+ if peer.address == self.ip: continue
+ if peer.pubkey == fingerprint:
+ status_handler.ITrustYouNow(peer)
+ else:
+ result = self.SignTrusted(fingerprint)
+ status_handler.NotifyNewTrusted(peer, result)
+ return fingerprint
+
+ def _PubkeyFilename(self, pubkey_fingerprint):
+ return os.path.join(self.root, "trusted", "%s.pem" % pubkey_fingerprint)
+
+ def IsTrusted(self, pubkey_fingerprint):
+ return os.path.exists(self._PubkeyFilename(pubkey_fingerprint))
+
+ def ListTrusted(self):
+ path = os.path.join(self.root, "trusted")
+ if not os.path.exists(path): return []
+ return [ f[:-4] for f in os.listdir(path) if f.endswith(".pem") ]
+
+ def SignTrusted(self, pubkey_fingerprint):
+ if not self.IsTrusted(pubkey_fingerprint):
+ return []
+ filename = self._PubkeyFilename(pubkey_fingerprint)
+ result = signatures.ReadFileAndSignature(filename) # Format: [key, sig].
+ return [pubkey_fingerprint, result[0], result[1], self.pubkey_fingerprint]
+
+ def AcceptNewTrusted(self, data):
+ # The format of |data| matches the return value of |SignTrusted()|.
+ if not data: return
+ fingerprint = data[0]
+ pubkey = data[1]
+ signature = data[2]
+ signer = data[3]
+ if not self.IsTrusted(signer):
+ return
+ if self.IsTrusted(fingerprint):
+ return # Already trust this guy.
+ filename = self._PubkeyFilename(fingerprint)
+ signer_pubkeyfile = self._PubkeyFilename(signer)
+ if not signatures.VerifySignature(filename, pubkey, signature,
+ signer_pubkeyfile):
+ return
+ return # Nothing more to do.
+
+ def AddPerfData(self, test_key, duration, arch, mode):
+ data_store = self.perf_data_manager.GetStore(arch, mode)
+ data_store.RawUpdatePerfData(str(test_key), duration)
+
+ def CompareOwnPerf(self, test, arch, mode):
+ data_store = self.perf_data_manager.GetStore(arch, mode)
+ observed = data_store.FetchPerfData(test)
+ if not observed: return
+ own_perf_estimate = observed / test.duration
+ with self.perf_data_lock:
+ kLearnRateLimiter = 9999
+ self.relative_perf *= kLearnRateLimiter
+ self.relative_perf += own_perf_estimate
+ self.relative_perf /= (kLearnRateLimiter + 1)
diff --git a/src/3rdparty/v8/tools/testrunner/server/presence_handler.py b/src/3rdparty/v8/tools/testrunner/server/presence_handler.py
new file mode 100644
index 0000000..1dc2ef1
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/server/presence_handler.py
@@ -0,0 +1,120 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import socket
+import SocketServer
+import threading
+try:
+ import ujson as json
+except:
+ import json
+
+from . import constants
+from ..objects import peer
+
+
+STARTUP_REQUEST = "V8 test peer starting up"
+STARTUP_RESPONSE = "Let's rock some tests!"
+EXIT_REQUEST = "V8 testing peer going down"
+
+
+def GetOwnIP():
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ s.connect(("8.8.8.8", 80))
+ ip = s.getsockname()[0]
+ s.close()
+ return ip
+
+
+class PresenceHandler(SocketServer.BaseRequestHandler):
+
+ def handle(self):
+ data = json.loads(self.request[0].strip())
+
+ if data[0] == STARTUP_REQUEST:
+ jobs = data[1]
+ relative_perf = data[2]
+ pubkey_fingerprint = data[3]
+ trusted = self.server.daemon.IsTrusted(pubkey_fingerprint)
+ response = [STARTUP_RESPONSE, self.server.daemon.jobs,
+ self.server.daemon.relative_perf,
+ self.server.daemon.pubkey_fingerprint, trusted]
+ response = json.dumps(response)
+ self.server.SendTo(self.client_address[0], response)
+ p = peer.Peer(self.client_address[0], jobs, relative_perf,
+ pubkey_fingerprint)
+ p.trusted = trusted
+ self.server.daemon.AddPeer(p)
+
+ elif data[0] == STARTUP_RESPONSE:
+ jobs = data[1]
+ perf = data[2]
+ pubkey_fingerprint = data[3]
+ p = peer.Peer(self.client_address[0], jobs, perf, pubkey_fingerprint)
+ p.trusted = self.server.daemon.IsTrusted(pubkey_fingerprint)
+ p.trusting_me = data[4]
+ self.server.daemon.AddPeer(p)
+
+ elif data[0] == EXIT_REQUEST:
+ self.server.daemon.DeletePeer(self.client_address[0])
+ if self.client_address[0] == self.server.daemon.ip:
+ self.server.shutdown_lock.release()
+
+
+class PresenceDaemon(SocketServer.ThreadingMixIn, SocketServer.UDPServer):
+ def __init__(self, daemon):
+ self.daemon = daemon
+ address = (daemon.ip, constants.PRESENCE_PORT)
+ SocketServer.UDPServer.__init__(self, address, PresenceHandler)
+ self.shutdown_lock = threading.Lock()
+
+ def shutdown(self):
+ self.shutdown_lock.acquire()
+ self.SendToAll(json.dumps([EXIT_REQUEST]))
+ self.shutdown_lock.acquire()
+ self.shutdown_lock.release()
+ SocketServer.UDPServer.shutdown(self)
+
+ def SendTo(self, target, message):
+ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ sock.sendto(message, (target, constants.PRESENCE_PORT))
+ sock.close()
+
+ def SendToAll(self, message):
+ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ ip = self.daemon.ip.split(".")
+ for i in range(1, 254):
+ ip[-1] = str(i)
+ sock.sendto(message, (".".join(ip), constants.PRESENCE_PORT))
+ sock.close()
+
+ def FindPeers(self):
+ request = [STARTUP_REQUEST, self.daemon.jobs, self.daemon.relative_perf,
+ self.daemon.pubkey_fingerprint]
+ request = json.dumps(request)
+ self.SendToAll(request)
diff --git a/src/3rdparty/v8/tools/testrunner/server/signatures.py b/src/3rdparty/v8/tools/testrunner/server/signatures.py
new file mode 100644
index 0000000..9957a18
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/server/signatures.py
@@ -0,0 +1,63 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import base64
+import os
+import subprocess
+
+
+def ReadFileAndSignature(filename):
+ with open(filename, "rb") as f:
+ file_contents = base64.b64encode(f.read())
+ signature_file = filename + ".signature"
+ if (not os.path.exists(signature_file) or
+ os.path.getmtime(signature_file) < os.path.getmtime(filename)):
+ private_key = "~/.ssh/v8_dtest"
+ code = subprocess.call("openssl dgst -out %s -sign %s %s" %
+ (signature_file, private_key, filename),
+ shell=True)
+ if code != 0: return [None, code]
+ with open(signature_file) as f:
+ signature = base64.b64encode(f.read())
+ return [file_contents, signature]
+
+
+def VerifySignature(filename, file_contents, signature, pubkeyfile):
+ with open(filename, "wb") as f:
+ f.write(base64.b64decode(file_contents))
+ signature_file = filename + ".foreign_signature"
+ with open(signature_file, "wb") as f:
+ f.write(base64.b64decode(signature))
+ code = subprocess.call("openssl dgst -verify %s -signature %s %s" %
+ (pubkeyfile, signature_file, filename),
+ shell=True)
+ matched = (code == 0)
+ if not matched:
+ os.remove(signature_file)
+ os.remove(filename)
+ return matched
diff --git a/src/3rdparty/v8/tools/testrunner/server/status_handler.py b/src/3rdparty/v8/tools/testrunner/server/status_handler.py
new file mode 100644
index 0000000..3f2271d
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/server/status_handler.py
@@ -0,0 +1,112 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import socket
+import SocketServer
+
+from . import compression
+from . import constants
+
+
+def _StatusQuery(peer, query):
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ code = sock.connect_ex((peer.address, constants.STATUS_PORT))
+ if code != 0:
+ # TODO(jkummerow): disconnect (after 3 failures?)
+ return
+ compression.Send(query, sock)
+ compression.Send(constants.END_OF_STREAM, sock)
+ rec = compression.Receiver(sock)
+ data = None
+ while not rec.IsDone():
+ data = rec.Current()
+ assert data[0] == query[0]
+ data = data[1]
+ rec.Advance()
+ sock.close()
+ return data
+
+
+def RequestTrustedPubkeys(peer, server):
+ pubkey_list = _StatusQuery(peer, [constants.LIST_TRUSTED_PUBKEYS])
+ for pubkey in pubkey_list:
+ if server.IsTrusted(pubkey): continue
+ result = _StatusQuery(peer, [constants.GET_SIGNED_PUBKEY, pubkey])
+ server.AcceptNewTrusted(result)
+
+
+def NotifyNewTrusted(peer, data):
+ _StatusQuery(peer, [constants.NOTIFY_NEW_TRUSTED] + data)
+
+
+def ITrustYouNow(peer):
+ _StatusQuery(peer, [constants.TRUST_YOU_NOW])
+
+
+def TryTransitiveTrust(peer, pubkey, server):
+ if _StatusQuery(peer, [constants.DO_YOU_TRUST, pubkey]):
+ result = _StatusQuery(peer, [constants.GET_SIGNED_PUBKEY, pubkey])
+ server.AcceptNewTrusted(result)
+
+
+class StatusHandler(SocketServer.BaseRequestHandler):
+ def handle(self):
+ rec = compression.Receiver(self.request)
+ while not rec.IsDone():
+ data = rec.Current()
+ action = data[0]
+
+ if action == constants.LIST_TRUSTED_PUBKEYS:
+ response = self.server.daemon.ListTrusted()
+ compression.Send([action, response], self.request)
+
+ elif action == constants.GET_SIGNED_PUBKEY:
+ response = self.server.daemon.SignTrusted(data[1])
+ compression.Send([action, response], self.request)
+
+ elif action == constants.NOTIFY_NEW_TRUSTED:
+ self.server.daemon.AcceptNewTrusted(data[1:])
+ pass # No response.
+
+ elif action == constants.TRUST_YOU_NOW:
+ self.server.daemon.MarkPeerAsTrusting(self.client_address[0])
+ pass # No response.
+
+ elif action == constants.DO_YOU_TRUST:
+ response = self.server.daemon.IsTrusted(data[1])
+ compression.Send([action, response], self.request)
+
+ rec.Advance()
+ compression.Send(constants.END_OF_STREAM, self.request)
+
+
+class StatusSocketServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
+ def __init__(self, daemon):
+ address = (daemon.ip, constants.STATUS_PORT)
+ SocketServer.TCPServer.__init__(self, address, StatusHandler)
+ self.daemon = daemon
diff --git a/src/3rdparty/v8/tools/testrunner/server/work_handler.py b/src/3rdparty/v8/tools/testrunner/server/work_handler.py
new file mode 100644
index 0000000..6bf7d43
--- /dev/null
+++ b/src/3rdparty/v8/tools/testrunner/server/work_handler.py
@@ -0,0 +1,150 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import os
+import SocketServer
+import stat
+import subprocess
+import threading
+
+from . import compression
+from . import constants
+from . import signatures
+from ..network import endpoint
+from ..objects import workpacket
+
+
+class WorkHandler(SocketServer.BaseRequestHandler):
+
+ def handle(self):
+ rec = compression.Receiver(self.request)
+ while not rec.IsDone():
+ data = rec.Current()
+ with self.server.job_lock:
+ self._WorkOnWorkPacket(data)
+ rec.Advance()
+
+ def _WorkOnWorkPacket(self, data):
+ server_root = self.server.daemon.root
+ v8_root = os.path.join(server_root, "v8")
+ os.chdir(v8_root)
+ packet = workpacket.WorkPacket.Unpack(data)
+ self.ctx = packet.context
+ self.ctx.shell_dir = os.path.join("out",
+ "%s.%s" % (self.ctx.arch, self.ctx.mode))
+ if not os.path.isdir(self.ctx.shell_dir):
+ os.makedirs(self.ctx.shell_dir)
+ for binary in packet.binaries:
+ if not self._UnpackBinary(binary, packet.pubkey_fingerprint):
+ return
+
+ if not self._CheckoutRevision(packet.base_revision):
+ return
+
+ if not self._ApplyPatch(packet.patch):
+ return
+
+ tests = packet.tests
+ endpoint.Execute(v8_root, self.ctx, tests, self.request, self.server.daemon)
+ self._SendResponse()
+
+ def _SendResponse(self, error_message=None):
+ try:
+ if error_message:
+ compression.Send([[-1, error_message]], self.request)
+ compression.Send(constants.END_OF_STREAM, self.request)
+ return
+ except Exception, e:
+ pass # Peer is gone. There's nothing we can do.
+ # Clean up.
+ self._Call("git checkout -f")
+ self._Call("git clean -f -d")
+ self._Call("rm -rf %s" % self.ctx.shell_dir)
+
+ def _UnpackBinary(self, binary, pubkey_fingerprint):
+ binary_name = binary["name"]
+ if binary_name == "libv8.so":
+ libdir = os.path.join(self.ctx.shell_dir, "lib.target")
+ if not os.path.exists(libdir): os.makedirs(libdir)
+ target = os.path.join(libdir, binary_name)
+ else:
+ target = os.path.join(self.ctx.shell_dir, binary_name)
+ pubkeyfile = "../trusted/%s.pem" % pubkey_fingerprint
+ if not signatures.VerifySignature(target, binary["blob"],
+ binary["sign"], pubkeyfile):
+ self._SendResponse("Signature verification failed")
+ return False
+ os.chmod(target, stat.S_IRWXU)
+ return True
+
+ def _CheckoutRevision(self, base_svn_revision):
+ get_hash_cmd = (
+ "git log -1 --format=%%H --remotes --grep='^git-svn-id:.*@%s'" %
+ base_svn_revision)
+ try:
+ base_revision = subprocess.check_output(get_hash_cmd, shell=True)
+ if not base_revision: raise ValueError
+ except:
+ self._Call("git fetch")
+ try:
+ base_revision = subprocess.check_output(get_hash_cmd, shell=True)
+ if not base_revision: raise ValueError
+ except:
+ self._SendResponse("Base revision not found.")
+ return False
+ code = self._Call("git checkout -f %s" % base_revision)
+ if code != 0:
+ self._SendResponse("Error trying to check out base revision.")
+ return False
+ code = self._Call("git clean -f -d")
+ if code != 0:
+ self._SendResponse("Failed to reset checkout")
+ return False
+ return True
+
+ def _ApplyPatch(self, patch):
+ if not patch: return True # Just skip if the patch is empty.
+ patchfilename = "_dtest_incoming_patch.patch"
+ with open(patchfilename, "w") as f:
+ f.write(patch)
+ code = self._Call("git apply %s" % patchfilename)
+ if code != 0:
+ self._SendResponse("Error applying patch.")
+ return False
+ return True
+
+ def _Call(self, cmd):
+ return subprocess.call(cmd, shell=True)
+
+
+class WorkSocketServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
+ def __init__(self, daemon):
+ address = (daemon.ip, constants.PEER_PORT)
+ SocketServer.TCPServer.__init__(self, address, WorkHandler)
+ self.job_lock = threading.Lock()
+ self.daemon = daemon
diff --git a/src/3rdparty/v8/tools/tickprocessor-driver.js b/src/3rdparty/v8/tools/tickprocessor-driver.js
index 9af5ab6..313c6d4 100644
--- a/src/3rdparty/v8/tools/tickprocessor-driver.js
+++ b/src/3rdparty/v8/tools/tickprocessor-driver.js
@@ -1,4 +1,4 @@
-// Copyright 2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -50,7 +50,7 @@ if (params.snapshotLogFileName) {
snapshotLogProcessor.processLogFile(params.snapshotLogFileName);
}
var tickProcessor = new TickProcessor(
- new (entriesProviders[params.platform])(params.nm),
+ new (entriesProviders[params.platform])(params.nm, params.targetRootFS),
params.separateIc,
params.callGraphSize,
params.ignoreUnknown,
diff --git a/src/3rdparty/v8/tools/tickprocessor.js b/src/3rdparty/v8/tools/tickprocessor.js
index 05a3369..f6e315d 100644
--- a/src/3rdparty/v8/tools/tickprocessor.js
+++ b/src/3rdparty/v8/tools/tickprocessor.js
@@ -1,4 +1,4 @@
-// Copyright 2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -231,8 +231,9 @@ TickProcessor.VmStates = {
JS: 0,
GC: 1,
COMPILER: 2,
- OTHER: 3,
- EXTERNAL: 4
+ PARALLEL_COMPILER_PROLOGUE: 3,
+ OTHER: 4,
+ EXTERNAL: 5
};
@@ -608,10 +609,11 @@ CppEntriesProvider.prototype.parseNextLine = function() {
};
-function UnixCppEntriesProvider(nmExec) {
+function UnixCppEntriesProvider(nmExec, targetRootFS) {
this.symbols = [];
this.parsePos = 0;
this.nmExec = nmExec;
+ this.targetRootFS = targetRootFS;
this.FUNC_RE = /^([0-9a-fA-F]{8,16}) ([0-9a-fA-F]{8,16} )?[tTwW] (.*)$/;
};
inherits(UnixCppEntriesProvider, CppEntriesProvider);
@@ -619,6 +621,7 @@ inherits(UnixCppEntriesProvider, CppEntriesProvider);
UnixCppEntriesProvider.prototype.loadSymbols = function(libName) {
this.parsePos = 0;
+ libName = this.targetRootFS + libName;
try {
this.symbols = [
os.system(this.nmExec, ['-C', '-n', '-S', libName], -1, -1),
@@ -656,8 +659,8 @@ UnixCppEntriesProvider.prototype.parseNextLine = function() {
};
-function MacCppEntriesProvider(nmExec) {
- UnixCppEntriesProvider.call(this, nmExec);
+function MacCppEntriesProvider(nmExec, targetRootFS) {
+ UnixCppEntriesProvider.call(this, nmExec, targetRootFS);
// Note an empty group. It is required, as UnixCppEntriesProvider expects 3 groups.
this.FUNC_RE = /^([0-9a-fA-F]{8,16}) ()[iItT] (.*)$/;
};
@@ -666,6 +669,7 @@ inherits(MacCppEntriesProvider, UnixCppEntriesProvider);
MacCppEntriesProvider.prototype.loadSymbols = function(libName) {
this.parsePos = 0;
+ libName = this.targetRootFS + libName;
try {
this.symbols = [os.system(this.nmExec, ['-n', '-f', libName], -1, -1), ''];
} catch (e) {
@@ -675,7 +679,8 @@ MacCppEntriesProvider.prototype.loadSymbols = function(libName) {
};
-function WindowsCppEntriesProvider() {
+function WindowsCppEntriesProvider(_ignored_nmExec, targetRootFS) {
+ this.targetRootFS = targetRootFS;
this.symbols = '';
this.parsePos = 0;
};
@@ -698,6 +703,7 @@ WindowsCppEntriesProvider.EXE_IMAGE_BASE = 0x00400000;
WindowsCppEntriesProvider.prototype.loadSymbols = function(libName) {
+ libName = this.targetRootFS + libName;
var fileNameFields = libName.match(WindowsCppEntriesProvider.FILENAME_RE);
if (!fileNameFields) return;
var mapFileName = fileNameFields[1] + '.map';
@@ -785,6 +791,8 @@ function ArgumentsProcessor(args) {
'Specify that we are running on Mac OS X platform'],
'--nm': ['nm', 'nm',
'Specify the \'nm\' executable to use (e.g. --nm=/my_dir/nm)'],
+ '--target': ['targetRootFS', '',
+ 'Specify the target root directory for cross environment'],
'--snapshot-log': ['snapshotLogFileName', 'snapshot.log',
'Specify snapshot log file to use (e.g. --snapshot-log=snapshot.log)']
};
@@ -804,6 +812,7 @@ ArgumentsProcessor.DEFAULTS = {
callGraphSize: 5,
ignoreUnknown: false,
separateIc: false,
+ targetRootFS: '',
nm: 'nm'
};
diff --git a/src/v8/v8.pri b/src/v8/v8.pri
index 20370fb..3a563ff 100644
--- a/src/v8/v8.pri
+++ b/src/v8/v8.pri
@@ -33,6 +33,12 @@ DEFINES += ENABLE_DEBUGGER_SUPPORT
# this is needed by crankshaft ( http://code.google.com/p/v8/issues/detail?id=1271 )
DEFINES += ENABLE_VMSTATE_TRACKING ENABLE_LOGGING_AND_PROFILING
+# Set NOMINMAX, so that the minmax macros are not getting set for the msvc
+win*:DEFINES += NOMINMAX
+
+# Windows CE does not set the WIN32 macro, which is needed for compiling
+wince:DEFINES += WIN32
+
CONFIG(debug, debug|release) {
DEFINES += DEBUG V8_ENABLE_CHECKS OBJECT_PRINT ENABLE_DISASSEMBLER
} else {
@@ -76,6 +82,7 @@ SOURCES += \
$$V8SRC/diy-fp.cc \
$$V8SRC/dtoa.cc \
$$V8SRC/elements.cc \
+ $$V8SRC/elements-kind.cc \
$$V8SRC/execution.cc \
$$V8SRC/factory.cc \
$$V8SRC/flags.cc \
@@ -110,6 +117,7 @@ SOURCES += \
$$V8SRC/objects-printer.cc \
$$V8SRC/objects-visiting.cc \
$$V8SRC/once.cc \
+ $$V8SRC/optimizing-compiler-thread.cc \
$$V8SRC/parser.cc \
$$V8SRC/preparser.cc \
$$V8SRC/preparse-data.cc \
@@ -134,6 +142,7 @@ SOURCES += \
$$V8SRC/strtod.cc \
$$V8SRC/stub-cache.cc \
$$V8SRC/token.cc \
+ $$V8SRC/transitions.cc \
$$V8SRC/type-info.cc \
$$V8SRC/unicode.cc \
$$V8SRC/utils.cc \
@@ -146,8 +155,9 @@ SOURCES += \
$$V8SRC/version.cc \
$$V8SRC/store-buffer.cc \
$$V8SRC/zone.cc \
+ $$V8SRC/extensions/externalize-string-extension.cc \
$$V8SRC/extensions/gc-extension.cc \
- $$V8SRC/extensions/externalize-string-extension.cc
+ $$V8SRC/extensions/statistics-extension.cc
equals(V8_TARGET_ARCH, arm) {
DEFINES += V8_TARGET_ARCH_ARM
@@ -263,7 +273,8 @@ win32 {
SOURCES += \
$$V8SRC/platform-win32.cc \
$$V8SRC/win32-math.cc
-LIBS += -lws2_32 -lwinmm
+wince*:LIBS += -lws2 -lmmtimer
+else:LIBS += -lws2_32 -lwinmm
win32-msvc*: QMAKE_CXXFLAGS += -wd4100 -wd 4291 -wd4351 -wd4355 -wd4800
win32-msvc*:arch_i386: DEFINES += _USE_32BIT_TIME_T
}
diff --git a/tests/auto/v8/tst_v8.cpp b/tests/auto/v8/tst_v8.cpp
index fe1da30..7461ce3 100644
--- a/tests/auto/v8/tst_v8.cpp
+++ b/tests/auto/v8/tst_v8.cpp
@@ -68,6 +68,7 @@ private slots:
void fallbackpropertyhandler_nonempty();
void completehash();
void stringhashcomparison();
+ void qmlmodevariables();
};
void tst_v8::eval()
@@ -140,6 +141,11 @@ void tst_v8::stringhashcomparison()
QVERIFY(v8test_stringhashcomparison());
}
+void tst_v8::qmlmodevariables()
+{
+ QVERIFY(v8test_qmlmodevariables());
+}
+
int main(int argc, char *argv[])
{
V8::SetFlagsFromCommandLine(&argc, argv, true);
diff --git a/tests/auto/v8/v8main.cpp b/tests/auto/v8/v8main.cpp
index cd826ee..5ec41ad 100644
--- a/tests/auto/v8/v8main.cpp
+++ b/tests/auto/v8/v8main.cpp
@@ -74,6 +74,7 @@ int main(int argc, char *argv[])
RUN_TEST(fallbackpropertyhandler_in_prototype);
RUN_TEST(fallbackpropertyhandler_nonempty);
RUN_TEST(completehash);
+ RUN_TEST(qmlmodevariables);
return exit_status;
}
diff --git a/tests/auto/v8/v8test.cpp b/tests/auto/v8/v8test.cpp
index 77a7146..6621846 100644
--- a/tests/auto/v8/v8test.cpp
+++ b/tests/auto/v8/v8test.cpp
@@ -1147,3 +1147,66 @@ cleanup:
ENDTEST();
}
#endif
+
+// Test whether the variables are declared in the appropriate scope
+// when script is compiled in QML compilation mode.
+bool v8test_qmlmodevariables()
+{
+ BEGINTEST();
+
+ HandleScope handle_scope;
+ Persistent<Context> context = Context::New();
+ Context::Scope context_scope(context);
+
+ Local<Object> global = context->Global();
+ Local<Object> qmlglobal = Object::New();
+
+ qmlglobal->Set(String::New("eval"), Integer::New(1922));
+ qmlglobal->Set(String::New("b"), Integer::New(28));
+ global->Set(String::New("x"), Integer::New(32));
+ global->Set(String::New("y"), Integer::New(40));
+
+ // Different declarations regarding to the binding kind.
+ Local<String> source = String::New(
+ "function f() { return 28; }" // function is bound at parse-time
+ "var a = 42;" // bound variable declared in qmlglobal scope
+ "eval(\"b\");" // unbound variable declared in qmlglobal scope
+ "const c = 28;" // constant is bound at parse-time
+ "var x = 2;" // bound variable declared in global scope
+ "eval(\"y\");" // unbound variable declared in global scope
+ );
+ Local<Script> script = Script::Compile(source, NULL, NULL, Handle<String>(), Script::QmlMode);
+
+ TryCatch tc;
+ script->Run(qmlglobal);
+ VERIFY(!tc.HasCaught());
+
+ // Check redeclaration of a global JS function.
+ VERIFY(global->HasOwnProperty(String::New("eval")));
+ VERIFY(qmlglobal->HasOwnProperty(String::New("eval")));
+
+ // The following variables should be declared in the qmlglobal scope.
+ VERIFY(!global->HasOwnProperty(String::New("f")));
+ VERIFY(qmlglobal->HasOwnProperty(String::New("f")));
+
+ VERIFY(!global->HasOwnProperty(String::New("a")));
+ VERIFY(qmlglobal->HasOwnProperty(String::New("a")));
+
+ VERIFY(!global->HasOwnProperty(String::New("b")));
+ VERIFY(qmlglobal->HasOwnProperty(String::New("b")));
+
+ VERIFY(!global->HasOwnProperty(String::New("c")));
+ VERIFY(qmlglobal->HasOwnProperty(String::New("c")));
+
+ // The following variables should be declared in the global scope.
+ VERIFY(global->HasOwnProperty(String::New("x")));
+ VERIFY(!qmlglobal->HasOwnProperty(String::New("x")));
+
+ VERIFY(global->HasOwnProperty(String::New("y")));
+ VERIFY(!qmlglobal->HasOwnProperty(String::New("y")));
+
+cleanup:
+ context.Dispose();
+
+ ENDTEST();
+}
diff --git a/tests/auto/v8/v8test.h b/tests/auto/v8/v8test.h
index dac44ba..2db655c 100644
--- a/tests/auto/v8/v8test.h
+++ b/tests/auto/v8/v8test.h
@@ -62,6 +62,7 @@ bool v8test_fallbackpropertyhandler_in_prototype();
bool v8test_fallbackpropertyhandler_nonempty();
bool v8test_completehash();
bool v8test_stringhashcomparison();
+bool v8test_qmlmodevariables();
#endif // V8TEST_H